diff --git a/.dockerignore b/.dockerignore index f229611505534..a6274ffa46194 100644 --- a/.dockerignore +++ b/.dockerignore @@ -31,7 +31,7 @@ !api/doc/ !crates/indexer/migrations/**/*.sql !ecosystem/indexer-grpc/indexer-grpc-parser/migrations/**/*.sql -!ecosystem/nft-metadata-crawler-parser/migrations/**/*.sql +!ecosystem/nft-metadata-crawler/migrations/**/*.sql !rust-toolchain.toml !scripts/ !terraform/helm/aptos-node/ diff --git a/.github/actions/fullnode-sync/action.yaml b/.github/actions/fullnode-sync/action.yaml index 27bd18ae67eef..213f3e7ca594a 100644 --- a/.github/actions/fullnode-sync/action.yaml +++ b/.github/actions/fullnode-sync/action.yaml @@ -27,11 +27,6 @@ inputs: runs: using: composite steps: - - name: Install Dependencies - shell: bash - run: | - echo "${HOME}/bin/" >> $GITHUB_PATH # default INSTALL_DIR to path - scripts/dev_setup.sh -b # Install dependencies - name: Run fullnode sync shell: bash run: | diff --git a/.github/workflows/copy-images-to-dockerhub.yaml b/.github/workflows/copy-images-to-dockerhub.yaml index a0063eef1c5bc..edb72cc8c715a 100644 --- a/.github/workflows/copy-images-to-dockerhub.yaml +++ b/.github/workflows/copy-images-to-dockerhub.yaml @@ -10,6 +10,11 @@ on: required: false type: string description: the git sha to use for the image tag. If not provided, the git sha of the triggering branch will be used + dry_run: + required: false + type: boolean + default: false + description: If true, run the workflow without actually pushing images workflow_dispatch: inputs: image_tag_prefix: @@ -21,6 +26,11 @@ on: required: false type: string description: the git sha to use for the image tag. If not provided, the git sha of the triggering branch will be used + dry_run: + required: false + type: boolean + default: false + description: If true, run the workflow without actually pushing images permissions: contents: read @@ -29,7 +39,7 @@ permissions: jobs: copy-images: # Run on a machine with more local storage for large docker images - runs-on: medium-perf-docker-with-local-ssd + runs-on: runs-on,cpu=16,family=m6id,hdd=500,image=aptos-ubuntu-x64,run-id=${{ github.run_id }} steps: - uses: actions/checkout@v4 @@ -61,4 +71,5 @@ jobs: AWS_ACCOUNT_ID: ${{ secrets.AWS_ECR_ACCOUNT_NUM }} GCP_DOCKER_ARTIFACT_REPO: ${{ vars.GCP_DOCKER_ARTIFACT_REPO }} IMAGE_TAG_PREFIX: ${{ inputs.image_tag_prefix }} - run: ./docker/release-images.mjs --wait-for-image-seconds=3600 + DRY_RUN: ${{ inputs.dry_run }} + run: ./docker/release-images.mjs --wait-for-image-seconds=3600 ${{ inputs.dry_run && '--dry-run' || '' }} diff --git a/.github/workflows/docker-build-test.yaml b/.github/workflows/docker-build-test.yaml index df603a423e051..10efb527cc79f 100644 --- a/.github/workflows/docker-build-test.yaml +++ b/.github/workflows/docker-build-test.yaml @@ -245,17 +245,6 @@ jobs: GIT_SHA: ${{ needs.determine-docker-build-metadata.outputs.gitSha }} SKIP_JOB: ${{ needs.file_change_determinator.outputs.only_docs_changed == 'true' }} - indexer-grpc-e2e-tests: - needs: [permission-check, rust-images, determine-docker-build-metadata] # runs with the default release docker build variant "rust-images" - if: | - github.event_name == 'workflow_dispatch' || - contains(github.event.pull_request.labels.*.name, 'CICD:run-e2e-tests') || - contains(github.event.pull_request.body, '#e2e') - uses: aptos-labs/aptos-core/.github/workflows/docker-indexer-grpc-test.yaml@main - secrets: inherit - with: - GIT_SHA: ${{ needs.determine-docker-build-metadata.outputs.gitSha }} - # This is a PR required job. forge-e2e-test: needs: diff --git a/.github/workflows/docker-indexer-grpc-test.yaml b/.github/workflows/docker-indexer-grpc-test.yaml index 8cfb8cf1b1b09..94319399ceb97 100644 --- a/.github/workflows/docker-indexer-grpc-test.yaml +++ b/.github/workflows/docker-indexer-grpc-test.yaml @@ -1,3 +1,5 @@ +# THIS WORKFLOW IS DEPRECATED. Keep it around for branches that still reference it on the main branch. +# This file should eventually be deleted. name: "Docker Indexer gRPC test" on: pull_request: diff --git a/.github/workflows/execution-performance.yaml b/.github/workflows/execution-performance.yaml index 37064dfba35ca..1597d9ecbc863 100644 --- a/.github/workflows/execution-performance.yaml +++ b/.github/workflows/execution-performance.yaml @@ -23,6 +23,6 @@ jobs: GIT_SHA: ${{ github.event.pull_request.head.sha || github.sha }} RUNNER_NAME: executor-benchmark-runner # Run all tests only on the scheduled cadence, or explicitly requested - IS_FULL_RUN: ${{ github.event_name == 'schedule' || contains(github.event.pull_request.labels.*.name, 'CICD:run-execution-performance-full-test') }} + FLOW: ${{ (github.event_name == 'schedule' || contains(github.event.pull_request.labels.*.name, 'CICD:run-execution-performance-full-test')) && 'CONTINUOUS' || 'LAND_BLOCKING' }} # Ignore target determination if on the scheduled cadence, or explicitly requested IGNORE_TARGET_DETERMINATION: ${{ github.event_name == 'schedule' || contains(github.event.pull_request.labels.*.name, 'CICD:run-execution-performance-test') || contains(github.event.pull_request.labels.*.name, 'CICD:run-execution-performance-full-test') }} diff --git a/.github/workflows/forge-pfn.yaml b/.github/workflows/forge-pfn.yaml deleted file mode 100644 index 660ed535217b7..0000000000000 --- a/.github/workflows/forge-pfn.yaml +++ /dev/null @@ -1,170 +0,0 @@ -# Continuously run PFN forge tests against the latest main branch -name: Continuous Forge Tests - Public Fullnodes - -permissions: - issues: write - pull-requests: write - contents: read - id-token: write - actions: write # Required for workflow cancellation via check-aptos-core - -on: - # Allow triggering manually - workflow_dispatch: - inputs: - IMAGE_TAG: - required: false - type: string - description: The docker image tag to test. This may be a git SHA1, or a tag like "_". If not specified, Forge will find the latest build based on the git history (starting from GIT_SHA input) - GIT_SHA: - required: false - type: string - description: The git SHA1 to checkout. This affects the Forge test runner that is used. If not specified, the latest main will be used - pull_request: - paths: - - ".github/workflows/forge-pfn.yaml" - -env: - AWS_ACCOUNT_NUM: ${{ secrets.ENV_ECR_AWS_ACCOUNT_NUM }} - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - IMAGE_TAG: ${{ inputs.IMAGE_TAG }} # This is only used for workflow_dispatch, otherwise defaults to empty - AWS_REGION: us-west-2 - -jobs: - # This job determines the image tag and branch to test, and passes them to the other jobs. - # NOTE: this may be better as a separate workflow as the logic is quite complex but generalizable. - determine-test-metadata: - runs-on: ubuntu-latest - outputs: - IMAGE_TAG: ${{ steps.get-docker-image-tag.outputs.IMAGE_TAG }} - BRANCH: ${{ steps.determine-test-branch.outputs.BRANCH }} - steps: - - uses: actions/checkout@v4 - - - name: Determine branch based on cadence - id: determine-test-branch - run: | - if [[ "${{ github.event_name }}" == "schedule" ]]; then - echo "Unknown schedule: ${{ github.event.schedule }}" - exit 1 - elif [[ "${{ github.event_name }}" == "push" ]]; then - echo "Branch: ${{ github.ref_name }}" - echo "BRANCH=${{ github.ref_name }}" >> $GITHUB_OUTPUT - else - echo "Using GIT_SHA" - # on workflow_dispatch, this will simply use the inputs.GIT_SHA given (or the default) - # on pull_request, this will default to null and the following "checkout" step will use the PR's base branch - echo "BRANCH=${{ inputs.GIT_SHA }}" >> $GITHUB_OUTPUT - fi - - - uses: aptos-labs/aptos-core/.github/actions/check-aptos-core@main - with: - cancel-workflow: ${{ github.event_name == 'schedule' }} # Cancel the workflow if it is scheduled on a fork - - # actions/get-latest-docker-image-tag requires docker utilities and having authenticated to internal docker image registries - - uses: aptos-labs/aptos-core/.github/actions/docker-setup@main - id: docker-setup - with: - GCP_WORKLOAD_IDENTITY_PROVIDER: ${{ secrets.GCP_WORKLOAD_IDENTITY_PROVIDER }} - GCP_SERVICE_ACCOUNT_EMAIL: ${{ secrets.GCP_SERVICE_ACCOUNT_EMAIL }} - EXPORT_GCP_PROJECT_VARIABLES: "false" - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - AWS_DOCKER_ARTIFACT_REPO: ${{ secrets.AWS_DOCKER_ARTIFACT_REPO }} - GIT_CREDENTIALS: ${{ secrets.GIT_CREDENTIALS }} - - - uses: aptos-labs/aptos-core/.github/actions/get-latest-docker-image-tag@main - id: get-docker-image-tag - with: - branch: ${{ steps.determine-test-branch.outputs.BRANCH }} - variants: "failpoints performance" - - - name: Write summary - run: | - IMAGE_TAG=${{ steps.get-docker-image-tag.outputs.IMAGE_TAG }} - BRANCH=${{ steps.determine-test-branch.outputs.BRANCH }} - if [ -n "${BRANCH}" ]; then - echo "BRANCH: [${BRANCH}](https://github.com/${{ github.repository }}/tree/${BRANCH})" >> $GITHUB_STEP_SUMMARY - fi - echo "IMAGE_TAG: [${IMAGE_TAG}](https://github.com/${{ github.repository }}/commit/${IMAGE_TAG})" >> $GITHUB_STEP_SUMMARY - - ### Public fullnode tests - - # Measures PFN latencies with a constant TPS - run-forge-pfn-const-tps: - if: ${{ github.event_name != 'pull_request' }} - needs: determine-test-metadata - uses: aptos-labs/aptos-core/.github/workflows/workflow-run-forge.yaml@main - secrets: inherit - with: - IMAGE_TAG: ${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - FORGE_NAMESPACE: forge-pfn-const-tps-${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - FORGE_RUNNER_DURATION_SECS: 1800 # Run for 30 minutes - FORGE_TEST_SUITE: pfn_const_tps - POST_TO_SLACK: true - - # Measures PFN latencies with a constant TPS (with network chaos) - run-forge-pfn-const-tps-network-chaos: - if: ${{ github.event_name != 'pull_request' && always() }} - needs: [determine-test-metadata, run-forge-pfn-const-tps] # Only run after the previous job completes - uses: aptos-labs/aptos-core/.github/workflows/workflow-run-forge.yaml@main - secrets: inherit - with: - IMAGE_TAG: ${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - FORGE_NAMESPACE: forge-pfn-const-tps-with-network-chaos-${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - FORGE_RUNNER_DURATION_SECS: 1800 # Run for 30 minutes - FORGE_TEST_SUITE: pfn_const_tps_with_network_chaos - POST_TO_SLACK: true - - # Measures PFN latencies with a constant TPS (with a realistic environment) - run-forge-pfn-const-tps-realistic-env: - if: ${{ github.event_name != 'pull_request' && always() }} - needs: [determine-test-metadata, run-forge-pfn-const-tps-network-chaos] # Only run after the previous job completes - uses: aptos-labs/aptos-core/.github/workflows/workflow-run-forge.yaml@main - secrets: inherit - with: - IMAGE_TAG: ${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - FORGE_NAMESPACE: forge-pfn-const-tps-with-realistic-env-${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - FORGE_RUNNER_DURATION_SECS: 1800 # Run for 30 minutes - FORGE_TEST_SUITE: pfn_const_tps_with_realistic_env - POST_TO_SLACK: true - - # Measures max PFN throughput and latencies under load - run-forge-pfn-performance: - if: ${{ github.event_name != 'pull_request' && always() }} - needs: [determine-test-metadata, run-forge-pfn-const-tps-realistic-env] # Only run after the previous job completes - uses: aptos-labs/aptos-core/.github/workflows/workflow-run-forge.yaml@main - secrets: inherit - with: - IMAGE_TAG: ${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - FORGE_NAMESPACE: forge-pfn-performance-${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - FORGE_RUNNER_DURATION_SECS: 1800 # Run for 30 minutes - FORGE_TEST_SUITE: pfn_performance - POST_TO_SLACK: true - - # Measures max PFN throughput and latencies under load (with network chaos) - run-forge-pfn-performance-network-chaos: - if: ${{ github.event_name != 'pull_request' && always() }} - needs: [determine-test-metadata, run-forge-pfn-performance] # Only run after the previous job completes - uses: aptos-labs/aptos-core/.github/workflows/workflow-run-forge.yaml@main - secrets: inherit - with: - IMAGE_TAG: ${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - FORGE_NAMESPACE: forge-pfn-performance-with-network-chaos-${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - FORGE_RUNNER_DURATION_SECS: 1800 # Run for 30 minutes - FORGE_TEST_SUITE: pfn_performance_with_network_chaos - POST_TO_SLACK: true - - # Measures max PFN throughput and latencies under load (with a realistic environment) - run-forge-pfn-performance-realistic-env: - if: ${{ github.event_name != 'pull_request' && always() }} - needs: [determine-test-metadata, run-forge-pfn-performance-network-chaos] # Only run after the previous job completes - uses: aptos-labs/aptos-core/.github/workflows/workflow-run-forge.yaml@main - secrets: inherit - with: - IMAGE_TAG: ${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - FORGE_NAMESPACE: forge-pfn-performance-with-realistic-env-${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - FORGE_RUNNER_DURATION_SECS: 1800 # Run for 30 minutes - FORGE_TEST_SUITE: pfn_performance_with_realistic_env - POST_TO_SLACK: true diff --git a/.github/workflows/forge-stable.yaml b/.github/workflows/forge-stable.yaml index 82d9aef76eaf5..897b752d50948 100644 --- a/.github/workflows/forge-stable.yaml +++ b/.github/workflows/forge-stable.yaml @@ -68,26 +68,19 @@ jobs: elif [[ "${{ github.event_name }}" == "push" ]]; then echo "Branch: ${{ github.ref_name }}" echo "BRANCH=${{ github.ref_name }}" >> $GITHUB_OUTPUT - else - echo "Using GIT_SHA" - # on workflow_dispatch, this will simply use the inputs.GIT_SHA given (or the default) - # on pull_request, this will default to null and the following "checkout" step will use the PR's base branch + # on workflow_dispatch, this will simply use the inputs.GIT_SHA given (or the default) + elif [[ -n "${{ inputs.GIT_SHA }}" ]]; then echo "BRANCH=${{ inputs.GIT_SHA }}" >> $GITHUB_OUTPUT + # if GIT_SHA not provided, use the branch where workflow runs on + else + echo "BRANCH=${{ github.head_ref }}" >> $GITHUB_OUTPUT fi - # Use the branch hash instead of the full branch name to stay under kubernetes namespace length limit - name: Hash the branch id: hash-branch run: | - # If BRANCH is empty, default to "main" - if [ -z "${{ steps.determine-test-branch.outputs.BRANCH }}" ]; then - BRANCH="main" - else - BRANCH="${{ steps.determine-test-branch.outputs.BRANCH }}" - fi - # Hashing the branch name - echo "BRANCH_HASH=$(echo -n "$BRANCH" | sha256sum | cut -c1-10)" >> $GITHUB_OUTPUT + echo "BRANCH_HASH=$(echo -n "${{ steps.determine-test-branch.outputs.BRANCH }}" | sha256sum | cut -c1-10)" >> $GITHUB_OUTPUT - uses: aptos-labs/aptos-core/.github/actions/check-aptos-core@main with: @@ -111,7 +104,7 @@ jobs: branch: ${{ steps.determine-test-branch.outputs.BRANCH }} variants: "failpoints performance" - - uses: ./.github/actions/determine-or-use-target-branch-and-get-last-released-image + - uses: aptos-labs/aptos-core/.github/actions/determine-or-use-target-branch-and-get-last-released-image@main id: get-last-released-image-tag-for-compat-test with: base-branch: ${{ steps.determine-test-branch.outputs.BRANCH }} diff --git a/.github/workflows/forge-state-sync.yaml b/.github/workflows/forge-state-sync.yaml deleted file mode 100644 index c9b6e2afc72b5..0000000000000 --- a/.github/workflows/forge-state-sync.yaml +++ /dev/null @@ -1,142 +0,0 @@ -# Continuously run state sync forge tests against the latest main branch -name: Continuous Forge Tests - State Sync - -permissions: - issues: write - pull-requests: write - contents: read - id-token: write - actions: write # Required for workflow cancellation via check-aptos-core - -on: - # Allow triggering manually - workflow_dispatch: - inputs: - IMAGE_TAG: - required: false - type: string - description: The docker image tag to test. This may be a git SHA1, or a tag like "_". If not specified, Forge will find the latest build based on the git history (starting from GIT_SHA input) - GIT_SHA: - required: false - type: string - description: The git SHA1 to checkout. This affects the Forge test runner that is used. If not specified, the latest main will be used - pull_request: - paths: - - ".github/workflows/forge-state-sync.yaml" - -env: - AWS_ACCOUNT_NUM: ${{ secrets.ENV_ECR_AWS_ACCOUNT_NUM }} - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - IMAGE_TAG: ${{ inputs.IMAGE_TAG }} # This is only used for workflow_dispatch, otherwise defaults to empty - AWS_REGION: us-west-2 - -jobs: - # This job determines the image tag and branch to test, and passes them to the other jobs. - # NOTE: this may be better as a separate workflow as the logic is quite complex but generalizable. - determine-test-metadata: - runs-on: ubuntu-latest - outputs: - IMAGE_TAG: ${{ steps.get-docker-image-tag.outputs.IMAGE_TAG }} - BRANCH: ${{ steps.determine-test-branch.outputs.BRANCH }} - steps: - - name: Determine branch based on cadence - id: determine-test-branch - run: | - if [[ "${{ github.event_name }}" == "schedule" ]]; then - echo "Unknown schedule: ${{ github.event.schedule }}" - exit 1 - elif [[ "${{ github.event_name }}" == "push" ]]; then - echo "Branch: ${{ github.ref_name }}" - echo "BRANCH=${{ github.ref_name }}" >> $GITHUB_OUTPUT - else - echo "Using GIT_SHA" - # on workflow_dispatch, this will simply use the inputs.GIT_SHA given (or the default) - # on pull_request, this will default to null and the following "checkout" step will use the PR's base branch - echo "BRANCH=${{ inputs.GIT_SHA }}" >> $GITHUB_OUTPUT - fi - - - uses: aptos-labs/aptos-core/.github/actions/check-aptos-core@main - with: - cancel-workflow: ${{ github.event_name == 'schedule' }} # Cancel the workflow if it is scheduled on a fork - - # actions/get-latest-docker-image-tag requires docker utilities and having authenticated to internal docker image registries - - uses: aptos-labs/aptos-core/.github/actions/docker-setup@main - id: docker-setup - with: - GCP_WORKLOAD_IDENTITY_PROVIDER: ${{ secrets.GCP_WORKLOAD_IDENTITY_PROVIDER }} - GCP_SERVICE_ACCOUNT_EMAIL: ${{ secrets.GCP_SERVICE_ACCOUNT_EMAIL }} - EXPORT_GCP_PROJECT_VARIABLES: "false" - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - AWS_DOCKER_ARTIFACT_REPO: ${{ secrets.AWS_DOCKER_ARTIFACT_REPO }} - GIT_CREDENTIALS: ${{ secrets.GIT_CREDENTIALS }} - - - uses: aptos-labs/aptos-core/.github/actions/get-latest-docker-image-tag@main - id: get-docker-image-tag - with: - branch: ${{ steps.determine-test-branch.outputs.BRANCH }} - variants: "failpoints performance" - - - name: Write summary - run: | - IMAGE_TAG=${{ steps.get-docker-image-tag.outputs.IMAGE_TAG }} - BRANCH=${{ steps.determine-test-branch.outputs.BRANCH }} - if [ -n "${BRANCH}" ]; then - echo "BRANCH: [${BRANCH}](https://github.com/${{ github.repository }}/tree/${BRANCH})" >> $GITHUB_STEP_SUMMARY - fi - echo "IMAGE_TAG: [${IMAGE_TAG}](https://github.com/${{ github.repository }}/commit/${IMAGE_TAG})" >> $GITHUB_STEP_SUMMARY - - ### State sync tests - - # Measures state sync performance for validators (output syncing) - run-forge-state-sync-perf-validator-test: - if: ${{ github.event_name != 'pull_request' }} - needs: determine-test-metadata - uses: aptos-labs/aptos-core/.github/workflows/workflow-run-forge.yaml@main - secrets: inherit - with: - IMAGE_TAG: ${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - FORGE_NAMESPACE: forge-state-sync-perf-validator-${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - FORGE_RUNNER_DURATION_SECS: 1800 # Run for 30 minutes - FORGE_TEST_SUITE: state_sync_perf_validators - POST_TO_SLACK: true - - # Measures state sync performance for validator fullnodes (execution syncing) - run-forge-state-sync-perf-fullnode-execute-test: - if: ${{ github.event_name != 'pull_request' && always() }} - needs: [determine-test-metadata, run-forge-state-sync-perf-validator-test] # Only run after the previous job completes - uses: aptos-labs/aptos-core/.github/workflows/workflow-run-forge.yaml@main - secrets: inherit - with: - IMAGE_TAG: ${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - FORGE_NAMESPACE: forge-state-sync-perf-fullnode-execute-${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - FORGE_RUNNER_DURATION_SECS: 1800 # Run for 30 minutes - FORGE_TEST_SUITE: state_sync_perf_fullnodes_execute_transactions - POST_TO_SLACK: true - - # Measures state sync performance for validator fullnodes (fast syncing) - run-forge-state-sync-perf-fullnode-fast-sync-test: - if: ${{ github.event_name != 'pull_request' && always() }} - needs: [determine-test-metadata, run-forge-state-sync-perf-fullnode-execute-test] # Only run after the previous job completes - uses: aptos-labs/aptos-core/.github/workflows/workflow-run-forge.yaml@main - secrets: inherit - with: - IMAGE_TAG: ${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - FORGE_NAMESPACE: forge-state-sync-perf-fullnode-fast-sync-${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - FORGE_RUNNER_DURATION_SECS: 1800 # Run for 30 minutes - FORGE_TEST_SUITE: state_sync_perf_fullnodes_fast_sync - POST_TO_SLACK: true - - # Measures state sync performance for validator fullnodes (output syncing) - run-forge-state-sync-perf-fullnode-apply-test: - if: ${{ github.event_name != 'pull_request' && always() }} - needs: [determine-test-metadata, run-forge-state-sync-perf-fullnode-fast-sync-test] # Only run after the previous job completes - uses: aptos-labs/aptos-core/.github/workflows/workflow-run-forge.yaml@main - secrets: inherit - with: - IMAGE_TAG: ${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - FORGE_NAMESPACE: forge-state-sync-perf-fullnode-apply-${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - FORGE_RUNNER_DURATION_SECS: 1800 # Run for 30 minutes - FORGE_TEST_SUITE: state_sync_perf_fullnodes_apply_outputs - POST_TO_SLACK: true diff --git a/.github/workflows/forge-unstable.yaml b/.github/workflows/forge-unstable.yaml index 5ff54030cf475..d7616db23141f 100644 --- a/.github/workflows/forge-unstable.yaml +++ b/.github/workflows/forge-unstable.yaml @@ -8,6 +8,10 @@ permissions: id-token: write actions: write #required for workflow cancellation via check-aptos-core +concurrency: + group: forge-unstable-${{ github.ref_name }} + cancel-in-progress: true + on: # Allow triggering manually workflow_dispatch: @@ -20,9 +24,14 @@ on: required: false type: string description: The git SHA1 to checkout. This affects the Forge test runner that is used. If not specified, the latest main will be used + # NOTE: to support testing different branches on different schedules, you need to specify the cron schedule in the 'determine-test-branch' step as well below + # Reference: https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#schedule + schedule: + - cron: "30 */2 * * *" # Run this on an aggressive cadence to get signal on the tests. They should be fully evaluated (moved to forge stable or removed entirely) after baking pull_request: paths: - ".github/workflows/forge-unstable.yaml" + - "testsuite/find_latest_image.py" env: AWS_ACCOUNT_NUM: ${{ secrets.ENV_ECR_AWS_ACCOUNT_NUM }} @@ -38,16 +47,22 @@ jobs: runs-on: ubuntu-latest outputs: IMAGE_TAG: ${{ steps.get-docker-image-tag.outputs.IMAGE_TAG }} + IMAGE_TAG_FOR_COMPAT_TEST: ${{ steps.get-last-released-image-tag-for-compat-test.outputs.IMAGE_TAG }} BRANCH: ${{ steps.determine-test-branch.outputs.BRANCH }} + BRANCH_HASH: ${{ steps.hash-branch.outputs.BRANCH_HASH }} steps: - uses: actions/checkout@v4 - name: Determine branch based on cadence id: determine-test-branch + # NOTE: the schedule cron MUST match the one in the 'on.schedule.cron' section above run: | if [[ "${{ github.event_name }}" == "schedule" ]]; then - echo "Unknown schedule: ${{ github.event.schedule }}" - exit 1 + echo "Branch: main" + echo "BRANCH=main" >> $GITHUB_OUTPUT + elif [[ "${{ github.event_name }}" == "push" ]]; then + echo "Branch: ${{ github.ref_name }}" + echo "BRANCH=${{ github.ref_name }}" >> $GITHUB_OUTPUT else echo "Using GIT_SHA" # on workflow_dispatch, this will simply use the inputs.GIT_SHA given (or the default) @@ -55,6 +70,20 @@ jobs: echo "BRANCH=${{ inputs.GIT_SHA }}" >> $GITHUB_OUTPUT fi + # Use the branch hash instead of the full branch name to stay under kubernetes namespace length limit + - name: Hash the branch + id: hash-branch + run: | + # If BRANCH is empty, default to "main" + if [[ -z "${{ steps.determine-test-branch.outputs.BRANCH }}" ]]; then + BRANCH="main" + else + BRANCH="${{ steps.determine-test-branch.outputs.BRANCH }}" + fi + + # Hashing the branch name + echo "BRANCH_HASH=$(echo -n "$BRANCH" | sha256sum | cut -c1-10)" >> $GITHUB_OUTPUT + - uses: aptos-labs/aptos-core/.github/actions/check-aptos-core@main with: cancel-workflow: ${{ github.event_name == 'schedule' }} # Cancel the workflow if it is scheduled on a fork @@ -86,88 +115,19 @@ jobs: fi echo "IMAGE_TAG: [${IMAGE_TAG}](https://github.com/${{ github.repository }}/commit/${IMAGE_TAG})" >> $GITHUB_STEP_SUMMARY - forge-continuous: +# Test definitions start below +# To add a new Forge test, add a new job definition below. Copy all fields and change only the: +# * job name +# * "needs" dependency. You need "determine-test-metadata", and the previous job (jobs are run sequentially) +# * with.FORGE_TEST_SUITE, change this to your test s uite +# * with.FORGE_*, any features or customizations you need + forge-indexer: if: ${{ github.event_name != 'pull_request' }} needs: determine-test-metadata uses: aptos-labs/aptos-core/.github/workflows/workflow-run-forge.yaml@main secrets: inherit with: IMAGE_TAG: ${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - # GCP cluster - FORGE_CLUSTER_NAME: aptos-forge-1 - COMMENT_HEADER: forge-continuous - # This test suite is configured using the forge.py config test command - FORGE_TEST_SUITE: continuous - - run-forge-state-sync-slow-processing-catching-up-test: - if: ${{ github.event_name != 'pull_request' && always() }} - needs: [ determine-test-metadata, forge-continuous ] # Only run after the previous job completes - uses: aptos-labs/aptos-core/.github/workflows/workflow-run-forge.yaml@main - secrets: inherit - with: - IMAGE_TAG: ${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - # GCP cluster - FORGE_CLUSTER_NAME: aptos-forge-1 - FORGE_NAMESPACE: forge-state-sync-slow-processing-catching-up-test-${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - FORGE_RUNNER_DURATION_SECS: 900 # Run for 15 minutes - FORGE_TEST_SUITE: state_sync_slow_processing_catching_up - POST_TO_SLACK: true - FORGE_ENABLE_FAILPOINTS: true - - run-forge-twin-validator-test: - if: ${{ github.event_name != 'pull_request' && always() }} - needs: [ determine-test-metadata, run-forge-state-sync-slow-processing-catching-up-test ] # Only run after the previous job completes - uses: aptos-labs/aptos-core/.github/workflows/workflow-run-forge.yaml@main - secrets: inherit - with: - IMAGE_TAG: ${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - # GCP cluster - FORGE_CLUSTER_NAME: aptos-forge-1 - FORGE_NAMESPACE: forge-twin-validator-${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - FORGE_RUNNER_DURATION_SECS: 900 # Run for 15 minutes - FORGE_TEST_SUITE: twin_validator_test - POST_TO_SLACK: true - - run-forge-state-sync-failures-catching-up-test: - if: ${{ github.event_name != 'pull_request' && always() }} - needs: [ determine-test-metadata, run-forge-twin-validator-test ] # Only run after the previous job completes - uses: aptos-labs/aptos-core/.github/workflows/workflow-run-forge.yaml@main - secrets: inherit - with: - IMAGE_TAG: ${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - FORGE_NAMESPACE: forge-state-sync-failures-catching-up-test-${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - # GCP cluster - FORGE_CLUSTER_NAME: aptos-forge-1 - FORGE_RUNNER_DURATION_SECS: 900 # Run for 15 minutes - FORGE_TEST_SUITE: state_sync_failures_catching_up - FORGE_ENABLE_FAILPOINTS: true - POST_TO_SLACK: ${{ needs.determine-test-metadata.outputs.BRANCH == 'main' }} # only post to slack on main branch - - run-forge-validator-reboot-stress-test: - if: ${{ github.event_name != 'pull_request' && always() }} - needs: [ determine-test-metadata, run-forge-state-sync-failures-catching-up-test ] # Only run after the previous job completes - uses: aptos-labs/aptos-core/.github/workflows/workflow-run-forge.yaml@main - secrets: inherit - with: - IMAGE_TAG: ${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - # GCP cluster - FORGE_CLUSTER_NAME: aptos-forge-1 - FORGE_NAMESPACE: forge-validator-reboot-stress-${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - FORGE_RUNNER_DURATION_SECS: 2400 # Run for 40 minutes - FORGE_TEST_SUITE: validator_reboot_stress_test - POST_TO_SLACK: true - - run-forge-haproxy: - if: ${{ github.event_name != 'pull_request' && always() }} - needs: [ determine-test-metadata, run-forge-validator-reboot-stress-test ] # Only run after the previous job completes - uses: aptos-labs/aptos-core/.github/workflows/workflow-run-forge.yaml@main - secrets: inherit - with: - IMAGE_TAG: ${{ needs.determine-test-metadata.outputs.IMAGE_TAG }} - # GCP cluster - FORGE_CLUSTER_NAME: aptos-forge-1 - FORGE_NAMESPACE: forge-haproxy-${{ needs.determine-test-metadata.outputs.BRANCH_HASH }} - FORGE_RUNNER_DURATION_SECS: 600 # Run for 10 minutes - FORGE_ENABLE_HAPROXY: true - FORGE_TEST_SUITE: realistic_env_max_load - POST_TO_SLACK: true + COMMENT_HEADER: forge-indexer + FORGE_TEST_SUITE: indexer_test + FORGE_ENABLE_INDEXER: true diff --git a/.github/workflows/indexer-grpc-integration-tests.yaml b/.github/workflows/indexer-grpc-integration-tests.yaml deleted file mode 100644 index e9d733718312b..0000000000000 --- a/.github/workflows/indexer-grpc-integration-tests.yaml +++ /dev/null @@ -1,84 +0,0 @@ -name: "Indexer gRPC Integration Tests" -on: - pull_request_target: - types: [labeled, opened, synchronize, reopened, auto_merge_enabled] - push: - branches: - - main - -permissions: - contents: read - id-token: write # Required for GCP Workload Identity federation which we use to login into Google Artifact Registry - -# cancel redundant builds -concurrency: - # for push events we use `github.sha` in the concurrency group and don't really cancel each other out/limit concurrency - # for pull_request events newer jobs cancel earlier jobs to save on CI etc. - group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event_name == 'push' && github.sha || github.head_ref || github.ref }} - cancel-in-progress: true - -jobs: - permission-check: - runs-on: ubuntu-latest - steps: - - name: Check repository permission for user which triggered workflow - uses: sushichop/action-repository-permission@13d208f5ae7a6a3fc0e5a7c2502c214983f0241c - with: - required-permission: write - comment-not-permitted: Sorry, you don't have permission to trigger this workflow. - - run-tests-local-testnet: - if: contains(github.event.pull_request.labels.*.name, 'CICD:non-required-tests') - needs: [permission-check] - runs-on: runs-on,cpu=64,family=c7,hdd=500,image=aptos-ubuntu-x64,run-id=${{ github.run_id }} - env: - # spin up the local testnet using the latest devnet image - VALIDATOR_IMAGE_REPO: ${{ vars.GCP_DOCKER_ARTIFACT_REPO }}/validator - FAUCET_IMAGE_REPO: ${{ vars.GCP_DOCKER_ARTIFACT_REPO }}/faucet - INDEXER_GRPC_IMAGE_REPO: ${{ vars.GCP_DOCKER_ARTIFACT_REPO }}/indexer-grpc - IMAGE_TAG: devnet - - steps: - - uses: actions/checkout@v4 - - - name: Install grpcurl - run: curl -sSL "https://github.com/fullstorydev/grpcurl/releases/download/v1.8.7/grpcurl_1.8.7_linux_x86_64.tar.gz" | sudo tar -xz -C /usr/local/bin - - - name: Set up Rust - uses: aptos-labs/aptos-core/.github/actions/rust-setup@main - with: - GIT_CREDENTIALS: ${{ secrets.GIT_CREDENTIALS }} - - - uses: aptos-labs/aptos-core/.github/actions/docker-setup@main - with: - GCP_WORKLOAD_IDENTITY_PROVIDER: ${{ secrets.GCP_WORKLOAD_IDENTITY_PROVIDER }} - GCP_SERVICE_ACCOUNT_EMAIL: ${{ secrets.GCP_SERVICE_ACCOUNT_EMAIL }} - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - AWS_DOCKER_ARTIFACT_REPO: ${{ secrets.AWS_DOCKER_ARTIFACT_REPO }} - GIT_CREDENTIALS: ${{ secrets.GIT_CREDENTIALS }} - - - uses: ./.github/actions/python-setup - with: - pyproject_directory: ./testsuite - - - name: Run indexer gRPC dependencies locally (devnet) - shell: bash - working-directory: ./testsuite - run: poetry run python indexer_grpc_local.py --verbose start --no-indexer-grpc - - - name: Run indexer gRPC integration tests - shell: bash - run: cargo nextest run --features integration-tests --package aptos-indexer-grpc-integration-tests - - - name: Print docker-compose indexer-grpc deps logs on failure - if: ${{ failure() }} - working-directory: docker/compose/indexer-grpc - run: docker-compose logs - - - name: Print docker-compose validator-testnet logs on failure - if: ${{ failure() }} - working-directory: docker/compose/validator-testnet - run: docker-compose logs - -# validator-testnet-validator-1 diff --git a/.github/workflows/indexer-processor-testing.yaml b/.github/workflows/indexer-processor-testing.yaml new file mode 100644 index 0000000000000..091ef7982b017 --- /dev/null +++ b/.github/workflows/indexer-processor-testing.yaml @@ -0,0 +1,150 @@ + +name: Trigger Processor Tests on JSON Change + +on: + workflow_dispatch: + pull_request: # Trigger on PR-level events + branches: + - main + paths: + - 'ecosystem/indexer-grpc/indexer-test-transactions/**' # Only trigger if files under this path change + +# the required permissions to request the ID token +permissions: + id-token: write # This is required for GCP authentication + contents: read # Ensure the workflow has access to repository contents + +jobs: + dispatch_event: + runs-on: runs-on,cpu=16,family=c7,hdd=500,image=aptos-ubuntu-x64,run-id=${{ github.run_id }} + + steps: + - name: Checkout the repository + uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.ref }} + + - name: Set up Rust + uses: aptos-labs/aptos-core/.github/actions/rust-setup@main + with: + GIT_CREDENTIALS: ${{ secrets.GIT_CREDENTIALS }} + + # Install necessary system dependencies + - name: Install system dependencies + run: | + sudo apt-get update + sudo apt-get install build-essential libssl-dev pkg-config + + # Ensure Rust is updated + - name: Update Rust toolchain + run: rustup update + + - name: Run CLI to Generate JSON Files + run: | + cd ecosystem/indexer-grpc/indexer-transaction-generator + cargo run -- --testing-folder ./example_tests --output-folder ../indexer-test-transactions/new_json_transactions + + - name: Install jq + run: sudo apt-get install jq # Ensure jq is installed for JSON processing + + # TODO: improve this step to be easily maintainable and extensible + # Prepare Original and New JSON Files + - name: Prepare and Clean JSON Files + run: | + cd ecosystem/indexer-grpc/indexer-test-transactions + + for folder in json_transactions/scripted_transactions new_json_transactions/scripted_transactions; do + for file in $folder/*.json; do + echo "Processing $file..." + base_file=$(basename "$file") + + jq 'del(.timestamp, + .version, + .info.hash, + .info.stateChangeHash, + .info.accumulatorRootHash, + .info.changes[].writeResource.stateKeyHash, + .info.changes[].writeResource.type.address, + .info.changes[].writeResource.address, + .info.changes[].writeTableItem.stateKeyHash, + .info.changes[].writeTableItem.data.key, + .info.changes[].writeTableItem.data.value, + .epoch, + .blockHeight, + .sizeInfo, + .user.request.sender, + .user.request.expirationTimestampSecs.seconds, + .user.request.signature.ed25519.publicKey, + .user.request.signature.ed25519.signature) + | (.info.changes[].writeResource.data |= + if type == "string" then + (fromjson + | del(.authentication_key) + | walk(if type == "object" and has("addr") then del(.addr) else . end) + | tostring) + else . end)' "$file" > "$folder/cleaned_$base_file" + done + done + + - name: Compare JSON Files Across Multiple Folders + id: diff_check + run: | + . scripts/indexer_test_txns_compare_and_diff.sh + + - name: Handle New Files and Differences + run: | + echo "Checking outputs from diff_check step..." + echo "New file found: ${{ steps.diff_check.outputs.new_file_found }}" + echo "Diff found: ${{ steps.diff_check.outputs.diff_found }}" + + if [ "${{ steps.diff_check.outputs.new_file_found }}" == "true" ]; then + echo "New JSON files detected:" + echo "${{ steps.diff_check.outputs.new_files }}" # Print all new files with paths + exit 0 # Fail the workflow to enforce manual review + elif [ "${{ steps.diff_check.outputs.diff_found }}" == "true" ]; then + echo "Differences detected. Proceeding with dispatch event." + echo "Modified files:" + echo "${{ steps.diff_check.outputs.modified_files }}" # Print modified files with paths + else + echo "No differences or new files detected." + exit 0 # Proceed successfully only if no new files or differences are found + fi + + - id: auth + if: steps.diff_check.outputs.diff_found == 'true' && steps.diff_check.outputs.new_file_found == 'false' + uses: "google-github-actions/auth@v2" + with: + workload_identity_provider: ${{ secrets.GCP_WORKLOAD_IDENTITY_PROVIDER }} + service_account: ${{ secrets.GCP_SERVICE_ACCOUNT_EMAIL }} + + - name: Log active service account email + if: steps.diff_check.outputs.diff_found == 'true' && steps.diff_check.outputs.new_file_found == 'false' + run: | + gcloud auth list --filter=status:ACTIVE --format="value(account)" + + - id: 'secrets' + if: steps.diff_check.outputs.diff_found == 'true' && steps.diff_check.outputs.new_file_found == 'false' + uses: 'google-github-actions/get-secretmanager-secrets@v2' + with: + secrets: |- + token:aptos-ci/github-actions-repository-dispatch + + # Conditionally Dispatch Event to Processor Repo if Differences Found + - name: Dispatch Event to Processor Repo + if: steps.diff_check.outputs.diff_found == 'true' && steps.diff_check.outputs.new_file_found == 'false' + uses: peter-evans/repository-dispatch@v3.0.0 + with: + TOKEN: '${{ steps.secrets.outputs.token }}' + repository: 'aptos-labs/aptos-indexer-processors' + event-type: 'test-txn-json-change-detected' + client-payload: '{"commit_hash": "${{ github.sha }}"}' + + # Poll Processor Repo for Workflow Run Status and Memorize Run ID to check the job status + - name: Poll for Workflow Run and Wait for Job Completion + if: steps.diff_check.outputs.diff_found == 'true' && steps.diff_check.outputs.new_file_found == 'false' + id: poll_status + run: | + . scripts/indexer_processor_tests_status_poll.sh + env: + GITHUB_TOKEN: ${{ steps.secrets.outputs.token }} # Pass the correct GitHub token + GITHUB_SHA: ${{ github.sha }} \ No newline at end of file diff --git a/.github/workflows/replay-verify.yaml b/.github/workflows/replay-verify.yaml index ae44009412ac8..ac498ad123fc4 100644 --- a/.github/workflows/replay-verify.yaml +++ b/.github/workflows/replay-verify.yaml @@ -32,7 +32,6 @@ on: paths: - ".github/workflows/replay-verify.yaml" - ".github/workflows/workflow-run-replay-verify.yaml" - - "testsuite/replay_verify.py" schedule: - cron: "0 22 * * 0,2,4" # The main branch cadence. This runs every Sun,Tues,Thurs diff --git a/.github/workflows/run-fullnode-sync.yaml b/.github/workflows/run-fullnode-sync.yaml index 82295e882bc77..c4c2ce1e2d087 100644 --- a/.github/workflows/run-fullnode-sync.yaml +++ b/.github/workflows/run-fullnode-sync.yaml @@ -57,7 +57,7 @@ on: jobs: fullnode-sync: - runs-on: medium-perf-docker-with-local-ssd + runs-on: runs-on,cpu=16,ram=64,family=m5ad,image=aptos-ubuntu-x64,run-id=${{ github.run_id }} timeout-minutes: ${{ inputs.TIMEOUT_MINUTES || 300 }} # the default run is 300 minutes (5 hours). Specified here because workflow_dispatch uses string rather than number steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/workflow-run-execution-performance.yaml b/.github/workflows/workflow-run-execution-performance.yaml index 086b59b700d3d..d57b0ff678bd0 100644 --- a/.github/workflows/workflow-run-execution-performance.yaml +++ b/.github/workflows/workflow-run-execution-performance.yaml @@ -12,16 +12,29 @@ on: required: false default: executor-benchmark-runner type: string - IS_FULL_RUN: + FLOW: required: false - default: false - type: boolean - description: Run complete version of the tests + default: CONTINUOUS + type: string + description: Which set of tests to run. IGNORE_TARGET_DETERMINATION: required: false default: false type: boolean description: Ignore target determination and run the tests + SKIP_MOVE_E2E: + required: false + default: false + type: boolean + description: Whether to run or skip move-only e2e tests at the beginning. + SOURCE: + required: false + default: CI + type: string + NUMBER_OF_EXECUTION_THREADS: + required: false + default: "32" + type: string # This allows the workflow to be triggered manually from the Github UI or CLI # NOTE: because the "number" type is not supported, we default to 720 minute timeout workflow_dispatch: @@ -36,18 +49,47 @@ on: type: choice options: - executor-benchmark-runner - description: The name of the runner to use for the test. - IS_FULL_RUN: + - benchmark-t2d-32 + - benchmark-t2d-60 + - benchmark-c3d-30 + - benchmark-c3d-60 + - benchmark-c3d-180 + - benchmark-n4-32 + - benchmark-c4-32 + - benchmark-c4-48 + - benchmark-c4-96 + description: The name of the runner to use for the test. (which decides machine specs) + NUMBER_OF_EXECUTION_THREADS: + required: false + default: "32" + type: string + FLOW: + required: false + default: LAND_BLOCKING + options: + - LAND_BLOCKING + - CONTINUOUS + - MAINNET + - MAINNET_LARGE_DB + type: choice + description: Which set of tests to run. MAINNET/MAINNET_LARGE_DB are for performance validation of mainnet nodes. + SKIP_MOVE_E2E: required: false default: false type: boolean - description: Run complete version of the tests + description: Whether to skip move-only e2e tests at the beginning. IGNORE_TARGET_DETERMINATION: required: false - default: false + default: true type: boolean description: Ignore target determination and run the tests - + SOURCE: + required: false + default: ADHOC + options: + - ADHOC + type: choice + description: Test source (always adhoc from here) jobs: # This job determines which tests to run test-target-determinator: @@ -63,7 +105,7 @@ jobs: # Run single node execution performance tests single-node-performance: needs: test-target-determinator - timeout-minutes: 60 + timeout-minutes: 120 runs-on: ${{ inputs.RUNNER_NAME }} steps: - uses: actions/checkout@v4 @@ -78,13 +120,8 @@ jobs: - name: Run single node execution benchmark in performance build mode shell: bash - run: TABULATE_INSTALL=lib-only pip install tabulate && testsuite/single_node_performance.py - if: ${{ !inputs.IS_FULL_RUN && (inputs.IGNORE_TARGET_DETERMINATION || needs.test-target-determinator.outputs.run_execution_performance_test == 'true') }} - - - name: Run full version of the single node execution benchmark in performance build mode - shell: bash - run: TABULATE_INSTALL=lib-only pip install tabulate && FLOW=CONTINUOUS testsuite/single_node_performance.py - if: ${{ inputs.IS_FULL_RUN && (inputs.IGNORE_TARGET_DETERMINATION || needs.test-target-determinator.outputs.run_execution_performance_test == 'true') }} + run: TABULATE_INSTALL=lib-only pip install tabulate && FLOW="${{ inputs.FLOW }}" SOURCE="${{ inputs.SOURCE }}" RUNNER_NAME="${{ inputs.RUNNER_NAME }}" SKIP_MOVE_E2E="${{ inputs.SKIP_MOVE_E2E && '1' || '' }}" NUMBER_OF_EXECUTION_THREADS="${{ inputs.NUMBER_OF_EXECUTION_THREADS }}" testsuite/single_node_performance.py + if: ${{ (inputs.IGNORE_TARGET_DETERMINATION || needs.test-target-determinator.outputs.run_execution_performance_test == 'true') }} - run: echo "Skipping single node execution performance! Unrelated changes detected." if: ${{ !inputs.IGNORE_TARGET_DETERMINATION && needs.test-target-determinator.outputs.run_execution_performance_test != 'true' }} diff --git a/.github/workflows/workflow-run-replay-verify.yaml b/.github/workflows/workflow-run-replay-verify.yaml index cb7af2922a65d..40788a9fa0e28 100644 --- a/.github/workflows/workflow-run-replay-verify.yaml +++ b/.github/workflows/workflow-run-replay-verify.yaml @@ -110,7 +110,6 @@ jobs: # which cleans up the target directory in its post action path: | aptos-debugger - testsuite/replay_verify.py key: aptos-debugger-${{ inputs.GIT_SHA || github.sha }} - name: Prepare for build if not cached @@ -185,12 +184,11 @@ jobs: matrix: job_id: ${{ fromJson(needs.prepare.outputs.job_ids) }} steps: - - name: Load cached aptos-debugger binary and replay_verify.py script + - name: Load cached aptos-debugger binary uses: actions/cache/restore@v4 with: path: | aptos-debugger - testsuite/replay_verify.py key: aptos-debugger-${{ inputs.GIT_SHA || github.sha }} fail-on-cache-miss: true diff --git a/Cargo.lock b/Cargo.lock index 601203958b7ea..4ab17dbdbd8d6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -310,7 +310,7 @@ dependencies = [ "dashmap", "diesel", "diesel-async", - "dirs", + "dirs 5.0.1", "futures", "hex", "itertools 0.13.0", @@ -328,6 +328,7 @@ dependencies = [ "move-ir-types", "move-model", "move-package", + "move-prover-boogie-backend", "move-symbol-pool", "move-unit-test", "move-vm-runtime", @@ -342,6 +343,7 @@ dependencies = [ "serde_json", "serde_yaml 0.8.26", "server-framework", + "set_env", "shadow-rs", "tempfile", "thiserror", @@ -387,11 +389,13 @@ dependencies = [ "aptos-crypto", "aptos-infallible", "aptos-logger", + "aptos-mempool", "aptos-runtimes", "aptos-storage-interface", "aptos-system-utils 0.1.0", "aptos-types", "bcs 0.1.4", + "futures-channel", "http 0.2.11", "hyper 0.14.28", "sha256", @@ -403,7 +407,6 @@ dependencies = [ name = "aptos-aggregator" version = "0.1.0" dependencies = [ - "aptos-logger", "aptos-types", "bcs 0.1.4", "claims", @@ -1006,6 +1009,7 @@ dependencies = [ "aes-gcm", "anyhow", "aptos-crypto-derive", + "arbitrary", "ark-bls12-381", "ark-bn254", "ark-ec", @@ -1195,6 +1199,7 @@ dependencies = [ "aptos-config", "aptos-db-indexer-schemas", "aptos-logger", + "aptos-metrics-core", "aptos-proptest-helpers", "aptos-resource-viewer", "aptos-rocksdb-options", @@ -1205,6 +1210,7 @@ dependencies = [ "bytes", "dashmap", "move-core-types", + "once_cell", "rand 0.7.3", ] @@ -1540,7 +1546,6 @@ version = "0.1.0" dependencies = [ "anyhow", "aptos-crypto", - "aptos-drop-helper", "aptos-scratchpad", "aptos-secure-net", "aptos-storage-interface", @@ -2902,8 +2907,6 @@ dependencies = [ "claims", "crossbeam", "dashmap", - "derivative", - "move-binary-format", "move-core-types", "move-vm-types", "proptest", @@ -3079,7 +3082,7 @@ dependencies = [ ] [[package]] -name = "aptos-nft-metadata-crawler-parser" +name = "aptos-nft-metadata-crawler" version = "0.1.0" dependencies = [ "anyhow", @@ -3702,6 +3705,7 @@ dependencies = [ "once_cell", "rand 0.7.3", "rand_core 0.5.1", + "serde", "serde_json", "tiny-bip39", "tokio", @@ -4284,6 +4288,7 @@ dependencies = [ "aptos-experimental-runtimes", "aptos-infallible", "aptos-proptest-helpers", + "arbitrary", "ark-bn254", "ark-crypto-primitives", "ark-ec", @@ -4344,6 +4349,7 @@ dependencies = [ "strum_macros 0.24.3", "thiserror", "tokio", + "tracing", "url", ] @@ -4593,6 +4599,24 @@ dependencies = [ "warp", ] +[[package]] +name = "aptos-workspace-server" +version = "0.1.0" +dependencies = [ + "anyhow", + "aptos", + "aptos-cached-packages", + "aptos-config", + "aptos-faucet-core", + "aptos-node", + "aptos-types", + "futures", + "rand 0.7.3", + "tempfile", + "tokio", + "url", +] + [[package]] name = "arbitrary" version = "1.3.2" @@ -7330,13 +7354,22 @@ dependencies = [ "walkdir", ] +[[package]] +name = "dirs" +version = "4.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3aa72a6f96ea37bbc5aa912f6788242832f75369bdfdadcb0e38423f100059" +dependencies = [ + "dirs-sys 0.3.7", +] + [[package]] name = "dirs" version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225" dependencies = [ - "dirs-sys", + "dirs-sys 0.4.1", ] [[package]] @@ -7349,6 +7382,17 @@ dependencies = [ "dirs-sys-next", ] +[[package]] +name = "dirs-sys" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6" +dependencies = [ + "libc", + "redox_users", + "winapi 0.3.9", +] + [[package]] name = "dirs-sys" version = "0.4.1" @@ -8369,6 +8413,7 @@ name = "fuzzer-fuzz" version = "0.0.0" dependencies = [ "aptos-cached-packages", + "aptos-crypto", "aptos-framework", "aptos-language-e2e-tests", "aptos-types", @@ -8382,6 +8427,8 @@ dependencies = [ "move-vm-types", "once_cell", "rayon", + "serde", + "serde_json", ] [[package]] @@ -10799,6 +10846,19 @@ dependencies = [ "tempfile", ] +[[package]] +name = "move-ast-generator-tests" +version = "0.1.0" +dependencies = [ + "anyhow", + "codespan-reporting", + "datatest-stable", + "move-compiler-v2", + "move-model", + "move-prover-test-utils", + "move-stackless-bytecode", +] + [[package]] name = "move-async-vm" version = "0.1.0" @@ -11413,6 +11473,7 @@ dependencies = [ "num 0.4.1", "paste", "petgraph 0.6.5", + "topological-sort", ] [[package]] @@ -12179,9 +12240,9 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "openssl" -version = "0.10.62" +version = "0.10.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cde4d2d9200ad5909f8dac647e29482e07c3a35de8a13fce7c9c7747ad9f671" +checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1" dependencies = [ "bitflags 2.6.0", "cfg-if", @@ -12211,9 +12272,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.98" +version = "0.9.103" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1665caf8ab2dc9aef43d1c0023bd904633a6a05cb30b0ad59bec2ae986e57a7" +checksum = "7f9e8deee91df40a943c71b917e5874b951d32a802526c85721ce3b776c929d6" dependencies = [ "cc", "libc", @@ -15189,6 +15250,15 @@ dependencies = [ "warp", ] +[[package]] +name = "set_env" +version = "1.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51864fb62fd09b8a7d931a11832bfbbda5297425cfc9ce04b54bc86c945c58eb" +dependencies = [ + "dirs 4.0.0", +] + [[package]] name = "sha1" version = "0.10.6" @@ -15786,9 +15856,9 @@ dependencies = [ [[package]] name = "strum" -version = "0.26.2" +version = "0.26.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d8cec3501a5194c432b2b7976db6b7d10ec95c253208b45f83f7136aa985e29" +checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" [[package]] name = "strum_macros" @@ -16633,7 +16703,7 @@ dependencies = [ "prost 0.12.3", "rustls-native-certs 0.7.0", "rustls-pemfile 2.1.1", - "strum 0.26.2", + "strum 0.26.3", "strum_macros 0.26.4", "tokio", "tokio-rustls 0.26.0", @@ -16691,6 +16761,12 @@ dependencies = [ "tonic 0.11.0", ] +[[package]] +name = "topological-sort" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea68304e134ecd095ac6c3574494fc62b909f416c4fca77e440530221e549d3d" + [[package]] name = "tower" version = "0.4.13" diff --git a/Cargo.toml b/Cargo.toml index 1f882e3c4a2d3..d1f271a95416b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -29,6 +29,7 @@ members = [ "aptos-move/aptos-vm-logging", "aptos-move/aptos-vm-profiling", "aptos-move/aptos-vm-types", + "aptos-move/aptos-workspace-server", "aptos-move/block-executor", "aptos-move/e2e-benchmark", "aptos-move/e2e-move-tests", @@ -125,7 +126,7 @@ members = [ "ecosystem/indexer-grpc/indexer-test-transactions", "ecosystem/indexer-grpc/indexer-transaction-generator", "ecosystem/indexer-grpc/transaction-filter", - "ecosystem/nft-metadata-crawler-parser", + "ecosystem/nft-metadata-crawler", "ecosystem/node-checker", "ecosystem/node-checker/fn-check-client", "execution/block-partitioner", @@ -220,6 +221,7 @@ members = [ "third_party/move/move-model", "third_party/move/move-model/bytecode", "third_party/move/move-model/bytecode-test-utils", + "third_party/move/move-model/bytecode/ast-generator-tests", "third_party/move/move-prover", "third_party/move/move-prover/boogie-backend", "third_party/move/move-prover/bytecode-pipeline", @@ -392,7 +394,7 @@ aptos-network-benchmark = { path = "network/benchmark" } aptos-network-builder = { path = "network/builder" } aptos-network-checker = { path = "crates/aptos-network-checker" } aptos-network-discovery = { path = "network/discovery" } -aptos-nft-metadata-crawler-parser = { path = "ecosystem/nft-metadata-crawler-parser" } +aptos-nft-metadata-crawler = { path = "ecosystem/nft-metadata-crawler" } aptos-node = { path = "aptos-node" } aptos-node-checker = { path = "ecosystem/node-checker" } aptos-node-identity = { path = "crates/aptos-node-identity" } @@ -744,6 +746,7 @@ serde-generate = { git = "https://github.com/aptos-labs/serde-reflection", rev = serde-reflection = { git = "https://github.com/aptos-labs/serde-reflection", rev = "73b6bbf748334b71ff6d7d09d06a29e3062ca075" } serde_with = "3.4.0" serde_yaml = "0.8.24" +set_env = "1.3.4" shadow-rs = "0.16.2" simplelog = "0.9.0" smallbitvec = "2.5.1" @@ -789,6 +792,7 @@ tonic = { version = "0.11.0", features = [ "zstd", ] } tonic-reflection = "0.11.0" +topological-sort = "0.2.2" triomphe = "0.1.9" tui = "0.19.0" typed-arena = "2.0.2" diff --git a/api/doc/spec.json b/api/doc/spec.json index bf944fc8e7000..2693b4c7289a0 100644 --- a/api/doc/spec.json +++ b/api/doc/spec.json @@ -14269,6 +14269,79 @@ } } }, + "BlockMetadataExtension": { + "type": "object", + "oneOf": [ + { + "$ref": "#/components/schemas/BlockMetadataExtension_BlockMetadataExtensionEmpty" + }, + { + "$ref": "#/components/schemas/BlockMetadataExtension_BlockMetadataExtensionRandomness" + } + ], + "discriminator": { + "propertyName": "type", + "mapping": { + "v0": "#/components/schemas/BlockMetadataExtension_BlockMetadataExtensionEmpty", + "v1": "#/components/schemas/BlockMetadataExtension_BlockMetadataExtensionRandomness" + } + } + }, + "BlockMetadataExtensionEmpty": { + "type": "object" + }, + "BlockMetadataExtensionRandomness": { + "type": "object", + "properties": { + "randomness": { + "$ref": "#/components/schemas/HexEncodedBytes" + } + } + }, + "BlockMetadataExtension_BlockMetadataExtensionEmpty": { + "allOf": [ + { + "type": "object", + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "v0" + ], + "example": "v0" + } + } + }, + { + "$ref": "#/components/schemas/BlockMetadataExtensionEmpty" + } + ] + }, + "BlockMetadataExtension_BlockMetadataExtensionRandomness": { + "allOf": [ + { + "type": "object", + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "v1" + ], + "example": "v1" + } + } + }, + { + "$ref": "#/components/schemas/BlockMetadataExtensionRandomness" + } + ] + }, "BlockMetadataTransaction": { "type": "object", "description": "A block metadata transaction\n\nThis signifies the beginning of a block, and contains information\nabout the specific block", @@ -14365,6 +14438,17 @@ }, "timestamp": { "$ref": "#/components/schemas/U64" + }, + "block_metadata_extension": { + "allOf": [ + { + "$ref": "#/components/schemas/BlockMetadataExtension" + }, + { + "description": "If some, it means the internal txn type is `aptos_types::transaction::Transaction::BlockMetadataExt`.\nOtherwise, it is `aptos_types::transaction::Transaction::BlockMetadata`.\n\nNOTE: we could have introduced a new APT txn type to represent the corresponding internal type,\nbut that is a breaking change to the ecosystem.\n\nNOTE: `oai` does not support `flatten` together with `skip_serializing_if`.", + "default": null + } + ] } } }, diff --git a/api/doc/spec.yaml b/api/doc/spec.yaml index a515d33940b19..9ff83e9c1dbdc 100644 --- a/api/doc/spec.yaml +++ b/api/doc/spec.yaml @@ -10669,6 +10669,47 @@ components: $ref: '#/components/schemas/U64' block_end_info: $ref: '#/components/schemas/BlockEndInfo' + BlockMetadataExtension: + type: object + oneOf: + - $ref: '#/components/schemas/BlockMetadataExtension_BlockMetadataExtensionEmpty' + - $ref: '#/components/schemas/BlockMetadataExtension_BlockMetadataExtensionRandomness' + discriminator: + propertyName: type + mapping: + v0: '#/components/schemas/BlockMetadataExtension_BlockMetadataExtensionEmpty' + v1: '#/components/schemas/BlockMetadataExtension_BlockMetadataExtensionRandomness' + BlockMetadataExtensionEmpty: + type: object + BlockMetadataExtensionRandomness: + type: object + properties: + randomness: + $ref: '#/components/schemas/HexEncodedBytes' + BlockMetadataExtension_BlockMetadataExtensionEmpty: + allOf: + - type: object + required: + - type + properties: + type: + type: string + enum: + - v0 + example: v0 + - $ref: '#/components/schemas/BlockMetadataExtensionEmpty' + BlockMetadataExtension_BlockMetadataExtensionRandomness: + allOf: + - type: object + required: + - type + properties: + type: + type: string + enum: + - v1 + example: v1 + - $ref: '#/components/schemas/BlockMetadataExtensionRandomness' BlockMetadataTransaction: type: object description: |- @@ -10747,6 +10788,18 @@ components: format: uint32 timestamp: $ref: '#/components/schemas/U64' + block_metadata_extension: + allOf: + - $ref: '#/components/schemas/BlockMetadataExtension' + - description: |- + If some, it means the internal txn type is `aptos_types::transaction::Transaction::BlockMetadataExt`. + Otherwise, it is `aptos_types::transaction::Transaction::BlockMetadata`. + + NOTE: we could have introduced a new APT txn type to represent the corresponding internal type, + but that is a breaking change to the ecosystem. + + NOTE: `oai` does not support `flatten` together with `skip_serializing_if`. + default: null DKGResultTransaction: type: object required: diff --git a/api/goldens/aptos_api__tests__state_test__test_get_account_module.json b/api/goldens/aptos_api__tests__state_test__test_get_account_module.json index 37c8ff52436ac..c1776be65eea1 100644 --- a/api/goldens/aptos_api__tests__state_test__test_get_account_module.json +++ b/api/goldens/aptos_api__tests__state_test__test_get_account_module.json @@ -1,5 +1,5 @@ { - "bytecode": "0xa11ceb0b060000000c010002020208030a2805322307557b08d0012006f0010a10fa019a010a94030d0ca1037e0d9f04060fa504040002000306000004070000050001000006020300000704050000080406000009070800000a040300000b090500000c0906000205070301080002050301080101060800010301050206080006080101010106080100076163636f756e74066f626a6563740467756964044755494402494406637265617465096372656174655f69640c6372656174696f6e5f6e756d0f63726561746f725f616464726573730565715f69640269640f69645f6372656174696f6e5f6e756d1269645f63726561746f725f616464726573730461646472000000000000000000000000000000000000000000000000000000000000000103080000000000000000126170746f733a3a6d657461646174615f763185010100000000000000001d45475549445f47454e455241544f525f4e4f545f5055424c49534845445b475549442067656e657261746f72206d757374206265207075626c6973686564206168656164206f66206669727374207573616765206f6620606372656174655f776974685f6361706162696c697479602066756e6374696f6e2e00000002010a080101020207030d0500030000050d0a01140c020a02060100000000000000160b01150b020b001201120002010100000a040b010b00120102020100000a050b00100010011402030100000a050b00100010021402040100000a050b0010000b012102050100000a040b0010001402060100000a040b0010011402070100000a040b00100214020000010001010000000100", + "bytecode": "0xa11ceb0b0700000a0c010002020208030a30053a23075d7b08d8012006f8010a1082029a010a9c030d0ca9037e0da704060fad040400020003060000040700000500010001000602030001000704050001000804060001000907080001000a04030001000b09050001000c090600010205070301080002050301080101060800010301050206080006080101010106080100076163636f756e74066f626a6563740467756964044755494402494406637265617465096372656174655f69640c6372656174696f6e5f6e756d0f63726561746f725f616464726573730565715f69640269640f69645f6372656174696f6e5f6e756d1269645f63726561746f725f616464726573730461646472000000000000000000000000000000000000000000000000000000000000000103080000000000000000126170746f733a3a6d657461646174615f763185010100000000000000001d45475549445f47454e455241544f525f4e4f545f5055424c49534845445b475549442067656e657261746f72206d757374206265207075626c6973686564206168656164206f66206669727374207573616765206f6620606372656174655f776974685f6361706162696c697479602066756e6374696f6e2e00000002010a080101020207030d0500030000050d0a01140c020a02060100000000000000160b01150b020b001201120002010100000a040b010b00120102020100000a050b00100010011402030100000a050b00100010021402040100000a050b0010000b012102050100000a040b0010001402060100000a040b0010011402070100000a040b00100214020000010001010000000100", "abi": { "address": "0x1", "name": "guid", diff --git a/api/goldens/aptos_api__tests__transactions_test__test_get_transaction_by_hash_with_delayed_internal_indexer.json b/api/goldens/aptos_api__tests__transactions_test__test_get_transaction_by_hash_with_delayed_internal_indexer.json new file mode 100644 index 0000000000000..1e26336dfd873 --- /dev/null +++ b/api/goldens/aptos_api__tests__transactions_test__test_get_transaction_by_hash_with_delayed_internal_indexer.json @@ -0,0 +1,23 @@ +{ + "hash": "", + "sender": "0x34bf7e2d17674feb234371a7ea58efd715f0e56ba20ebf13789480d9d643afaf", + "sequence_number": "0", + "max_gas_amount": "100000000", + "gas_unit_price": "0", + "expiration_timestamp_secs": "18446744073709551615", + "payload": { + "function": "0x1::aptos_account::transfer", + "type_arguments": [], + "arguments": [ + "0x1", + "1" + ], + "type": "entry_function_payload" + }, + "signature": { + "public_key": "0xd5a781494d2bf1a174ddffde1e02cb8881cff6dab70e61cbdef393deac0ce639", + "signature": "0xbdc9e553e86cdee876de3318bccd8c6499923b719ab5f189e8b43ba91771645f01c3ded4061e20b3bb85767e475dfe24f76b4aed46860c9328baf28d11d2c701", + "type": "ed25519_signature" + }, + "type": "pending_transaction" +} diff --git a/api/src/context.rs b/api/src/context.rs index 3ba77fe8923eb..1a77f5d5ee1f4 100644 --- a/api/src/context.rs +++ b/api/src/context.rs @@ -267,6 +267,20 @@ impl Context { self.get_latest_storage_ledger_info() } + pub fn get_latest_internal_and_storage_ledger_info( + &self, + ) -> Result<(Option, LedgerInfo), E> { + if let Some(indexer_reader) = self.indexer_reader.as_ref() { + if indexer_reader.is_internal_indexer_enabled() { + return Ok(( + Some(self.get_latest_internal_indexer_ledger_info()?), + self.get_latest_storage_ledger_info()?, + )); + } + } + Ok((None, self.get_latest_storage_ledger_info()?)) + } + pub fn get_latest_ledger_info_and_verify_lookup_version( &self, requested_ledger_version: Option, @@ -844,7 +858,7 @@ impl Context { } else { self.indexer_reader .as_ref() - .ok_or(anyhow!("Indexer reader is None")) + .ok_or_else(|| anyhow!("Indexer reader is None")) .map_err(|err| { E::internal_with_code(err, AptosErrorCode::InternalError, ledger_info) })? @@ -943,7 +957,7 @@ impl Context { } else { self.indexer_reader .as_ref() - .ok_or(anyhow!("Internal indexer reader doesn't exist"))? + .ok_or_else(|| anyhow!("Internal indexer reader doesn't exist"))? .get_events(event_key, start, order, limit as u64, ledger_version)? }; if order == Order::Descending { @@ -954,6 +968,10 @@ impl Context { } } + pub fn get_indexer_reader(&self) -> Option<&Arc> { + self.indexer_reader.as_ref() + } + fn next_bucket(&self, gas_unit_price: u64) -> u64 { match self .node_config diff --git a/api/src/runtime.rs b/api/src/runtime.rs index 219036a90b5d7..31e30eb64302e 100644 --- a/api/src/runtime.rs +++ b/api/src/runtime.rs @@ -18,12 +18,13 @@ use crate::{ transactions::TransactionsApi, view_function::ViewFunctionApi, }; -use anyhow::Context as AnyhowContext; +use anyhow::{anyhow, Context as AnyhowContext}; use aptos_config::config::{ApiConfig, NodeConfig}; use aptos_logger::info; use aptos_mempool::MempoolClientSender; use aptos_storage_interface::DbReader; use aptos_types::{chain_id::ChainId, indexer::indexer_db_reader::IndexerReader}; +use futures::channel::oneshot; use poem::{ handler, http::Method, @@ -45,13 +46,14 @@ pub fn bootstrap( db: Arc, mp_sender: MempoolClientSender, indexer_reader: Option>, + port_tx: Option>, ) -> anyhow::Result { let max_runtime_workers = get_max_runtime_workers(&config.api); let runtime = aptos_runtimes::spawn_named_runtime("api".into(), Some(max_runtime_workers)); let context = Context::new(chain_id, db, mp_sender, config.clone(), indexer_reader); - attach_poem_to_runtime(runtime.handle(), context.clone(), config, false) + attach_poem_to_runtime(runtime.handle(), context.clone(), config, false, port_tx) .context("Failed to attach poem to runtime")?; let context_cloned = context.clone(); @@ -167,6 +169,7 @@ pub fn attach_poem_to_runtime( context: Context, config: &NodeConfig, random_port: bool, + port_tx: Option>, ) -> anyhow::Result { let context = Arc::new(context); @@ -216,6 +219,13 @@ pub fn attach_poem_to_runtime( let actual_address = *actual_address .as_socket_addr() .context("Failed to get socket addr from local addr for Poem webserver")?; + + if let Some(port_tx) = port_tx { + port_tx + .send(actual_address.port()) + .map_err(|_| anyhow!("Failed to send port"))?; + } + runtime_handle.spawn(async move { let cors = Cors::new() // To allow browsers to use cookies (for cookie-based sticky @@ -351,6 +361,7 @@ mod tests { context.db.clone(), context.mempool.ac_client.clone(), None, + None, ); assert!(ret.is_ok()); diff --git a/api/src/tests/accounts_test.rs b/api/src/tests/accounts_test.rs index a5199cf61c452..b5bac410dc0c9 100644 --- a/api/src/tests/accounts_test.rs +++ b/api/src/tests/accounts_test.rs @@ -144,6 +144,9 @@ async fn test_account_resources_by_ledger_version_with_context(mut context: Test async fn test_get_account_resources_by_ledger_version() { let context = new_test_context(current_function_name!()); test_account_resources_by_ledger_version_with_context(context).await; +} +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_get_account_resources_by_ledger_version_with_shard_context() { let shard_context = new_test_context_with_db_sharding_and_internal_indexer(current_function_name!()); test_account_resources_by_ledger_version_with_context(shard_context).await; diff --git a/api/src/tests/mod.rs b/api/src/tests/mod.rs index e7978f66a126e..340721b1ce200 100644 --- a/api/src/tests/mod.rs +++ b/api/src/tests/mod.rs @@ -21,7 +21,7 @@ mod transactions_test; mod view_function; mod webauthn_secp256r1_ecdsa; -use aptos_api_test_context::{new_test_context as super_new_test_context, TestContext}; +use aptos_api_test_context::{new_test_context_inner as super_new_test_context, TestContext}; use aptos_config::config::{internal_indexer_db_config::InternalIndexerDBConfig, NodeConfig}; fn new_test_context(test_name: String) -> TestContext { @@ -29,12 +29,28 @@ fn new_test_context(test_name: String) -> TestContext { } fn new_test_context_with_config(test_name: String, node_config: NodeConfig) -> TestContext { - super_new_test_context(test_name, node_config, false) + super_new_test_context(test_name, node_config, false, None) } +#[cfg(test)] fn new_test_context_with_db_sharding_and_internal_indexer(test_name: String) -> TestContext { let mut node_config = NodeConfig::default(); node_config.storage.rocksdb_configs.enable_storage_sharding = true; - node_config.indexer_db_config = InternalIndexerDBConfig::new(true, true, true, 10_000); - super_new_test_context(test_name, node_config, true) + node_config.indexer_db_config = InternalIndexerDBConfig::new(true, true, true, 10); + let test_context = super_new_test_context(test_name, node_config, false, None); + let _ = test_context + .get_indexer_reader() + .unwrap() + .wait_for_internal_indexer(0); + test_context +} + +fn new_test_context_with_sharding_and_delayed_internal_indexer( + test_name: String, + end_version: Option, +) -> TestContext { + let mut node_config = NodeConfig::default(); + node_config.storage.rocksdb_configs.enable_storage_sharding = true; + node_config.indexer_db_config = InternalIndexerDBConfig::new(true, true, true, 1); + super_new_test_context(test_name, node_config, false, end_version) } diff --git a/api/src/tests/multisig_transactions_test.rs b/api/src/tests/multisig_transactions_test.rs index 9cd59d00e41ee..a716b9d0091b7 100644 --- a/api/src/tests/multisig_transactions_test.rs +++ b/api/src/tests/multisig_transactions_test.rs @@ -50,6 +50,66 @@ async fn test_multisig_transaction_with_payload_succeeds() { assert_eq!(0, context.get_apt_balance(multisig_account).await); } +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_multisig_transaction_with_existing_account() { + let mut context = new_test_context(current_function_name!()); + let multisig_account = &mut context.create_account().await; + let owner_account_1 = &mut context.create_account().await; + let owner_account_2 = &mut context.create_account().await; + let owner_account_3 = &mut context.create_account().await; + let owners = vec![ + owner_account_1.address(), + owner_account_2.address(), + owner_account_3.address(), + ]; + context + .create_multisig_account_with_existing_account(multisig_account, owners.clone(), 2, 1000) + .await; + assert_owners(&context, multisig_account.address(), owners).await; + assert_signature_threshold(&context, multisig_account.address(), 2).await; + + let multisig_payload = construct_multisig_txn_transfer_payload(owner_account_1.address(), 1000); + context + .create_multisig_transaction( + owner_account_1, + multisig_account.address(), + multisig_payload.clone(), + ) + .await; + // Owner 2 approves and owner 3 rejects. There are still 2 approvals total (owners 1 and 2) so + // the transaction can still be executed. + context + .approve_multisig_transaction(owner_account_2, multisig_account.address(), 1) + .await; + context + .reject_multisig_transaction(owner_account_3, multisig_account.address(), 1) + .await; + + let org_multisig_balance = context.get_apt_balance(multisig_account.address()).await; + let org_owner_1_balance = context.get_apt_balance(owner_account_1.address()).await; + + context + .execute_multisig_transaction(owner_account_2, multisig_account.address(), 202) + .await; + + // The multisig tx that transfers away 1000 APT should have succeeded. + assert_multisig_tx_executed( + &mut context, + multisig_account.address(), + multisig_payload, + 1, + ) + .await; + assert_eq!( + org_multisig_balance - 1000, + context.get_apt_balance(multisig_account.address()).await + ); + assert_eq!( + org_owner_1_balance + 1000, + context.get_apt_balance(owner_account_1.address()).await + ); +} + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn test_multisig_transaction_to_update_owners() { let mut context = new_test_context(current_function_name!()); diff --git a/api/src/tests/transactions_test.rs b/api/src/tests/transactions_test.rs index 292c5318f8e6d..e8524525004d4 100644 --- a/api/src/tests/transactions_test.rs +++ b/api/src/tests/transactions_test.rs @@ -5,6 +5,7 @@ use super::new_test_context; use crate::tests::{ new_test_context_with_config, new_test_context_with_db_sharding_and_internal_indexer, + new_test_context_with_sharding_and_delayed_internal_indexer, }; use aptos_api_test_context::{assert_json, current_function_name, pretty, TestContext}; use aptos_config::config::{GasEstimationStaticOverride, NodeConfig}; @@ -491,6 +492,34 @@ async fn test_get_transaction_by_hash() { assert_json(resp, txns[0].clone()); } +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_get_transaction_by_hash_with_delayed_internal_indexer() { + let mut context = new_test_context_with_sharding_and_delayed_internal_indexer( + current_function_name!(), + Some(1), + ); + + let mut account = context.gen_account(); + let txn = context.create_user_account(&account).await; + context.commit_block(&vec![txn.clone()]).await; + let txn1 = context.account_transfer_to( + &mut account, + AccountAddress::from_hex_literal("0x1").unwrap(), + 1, + ); + context.commit_block(&vec![txn1.clone()]).await; + let committed_hash = txn1.committed_hash().to_hex_literal(); + + let _ = context + .get_indexer_reader() + .unwrap() + .wait_for_internal_indexer(1); + let resp = context + .get(&format!("/transactions/by_hash/{}", committed_hash)) + .await; + context.check_golden_output(resp); +} + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn test_get_transaction_by_hash_not_found() { let mut context = new_test_context(current_function_name!()); diff --git a/api/src/transactions.rs b/api/src/transactions.rs index 1e1214361961b..56495de0ad2c6 100644 --- a/api/src/transactions.rs +++ b/api/src/transactions.rs @@ -793,32 +793,38 @@ impl TransactionsApi { let context = self.context.clone(); let accept_type = accept_type.clone(); - let ledger_info = api_spawn_blocking(move || context.get_latest_ledger_info()).await?; - + let (internal_ledger_info_opt, storage_ledger_info) = + api_spawn_blocking(move || context.get_latest_internal_and_storage_ledger_info()) + .await?; + let storage_version = storage_ledger_info.ledger_version.into(); + let internal_ledger_version = internal_ledger_info_opt + .as_ref() + .map(|info| info.ledger_version.into()); + let latest_ledger_info = internal_ledger_info_opt.unwrap_or(storage_ledger_info); let txn_data = self - .get_by_hash(hash.into(), &ledger_info) + .get_by_hash(hash.into(), storage_version, internal_ledger_version) .await .context(format!("Failed to get transaction by hash {}", hash)) .map_err(|err| { BasicErrorWith404::internal_with_code( err, AptosErrorCode::InternalError, - &ledger_info, + &latest_ledger_info, ) })? .context(format!("Failed to find transaction with hash: {}", hash)) - .map_err(|_| transaction_not_found_by_hash(hash, &ledger_info))?; + .map_err(|_| transaction_not_found_by_hash(hash, &latest_ledger_info))?; - if let TransactionData::Pending(_) = txn_data { - if (start_time.elapsed().as_millis() as u64) < wait_by_hash_timeout_ms { - tokio::time::sleep(Duration::from_millis(wait_by_hash_poll_interval_ms)).await; - continue; - } + if matches!(txn_data, TransactionData::Pending(_)) + && (start_time.elapsed().as_millis() as u64) < wait_by_hash_timeout_ms + { + tokio::time::sleep(Duration::from_millis(wait_by_hash_poll_interval_ms)).await; + continue; } let api = self.clone(); return api_spawn_blocking(move || { - api.get_transaction_inner(&accept_type, txn_data, &ledger_info) + api.get_transaction_inner(&accept_type, txn_data, &latest_ledger_info) }) .await; } @@ -832,25 +838,34 @@ impl TransactionsApi { let context = self.context.clone(); let accept_type = accept_type.clone(); - let ledger_info = api_spawn_blocking(move || context.get_latest_ledger_info()).await?; + let (internal_ledger_info_opt, storage_ledger_info) = + api_spawn_blocking(move || context.get_latest_internal_and_storage_ledger_info()) + .await?; + let storage_version = storage_ledger_info.ledger_version.into(); + let internal_indexer_version = internal_ledger_info_opt + .as_ref() + .map(|info| info.ledger_version.into()); + let latest_ledger_info = internal_ledger_info_opt.unwrap_or(storage_ledger_info); let txn_data = self - .get_by_hash(hash.into(), &ledger_info) + .get_by_hash(hash.into(), storage_version, internal_indexer_version) .await .context(format!("Failed to get transaction by hash {}", hash)) .map_err(|err| { BasicErrorWith404::internal_with_code( err, AptosErrorCode::InternalError, - &ledger_info, + &latest_ledger_info, ) })? .context(format!("Failed to find transaction with hash: {}", hash)) - .map_err(|_| transaction_not_found_by_hash(hash, &ledger_info))?; + .map_err(|_| transaction_not_found_by_hash(hash, &latest_ledger_info))?; let api = self.clone(); - api_spawn_blocking(move || api.get_transaction_inner(&accept_type, txn_data, &ledger_info)) - .await + api_spawn_blocking(move || { + api.get_transaction_inner(&accept_type, txn_data, &latest_ledger_info) + }) + .await } fn get_transaction_by_version_inner( @@ -946,9 +961,11 @@ impl TransactionsApi { return Ok(GetByVersionResponse::VersionTooOld); } Ok(GetByVersionResponse::Found( - self.context - .get_transaction_by_version(version, ledger_info.version())? - .into(), + TransactionData::from_transaction_onchain_data( + self.context + .get_transaction_by_version(version, ledger_info.version())?, + ledger_info.version(), + )?, )) } @@ -959,23 +976,30 @@ impl TransactionsApi { async fn get_by_hash( &self, hash: aptos_crypto::HashValue, - ledger_info: &LedgerInfo, + storage_ledger_version: u64, + internal_ledger_version: Option, ) -> anyhow::Result> { - let context = self.context.clone(); - let version = ledger_info.version(); - let from_db = - tokio::task::spawn_blocking(move || context.get_transaction_by_hash(hash, version)) - .await - .context("Failed to join task to read transaction by hash")? - .context("Failed to read transaction by hash from DB")?; - Ok(match from_db { - None => self - .context - .get_pending_transaction_by_hash(hash) - .await? - .map(|t| t.into()), - _ => from_db.map(|t| t.into()), - }) + Ok( + match self.context.get_pending_transaction_by_hash(hash).await? { + None => { + let context_clone = self.context.clone(); + tokio::task::spawn_blocking(move || { + context_clone.get_transaction_by_hash(hash, storage_ledger_version) + }) + .await + .context("Failed to join task to read transaction by hash")? + .context("Failed to read transaction by hash from DB")? + .map(|t| { + TransactionData::from_transaction_onchain_data( + t, + internal_ledger_version.unwrap_or(storage_ledger_version), + ) + }) + .transpose()? + }, + Some(t) => Some(t.into()), + }, + ) } /// List all transactions for an account diff --git a/api/test-context/src/test_context.rs b/api/test-context/src/test_context.rs index 45452d7311d15..f18f382263187 100644 --- a/api/test-context/src/test_context.rs +++ b/api/test-context/src/test_context.rs @@ -40,6 +40,7 @@ use aptos_types::{ block_info::BlockInfo, block_metadata::BlockMetadata, chain_id::ChainId, + indexer::indexer_db_reader::IndexerReader, ledger_info::{LedgerInfo, LedgerInfoWithSignatures}, transaction::{ signature_verified_transaction::into_signature_verified_block, Transaction, @@ -95,9 +96,18 @@ impl ApiSpecificConfig { } pub fn new_test_context( + test_name: String, + node_config: NodeConfig, + use_db_with_indexer: bool, +) -> TestContext { + new_test_context_inner(test_name, node_config, use_db_with_indexer, None) +} + +pub fn new_test_context_inner( test_name: String, mut node_config: NodeConfig, use_db_with_indexer: bool, + end_version: Option, ) -> TestContext { // Speculative logging uses a global variable and when many instances use it together, they // panic, so we disable this to run tests. @@ -125,28 +135,38 @@ pub fn new_test_context( &tmp_dir, node_config.storage.rocksdb_configs.enable_storage_sharding, ); - aptos_db.add_version_update_subscriber(sender).unwrap(); + if node_config + .indexer_db_config + .is_internal_indexer_db_enabled() + { + aptos_db.add_version_update_subscriber(sender).unwrap(); + } DbReaderWriter::wrap(aptos_db) } else { - DbReaderWriter::wrap( - AptosDB::open( - StorageDirPaths::from_path(&tmp_dir), - false, /* readonly */ - NO_OP_STORAGE_PRUNER_CONFIG, /* pruner */ - RocksdbConfigs { - enable_storage_sharding: node_config - .storage - .rocksdb_configs - .enable_storage_sharding, - ..Default::default() - }, - false, /* indexer */ - BUFFERED_STATE_TARGET_ITEMS_FOR_TEST, - DEFAULT_MAX_NUM_NODES_PER_LRU_CACHE_SHARD, - None, - ) - .unwrap(), + let mut aptos_db = AptosDB::open( + StorageDirPaths::from_path(&tmp_dir), + false, /* readonly */ + NO_OP_STORAGE_PRUNER_CONFIG, /* pruner */ + RocksdbConfigs { + enable_storage_sharding: node_config + .storage + .rocksdb_configs + .enable_storage_sharding, + ..Default::default() + }, + false, /* indexer */ + BUFFERED_STATE_TARGET_ITEMS_FOR_TEST, + DEFAULT_MAX_NUM_NODES_PER_LRU_CACHE_SHARD, + None, ) + .unwrap(); + if node_config + .indexer_db_config + .is_internal_indexer_db_enabled() + { + aptos_db.add_version_update_subscriber(sender).unwrap(); + } + DbReaderWriter::wrap(aptos_db) }; let ret = db_bootstrapper::maybe_bootstrap::(&db_rw, &genesis, genesis_waypoint).unwrap(); @@ -157,8 +177,12 @@ pub fn new_test_context( node_config .storage .set_data_dir(tmp_dir.path().to_path_buf()); - let mock_indexer_service = - MockInternalIndexerDBService::new_for_test(db_rw.reader.clone(), &node_config, recver); + let mock_indexer_service = MockInternalIndexerDBService::new_for_test( + db_rw.reader.clone(), + &node_config, + recver, + end_version, + ); let context = Context::new( ChainId::test(), @@ -170,8 +194,9 @@ pub fn new_test_context( // Configure the testing depending on which API version we're testing. let runtime_handle = tokio::runtime::Handle::current(); - let poem_address = attach_poem_to_runtime(&runtime_handle, context.clone(), &node_config, true) - .expect("Failed to attach poem to runtime"); + let poem_address = + attach_poem_to_runtime(&runtime_handle, context.clone(), &node_config, true, None) + .expect("Failed to attach poem to runtime"); let api_specific_config = ApiSpecificConfig::V1(poem_address); TestContext::new( @@ -428,6 +453,10 @@ impl TestContext { .await; } + pub fn get_indexer_reader(&self) -> Option<&Arc> { + self.context.get_indexer_reader() + } + pub async fn create_multisig_account( &mut self, account: &mut LocalAccount, @@ -451,6 +480,26 @@ impl TestContext { multisig_address } + pub async fn create_multisig_account_with_existing_account( + &mut self, + account: &mut LocalAccount, + owners: Vec, + signatures_required: u64, + initial_balance: u64, + ) { + let factory = self.transaction_factory(); + let txn = account.sign_with_transaction_builder( + factory + .create_multisig_account_with_existing_account(owners, signatures_required) + .expiration_timestamp_secs(u64::MAX), + ); + self.commit_block(&vec![ + txn, + self.account_transfer_to(account, account.address(), initial_balance), + ]) + .await; + } + pub async fn create_multisig_transaction( &mut self, owner: &mut LocalAccount, @@ -565,6 +614,16 @@ impl TestContext { self.context.get_latest_ledger_info::().unwrap() } + pub fn get_latest_storage_ledger_info(&self) -> aptos_api_types::LedgerInfo { + self.context + .get_latest_storage_ledger_info::() + .unwrap() + } + + pub fn get_indexer_readers(&self) -> Option<&Arc> { + self.context.get_indexer_reader() + } + pub fn get_transactions(&self, start: u64, limit: u16) -> Vec { self.context .get_transactions(start, limit, self.get_latest_ledger_info().version()) diff --git a/api/types/src/convert.rs b/api/types/src/convert.rs index 85fecd9ec32ec..7a9d699c20e12 100644 --- a/api/types/src/convert.rs +++ b/api/types/src/convert.rs @@ -4,9 +4,10 @@ use crate::{ transaction::{ - BlockEpilogueTransaction, DecodedTableData, DeleteModule, DeleteResource, DeleteTableItem, - DeletedTableData, MultisigPayload, MultisigTransactionPayload, StateCheckpointTransaction, - UserTransactionRequestInner, WriteModule, WriteResource, WriteTableItem, + BlockEpilogueTransaction, BlockMetadataTransaction, DecodedTableData, DeleteModule, + DeleteResource, DeleteTableItem, DeletedTableData, MultisigPayload, + MultisigTransactionPayload, StateCheckpointTransaction, UserTransactionRequestInner, + WriteModule, WriteResource, WriteTableItem, }, view::{ViewFunction, ViewRequest}, Address, Bytecode, DirectWriteSet, EntryFunctionId, EntryFunctionPayload, Event, @@ -204,8 +205,12 @@ impl<'a, S: StateView> MoveConverter<'a, S> { let payload = self.try_into_write_set_payload(write_set)?; (info, payload, events).into() }, - BlockMetadata(txn) => (&txn, info, events).into(), - BlockMetadataExt(txn) => (&txn, info, events).into(), + BlockMetadata(txn) => Transaction::BlockMetadataTransaction( + BlockMetadataTransaction::from_internal(txn, info, events), + ), + BlockMetadataExt(txn) => Transaction::BlockMetadataTransaction( + BlockMetadataTransaction::from_internal_ext(txn, info, events), + ), StateCheckpoint(_) => { Transaction::StateCheckpointTransaction(StateCheckpointTransaction { info, diff --git a/api/types/src/transaction.rs b/api/types/src/transaction.rs index 4af7dae0b843e..7a89a1ba3db64 100755 --- a/api/types/src/transaction.rs +++ b/api/types/src/transaction.rs @@ -7,7 +7,7 @@ use crate::{ MoveModuleBytecode, MoveModuleId, MoveResource, MoveScriptBytecode, MoveStructTag, MoveType, MoveValue, VerifyInput, VerifyInputWithRecursion, U64, }; -use anyhow::{bail, Context as AnyhowContext}; +use anyhow::{bail, Context as AnyhowContext, Result}; use aptos_crypto::{ ed25519::{self, Ed25519PublicKey, ED25519_PUBLIC_KEY_LENGTH, ED25519_SIGNATURE_LENGTH}, multi_ed25519::{self, MultiEd25519PublicKey, BITMAP_NUM_OF_BYTES, MAX_NUM_OF_KEYS}, @@ -70,9 +70,21 @@ pub enum TransactionData { Pending(Box), } -impl From for TransactionData { - fn from(txn: TransactionOnChainData) -> Self { - Self::OnChain(txn) +impl TransactionData { + pub fn from_transaction_onchain_data( + txn: TransactionOnChainData, + latest_ledger_version: u64, + ) -> Result { + if txn.version > latest_ledger_version { + match txn.transaction { + aptos_types::transaction::Transaction::UserTransaction(txn) => { + Ok(Self::Pending(Box::new(txn))) + }, + _ => bail!("convert non-user onchain transaction to pending shouldn't exist"), + } + } else { + Ok(Self::OnChain(txn)) + } } } @@ -311,38 +323,6 @@ impl From<(TransactionInfo, WriteSetPayload, Vec)> for Transaction { } } -impl From<(&BlockMetadata, TransactionInfo, Vec)> for Transaction { - fn from((txn, info, events): (&BlockMetadata, TransactionInfo, Vec)) -> Self { - Transaction::BlockMetadataTransaction(BlockMetadataTransaction { - info, - id: txn.id().into(), - epoch: txn.epoch().into(), - round: txn.round().into(), - events, - previous_block_votes_bitvec: txn.previous_block_votes_bitvec().clone(), - proposer: txn.proposer().into(), - failed_proposer_indices: txn.failed_proposer_indices().clone(), - timestamp: txn.timestamp_usecs().into(), - }) - } -} - -impl From<(&BlockMetadataExt, TransactionInfo, Vec)> for Transaction { - fn from((txn, info, events): (&BlockMetadataExt, TransactionInfo, Vec)) -> Self { - Transaction::BlockMetadataTransaction(BlockMetadataTransaction { - info, - id: txn.id().into(), - epoch: txn.epoch().into(), - round: txn.round().into(), - events, - previous_block_votes_bitvec: txn.previous_block_votes_bitvec().clone(), - proposer: txn.proposer().into(), - failed_proposer_indices: txn.failed_proposer_indices().clone(), - timestamp: txn.timestamp_usecs().into(), - }) - } -} - impl From<(&SignedTransaction, TransactionPayload)> for UserTransactionRequest { fn from((txn, payload): (&SignedTransaction, TransactionPayload)) -> Self { Self { @@ -577,6 +557,95 @@ pub struct BlockMetadataTransaction { /// The indices of the proposers who failed to propose pub failed_proposer_indices: Vec, pub timestamp: U64, + + /// If some, it means the internal txn type is `aptos_types::transaction::Transaction::BlockMetadataExt`. + /// Otherwise, it is `aptos_types::transaction::Transaction::BlockMetadata`. + /// + /// NOTE: we could have introduced a new APT txn type to represent the corresponding internal type, + /// but that is a breaking change to the ecosystem. + /// + /// NOTE: `oai` does not support `flatten` together with `skip_serializing_if`. + #[serde(default, skip_serializing_if = "Option::is_none")] + #[oai(default, skip_serializing_if = "Option::is_none")] + pub block_metadata_extension: Option, +} + +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Object)] +pub struct BlockMetadataExtensionEmpty {} + +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Object)] +pub struct BlockMetadataExtensionRandomness { + randomness: Option, +} + +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Union)] +#[serde(tag = "type", rename_all = "snake_case")] +#[oai(one_of, discriminator_name = "type", rename_all = "snake_case")] +pub enum BlockMetadataExtension { + V0(BlockMetadataExtensionEmpty), + V1(BlockMetadataExtensionRandomness), +} + +impl BlockMetadataExtension { + pub fn from_internal_txn(txn: &BlockMetadataExt) -> Self { + match txn { + BlockMetadataExt::V0(_) => Self::V0(BlockMetadataExtensionEmpty {}), + BlockMetadataExt::V1(payload) => Self::V1(BlockMetadataExtensionRandomness { + randomness: payload + .randomness + .clone() + .map(|pr| HexEncodedBytes::from(pr.randomness_cloned())), + }), + } + } +} + +impl BlockMetadataTransaction { + pub fn from_internal( + internal: BlockMetadata, + info: TransactionInfo, + events: Vec, + ) -> Self { + Self { + info, + id: internal.id().into(), + epoch: internal.epoch().into(), + round: internal.round().into(), + events, + previous_block_votes_bitvec: internal.previous_block_votes_bitvec().clone(), + proposer: internal.proposer().into(), + failed_proposer_indices: internal.failed_proposer_indices().clone(), + timestamp: internal.timestamp_usecs().into(), + block_metadata_extension: None, + } + } + + pub fn from_internal_ext( + internal: BlockMetadataExt, + info: TransactionInfo, + events: Vec, + ) -> Self { + Self { + info, + id: internal.id().into(), + epoch: internal.epoch().into(), + round: internal.round().into(), + events, + previous_block_votes_bitvec: internal.previous_block_votes_bitvec().clone(), + proposer: internal.proposer().into(), + failed_proposer_indices: internal.failed_proposer_indices().clone(), + timestamp: internal.timestamp_usecs().into(), + block_metadata_extension: Some(BlockMetadataExtension::from_internal_txn(&internal)), + } + } + + pub fn type_str(&self) -> &'static str { + match self.block_metadata_extension { + None => "block_metadata_transaction", + Some(BlockMetadataExtension::V0(_)) => "block_metadata_ext_transaction__v0", + Some(BlockMetadataExtension::V1(_)) => "block_metadata_ext_transaction__v1", + } + } } #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Union)] diff --git a/aptos-move/aptos-aggregator/Cargo.toml b/aptos-move/aptos-aggregator/Cargo.toml index 3152a494a97a6..6c73b0b750b5a 100644 --- a/aptos-move/aptos-aggregator/Cargo.toml +++ b/aptos-move/aptos-aggregator/Cargo.toml @@ -13,7 +13,6 @@ repository = { workspace = true } rust-version = { workspace = true } [dependencies] -aptos-logger = { workspace = true } aptos-types = { workspace = true } bcs = { workspace = true } claims = { workspace = true } diff --git a/aptos-move/aptos-aggregator/src/aggregator_v1_extension.rs b/aptos-move/aptos-aggregator/src/aggregator_v1_extension.rs index 4c4b3573ff2d5..6f51b30b5e983 100644 --- a/aptos-move/aptos-aggregator/src/aggregator_v1_extension.rs +++ b/aptos-move/aptos-aggregator/src/aggregator_v1_extension.rs @@ -5,9 +5,10 @@ use crate::{ bounded_math::{BoundedMath, SignedU128}, delta_math::DeltaHistory, resolver::AggregatorV1Resolver, - types::{expect_ok, DelayedFieldsSpeculativeError, DeltaApplicationFailureReason}, + types::{DelayedFieldsSpeculativeError, DeltaApplicationFailureReason}, }; use aptos_types::{ + error::expect_ok, state_store::{state_key::StateKey, table::TableHandle}, PeerId, }; diff --git a/aptos-move/aptos-aggregator/src/delayed_change.rs b/aptos-move/aptos-aggregator/src/delayed_change.rs index 4535d891bd644..64344255b32e0 100644 --- a/aptos-move/aptos-aggregator/src/delayed_change.rs +++ b/aptos-move/aptos-aggregator/src/delayed_change.rs @@ -3,9 +3,12 @@ use crate::{ delta_change_set::{DeltaOp, DeltaWithMax}, - types::{code_invariant_error, DelayedFieldValue, DelayedFieldsSpeculativeError, PanicOr}, + types::{DelayedFieldValue, DelayedFieldsSpeculativeError}, +}; +use aptos_types::{ + delayed_fields::SnapshotToStringFormula, + error::{code_invariant_error, PanicOr}, }; -use aptos_types::delayed_fields::SnapshotToStringFormula; #[derive(Clone, Debug, Eq, PartialEq)] pub enum DelayedApplyChange { diff --git a/aptos-move/aptos-aggregator/src/delayed_field_extension.rs b/aptos-move/aptos-aggregator/src/delayed_field_extension.rs index 9a67226fc335b..1451af79fcec4 100644 --- a/aptos-move/aptos-aggregator/src/delayed_field_extension.rs +++ b/aptos-move/aptos-aggregator/src/delayed_field_extension.rs @@ -6,14 +6,14 @@ use crate::{ delayed_change::{ApplyBase, DelayedApplyChange, DelayedChange}, delta_change_set::DeltaWithMax, resolver::DelayedFieldResolver, - types::{ - code_invariant_error, expect_ok, DelayedFieldValue, DelayedFieldsSpeculativeError, PanicOr, - ReadPosition, - }, + types::{DelayedFieldValue, DelayedFieldsSpeculativeError, ReadPosition}, }; -use aptos_types::delayed_fields::{ - calculate_width_for_constant_string, calculate_width_for_integer_embedded_string, - SnapshotToStringFormula, +use aptos_types::{ + delayed_fields::{ + calculate_width_for_constant_string, calculate_width_for_integer_embedded_string, + SnapshotToStringFormula, + }, + error::{code_invariant_error, expect_ok, PanicOr}, }; use move_binary_format::errors::PartialVMResult; use move_vm_types::delayed_values::delayed_field_id::DelayedFieldID; diff --git a/aptos-move/aptos-aggregator/src/delta_change_set.rs b/aptos-move/aptos-aggregator/src/delta_change_set.rs index a1bea4eb752e3..ab7a616128962 100644 --- a/aptos-move/aptos-aggregator/src/delta_change_set.rs +++ b/aptos-move/aptos-aggregator/src/delta_change_set.rs @@ -8,10 +8,9 @@ use crate::{ bounded_math::{BoundedMath, SignedU128}, delta_math::{merge_data_and_delta, merge_two_deltas, DeltaHistory}, - types::{ - code_invariant_error, DelayedFieldsSpeculativeError, DeltaApplicationFailureReason, PanicOr, - }, + types::{DelayedFieldsSpeculativeError, DeltaApplicationFailureReason}, }; +use aptos_types::error::{code_invariant_error, PanicOr}; #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub struct DeltaWithMax { @@ -219,7 +218,7 @@ mod test { FakeAggregatorView, }; use aptos_types::{ - delayed_fields::PanicError, + error::PanicError, state_store::{ state_key::StateKey, state_value::{StateValue, StateValueMetadata}, diff --git a/aptos-move/aptos-aggregator/src/delta_math.rs b/aptos-move/aptos-aggregator/src/delta_math.rs index 7078b03223a88..b4e079ad73bcf 100644 --- a/aptos-move/aptos-aggregator/src/delta_math.rs +++ b/aptos-move/aptos-aggregator/src/delta_math.rs @@ -4,10 +4,11 @@ use crate::{ bounded_math::{ok_overflow, ok_underflow, BoundedMath, SignedU128}, types::{ - expect_ok, DelayedFieldsSpeculativeError, DeltaApplicationFailureReason, - DeltaHistoryMergeOffsetFailureReason, PanicOr, + DelayedFieldsSpeculativeError, DeltaApplicationFailureReason, + DeltaHistoryMergeOffsetFailureReason, }, }; +use aptos_types::error::{expect_ok, PanicOr}; /// Tracks values seen by aggregator. In particular, stores information about /// the biggest and the smallest deltas that were applied successfully during diff --git a/aptos-move/aptos-aggregator/src/resolver.rs b/aptos-move/aptos-aggregator/src/resolver.rs index c15c6542f0517..0f522753392a1 100644 --- a/aptos-move/aptos-aggregator/src/resolver.rs +++ b/aptos-move/aptos-aggregator/src/resolver.rs @@ -5,13 +5,10 @@ use crate::{ aggregator_v1_extension::{addition_v1_error, subtraction_v1_error}, bounded_math::SignedU128, delta_change_set::{serialize, DeltaOp}, - types::{ - code_invariant_error, DelayedFieldValue, DelayedFieldsSpeculativeError, - DeltaApplicationFailureReason, PanicOr, - }, + types::{DelayedFieldValue, DelayedFieldsSpeculativeError, DeltaApplicationFailureReason}, }; use aptos_types::{ - delayed_fields::PanicError, + error::{code_invariant_error, PanicError, PanicOr}, state_store::{ state_key::StateKey, state_value::{StateValue, StateValueMetadata}, diff --git a/aptos-move/aptos-aggregator/src/tests/types.rs b/aptos-move/aptos-aggregator/src/tests/types.rs index 6cae8d5d36fc3..18096caae79b1 100644 --- a/aptos-move/aptos-aggregator/src/tests/types.rs +++ b/aptos-move/aptos-aggregator/src/tests/types.rs @@ -6,12 +6,10 @@ use crate::{ bounded_math::{BoundedMath, SignedU128}, delta_change_set::serialize, resolver::{TAggregatorV1View, TDelayedFieldView}, - types::{ - code_invariant_error, expect_ok, DelayedFieldValue, DelayedFieldsSpeculativeError, PanicOr, - }, + types::{DelayedFieldValue, DelayedFieldsSpeculativeError}, }; use aptos_types::{ - delayed_fields::PanicError, + error::{code_invariant_error, expect_ok, PanicError, PanicOr}, state_store::{ state_key::StateKey, state_value::{StateValue, StateValueMetadata}, diff --git a/aptos-move/aptos-aggregator/src/types.rs b/aptos-move/aptos-aggregator/src/types.rs index 81b99d6a2445b..9571c0fb9819e 100644 --- a/aptos-move/aptos-aggregator/src/types.rs +++ b/aptos-move/aptos-aggregator/src/types.rs @@ -2,8 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 use crate::bounded_math::SignedU128; -use aptos_logger::error; -use aptos_types::delayed_fields::PanicError; +use aptos_types::error::{code_invariant_error, NonPanic, PanicError, PanicOr}; use move_binary_format::errors::PartialVMError; use move_core_types::{ value::{IdentifierMappingKind, MoveTypeLayout}, @@ -20,84 +19,12 @@ use move_vm_types::{ values::{Struct, Value}, }; -// Wrapping another error, to add a variant that represents -// something that should never happen - i.e. a code invariant error, -// which we would generally just panic, but since we are inside of the VM, -// we cannot do that. -#[derive(Clone, Debug, PartialEq, Eq)] -pub enum PanicOr { - CodeInvariantError(String), - Or(T), -} - -impl PanicOr { - pub fn map_non_panic(self, f: impl FnOnce(T) -> E) -> PanicOr { - match self { - PanicOr::CodeInvariantError(msg) => PanicOr::CodeInvariantError(msg), - PanicOr::Or(value) => PanicOr::Or(f(value)), - } - } -} - -pub fn code_invariant_error(message: M) -> PanicError { - let msg = format!( - "Delayed materialization code invariant broken (there is a bug in the code), {:?}", - message - ); - error!("{}", msg); - PanicError::CodeInvariantError(msg) -} - -pub fn expect_ok(value: Result) -> Result { - value.map_err(|e| code_invariant_error(format!("Expected Ok, got Err({:?})", e))) -} - -impl From for PanicOr { - fn from(err: PanicError) -> Self { - match err { - PanicError::CodeInvariantError(e) => PanicOr::CodeInvariantError(e), - } - } -} - -pub trait NonPanic {} - -impl From for PanicOr { - fn from(err: T) -> Self { - PanicOr::Or(err) - } -} - impl From for PartialVMError { fn from(err: DelayedFieldsSpeculativeError) -> Self { PartialVMError::from(PanicOr::from(err)) } } -impl From<&PanicOr> for StatusCode { - fn from(err: &PanicOr) -> Self { - match err { - PanicOr::CodeInvariantError(_) => { - StatusCode::DELAYED_MATERIALIZATION_CODE_INVARIANT_ERROR - }, - PanicOr::Or(_) => StatusCode::SPECULATIVE_EXECUTION_ABORT_ERROR, - } - } -} - -impl From> for PartialVMError { - fn from(err: PanicOr) -> Self { - match err { - PanicOr::CodeInvariantError(msg) => { - PartialVMError::new(StatusCode::DELAYED_MATERIALIZATION_CODE_INVARIANT_ERROR) - .with_message(msg) - }, - PanicOr::Or(err) => PartialVMError::new(StatusCode::SPECULATIVE_EXECUTION_ABORT_ERROR) - .with_message(format!("{:?}", err)), - } - } -} - /// Different reasons for why applying new start_value doesn't /// satisfy history bounds #[derive(Clone, Debug, PartialEq, Eq)] diff --git a/aptos-move/aptos-vm-types/src/change_set.rs b/aptos-move/aptos-vm-types/src/change_set.rs index 3c2ca78492403..6c539b6b9045e 100644 --- a/aptos-move/aptos-vm-types/src/change_set.rs +++ b/aptos-move/aptos-vm-types/src/change_set.rs @@ -13,11 +13,10 @@ use aptos_aggregator::{ delayed_change::DelayedChange, delta_change_set::{serialize, DeltaOp}, resolver::AggregatorV1Resolver, - types::code_invariant_error, }; use aptos_types::{ contract_event::ContractEvent, - delayed_fields::PanicError, + error::{code_invariant_error, PanicError}, state_store::{ state_key::{inner::StateKeyInner, StateKey}, state_value::StateValueMetadata, @@ -193,7 +192,7 @@ impl VMChangeSet { let (key, value) = element?; if acc.insert(key, value).is_some() { Err(PartialVMError::new( - StatusCode::DELAYED_MATERIALIZATION_CODE_INVARIANT_ERROR, + StatusCode::DELAYED_FIELD_OR_BLOCKSTM_CODE_INVARIANT_ERROR, ) .with_message( "Found duplicate key across resource change sets.".to_string(), diff --git a/aptos-move/aptos-vm-types/src/output.rs b/aptos-move/aptos-vm-types/src/output.rs index 22bf8b0af5a2a..80db4b7e778f0 100644 --- a/aptos-move/aptos-vm-types/src/output.rs +++ b/aptos-move/aptos-vm-types/src/output.rs @@ -8,11 +8,10 @@ use crate::{ }; use aptos_aggregator::{ delayed_change::DelayedChange, delta_change_set::DeltaOp, resolver::AggregatorV1Resolver, - types::code_invariant_error, }; use aptos_types::{ contract_event::ContractEvent, - delayed_fields::PanicError, + error::{code_invariant_error, PanicError}, fee_statement::FeeStatement, state_store::state_key::StateKey, transaction::{TransactionAuxiliaryData, TransactionOutput, TransactionStatus}, @@ -167,7 +166,7 @@ impl VMOutput { self.try_materialize(resolver)?; self.into_transaction_output().map_err(|e| { VMStatus::error( - StatusCode::DELAYED_MATERIALIZATION_CODE_INVARIANT_ERROR, + StatusCode::DELAYED_FIELD_OR_BLOCKSTM_CODE_INVARIANT_ERROR, Some(e.to_string()), ) }) diff --git a/aptos-move/aptos-vm-types/src/resource_group_adapter.rs b/aptos-move/aptos-vm-types/src/resource_group_adapter.rs index 02030e40c5972..87293388dca28 100644 --- a/aptos-move/aptos-vm-types/src/resource_group_adapter.rs +++ b/aptos-move/aptos-vm-types/src/resource_group_adapter.rs @@ -3,7 +3,8 @@ use crate::resolver::{ResourceGroupSize, ResourceGroupView, TResourceGroupView, TResourceView}; use aptos_types::{ - serde_helper::bcs_utils::bcs_size_of_byte_array, state_store::state_key::StateKey, + error::code_invariant_error, serde_helper::bcs_utils::bcs_size_of_byte_array, + state_store::state_key::StateKey, }; use bytes::Bytes; use move_binary_format::errors::{PartialVMError, PartialVMResult}; @@ -273,6 +274,104 @@ impl TResourceGroupView for ResourceGroupAdapter<'_> { } } +// We set SPECULATIVE_EXECUTION_ABORT_ERROR here, as the error can happen due to +// speculative reads (and in a non-speculative context, e.g. during commit, it +// is a more serious error and block execution must abort). +// BlockExecutor is responsible with handling this error. +fn group_size_arithmetics_error() -> PartialVMError { + PartialVMError::new(StatusCode::SPECULATIVE_EXECUTION_ABORT_ERROR) + .with_message("Group size arithmetics error while applying updates".to_string()) +} + +// Updates a given ResourceGroupSize (an abstract representation allowing the computation +// of bcs serialized size) size, to reflect the state after removing a resource in a group +// with size old_tagged_resource_size. +pub fn decrement_size_for_remove_tag( + size: &mut ResourceGroupSize, + old_tagged_resource_size: u64, +) -> PartialVMResult<()> { + match size { + ResourceGroupSize::Concrete(_) => Err(code_invariant_error(format!( + "Unexpected ResourceGroupSize::Concrete in decrement_size_for_add_tag \ + (removing resource w. size = {old_tagged_resource_size})" + )) + .into()), + ResourceGroupSize::Combined { + num_tagged_resources, + all_tagged_resources_size, + } => { + *num_tagged_resources = num_tagged_resources + .checked_sub(1) + .ok_or_else(group_size_arithmetics_error)?; + *all_tagged_resources_size = all_tagged_resources_size + .checked_sub(old_tagged_resource_size) + .ok_or_else(group_size_arithmetics_error)?; + Ok(()) + }, + } +} + +// Updates a given ResourceGroupSize (an abstract representation allowing the computation +// of bcs serialized size) size, to reflect the state after adding a resource in a group +// with size new_tagged_resource_size. +pub fn increment_size_for_add_tag( + size: &mut ResourceGroupSize, + new_tagged_resource_size: u64, +) -> PartialVMResult<()> { + match size { + ResourceGroupSize::Concrete(_) => Err(code_invariant_error(format!( + "Unexpected ResourceGroupSize::Concrete in increment_size_for_add_tag \ + (adding resource w. size = {new_tagged_resource_size})" + )) + .into()), + ResourceGroupSize::Combined { + num_tagged_resources, + all_tagged_resources_size, + } => { + *num_tagged_resources = num_tagged_resources + .checked_add(1) + .ok_or_else(group_size_arithmetics_error)?; + *all_tagged_resources_size = all_tagged_resources_size + .checked_add(new_tagged_resource_size) + .ok_or_else(group_size_arithmetics_error)?; + Ok(()) + }, + } +} + +// Checks an invariant that iff a resource group exists, it must have a > 0 size. +pub fn check_size_and_existence_match( + size: &ResourceGroupSize, + exists: bool, + state_key: &StateKey, +) -> PartialVMResult<()> { + if exists { + if size.get() == 0 { + Err( + PartialVMError::new(StatusCode::SPECULATIVE_EXECUTION_ABORT_ERROR).with_message( + format!( + "Group tag count/size shouldn't be 0 for an existing group: {:?}", + state_key + ), + ), + ) + } else { + Ok(()) + } + } else if size.get() > 0 { + Err( + PartialVMError::new(StatusCode::SPECULATIVE_EXECUTION_ABORT_ERROR).with_message( + format!( + "Group tag count/size should be 0 for a new group: {:?}", + state_key + ), + ), + ) + } else { + Ok(()) + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/aptos-move/aptos-vm-types/src/tests/test_change_set.rs b/aptos-move/aptos-vm-types/src/tests/test_change_set.rs index af095be82cd8e..2d887e12e5c80 100644 --- a/aptos-move/aptos-vm-types/src/tests/test_change_set.rs +++ b/aptos-move/aptos-vm-types/src/tests/test_change_set.rs @@ -22,7 +22,8 @@ use aptos_aggregator::{ delta_change_set::DeltaWithMax, }; use aptos_types::{ - delayed_fields::{PanicError, SnapshotToStringFormula}, + delayed_fields::SnapshotToStringFormula, + error::PanicError, state_store::{state_key::StateKey, state_value::StateValueMetadata}, transaction::ChangeSet as StorageChangeSet, write_set::{WriteOp, WriteSetMut}, @@ -199,7 +200,7 @@ macro_rules! assert_invariant_violation { assert!( err.major_status() == StatusCode::UNKNOWN_INVARIANT_VIOLATION_ERROR || err.major_status() - == StatusCode::DELAYED_MATERIALIZATION_CODE_INVARIANT_ERROR + == StatusCode::DELAYED_FIELD_OR_BLOCKSTM_CODE_INVARIANT_ERROR ); }; diff --git a/aptos-move/aptos-vm/src/block_executor/mod.rs b/aptos-move/aptos-vm/src/block_executor/mod.rs index 08f3dcd0e3ce9..2f2fd0abdca75 100644 --- a/aptos-move/aptos-vm/src/block_executor/mod.rs +++ b/aptos-move/aptos-vm/src/block_executor/mod.rs @@ -20,7 +20,7 @@ use aptos_infallible::Mutex; use aptos_types::{ block_executor::config::BlockExecutorConfig, contract_event::ContractEvent, - delayed_fields::PanicError, + error::PanicError, executable::ExecutableTestType, fee_statement::FeeStatement, state_store::{state_key::StateKey, state_value::StateValueMetadata, StateView, StateViewId}, @@ -33,6 +33,7 @@ use aptos_types::{ use aptos_vm_logging::{flush_speculative_logs, init_speculative_logs}; use aptos_vm_types::{ abstract_write_op::AbstractResourceWriteOp, environment::Environment, output::VMOutput, + resolver::ResourceGroupSize, }; use move_core_types::{ language_storage::StructTag, @@ -118,6 +119,7 @@ impl BlockExecutorTransactionOutput for AptosTransactionOutput { ) -> Vec<( StateKey, WriteOp, + ResourceGroupSize, BTreeMap>)>, )> { self.vm_output @@ -131,6 +133,9 @@ impl BlockExecutorTransactionOutput for AptosTransactionOutput { Some(( key.clone(), group_write.metadata_op().clone(), + group_write + .maybe_group_op_size() + .unwrap_or(ResourceGroupSize::zero_combined()), group_write .inner_ops() .iter() @@ -436,7 +441,7 @@ impl BlockAptosVM { Err(BlockExecutionError::FatalBlockExecutorError(PanicError::CodeInvariantError( err_msg, ))) => Err(VMStatus::Error { - status_code: StatusCode::DELAYED_MATERIALIZATION_CODE_INVARIANT_ERROR, + status_code: StatusCode::DELAYED_FIELD_OR_BLOCKSTM_CODE_INVARIANT_ERROR, sub_status: None, message: Some(err_msg), }), diff --git a/aptos-move/aptos-vm/src/block_executor/vm_wrapper.rs b/aptos-move/aptos-vm/src/block_executor/vm_wrapper.rs index 058ea9de2553d..85e7a7bee305a 100644 --- a/aptos-move/aptos-vm/src/block_executor/vm_wrapper.rs +++ b/aptos-move/aptos-vm/src/block_executor/vm_wrapper.rs @@ -71,7 +71,7 @@ impl ExecutorTask for AptosExecutorTask { vm_status.message().cloned().unwrap_or_default(), ) } else if vm_status.status_code() - == StatusCode::DELAYED_MATERIALIZATION_CODE_INVARIANT_ERROR + == StatusCode::DELAYED_FIELD_OR_BLOCKSTM_CODE_INVARIANT_ERROR { ExecutionStatus::DelayedFieldsCodeInvariantError( vm_status.message().cloned().unwrap_or_default(), @@ -98,7 +98,7 @@ impl ExecutorTask for AptosExecutorTask { err.message().cloned().unwrap_or_default(), ) } else if err.status_code() - == StatusCode::DELAYED_MATERIALIZATION_CODE_INVARIANT_ERROR + == StatusCode::DELAYED_FIELD_OR_BLOCKSTM_CODE_INVARIANT_ERROR { ExecutionStatus::DelayedFieldsCodeInvariantError( err.message().cloned().unwrap_or_default(), diff --git a/aptos-move/aptos-vm/src/data_cache.rs b/aptos-move/aptos-vm/src/data_cache.rs index 7861880b40479..fafd7cb646e4c 100644 --- a/aptos-move/aptos-vm/src/data_cache.rs +++ b/aptos-move/aptos-vm/src/data_cache.rs @@ -13,11 +13,11 @@ use crate::{ use aptos_aggregator::{ bounded_math::SignedU128, resolver::{TAggregatorV1View, TDelayedFieldView}, - types::{DelayedFieldValue, DelayedFieldsSpeculativeError, PanicOr}, + types::{DelayedFieldValue, DelayedFieldsSpeculativeError}, }; use aptos_table_natives::{TableHandle, TableResolver}; use aptos_types::{ - delayed_fields::PanicError, + error::{PanicError, PanicOr}, on_chain_config::{ConfigStorage, Features, OnChainConfig}, state_store::{ errors::StateviewError, diff --git a/aptos-move/aptos-vm/src/errors.rs b/aptos-move/aptos-vm/src/errors.rs index f054a7c819222..695fe6166dd9e 100644 --- a/aptos-move/aptos-vm/src/errors.rs +++ b/aptos-move/aptos-vm/src/errors.rs @@ -151,7 +151,7 @@ pub fn convert_prologue_error( e @ VMStatus::Error { status_code: StatusCode::SPECULATIVE_EXECUTION_ABORT_ERROR - | StatusCode::DELAYED_MATERIALIZATION_CODE_INVARIANT_ERROR, + | StatusCode::DELAYED_FIELD_OR_BLOCKSTM_CODE_INVARIANT_ERROR, .. } => e, status @ VMStatus::ExecutionFailure { .. } | status @ VMStatus::Error { .. } => { @@ -207,7 +207,7 @@ pub fn convert_epilogue_error( e @ VMStatus::Error { status_code: StatusCode::SPECULATIVE_EXECUTION_ABORT_ERROR - | StatusCode::DELAYED_MATERIALIZATION_CODE_INVARIANT_ERROR, + | StatusCode::DELAYED_FIELD_OR_BLOCKSTM_CODE_INVARIANT_ERROR, .. } => e, status => { @@ -237,7 +237,7 @@ pub fn expect_only_successful_execution( e @ VMStatus::Error { status_code: StatusCode::SPECULATIVE_EXECUTION_ABORT_ERROR - | StatusCode::DELAYED_MATERIALIZATION_CODE_INVARIANT_ERROR, + | StatusCode::DELAYED_FIELD_OR_BLOCKSTM_CODE_INVARIANT_ERROR, .. } => e, status => { diff --git a/aptos-move/aptos-vm/src/move_vm_ext/session/view_with_change_set.rs b/aptos-move/aptos-vm/src/move_vm_ext/session/view_with_change_set.rs index 50e94ddba816e..06862df40c7aa 100644 --- a/aptos-move/aptos-vm/src/move_vm_ext/session/view_with_change_set.rs +++ b/aptos-move/aptos-vm/src/move_vm_ext/session/view_with_change_set.rs @@ -6,12 +6,10 @@ use aptos_aggregator::{ delayed_change::{ApplyBase, DelayedApplyChange, DelayedChange}, delta_change_set::DeltaWithMax, resolver::{TAggregatorV1View, TDelayedFieldView}, - types::{ - code_invariant_error, expect_ok, DelayedFieldValue, DelayedFieldsSpeculativeError, PanicOr, - }, + types::{DelayedFieldValue, DelayedFieldsSpeculativeError}, }; use aptos_types::{ - delayed_fields::PanicError, + error::{code_invariant_error, expect_ok, PanicError, PanicOr}, state_store::{ errors::StateviewError, state_key::StateKey, diff --git a/aptos-move/aptos-vm/src/move_vm_ext/write_op_converter.rs b/aptos-move/aptos-vm/src/move_vm_ext/write_op_converter.rs index 36cbdc4a23486..a60e719c90e91 100644 --- a/aptos-move/aptos-vm/src/move_vm_ext/write_op_converter.rs +++ b/aptos-move/aptos-vm/src/move_vm_ext/write_op_converter.rs @@ -9,8 +9,11 @@ use aptos_types::{ write_set::WriteOp, }; use aptos_vm_types::{ - abstract_write_op::GroupWrite, resolver::ResourceGroupSize, - resource_group_adapter::group_tagged_resource_size, + abstract_write_op::GroupWrite, + resource_group_adapter::{ + check_size_and_existence_match, decrement_size_for_remove_tag, group_tagged_resource_size, + increment_size_for_add_tag, + }, }; use bytes::Bytes; use move_binary_format::errors::{PartialVMError, PartialVMResult}; @@ -18,7 +21,6 @@ use move_core_types::{ effects::Op as MoveStorageOp, language_storage::StructTag, value::MoveTypeLayout, vm_status::StatusCode, }; -use move_vm_types::delayed_values::error::code_invariant_error; use std::{collections::BTreeMap, sync::Arc}; pub(crate) struct WriteOpConverter<'r> { @@ -47,96 +49,6 @@ macro_rules! convert_impl { }; } -// We set SPECULATIVE_EXECUTION_ABORT_ERROR here, as the error can happen due to -// speculative reads (and in a non-speculative context, e.g. during commit, it -// is a more serious error and block execution must abort). -// BlockExecutor is responsible with handling this error. -fn group_size_arithmetics_error() -> PartialVMError { - PartialVMError::new(StatusCode::SPECULATIVE_EXECUTION_ABORT_ERROR) - .with_message("Group size arithmetics error while applying updates".to_string()) -} - -fn decrement_size_for_remove_tag( - size: &mut ResourceGroupSize, - old_tagged_resource_size: u64, -) -> PartialVMResult<()> { - match size { - ResourceGroupSize::Concrete(_) => Err(code_invariant_error( - "Unexpected ResourceGroupSize::Concrete in decrement_size_for_remove_tag", - )), - ResourceGroupSize::Combined { - num_tagged_resources, - all_tagged_resources_size, - } => { - *num_tagged_resources = num_tagged_resources - .checked_sub(1) - .ok_or_else(group_size_arithmetics_error)?; - *all_tagged_resources_size = all_tagged_resources_size - .checked_sub(old_tagged_resource_size) - .ok_or_else(group_size_arithmetics_error)?; - Ok(()) - }, - } -} - -fn increment_size_for_add_tag( - size: &mut ResourceGroupSize, - new_tagged_resource_size: u64, -) -> PartialVMResult<()> { - match size { - ResourceGroupSize::Concrete(_) => Err(PartialVMError::new( - StatusCode::UNKNOWN_INVARIANT_VIOLATION_ERROR, - ) - .with_message( - "Unexpected ResourceGroupSize::Concrete in increment_size_for_add_tag".to_string(), - )), - ResourceGroupSize::Combined { - num_tagged_resources, - all_tagged_resources_size, - } => { - *num_tagged_resources = num_tagged_resources - .checked_add(1) - .ok_or_else(group_size_arithmetics_error)?; - *all_tagged_resources_size = all_tagged_resources_size - .checked_add(new_tagged_resource_size) - .ok_or_else(group_size_arithmetics_error)?; - Ok(()) - }, - } -} - -fn check_size_and_existence_match( - size: &ResourceGroupSize, - exists: bool, - state_key: &StateKey, -) -> PartialVMResult<()> { - if exists { - if size.get() == 0 { - Err( - PartialVMError::new(StatusCode::SPECULATIVE_EXECUTION_ABORT_ERROR).with_message( - format!( - "Group tag count/size shouldn't be 0 for an existing group: {:?}", - state_key - ), - ), - ) - } else { - Ok(()) - } - } else if size.get() > 0 { - Err( - PartialVMError::new(StatusCode::SPECULATIVE_EXECUTION_ABORT_ERROR).with_message( - format!( - "Group tag count/size should be 0 for a new group: {:?}", - state_key - ), - ), - ) - } else { - Ok(()) - } -} - impl<'r> WriteOpConverter<'r> { convert_impl!(convert_module, get_module_state_value_metadata); diff --git a/aptos-move/aptos-vm/src/natives.rs b/aptos-move/aptos-vm/src/natives.rs index 4fb5a39b11273..25515277c7bab 100644 --- a/aptos-move/aptos-vm/src/natives.rs +++ b/aptos-move/aptos-vm/src/natives.rs @@ -5,10 +5,7 @@ #[cfg(feature = "testing")] use aptos_aggregator::resolver::TAggregatorV1View; #[cfg(feature = "testing")] -use aptos_aggregator::{ - bounded_math::SignedU128, - types::{DelayedFieldsSpeculativeError, PanicOr}, -}; +use aptos_aggregator::{bounded_math::SignedU128, types::DelayedFieldsSpeculativeError}; #[cfg(feature = "testing")] use aptos_aggregator::{resolver::TDelayedFieldView, types::DelayedFieldValue}; #[cfg(feature = "testing")] @@ -26,7 +23,7 @@ use aptos_types::{ #[cfg(feature = "testing")] use aptos_types::{ chain_id::ChainId, - delayed_fields::PanicError, + error::{PanicError, PanicOr}, state_store::{ state_key::StateKey, state_value::{StateValue, StateValueMetadata}, diff --git a/aptos-move/aptos-vm/src/verifier/transaction_arg_validation.rs b/aptos-move/aptos-vm/src/verifier/transaction_arg_validation.rs index b4be0a60880de..2ee495c5b10a6 100644 --- a/aptos-move/aptos-vm/src/verifier/transaction_arg_validation.rs +++ b/aptos-move/aptos-vm/src/verifier/transaction_arg_validation.rs @@ -455,11 +455,15 @@ fn validate_and_construct( )?; let mut ret_vals = serialized_result.return_values; // We know ret_vals.len() == 1 - let deserialize_error = VMStatus::error( - StatusCode::INTERNAL_TYPE_ERROR, - Some(String::from("Constructor did not return value")), - ); - Ok(ret_vals.pop().ok_or(deserialize_error)?.0) + Ok(ret_vals + .pop() + .ok_or_else(|| { + VMStatus::error( + StatusCode::INTERNAL_TYPE_ERROR, + Some(String::from("Constructor did not return value")), + ) + })? + .0) } // String is a vector of bytes, so both string and vector carry a length in the serialized format. diff --git a/aptos-move/aptos-workspace-server/Cargo.toml b/aptos-move/aptos-workspace-server/Cargo.toml new file mode 100644 index 0000000000000..10739a87132d2 --- /dev/null +++ b/aptos-move/aptos-workspace-server/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "aptos-workspace-server" +version = "0.1.0" + +# Workspace inherited keys +authors = { workspace = true } +edition = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +publish = { workspace = true } +repository = { workspace = true } +rust-version = { workspace = true } + +[dependencies] +aptos = { workspace = true } +aptos-cached-packages = { workspace = true } +aptos-config = { workspace = true } +aptos-faucet-core = { workspace = true } +aptos-node = { workspace = true } +aptos-types = { workspace = true } + +anyhow = { workspace = true } +futures = { workspace = true } +rand = { workspace = true } +tempfile = { workspace = true } +tokio = { workspace = true } +url = { workspace = true } diff --git a/aptos-move/aptos-workspace-server/src/main.rs b/aptos-move/aptos-workspace-server/src/main.rs new file mode 100644 index 0000000000000..0a6a5e5334355 --- /dev/null +++ b/aptos-move/aptos-workspace-server/src/main.rs @@ -0,0 +1,177 @@ +// Copyright (c) Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use anyhow::Result; +use aptos::node::local_testnet::HealthChecker; +use aptos_config::config::{NodeConfig, TableInfoServiceMode}; +use aptos_faucet_core::server::{FunderKeyEnum, RunConfig}; +use aptos_node::{load_node_config, start_and_report_ports}; +use aptos_types::network_address::{NetworkAddress, Protocol}; +use futures::channel::oneshot; +use rand::{rngs::StdRng, SeedableRng}; +use std::{ + net::{IpAddr, Ipv4Addr}, + path::Path, + thread, + time::Duration, +}; +use url::Url; + +pub fn zero_all_ports(config: &mut NodeConfig) { + // TODO: Double check if all ports are covered. + + config.admin_service.port = 0; + config.api.address.set_port(0); + config.inspection_service.port = 0; + config.storage.backup_service_address.set_port(0); + config.indexer_grpc.address.set_port(0); + + if let Some(network) = config.validator_network.as_mut() { + network.listen_address = NetworkAddress::from_protocols(vec![ + Protocol::Ip4("0.0.0.0".parse().unwrap()), + Protocol::Tcp(0), + ]) + .unwrap(); + } + for network in config.full_node_networks.iter_mut() { + network.listen_address = NetworkAddress::from_protocols(vec![ + Protocol::Ip4("0.0.0.0".parse().unwrap()), + Protocol::Tcp(0), + ]) + .unwrap(); + } +} + +async fn spawn_node(test_dir: &Path) -> Result<()> { + let rng = StdRng::from_entropy(); + + let mut node_config = load_node_config( + &None, + &None, + test_dir, + false, + false, + false, + aptos_cached_packages::head_release_bundle(), + rng, + )?; + + zero_all_ports(&mut node_config); + node_config.indexer_grpc.enabled = true; + node_config.indexer_grpc.use_data_service_interface = true; + + node_config.indexer_table_info.table_info_service_mode = TableInfoServiceMode::IndexingOnly; + + node_config + .api + .address + .set_ip(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1))); + node_config + .indexer_grpc + .address + .set_ip(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1))); + + node_config.admin_service.address = "127.0.0.1".to_string(); + node_config.inspection_service.address = "127.0.0.1".to_string(); + + let (api_port_tx, api_port_rx) = oneshot::channel(); + let (indexer_grpc_port_tx, indexer_grpc_port_rx) = oneshot::channel(); + + let run_node = { + let test_dir = test_dir.to_owned(); + let node_config = node_config.clone(); + move || -> Result<()> { + start_and_report_ports( + node_config, + Some(test_dir.join("validator.log")), + false, + Some(api_port_tx), + Some(indexer_grpc_port_tx), + ) + } + }; + + let _node_thread_handle = thread::spawn(move || { + let res = run_node(); + + if let Err(err) = res { + println!("Node stopped unexpectedly {:?}", err); + } + }); + + let api_port = api_port_rx.await?; + let indexer_grpc_port = indexer_grpc_port_rx.await?; + + let api_health_checker = HealthChecker::NodeApi( + Url::parse(&format!( + "http://{}:{}", + node_config.api.address.ip(), + api_port + )) + .unwrap(), + ); + let indexer_grpc_health_checker = HealthChecker::DataServiceGrpc( + Url::parse(&format!( + "http://{}:{}", + node_config.indexer_grpc.address.ip(), + indexer_grpc_port + )) + .unwrap(), + ); + + api_health_checker.wait(None).await?; + eprintln!( + "Node API is ready. Endpoint: http://127.0.0.1:{}/", + api_port + ); + + indexer_grpc_health_checker.wait(None).await?; + eprintln!( + "Transaction stream is ready. Endpoint: http://127.0.0.1:{}/", + indexer_grpc_port + ); + + let faucet_run_config = RunConfig::build_for_cli( + Url::parse(&format!( + "http://{}:{}", + node_config.api.address.ip(), + api_port + )) + .unwrap(), + "127.0.0.1".to_string(), + 0, + FunderKeyEnum::KeyFile(test_dir.join("mint.key")), + false, + None, + ); + + let (faucet_port_tx, faucet_port_rx) = oneshot::channel(); + tokio::spawn(faucet_run_config.run_and_report_port(faucet_port_tx)); + + let faucet_port = faucet_port_rx.await?; + + let faucet_health_checker = + HealthChecker::http_checker_from_port(faucet_port, "Faucet".to_string()); + faucet_health_checker.wait(None).await?; + eprintln!( + "Faucet is ready. Endpoint: http://127.0.0.1:{}", + faucet_port + ); + + eprintln!("Indexer API is ready. Endpoint: http://127.0.0.1:0/"); + + Ok(()) +} + +#[tokio::main] +async fn main() -> Result<()> { + let test_dir = tempfile::tempdir()?; + + println!("Test directory: {}", test_dir.path().display()); + + spawn_node(test_dir.path()).await?; + + loop { + tokio::time::sleep(Duration::from_millis(200)).await; + } +} diff --git a/aptos-move/block-executor/src/captured_reads.rs b/aptos-move/block-executor/src/captured_reads.rs index aaec491043d24..7e2f83a74ada5 100644 --- a/aptos-move/block-executor/src/captured_reads.rs +++ b/aptos-move/block-executor/src/captured_reads.rs @@ -5,10 +5,7 @@ use crate::{types::InputOutputKey, value_exchange::filter_value_for_exchange}; use anyhow::bail; use aptos_aggregator::{ delta_math::DeltaHistory, - types::{ - code_invariant_error, DelayedFieldValue, DelayedFieldsSpeculativeError, PanicOr, - ReadPosition, - }, + types::{DelayedFieldValue, DelayedFieldsSpeculativeError, ReadPosition}, }; use aptos_mvhashmap::{ types::{ @@ -20,8 +17,10 @@ use aptos_mvhashmap::{ versioned_group_data::VersionedGroupData, }; use aptos_types::{ - delayed_fields::PanicError, state_store::state_value::StateValueMetadata, - transaction::BlockExecutableTransaction as Transaction, write_set::TransactionWrite, + error::{code_invariant_error, PanicError, PanicOr}, + state_store::state_value::StateValueMetadata, + transaction::BlockExecutableTransaction as Transaction, + write_set::TransactionWrite, }; use aptos_vm_types::resolver::ResourceGroupSize; use derivative::Derivative; @@ -304,12 +303,13 @@ pub(crate) struct CapturedReads { delayed_field_reads: HashMap, - /// If there is a speculative failure (e.g. delta application failure, or an - /// observed inconsistency), the transaction output is irrelevant (must be - /// discarded and transaction re-executed). We have a global flag, as which - /// read observed the inconsistency is irrelevant (moreover, typically, - /// an error is returned to the VM to wrap up the ongoing execution). - speculative_failure: bool, + /// If there is a speculative failure (e.g. delta application failure, or an observed + /// inconsistency), the transaction output is irrelevant (must be discarded and transaction + /// re-executed). We have two global flags, one for speculative failures regarding + /// delayed fields, and the second for all other speculative failures, because these + /// require different validation behavior (delayed fields are validated commit-time). + delayed_field_speculative_failure: bool, + non_delayed_field_speculative_failure: bool, /// Set if the invarint on CapturedReads intended use is violated. Leads to an alert /// and sequential execution fallback. incorrect_use: bool, @@ -444,7 +444,7 @@ impl CapturedReads { }, UpdateResult::Inconsistency(m) => { // Record speculative failure. - self.speculative_failure = true; + self.non_delayed_field_speculative_failure = true; bail!(m); }, UpdateResult::Updated | UpdateResult::Inserted => Ok(()), @@ -521,7 +521,7 @@ impl CapturedReads { }, UpdateResult::Inconsistency(_) => { // Record speculative failure. - self.speculative_failure = true; + self.delayed_field_speculative_failure = true; Err(PanicOr::Or(DelayedFieldsSpeculativeError::InconsistentRead)) }, UpdateResult::Updated | UpdateResult::Inserted => Ok(()), @@ -531,7 +531,7 @@ impl CapturedReads { pub(crate) fn capture_delayed_field_read_error(&mut self, e: &PanicOr) { match e { PanicOr::CodeInvariantError(_) => self.incorrect_use = true, - PanicOr::Or(_) => self.speculative_failure = true, + PanicOr::Or(_) => self.delayed_field_speculative_failure = true, }; } @@ -554,7 +554,7 @@ impl CapturedReads { data_map: &VersionedData, idx_to_validate: TxnIndex, ) -> bool { - if self.speculative_failure { + if self.non_delayed_field_speculative_failure { return false; } @@ -590,7 +590,7 @@ impl CapturedReads { ) -> bool { use MVGroupError::*; - if self.speculative_failure { + if self.non_delayed_field_speculative_failure { return false; } @@ -622,28 +622,25 @@ impl CapturedReads { Err(Uninitialized) => { unreachable!("May not be uninitialized if captured for validation"); }, - Err(TagSerializationError(_)) => { - unreachable!("Should not require tag serialization"); - }, } }) }) } // This validation needs to be called at commit time - // (as it internally uses read_latest_committed_value to get the current value). + // (as it internally uses read_latest_predicted_value to get the current value). pub(crate) fn validate_delayed_field_reads( &self, delayed_fields: &dyn TVersionedDelayedFieldView, idx_to_validate: TxnIndex, ) -> Result { - if self.speculative_failure { + if self.delayed_field_speculative_failure { return Ok(false); } use MVDelayedFieldsError::*; for (id, read_value) in &self.delayed_field_reads { - match delayed_fields.read_latest_committed_value( + match delayed_fields.read_latest_predicted_value( id, idx_to_validate, ReadPosition::BeforeCurrentTxn, @@ -707,8 +704,12 @@ impl CapturedReads { ret } - pub(crate) fn mark_failure(&mut self) { - self.speculative_failure = true; + pub(crate) fn mark_failure(&mut self, delayed_field_failure: bool) { + if delayed_field_failure { + self.delayed_field_speculative_failure = true; + } else { + self.non_delayed_field_speculative_failure = true; + } } pub(crate) fn mark_incorrect_use(&mut self) { @@ -756,8 +757,11 @@ impl UnsyncReadSet { mod test { use super::*; use crate::proptest_types::types::{raw_metadata, KeyType, MockEvent, ValueType}; - use aptos_mvhashmap::types::StorageVersion; - use claims::{assert_err, assert_gt, assert_matches, assert_none, assert_ok, assert_some_eq}; + use aptos_mvhashmap::{types::StorageVersion, MVHashMap}; + use aptos_types::executable::ExecutableTestType; + use claims::{ + assert_err, assert_gt, assert_matches, assert_none, assert_ok, assert_ok_eq, assert_some_eq, + }; use move_vm_types::delayed_values::delayed_field_id::DelayedFieldID; use test_case::test_case; @@ -1268,7 +1272,8 @@ mod test { let deletion_metadata = DataRead::Metadata(None); let exists = DataRead::Exists(true); - assert!(!captured_reads.speculative_failure); + assert!(!captured_reads.non_delayed_field_speculative_failure); + assert!(!captured_reads.delayed_field_speculative_failure); let key = KeyType::(20, false); assert_ok!(captured_reads.capture_read(key, use_tag.then_some(30), exists)); assert_err!(captured_reads.capture_read( @@ -1276,22 +1281,57 @@ mod test { use_tag.then_some(30), deletion_metadata.clone() )); - assert!(captured_reads.speculative_failure); + assert!(captured_reads.non_delayed_field_speculative_failure); + assert!(!captured_reads.delayed_field_speculative_failure); - captured_reads.speculative_failure = false; + let mvhashmap = + MVHashMap::, u32, ValueType, ExecutableTestType, DelayedFieldID>::new(); + + captured_reads.non_delayed_field_speculative_failure = false; + captured_reads.delayed_field_speculative_failure = false; let key = KeyType::(21, false); assert_ok!(captured_reads.capture_read(key, use_tag.then_some(30), deletion_metadata)); assert_err!(captured_reads.capture_read(key, use_tag.then_some(30), resolved)); - assert!(captured_reads.speculative_failure); + assert!(captured_reads.non_delayed_field_speculative_failure); + assert!(!captured_reads.validate_data_reads(mvhashmap.data(), 0)); + assert!(!captured_reads.validate_group_reads(mvhashmap.group_data(), 0)); + assert!(!captured_reads.delayed_field_speculative_failure); + assert_ok_eq!( + captured_reads.validate_delayed_field_reads(mvhashmap.delayed_fields(), 0), + true + ); - captured_reads.speculative_failure = false; + captured_reads.non_delayed_field_speculative_failure = false; + captured_reads.delayed_field_speculative_failure = false; let key = KeyType::(22, false); assert_ok!(captured_reads.capture_read(key, use_tag.then_some(30), metadata)); assert_err!(captured_reads.capture_read(key, use_tag.then_some(30), versioned_legacy)); - assert!(captured_reads.speculative_failure); + assert!(captured_reads.non_delayed_field_speculative_failure); + assert!(!captured_reads.delayed_field_speculative_failure); + + let mut captured_reads = CapturedReads::::new(); + captured_reads.non_delayed_field_speculative_failure = false; + captured_reads.delayed_field_speculative_failure = false; + captured_reads.mark_failure(true); + assert!(!captured_reads.non_delayed_field_speculative_failure); + assert!(captured_reads.validate_data_reads(mvhashmap.data(), 0)); + assert!(captured_reads.validate_group_reads(mvhashmap.group_data(), 0)); + assert!(captured_reads.delayed_field_speculative_failure); + assert_ok_eq!( + captured_reads.validate_delayed_field_reads(mvhashmap.delayed_fields(), 0), + false + ); - captured_reads.speculative_failure = false; - captured_reads.mark_failure(); - assert!(captured_reads.speculative_failure); + captured_reads.mark_failure(true); + assert!(!captured_reads.non_delayed_field_speculative_failure); + assert!(captured_reads.delayed_field_speculative_failure); + + captured_reads.delayed_field_speculative_failure = false; + captured_reads.mark_failure(false); + assert!(captured_reads.non_delayed_field_speculative_failure); + assert!(!captured_reads.delayed_field_speculative_failure); + captured_reads.mark_failure(true); + assert!(captured_reads.non_delayed_field_speculative_failure); + assert!(captured_reads.delayed_field_speculative_failure); } } diff --git a/aptos-move/block-executor/src/errors.rs b/aptos-move/block-executor/src/errors.rs index ec1c35724b61e..6e74abe88f027 100644 --- a/aptos-move/block-executor/src/errors.rs +++ b/aptos-move/block-executor/src/errors.rs @@ -2,7 +2,7 @@ // Parts of the project are originally copyright © Meta Platforms, Inc. // SPDX-License-Identifier: Apache-2.0 -use aptos_types::delayed_fields::PanicError; +use aptos_types::error::PanicError; #[derive(Clone, Debug, PartialEq, Eq)] pub(crate) enum ParallelBlockExecutionError { @@ -13,6 +13,10 @@ pub(crate) enum ParallelBlockExecutionError { ModulePathReadWriteError, /// unrecoverable VM error FatalVMError, + // Incarnation number that is higher than a threshold is observed during parallel execution. + // This might be indicative of some sort of livelock, or at least some sort of inefficiency + // that would warrants investigating the root cause. Execution can fallback to sequential. + IncarnationTooHigh, } // This is separate error because we need to match the error variant to provide a specialized diff --git a/aptos-move/block-executor/src/executor.rs b/aptos-move/block-executor/src/executor.rs index cde73c85b7992..43616a636ab4c 100644 --- a/aptos-move/block-executor/src/executor.rs +++ b/aptos-move/block-executor/src/executor.rs @@ -15,14 +15,17 @@ use crate::{ scheduler::{DependencyStatus, ExecutionTaskType, Scheduler, SchedulerTask, Wave}, task::{ExecutionStatus, ExecutorTask, TransactionOutput}, txn_commit_hook::TransactionCommitHook, - txn_last_input_output::{KeyKind, TxnLastInputOutput}, + txn_last_input_output::{ + KeyKind, + KeyKind::{Group, Module, Resource}, + TxnLastInputOutput, + }, types::ReadWriteSummary, view::{LatestView, ParallelState, SequentialState, ViewState}, }; use aptos_aggregator::{ delayed_change::{ApplyBase, DelayedChange}, delta_change_set::serialize, - types::{code_invariant_error, expect_ok, PanicOr}, }; use aptos_drop_helper::DEFAULT_DROPPER; use aptos_logger::{debug, error, info}; @@ -34,7 +37,7 @@ use aptos_mvhashmap::{ }; use aptos_types::{ block_executor::config::BlockExecutorConfig, - delayed_fields::PanicError, + error::{code_invariant_error, expect_ok, PanicError, PanicOr}, executable::Executable, on_chain_config::BlockGasLimitType, state_store::{state_value::StateValue, TStateView}, @@ -44,7 +47,7 @@ use aptos_types::{ write_set::{TransactionWrite, WriteOp}, }; use aptos_vm_logging::{alert, clear_speculative_txn_logs, init_speculative_logs, prelude::*}; -use aptos_vm_types::change_set::randomly_check_layout_matches; +use aptos_vm_types::{change_set::randomly_check_layout_matches, resolver::ResourceGroupSize}; use bytes::Bytes; use claims::assert_none; use core::panic; @@ -117,31 +120,60 @@ where let execute_result = executor.execute_transaction(&sync_view, txn, idx_to_execute); let mut prev_modified_keys = last_input_output - .modified_keys(idx_to_execute) - .map_or(HashMap::new(), |keys| keys.collect()); + .modified_keys(idx_to_execute, true) + .map_or_else(HashMap::new, |keys| keys.collect()); let mut prev_modified_delayed_fields = last_input_output .delayed_field_keys(idx_to_execute) - .map_or(HashSet::new(), |keys| keys.collect()); + .map_or_else(HashSet::new, |keys| keys.collect()); let mut read_set = sync_view.take_parallel_reads(); + if read_set.is_incorrect_use() { + return Err(PanicOr::from(code_invariant_error(format!( + "Incorrect use detected in CapturedReads after executing txn = {} incarnation = {}", + idx_to_execute, incarnation + )))); + } // For tracking whether it's required to (re-)validate the suffix of transactions in the block. // May happen, for instance, when the recent execution wrote outside of the previous write/delta // set (vanilla Block-STM rule), or if resource group size or metadata changed from an estimate // (since those resource group validations rely on estimates). let mut needs_suffix_validation = false; + let mut group_keys_and_tags: Vec<(T::Key, HashSet)> = vec![]; let mut apply_updates = |output: &E::Output| -> Result< Vec<(T::Key, Arc, Option>)>, // Cached resource writes PanicError, > { - for (group_key, group_metadata_op, group_ops) in - output.resource_group_write_set().into_iter() - { - if prev_modified_keys.remove(&group_key).is_none() { - // Previously no write to the group at all. - needs_suffix_validation = true; - } + let group_output = output.resource_group_write_set(); + group_keys_and_tags = group_output + .iter() + .map(|(key, _, _, ops)| { + let tags = ops.iter().map(|(tag, _)| tag.clone()).collect(); + (key.clone(), tags) + }) + .collect(); + for (group_key, group_metadata_op, group_size, group_ops) in group_output.into_iter() { + let prev_tags = match prev_modified_keys.remove(&group_key) { + Some(Group(tags)) => tags, + Some(Resource) => { + return Err(code_invariant_error(format!( + "Group key {:?} recorded as resource KeyKind", + group_key, + ))); + }, + Some(Module) => { + return Err(code_invariant_error(format!( + "Group key {:?} recorded as module KeyKind", + group_key, + ))); + }, + None => { + // Previously no write to the group at all. + needs_suffix_validation = true; + HashSet::new() + }, + }; if versioned_cache.data().write_metadata( group_key.clone(), @@ -151,12 +183,15 @@ where ) { needs_suffix_validation = true; } + if versioned_cache.group_data().write( group_key, idx_to_execute, incarnation, group_ops.into_iter(), - ) { + group_size, + prev_tags, + )? { needs_suffix_validation = true; } } @@ -282,7 +317,7 @@ where match kind { Resource => versioned_cache.data().remove(&k, idx_to_execute), Module => versioned_cache.modules().remove(&k, idx_to_execute), - Group => { + Group(tags) => { // A change in state observable during speculative execution // (which includes group metadata and size) changes, suffix // re-validation is needed. For resources where speculative @@ -297,7 +332,9 @@ where needs_suffix_validation = true; versioned_cache.data().remove(&k, idx_to_execute); - versioned_cache.group_data().remove(&k, idx_to_execute); + versioned_cache + .group_data() + .remove(&k, idx_to_execute, tags); }, }; } @@ -306,7 +343,13 @@ where versioned_cache.delayed_fields().remove(&id, idx_to_execute); } - if !last_input_output.record(idx_to_execute, read_set, result, resource_write_set) { + if !last_input_output.record( + idx_to_execute, + read_set, + result, + resource_write_set, + group_keys_and_tags, + ) { // Module R/W is an expected fallback behavior, no alert is required. debug!("[Execution] At txn {}, Module read & write", idx_to_execute); @@ -321,17 +364,16 @@ where idx_to_validate: TxnIndex, last_input_output: &TxnLastInputOutput, versioned_cache: &MVHashMap, - ) -> Result { + ) -> bool { let _timer = TASK_VALIDATE_SECONDS.start_timer(); let read_set = last_input_output .read_set(idx_to_validate) .expect("[BlockSTM]: Prior read-set must be recorded"); - if read_set.is_incorrect_use() { - return Err(code_invariant_error( - "Incorrect use detected in CapturedReads", - )); - } + assert!( + !read_set.is_incorrect_use(), + "Incorrect use must be handled after execution" + ); // Note: we validate delayed field reads only at try_commit. // TODO[agg_v2](optimize): potentially add some basic validation. @@ -341,10 +383,8 @@ where // until commit, but mark as estimates). // TODO: validate modules when there is no r/w fallback. - Ok( - read_set.validate_data_reads(versioned_cache.data(), idx_to_validate) - && read_set.validate_group_reads(versioned_cache.group_data(), idx_to_validate), - ) + read_set.validate_data_reads(versioned_cache.data(), idx_to_validate) + && read_set.validate_group_reads(versioned_cache.group_data(), idx_to_validate) } fn update_transaction_on_abort( @@ -358,16 +398,18 @@ where clear_speculative_txn_logs(txn_idx as usize); // Not valid and successfully aborted, mark the latest write/delta sets as estimates. - if let Some(keys) = last_input_output.modified_keys(txn_idx) { + if let Some(keys) = last_input_output.modified_keys(txn_idx, false) { for (k, kind) in keys { use KeyKind::*; match kind { Resource => versioned_cache.data().mark_estimate(&k, txn_idx), Module => versioned_cache.modules().mark_estimate(&k, txn_idx), - Group => { + Group(tags) => { // Validation for both group size and metadata is based on values. // Execution may wait for estimates. - versioned_cache.group_data().mark_estimate(&k, txn_idx); + versioned_cache + .group_data() + .mark_estimate(&k, txn_idx, tags); // Group metadata lives in same versioned cache as data / resources. // We are not marking metadata change as estimate, but after @@ -495,8 +537,7 @@ where scheduler.finish_execution_during_commit(txn_idx)?; - let validation_result = - Self::validate(txn_idx, last_input_output, versioned_cache)?; + let validation_result = Self::validate(txn_idx, last_input_output, versioned_cache); if !validation_result || !Self::validate_commit_ready(txn_idx, versioned_cache, last_input_output) .unwrap_or(false) @@ -543,33 +584,10 @@ where && block_limit_processor.should_end_block_parallel() { // Set the execution output status to be SkipRest, to skip the rest of the txns. - last_input_output.update_to_skip_rest(txn_idx); + last_input_output.update_to_skip_rest(txn_idx)?; } } - let finalized_groups = groups_to_finalize!(last_input_output, txn_idx) - .map(|((group_key, metadata_op), is_read_needing_exchange)| { - // finalize_group copies Arc of values and the Tags (TODO: optimize as needed). - // TODO[agg_v2]: have a test that fails if we don't do the if. - let finalized_result = if is_read_needing_exchange { - versioned_cache - .group_data() - .get_last_committed_group(&group_key) - } else { - versioned_cache - .group_data() - .finalize_group(&group_key, txn_idx) - }; - map_finalized_group::( - group_key, - finalized_result, - metadata_op, - is_read_needing_exchange, - ) - }) - .collect::, _>>()?; - - last_input_output.record_finalized_group(txn_idx, finalized_groups); defer! { scheduler.add_to_commit_queue(txn_idx); } @@ -679,7 +697,21 @@ where shared_counter, ); let latest_view = LatestView::new(base_view, ViewState::Sync(parallel_state), txn_idx); - let finalized_groups = last_input_output.take_finalized_group(txn_idx); + + let finalized_groups = groups_to_finalize!(last_input_output, txn_idx) + .map(|((group_key, metadata_op), is_read_needing_exchange)| { + let finalize_group = versioned_cache + .group_data() + .finalize_group(&group_key, txn_idx); + + map_finalized_group::( + group_key, + finalize_group, + metadata_op, + is_read_needing_exchange, + ) + }) + .collect::, _>>()?; let materialized_finalized_groups = map_id_to_values_in_group_writes(finalized_groups, &latest_view)?; @@ -732,7 +764,7 @@ where } let mut final_results = final_results.acquire(); - match last_input_output.take_output(txn_idx) { + match last_input_output.take_output(txn_idx)? { ExecutionStatus::Success(t) | ExecutionStatus::SkipRest(t) => { final_results[txn_idx as usize] = t; }, @@ -761,6 +793,7 @@ where num_workers: usize, ) -> Result<(), PanicOr> { // Make executor for each task. TODO: fast concurrent executor. + let num_txns = block.len(); let init_timer = VM_INIT_SECONDS.start_timer(); let executor = E::init(env.clone(), base_view); drop(init_timer); @@ -785,6 +818,16 @@ where }; loop { + if let SchedulerTask::ValidationTask(txn_idx, incarnation, _) = &scheduler_task { + if *incarnation as usize > num_workers.pow(2) + num_txns + 10 { + // Something is wrong if we observe high incarnations (e.g. a bug + // might manifest as an execution-invalidation cycle). Break out + // to fallback to sequential execution. + error!("Observed incarnation {} of txn {txn_idx}", *incarnation); + return Err(PanicOr::Or(ParallelBlockExecutionError::IncarnationTooHigh)); + } + } + while scheduler.should_coordinate_commits() { self.prepare_and_queue_commit_ready_txns( &self.config.onchain.block_gas_limit_type, @@ -807,7 +850,7 @@ where scheduler_task = match scheduler_task { SchedulerTask::ValidationTask(txn_idx, incarnation, wave) => { - let valid = Self::validate(txn_idx, last_input_output, versioned_cache)?; + let valid = Self::validate(txn_idx, last_input_output, versioned_cache); Self::update_on_validation( txn_idx, incarnation, @@ -941,6 +984,15 @@ where }); drop(timer); + if !shared_maybe_error.load(Ordering::SeqCst) && scheduler.pop_from_commit_queue().is_ok() { + // No error is recorded, parallel execution workers are done, but there is + // still a commit task remaining. Commit tasks must be drained before workers + // exit, hence we log an error and fallback to sequential execution. + alert!("[BlockSTM] error: commit tasks not drained after parallel execution"); + + shared_maybe_error.store(true, Ordering::Relaxed); + } + counters::update_state_counters(versioned_cache.stats(), true); // Explicit async drops. @@ -971,10 +1023,10 @@ where unsync_map.write(key, write_op, layout); } - for (group_key, metadata_op, group_ops) in output.resource_group_write_set().into_iter() { - for (value_tag, (group_op, maybe_layout)) in group_ops.into_iter() { - unsync_map.insert_group_op(&group_key, value_tag, group_op, maybe_layout)?; - } + for (group_key, metadata_op, group_size, group_ops) in + output.resource_group_write_set().into_iter() + { + unsync_map.insert_group_ops(&group_key, group_ops, group_size)?; unsync_map.write(group_key, Arc::new(metadata_op), None); } @@ -1161,22 +1213,26 @@ where // previously failed in bcs serialization for preparing final transaction outputs. // TODO: remove this fallback when txn errors can be created from block executor. - let finalize = |group_key| -> BTreeMap<_, _> { - unsync_map - .finalize_group(&group_key) - .map(|(resource_tag, value_with_layout)| { - let value = match value_with_layout { - ValueWithLayout::RawFromStorage(value) - | ValueWithLayout::Exchanged(value, _) => value, - }; - ( - resource_tag, - value - .extract_raw_bytes() - .expect("Deletions should already be applied"), - ) - }) - .collect() + let finalize = |group_key| -> (BTreeMap<_, _>, ResourceGroupSize) { + let (group, size) = unsync_map.finalize_group(&group_key); + + ( + group + .map(|(resource_tag, value_with_layout)| { + let value = match value_with_layout { + ValueWithLayout::RawFromStorage(value) + | ValueWithLayout::Exchanged(value, _) => value, + }; + ( + resource_tag, + value + .extract_raw_bytes() + .expect("Deletions should already be applied"), + ) + }) + .collect(), + size, + ) }; // The IDs are not exchanged but it doesn't change the types (Bytes) or size. @@ -1188,16 +1244,25 @@ where true }); - let finalized_group = finalize(group_key.clone()); - bcs::to_bytes(&finalized_group).is_err() + let (finalized_group, group_size) = finalize(group_key.clone()); + match bcs::to_bytes(&finalized_group) { + Ok(group) => { + (!finalized_group.is_empty() || group_size.get() != 0) + && group.len() as u64 != group_size.get() + }, + Err(_) => true, + } }) || output.resource_group_write_set().into_iter().any( - |(group_key, _, group_ops)| { + |(group_key, _, output_group_size, group_ops)| { fail_point!("fail-point-resource-group-serialization", |_| { true }); - let mut finalized_group = finalize(group_key); + let (mut finalized_group, group_size) = finalize(group_key); + if output_group_size.get() != group_size.get() { + return false; + } for (value_tag, (group_op, _)) in group_ops { if group_op.is_deletion() { finalized_group.remove(&value_tag); @@ -1210,7 +1275,13 @@ where ); } } - bcs::to_bytes(&finalized_group).is_err() + match bcs::to_bytes(&finalized_group) { + Ok(group) => { + (!finalized_group.is_empty() || group_size.get() != 0) + && group.len() as u64 != group_size.get() + }, + Err(_) => true, + } }, ); @@ -1219,7 +1290,7 @@ where // fallback is to just skip any transactions that would cause such serialization errors. alert!("Discarding transaction because serialization failed in bcs fallback"); ret.push(E::Output::discard_output( - StatusCode::DELAYED_MATERIALIZATION_CODE_INVARIANT_ERROR, + StatusCode::DELAYED_FIELD_OR_BLOCKSTM_CODE_INVARIANT_ERROR, )); continue; } @@ -1237,8 +1308,9 @@ where { let finalized_groups = groups_to_finalize!(output,) .map(|((group_key, metadata_op), is_read_needing_exchange)| { - let finalized_group = - Ok(unsync_map.finalize_group(&group_key).collect()); + let (group_ops_iter, group_size) = + unsync_map.finalize_group(&group_key); + let finalized_group = Ok((group_ops_iter.collect(), group_size)); map_finalized_group::( group_key, finalized_group, @@ -1426,7 +1498,7 @@ where // StateCheckpoint will be added afterwards. let error_code = match sequential_error { BlockExecutionError::FatalBlockExecutorError(_) => { - StatusCode::DELAYED_MATERIALIZATION_CODE_INVARIANT_ERROR + StatusCode::DELAYED_FIELD_OR_BLOCKSTM_CODE_INVARIANT_ERROR }, BlockExecutionError::FatalVMError(_) => { StatusCode::UNKNOWN_INVARIANT_VIOLATION_ERROR diff --git a/aptos-move/block-executor/src/executor_utilities.rs b/aptos-move/block-executor/src/executor_utilities.rs index 82274f741c76b..5588ce8da62a3 100644 --- a/aptos-move/block-executor/src/executor_utilities.rs +++ b/aptos-move/block-executor/src/executor_utilities.rs @@ -2,15 +2,18 @@ // SPDX-License-Identifier: Apache-2.0 use crate::{errors::*, view::LatestView}; -use aptos_aggregator::types::code_invariant_error; use aptos_logger::error; use aptos_mvhashmap::types::ValueWithLayout; use aptos_types::{ - contract_event::TransactionEvent, delayed_fields::PanicError, executable::Executable, - state_store::TStateView, transaction::BlockExecutableTransaction as Transaction, + contract_event::TransactionEvent, + error::{code_invariant_error, PanicError}, + executable::Executable, + state_store::TStateView, + transaction::BlockExecutableTransaction as Transaction, write_set::TransactionWrite, }; use aptos_vm_logging::{alert, prelude::*}; +use aptos_vm_types::resolver::ResourceGroupSize; use bytes::Bytes; use fail::fail_point; use move_core_types::value::MoveTypeLayout; @@ -84,14 +87,22 @@ pub(crate) use resource_writes_to_materialize; pub(crate) fn map_finalized_group( group_key: T::Key, - finalized_group: anyhow::Result)>>, + finalized_group: anyhow::Result<(Vec<(T::Tag, ValueWithLayout)>, ResourceGroupSize)>, metadata_op: T::Value, is_read_needing_exchange: bool, -) -> Result<(T::Key, T::Value, Vec<(T::Tag, ValueWithLayout)>), PanicError> { +) -> Result< + ( + T::Key, + T::Value, + Vec<(T::Tag, ValueWithLayout)>, + ResourceGroupSize, + ), + PanicError, +> { let metadata_is_deletion = metadata_op.is_deletion(); match finalized_group { - Ok(finalized_group) => { + Ok((finalized_group, group_size)) => { if is_read_needing_exchange && metadata_is_deletion { // Value needed exchange but was not written / modified during the txn // execution: may not be empty. @@ -106,7 +117,7 @@ pub(crate) fn map_finalized_group( metadata_is_deletion ))) } else { - Ok((group_key, metadata_op, finalized_group)) + Ok((group_key, metadata_op, finalized_group, group_size)) } }, Err(e) => Err(code_invariant_error(format!( @@ -117,7 +128,12 @@ pub(crate) fn map_finalized_group( } pub(crate) fn serialize_groups( - finalized_groups: Vec<(T::Key, T::Value, Vec<(T::Tag, Arc)>)>, + finalized_groups: Vec<( + T::Key, + T::Value, + Vec<(T::Tag, Arc)>, + ResourceGroupSize, + )>, ) -> Result, ResourceGroupSerializationError> { fail_point!( "fail-point-resource-group-serialization", @@ -127,27 +143,45 @@ pub(crate) fn serialize_groups( finalized_groups .into_iter() - .map(|(group_key, mut metadata_op, finalized_group)| { - let btree: BTreeMap = finalized_group - .into_iter() - .map(|(resource_tag, arc_v)| { - let bytes = arc_v - .extract_raw_bytes() - .expect("Deletions should already be applied"); - (resource_tag, bytes) - }) - .collect(); + .map( + |(group_key, mut metadata_op, finalized_group, group_size)| { + let btree: BTreeMap = finalized_group + .into_iter() + .map(|(resource_tag, arc_v)| { + let bytes = arc_v + .extract_raw_bytes() + .expect("Deletions should already be applied"); + (resource_tag, bytes) + }) + .collect(); - bcs::to_bytes(&btree) - .map_err(|e| { - alert!("Unexpected resource group error {:?}", e); - ResourceGroupSerializationError - }) - .map(|group_bytes| { - metadata_op.set_bytes(group_bytes.into()); - (group_key, metadata_op) - }) - }) + match bcs::to_bytes(&btree) { + Ok(group_bytes) => { + if (!btree.is_empty() || group_size.get() != 0) + && group_bytes.len() as u64 != group_size.get() + { + alert!( + "Serialized resource group size mismatch key = {:?} num items {}, \ + len {} recorded size {}, op {:?}", + group_key, + btree.len(), + group_bytes.len(), + group_size.get(), + metadata_op, + ); + Err(ResourceGroupSerializationError) + } else { + metadata_op.set_bytes(group_bytes.into()); + Ok((group_key, metadata_op)) + } + }, + Err(e) => { + alert!("Unexpected resource group error {:?}", e); + Err(ResourceGroupSerializationError) + }, + } + }, + ) .collect() } @@ -167,11 +201,24 @@ pub(crate) fn map_id_to_values_in_group_writes< S: TStateView + Sync, X: Executable + 'static, >( - finalized_groups: Vec<(T::Key, T::Value, Vec<(T::Tag, ValueWithLayout)>)>, + finalized_groups: Vec<( + T::Key, + T::Value, + Vec<(T::Tag, ValueWithLayout)>, + ResourceGroupSize, + )>, latest_view: &LatestView, -) -> Result)>)>, PanicError> { +) -> Result< + Vec<( + T::Key, + T::Value, + Vec<(T::Tag, Arc)>, + ResourceGroupSize, + )>, + PanicError, +> { let mut patched_finalized_groups = Vec::with_capacity(finalized_groups.len()); - for (group_key, group_metadata_op, resource_vec) in finalized_groups.into_iter() { + for (group_key, group_metadata_op, resource_vec, group_size) in finalized_groups.into_iter() { let mut patched_resource_vec = Vec::with_capacity(resource_vec.len()); for (tag, value_with_layout) in resource_vec.into_iter() { let value = match value_with_layout { @@ -183,7 +230,12 @@ pub(crate) fn map_id_to_values_in_group_writes< }; patched_resource_vec.push((tag, value)); } - patched_finalized_groups.push((group_key, group_metadata_op, patched_resource_vec)); + patched_finalized_groups.push(( + group_key, + group_metadata_op, + patched_resource_vec, + group_size, + )); } Ok(patched_finalized_groups) } diff --git a/aptos-move/block-executor/src/proptest_types/baseline.rs b/aptos-move/block-executor/src/proptest_types/baseline.rs index 21430370818e1..b2d72577fa3c0 100644 --- a/aptos-move/block-executor/src/proptest_types/baseline.rs +++ b/aptos-move/block-executor/src/proptest_types/baseline.rs @@ -310,11 +310,11 @@ impl BaselineOutput { .for_each(|(baseline_read, result_read)| baseline_read.assert_read_result(result_read)); // Update group world. - for (group_key, v, updates) in output.group_writes.iter() { + for (group_key, v, group_size, updates) in output.group_writes.iter() { group_metadata.insert(group_key.clone(), v.as_state_value_metadata()); + let group_map = group_world.entry(group_key).or_insert(base_map.clone()); for (tag, v) in updates { - let group_map = group_world.entry(group_key).or_insert(base_map.clone()); if v.is_deletion() { assert_some!(group_map.remove(tag)); } else { @@ -324,6 +324,9 @@ impl BaselineOutput { assert_eq!(existed, v.is_modification()); } } + let computed_size = + group_size_as_sum(group_map.iter().map(|(t, v)| (t, v.len()))).unwrap(); + assert_eq!(computed_size, *group_size); } // Test recorded finalized group writes: it should contain the whole group, and diff --git a/aptos-move/block-executor/src/proptest_types/types.rs b/aptos-move/block-executor/src/proptest_types/types.rs index f6d5de2dd1b39..70cf194fb86ad 100644 --- a/aptos-move/block-executor/src/proptest_types/types.rs +++ b/aptos-move/block-executor/src/proptest_types/types.rs @@ -12,7 +12,7 @@ use aptos_mvhashmap::types::TxnIndex; use aptos_types::{ account_address::AccountAddress, contract_event::TransactionEvent, - delayed_fields::PanicError, + error::PanicError, executable::ModulePath, fee_statement::FeeStatement, on_chain_config::CurrentTimeMicroseconds, @@ -25,7 +25,12 @@ use aptos_types::{ transaction::BlockExecutableTransaction as Transaction, write_set::{TransactionWrite, WriteOp, WriteOpKind}, }; -use aptos_vm_types::resolver::{TExecutorView, TResourceGroupView}; +use aptos_vm_types::{ + resolver::{ResourceGroupSize, TExecutorView, TResourceGroupView}, + resource_group_adapter::{ + decrement_size_for_remove_tag, group_tagged_resource_size, increment_size_for_add_tag, + }, +}; use bytes::Bytes; use claims::{assert_ge, assert_le, assert_ok}; use move_core_types::{identifier::IdentStr, value::MoveTypeLayout}; @@ -928,6 +933,8 @@ where let mut group_writes = vec![]; for (key, metadata, inner_ops) in behavior.group_writes.iter() { let mut new_inner_ops = HashMap::new(); + let group_size = view.resource_group_size(key).unwrap(); + let mut new_group_size = view.resource_group_size(key).unwrap(); for (tag, inner_op) in inner_ops.iter() { let exists = view .get_resource_from_group(key, tag, None) @@ -940,46 +947,92 @@ where // inner op is either deletion or creation. assert!(!inner_op.is_modification()); - if exists == inner_op.is_deletion() { - // insert the provided inner op. - new_inner_ops.insert(*tag, inner_op.clone()); - } - if exists && inner_op.is_creation() { - // Adjust the type, otherwise executor will assert. - if inner_op.bytes().unwrap()[0] % 4 < 3 || *tag == RESERVED_TAG { - new_inner_ops.insert( - *tag, + let maybe_op = if exists { + Some( + if inner_op.is_creation() + && (inner_op.bytes().unwrap()[0] % 4 < 3 + || *tag == RESERVED_TAG) + { ValueType::new( inner_op.bytes.clone(), StateValueMetadata::none(), WriteOpKind::Modification, - ), - ); - } else { - new_inner_ops.insert( - *tag, + ) + } else { ValueType::new( None, StateValueMetadata::none(), WriteOpKind::Deletion, - ), - ); + ) + }, + ) + } else { + inner_op.is_creation().then(|| inner_op.clone()) + }; + + if let Some(new_inner_op) = maybe_op { + if exists { + let old_tagged_value_size = + view.resource_size_in_group(key, tag).unwrap(); + let old_size = + group_tagged_resource_size(tag, old_tagged_value_size).unwrap(); + // let _ = + // decrement_size_for_remove_tag(&mut new_group_size, old_size); + if decrement_size_for_remove_tag(&mut new_group_size, old_size) + .is_err() + { + // Check it only happens for speculative executions that may not + // commit by returning incorrect (empty) output. + return ExecutionStatus::Success(MockOutput::skip_output()); + } + } + if !new_inner_op.is_deletion() { + let new_size = group_tagged_resource_size( + tag, + inner_op.bytes.as_ref().unwrap().len(), + ) + .unwrap(); + if increment_size_for_add_tag(&mut new_group_size, new_size) + .is_err() + { + // Check it only happens for speculative executions that may not + // commit by returning incorrect (empty) output. + return ExecutionStatus::Success(MockOutput::skip_output()); + } } + + new_inner_ops.insert(*tag, new_inner_op); } } - if !inner_ops.is_empty() { - // Not testing metadata_op here, always modification. - group_writes.push(( - key.clone(), - ValueType::new( - Some(Bytes::new()), - metadata.clone(), - WriteOpKind::Modification, - ), - new_inner_ops, - )); + if !new_inner_ops.is_empty() { + if group_size.get() > 0 + && new_group_size == ResourceGroupSize::zero_combined() + { + // TODO: reserved tag currently prevents this code from being run. + // Group got deleted. + group_writes.push(( + key.clone(), + ValueType::new(None, metadata.clone(), WriteOpKind::Deletion), + new_group_size, + new_inner_ops, + )); + } else { + let op_kind = if group_size.get() == 0 { + WriteOpKind::Creation + } else { + WriteOpKind::Modification + }; + + // Not testing metadata_op here, always modification. + group_writes.push(( + key.clone(), + ValueType::new(Some(Bytes::new()), metadata.clone(), op_kind), + new_group_size, + new_inner_ops, + )); + } } } @@ -1024,7 +1077,7 @@ pub(crate) enum GroupSizeOrMetadata { pub(crate) struct MockOutput { pub(crate) writes: Vec<(K, ValueType)>, // Key, metadata_op, inner_ops - pub(crate) group_writes: Vec<(K, ValueType, HashMap)>, + pub(crate) group_writes: Vec<(K, ValueType, ResourceGroupSize, HashMap)>, pub(crate) deltas: Vec<(K, DeltaOp)>, pub(crate) events: Vec, pub(crate) read_results: Vec>>, @@ -1111,15 +1164,17 @@ where ) -> Vec<( K, ValueType, + ResourceGroupSize, BTreeMap>)>, )> { self.group_writes .iter() .cloned() - .map(|(group_key, metadata_v, inner_ops)| { + .map(|(group_key, metadata_v, group_size, inner_ops)| { ( group_key, metadata_v, + group_size, inner_ops.into_iter().map(|(k, v)| (k, (v, None))).collect(), ) }) @@ -1165,12 +1220,26 @@ where fn incorporate_materialized_txn_output( &self, aggregator_v1_writes: Vec<(::Key, WriteOp)>, - _patched_resource_write_set: Vec<( + patched_resource_write_set: Vec<( ::Key, ::Value, )>, _patched_events: Vec<::Event>, ) -> Result<(), PanicError> { + let resources: HashMap<::Key, ::Value> = + patched_resource_write_set.clone().into_iter().collect(); + for (key, _, size, _) in &self.group_writes { + let v = resources.get(key).unwrap(); + if v.is_deletion() { + assert_eq!(*size, ResourceGroupSize::zero_combined()); + } else { + assert_eq!( + size.get(), + resources.get(key).unwrap().bytes().map_or(0, |b| b.len()) as u64 + ); + } + } + assert_ok!(self.materialized_delta_writes.set(aggregator_v1_writes)); // TODO[agg_v2](tests): Set the patched resource write set and events. But that requires the function // to take &mut self as input diff --git a/aptos-move/block-executor/src/scheduler.rs b/aptos-move/block-executor/src/scheduler.rs index 385e0076a606a..27ee7e80d1772 100644 --- a/aptos-move/block-executor/src/scheduler.rs +++ b/aptos-move/block-executor/src/scheduler.rs @@ -3,10 +3,9 @@ // SPDX-License-Identifier: Apache-2.0 use crate::explicit_sync_wrapper::ExplicitSyncWrapper; -use aptos_aggregator::types::code_invariant_error; use aptos_infallible::Mutex; use aptos_mvhashmap::types::{Incarnation, TxnIndex}; -use aptos_types::delayed_fields::PanicError; +use aptos_types::error::{code_invariant_error, PanicError}; use concurrent_queue::{ConcurrentQueue, PopError}; use crossbeam::utils::CachePadded; use parking_lot::{RwLock, RwLockUpgradableReadGuard}; diff --git a/aptos-move/block-executor/src/task.rs b/aptos-move/block-executor/src/task.rs index 350b566175924..6b87a482f2993 100644 --- a/aptos-move/block-executor/src/task.rs +++ b/aptos-move/block-executor/src/task.rs @@ -8,13 +8,13 @@ use aptos_aggregator::{ }; use aptos_mvhashmap::types::TxnIndex; use aptos_types::{ - delayed_fields::PanicError, + error::PanicError, fee_statement::FeeStatement, state_store::{state_value::StateValueMetadata, TStateView}, transaction::BlockExecutableTransaction as Transaction, write_set::WriteOp, }; -use aptos_vm_types::resolver::{TExecutorView, TResourceGroupView}; +use aptos_vm_types::resolver::{ResourceGroupSize, TExecutorView, TResourceGroupView}; use move_core_types::{value::MoveTypeLayout, vm_status::StatusCode}; use std::{ collections::{BTreeMap, HashSet}, @@ -144,6 +144,7 @@ pub trait TransactionOutput: Send + Sync + Debug { ) -> Vec<( ::Key, ::Value, + ResourceGroupSize, BTreeMap< ::Tag, ( @@ -161,7 +162,7 @@ pub trait TransactionOutput: Send + Sync + Debug { )> { self.resource_group_write_set() .into_iter() - .map(|(key, op, _)| (key, op)) + .map(|(key, op, _, _)| (key, op)) .collect() } diff --git a/aptos-move/block-executor/src/txn_last_input_output.rs b/aptos-move/block-executor/src/txn_last_input_output.rs index c928a88005bac..bc6b203f28b25 100644 --- a/aptos-move/block-executor/src/txn_last_input_output.rs +++ b/aptos-move/block-executor/src/txn_last_input_output.rs @@ -8,13 +8,14 @@ use crate::{ task::{ExecutionStatus, TransactionOutput}, types::{InputOutputKey, ReadWriteSummary}, }; -use aptos_aggregator::types::code_invariant_error; use aptos_logger::error; -use aptos_mvhashmap::types::{TxnIndex, ValueWithLayout}; +use aptos_mvhashmap::types::TxnIndex; use aptos_types::{ - delayed_fields::PanicError, fee_statement::FeeStatement, + error::{code_invariant_error, PanicError}, + fee_statement::FeeStatement, state_store::state_value::StateValueMetadata, - transaction::BlockExecutableTransaction as Transaction, write_set::WriteOp, + transaction::BlockExecutableTransaction as Transaction, + write_set::WriteOp, }; use arc_swap::ArcSwapOption; use crossbeam::utils::CachePadded; @@ -34,7 +35,7 @@ macro_rules! forward_on_success_or_skip_rest { $self.outputs[$txn_idx as usize] .load() .as_ref() - .map_or(vec![], |txn_output| match txn_output.as_ref() { + .map_or_else(Vec::new, |txn_output| match txn_output.as_ref() { ExecutionStatus::Success(t) | ExecutionStatus::SkipRest(t) => t.$f(), ExecutionStatus::Abort(_) | ExecutionStatus::SpeculativeExecutionAbortError(_) @@ -43,21 +44,15 @@ macro_rules! forward_on_success_or_skip_rest { }}; } -pub(crate) enum KeyKind { +pub(crate) enum KeyKind { Resource, Module, - Group, + // Contains the set of tags for the given group key. + Group(HashSet), } pub struct TxnLastInputOutput, E: Debug> { inputs: Vec>>>, // txn_idx -> input. - // Set once when the group outputs are committed sequentially, to be processed later by - // concurrent materialization / output preparation. - finalized_groups: Vec< - CachePadded< - ExplicitSyncWrapper)>)>>, - >, - >, // TODO: Consider breaking down the outputs when storing (avoid traversals, cache below). outputs: Vec>>>, // txn_idx -> output. @@ -67,6 +62,8 @@ pub struct TxnLastInputOutput, E: arced_resource_writes: Vec< CachePadded, Option>)>>>, >, + resource_group_keys_and_tags: + Vec)>>>>, // Record all writes and reads to access paths corresponding to modules (code) in any // (speculative) executions. Used to avoid a potential race with module publishing and @@ -89,9 +86,10 @@ impl, E: Debug + Send + Clone> arced_resource_writes: (0..num_txns) .map(|_| CachePadded::new(ExplicitSyncWrapper::>::new(vec![]))) .collect(), - finalized_groups: (0..num_txns) + resource_group_keys_and_tags: (0..num_txns) .map(|_| CachePadded::new(ExplicitSyncWrapper::>::new(vec![]))) .collect(), + module_writes: DashSet::new(), module_reads: DashSet::new(), } @@ -131,6 +129,7 @@ impl, E: Debug + Send + Clone> input: CapturedReads, output: ExecutionStatus, arced_resource_writes: Vec<(T::Key, Arc, Option>)>, + group_keys_and_tags: Vec<(T::Key, HashSet)>, ) -> bool { let written_modules = match &output { ExecutionStatus::Success(output) | ExecutionStatus::SkipRest(output) => { @@ -148,6 +147,7 @@ impl, E: Debug + Send + Clone> } *self.arced_resource_writes[txn_idx as usize].acquire() = arced_resource_writes; + *self.resource_group_keys_and_tags[txn_idx as usize].acquire() = group_keys_and_tags; self.inputs[txn_idx as usize].store(Some(Arc::new(input))); self.outputs[txn_idx as usize].store(Some(Arc::new(output))); @@ -250,18 +250,21 @@ impl, E: Debug + Send + Clone> } } - pub(crate) fn update_to_skip_rest(&self, txn_idx: TxnIndex) { + pub(crate) fn update_to_skip_rest(&self, txn_idx: TxnIndex) -> Result<(), PanicError> { if self.block_skips_rest_at_idx(txn_idx) { // Already skipping. - return; + return Ok(()); } // check_execution_status_during_commit must be used for checks re:status. // Hence, since the status is not SkipRest, it must be Success. - if let ExecutionStatus::Success(output) = self.take_output(txn_idx) { + if let ExecutionStatus::Success(output) = self.take_output(txn_idx)? { self.outputs[txn_idx as usize].store(Some(Arc::new(ExecutionStatus::SkipRest(output)))); + Ok(()) } else { - unreachable!("Unexpected status, must be Success"); + Err(code_invariant_error( + "Unexpected status to change to SkipRest, must be Success", + )) } } @@ -270,11 +273,22 @@ impl, E: Debug + Send + Clone> } // Extracts a set of paths (keys) written or updated during execution from transaction - // output, .1 for each item is false for non-module paths and true for module paths. + // output, with corresponding KeyKind. If take_group_tags is true, the final HashSet + // of tags is moved for the group key - should be called once for each incarnation / record + // due to 'take'. if false, stored modified group resource tags in the group are cloned out. pub(crate) fn modified_keys( &self, txn_idx: TxnIndex, - ) -> Option> { + take_group_tags: bool, + ) -> Option)>> { + let group_keys_and_tags: Vec<(T::Key, HashSet)> = if take_group_tags { + std::mem::take(&mut self.resource_group_keys_and_tags[txn_idx as usize].acquire()) + } else { + self.resource_group_keys_and_tags[txn_idx as usize] + .acquire() + .clone() + }; + self.outputs[txn_idx as usize] .load_full() .and_then(|txn_output| match txn_output.as_ref() { @@ -296,9 +310,9 @@ impl, E: Debug + Send + Clone> .map(|k| (k, KeyKind::Module)), ) .chain( - t.resource_group_metadata_ops() + group_keys_and_tags .into_iter() - .map(|(k, _)| (k, KeyKind::Group)), + .map(|(k, tags)| (k, KeyKind::Group(tags))), ), ), ExecutionStatus::Abort(_) @@ -353,9 +367,9 @@ impl, E: Debug + Send + Clone> &self, txn_idx: TxnIndex, ) -> Box)>> { - self.outputs[txn_idx as usize].load().as_ref().map_or( - Box::new(empty::<(T::Event, Option)>()), - |txn_output| match txn_output.as_ref() { + match self.outputs[txn_idx as usize].load().as_ref() { + None => Box::new(empty::<(T::Event, Option)>()), + Some(txn_output) => match txn_output.as_ref() { ExecutionStatus::Success(t) | ExecutionStatus::SkipRest(t) => { let events = t.get_events(); Box::new(events.into_iter()) @@ -366,22 +380,7 @@ impl, E: Debug + Send + Clone> Box::new(empty::<(T::Event, Option)>()) }, }, - ) - } - - pub(crate) fn record_finalized_group( - &self, - txn_idx: TxnIndex, - finalized_groups: Vec<(T::Key, T::Value, Vec<(T::Tag, ValueWithLayout)>)>, - ) { - *self.finalized_groups[txn_idx as usize].acquire() = finalized_groups; - } - - pub(crate) fn take_finalized_group( - &self, - txn_idx: TxnIndex, - ) -> Vec<(T::Key, T::Value, Vec<(T::Tag, ValueWithLayout)>)> { - std::mem::take(&mut self.finalized_groups[txn_idx as usize].acquire()) + } } pub(crate) fn take_resource_write_set( @@ -446,12 +445,16 @@ impl, E: Debug + Send + Clone> // Must be executed after parallel execution is done, grabs outputs. Will panic if // other outstanding references to the recorded outputs exist. - pub(crate) fn take_output(&self, txn_idx: TxnIndex) -> ExecutionStatus { - let owning_ptr = self.outputs[txn_idx as usize] - .swap(None) - .expect("[BlockSTM]: Output must be recorded after execution"); - - Arc::try_unwrap(owning_ptr) - .expect("[BlockSTM]: Output should be uniquely owned after execution") + pub(crate) fn take_output( + &self, + txn_idx: TxnIndex, + ) -> Result, PanicError> { + let owning_ptr = self.outputs[txn_idx as usize].swap(None).ok_or_else(|| { + code_invariant_error("[BlockSTM]: Output must be recorded after execution") + })?; + + Arc::try_unwrap(owning_ptr).map_err(|_| { + code_invariant_error("[BlockSTM]: Output must be uniquely owned after execution") + }) } } diff --git a/aptos-move/block-executor/src/unit_tests/mod.rs b/aptos-move/block-executor/src/unit_tests/mod.rs index 82472dd15aad5..743e2400240ff 100644 --- a/aptos-move/block-executor/src/unit_tests/mod.rs +++ b/aptos-move/block-executor/src/unit_tests/mod.rs @@ -28,8 +28,9 @@ use aptos_types::{ contract_event::TransactionEvent, executable::{ExecutableTestType, ModulePath}, state_store::state_value::StateValueMetadata, + write_set::WriteOpKind, }; -use claims::assert_matches; +use claims::{assert_matches, assert_ok}; use fail::FailScenario; use rand::{prelude::*, random}; use std::{ @@ -41,6 +42,60 @@ use std::{ sync::Arc, }; +#[test] +fn test_resource_group_deletion() { + let mut group_creation: MockIncarnation, MockEvent> = + MockIncarnation::new(vec![KeyType::(1, false)], vec![], vec![], vec![], 10); + group_creation.group_writes.push(( + KeyType::(100, false), + StateValueMetadata::none(), + HashMap::from([(101, ValueType::from_value(vec![5], true))]), + )); + let mut group_deletion: MockIncarnation, MockEvent> = + MockIncarnation::new(vec![KeyType::(1, false)], vec![], vec![], vec![], 10); + group_deletion.group_writes.push(( + KeyType::(100, false), + StateValueMetadata::none(), + HashMap::from([( + 101, + ValueType::new(None, StateValueMetadata::none(), WriteOpKind::Deletion), + )]), + )); + let t_0 = MockTransaction::from_behavior(group_creation); + let t_1 = MockTransaction::from_behavior(group_deletion); + + let transactions = Vec::from([t_0, t_1]); + + let data_view = NonEmptyGroupDataView::> { + group_keys: HashSet::new(), + }; + let executor_thread_pool = Arc::new( + rayon::ThreadPoolBuilder::new() + .num_threads(num_cpus::get()) + .build() + .unwrap(), + ); + let block_executor = BlockExecutor::< + MockTransaction, MockEvent>, + MockTask, MockEvent>, + NonEmptyGroupDataView>, + NoOpTransactionCommitHook, MockEvent>, usize>, + ExecutableTestType, + >::new( + BlockExecutorConfig::new_no_block_limit(num_cpus::get()), + executor_thread_pool, + None, + ); + + assert_ok!(block_executor.execute_transactions_sequential( + (), + &transactions, + &data_view, + false + )); + assert_ok!(block_executor.execute_transactions_parallel(&(), &transactions, &data_view)); +} + #[test] fn resource_group_bcs_fallback() { let no_group_incarnation_1: MockIncarnation, MockEvent> = MockIncarnation::new( diff --git a/aptos-move/block-executor/src/value_exchange.rs b/aptos-move/block-executor/src/value_exchange.rs index 5170291e6dba5..e28ec35dddd03 100644 --- a/aptos-move/block-executor/src/value_exchange.rs +++ b/aptos-move/block-executor/src/value_exchange.rs @@ -4,11 +4,11 @@ use crate::view::{LatestView, ViewState}; use aptos_aggregator::{ resolver::TDelayedFieldView, - types::{code_invariant_error, DelayedFieldValue, ReadPosition}, + types::{DelayedFieldValue, ReadPosition}, }; use aptos_mvhashmap::{types::TxnIndex, versioned_delayed_fields::TVersionedDelayedFieldView}; use aptos_types::{ - delayed_fields::PanicError, + error::{code_invariant_error, PanicError}, executable::Executable, state_store::{state_value::StateValueMetadata, TStateView}, transaction::BlockExecutableTransaction as Transaction, @@ -92,7 +92,7 @@ impl<'a, T: Transaction, S: TStateView, X: Executable> ValueToIden ViewState::Sync(state) => state .versioned_map .delayed_fields() - .read_latest_committed_value( + .read_latest_predicted_value( &identifier, self.txn_idx, ReadPosition::AfterCurrentTxn, diff --git a/aptos-move/block-executor/src/view.rs b/aptos-move/block-executor/src/view.rs index 837a47d419ed9..871c4166dcc32 100644 --- a/aptos-move/block-executor/src/view.rs +++ b/aptos-move/block-executor/src/view.rs @@ -19,10 +19,7 @@ use aptos_aggregator::{ delta_change_set::serialize, delta_math::DeltaHistory, resolver::{TAggregatorV1View, TDelayedFieldView}, - types::{ - code_invariant_error, expect_ok, DelayedFieldValue, DelayedFieldsSpeculativeError, PanicOr, - ReadPosition, - }, + types::{DelayedFieldValue, DelayedFieldsSpeculativeError, ReadPosition}, }; use aptos_logger::error; use aptos_mvhashmap::{ @@ -36,7 +33,7 @@ use aptos_mvhashmap::{ MVHashMap, }; use aptos_types::{ - delayed_fields::PanicError, + error::{code_invariant_error, expect_ok, PanicError, PanicOr}, executable::{Executable, ModulePath}, state_store::{ errors::StateviewError, @@ -143,7 +140,11 @@ trait ResourceState { } trait ResourceGroupState { - fn set_raw_group_base_values(&self, group_key: T::Key, base_values: Vec<(T::Tag, T::Value)>); + fn set_raw_group_base_values( + &self, + group_key: T::Key, + base_values: Vec<(T::Tag, T::Value)>, + ) -> PartialVMResult<()>; fn read_cached_group_tagged_data( &self, @@ -362,8 +363,8 @@ fn delayed_field_try_add_delta_outcome_impl( .into()); } - let last_committed_value = loop { - match versioned_delayed_fields.read_latest_committed_value( + let predicted_value = loop { + match versioned_delayed_fields.read_latest_predicted_value( id, txn_idx, ReadPosition::BeforeCurrentTxn, @@ -388,7 +389,7 @@ fn delayed_field_try_add_delta_outcome_impl( compute_delayed_field_try_add_delta_outcome_first_time( delta, max_value, - last_committed_value, + predicted_value, )?; captured_reads @@ -515,9 +516,6 @@ impl<'a, T: Transaction, X: Executable> ParallelState<'a, T, X> { .with_message("Interrupted as block execution was halted".to_string())); } }, - Err(TagSerializationError(e)) => { - return Err(e); - }, } } } @@ -643,7 +641,7 @@ impl<'a, T: Transaction, X: Executable> ResourceState for ParallelState<'a, T )); }, Ok(false) => { - self.captured_reads.borrow_mut().mark_failure(); + self.captured_reads.borrow_mut().mark_failure(false); return ReadResult::HaltSpeculativeExecution( "Interrupted as block execution was halted".to_string(), ); @@ -655,7 +653,7 @@ impl<'a, T: Transaction, X: Executable> ResourceState for ParallelState<'a, T }, Err(DeltaApplicationFailure) => { // AggregatorV1 may have delta application failure due to speculation. - self.captured_reads.borrow_mut().mark_failure(); + self.captured_reads.borrow_mut().mark_failure(false); return ReadResult::HaltSpeculativeExecution( "Delta application failure (must be speculative)".to_string(), ); @@ -666,10 +664,19 @@ impl<'a, T: Transaction, X: Executable> ResourceState for ParallelState<'a, T } impl<'a, T: Transaction, X: Executable> ResourceGroupState for ParallelState<'a, T, X> { - fn set_raw_group_base_values(&self, group_key: T::Key, base_values: Vec<(T::Tag, T::Value)>) { + fn set_raw_group_base_values( + &self, + group_key: T::Key, + base_values: Vec<(T::Tag, T::Value)>, + ) -> PartialVMResult<()> { self.versioned_map .group_data() - .set_raw_base_values(group_key.clone(), base_values); + .set_raw_base_values(group_key.clone(), base_values) + .map_err(|e| { + self.captured_reads.borrow_mut().mark_incorrect_use(); + PartialVMError::new(StatusCode::UNEXPECTED_DESERIALIZATION_ERROR) + .with_message(e.to_string()) + }) } fn read_cached_group_tagged_data( @@ -760,9 +767,6 @@ impl<'a, T: Transaction, X: Executable> ResourceGroupState for ParallelState< .with_message("Interrupted as block execution was halted".to_string())); } }, - Err(TagSerializationError(_)) => { - unreachable!("Reading a resource does not require tag serialization"); - }, } } } @@ -773,6 +777,7 @@ pub(crate) struct SequentialState<'a, T: Transaction, X: Executable> { pub(crate) read_set: RefCell>, pub(crate) start_counter: u32, pub(crate) counter: &'a RefCell, + // TODO: Move to UnsyncMap. pub(crate) incorrect_use: RefCell, } @@ -870,9 +875,18 @@ impl<'a, T: Transaction, X: Executable> ResourceState for SequentialState<'a, } impl<'a, T: Transaction, X: Executable> ResourceGroupState for SequentialState<'a, T, X> { - fn set_raw_group_base_values(&self, group_key: T::Key, base_values: Vec<(T::Tag, T::Value)>) { + fn set_raw_group_base_values( + &self, + group_key: T::Key, + base_values: Vec<(T::Tag, T::Value)>, + ) -> PartialVMResult<()> { self.unsync_map - .set_group_base_values(group_key.clone(), base_values); + .set_group_base_values(group_key.clone(), base_values) + .map_err(|e| { + *self.incorrect_use.borrow_mut() = true; + PartialVMError::new(StatusCode::UNEXPECTED_DESERIALIZATION_ERROR) + .with_message(e.to_string()) + }) } fn read_cached_group_tagged_data( @@ -1016,7 +1030,11 @@ impl<'a, T: Transaction, S: TStateView, X: Executable> LatestView< pub fn is_incorrect_use(&self) -> bool { match &self.latest_view { - ViewState::Sync(state) => state.captured_reads.borrow().is_incorrect_use(), + ViewState::Sync(_) => { + // Parallel executor accesses captured reads directly and does not use this API. + true + }, + // TODO: store incorrect use in UnsyncMap and eliminate this API. ViewState::Unsync(state) => *state.incorrect_use.borrow(), } } @@ -1064,7 +1082,7 @@ impl<'a, T: Transaction, S: TStateView, X: Executable> LatestView< ); self.mark_incorrect_use(); return Err(PartialVMError::new( - StatusCode::DELAYED_MATERIALIZATION_CODE_INVARIANT_ERROR, + StatusCode::DELAYED_FIELD_OR_BLOCKSTM_CODE_INVARIANT_ERROR, ) .with_message(format!("{}", err))); }, @@ -1260,7 +1278,7 @@ impl<'a, T: Transaction, S: TStateView, X: Executable> LatestView< return Ok(None); } match self.get_resource_state_value_metadata(key)? { - Some(metadata) => match unsync_map.get_group_size(key)? { + Some(metadata) => match unsync_map.get_group_size(key) { GroupReadResult::Size(group_size) => { Ok(Some((key.clone(), (metadata, group_size.get())))) }, @@ -1355,7 +1373,7 @@ impl<'a, T: Transaction, S: TStateView, X: Executable> LatestView< bcs::from_bytes(state_value.bytes()).map_err(|e| { PartialVMError::new(StatusCode::UNEXPECTED_DESERIALIZATION_ERROR) .with_message(format!( - "Failed to deserialize the resource group at {:? }: {:?}", + "Failed to deserialize the resource group at {:?}: {:?}", group_key, e )) })?, @@ -1375,7 +1393,7 @@ impl<'a, T: Transaction, S: TStateView, X: Executable> LatestView< self.latest_view .get_resource_group_state() - .set_raw_group_base_values(group_key.clone(), base_group_sentinel_ops); + .set_raw_group_base_values(group_key.clone(), base_group_sentinel_ops)?; self.latest_view.get_resource_state().set_base_value( group_key.clone(), ValueWithLayout::RawFromStorage(Arc::new(metadata_op)), @@ -1442,7 +1460,7 @@ impl<'a, T: Transaction, S: TStateView, X: Executable> TResourceGr ) -> PartialVMResult { let mut group_read = match &self.latest_view { ViewState::Sync(state) => state.read_group_size(group_key, self.txn_idx)?, - ViewState::Unsync(state) => state.unsync_map.get_group_size(group_key)?, + ViewState::Unsync(state) => state.unsync_map.get_group_size(group_key), }; if matches!(group_read, GroupReadResult::Uninitialized) { @@ -1450,7 +1468,7 @@ impl<'a, T: Transaction, S: TStateView, X: Executable> TResourceGr group_read = match &self.latest_view { ViewState::Sync(state) => state.read_group_size(group_key, self.txn_idx)?, - ViewState::Unsync(state) => state.unsync_map.get_group_size(group_key)?, + ViewState::Unsync(state) => state.unsync_map.get_group_size(group_key), } }; @@ -1492,22 +1510,6 @@ impl<'a, T: Transaction, S: TStateView, X: Executable> TResourceGr Ok(group_read.into_value().0) } - fn resource_size_in_group( - &self, - _group_key: &Self::GroupKey, - _resource_tag: &Self::ResourceTag, - ) -> PartialVMResult { - unimplemented!("Currently resolved by ResourceGroupAdapter"); - } - - fn resource_exists_in_group( - &self, - _group_key: &Self::GroupKey, - _resource_tag: &Self::ResourceTag, - ) -> PartialVMResult { - unimplemented!("Currently resolved by ResourceGroupAdapter"); - } - fn release_group_cache( &self, ) -> Option>> { @@ -1759,7 +1761,7 @@ mod test { use aptos_aggregator::{ bounded_math::{BoundedMath, SignedU128}, delta_math::DeltaHistory, - types::{DelayedFieldValue, DelayedFieldsSpeculativeError, PanicOr, ReadPosition}, + types::{DelayedFieldValue, DelayedFieldsSpeculativeError, ReadPosition}, }; use aptos_mvhashmap::{ types::{MVDelayedFieldsError, TxnIndex}, @@ -1768,6 +1770,7 @@ mod test { MVHashMap, }; use aptos_types::{ + error::PanicOr, executable::Executable, state_store::{ errors::StateviewError, state_storage_usage::StateStorageUsage, @@ -1813,7 +1816,7 @@ mod test { .ok_or(PanicOr::Or(MVDelayedFieldsError::NotFound)) } - fn read_latest_committed_value( + fn read_latest_predicted_value( &self, id: &DelayedFieldID, _current_txn_idx: TxnIndex, diff --git a/aptos-move/framework/README.md b/aptos-move/framework/README.md index d308719bb50e0..31f6e61e81b06 100644 --- a/aptos-move/framework/README.md +++ b/aptos-move/framework/README.md @@ -79,7 +79,7 @@ The overall structure of the Aptos Framework is as follows: ├── aptos-token # Sources, testing and generated documentation for Aptos token component ├── aptos-stdlib # Sources, testing and generated documentation for Aptos stdlib component ├── move-stdlib # Sources, testing and generated documentation for Move stdlib component -├── cached-packages # Tooling to generate SDK from mvoe sources. +├── cached-packages # Tooling to generate SDK from move sources. ├── src # Compilation and generation of information from Move source files in the Aptos Framework. Not designed to be used as a Rust library ├── releases # Move release bundles └── tests diff --git a/aptos-move/framework/aptos-framework/doc/managed_coin.md b/aptos-move/framework/aptos-framework/doc/managed_coin.md index 8ae37e71a1f9c..50c2383fd111d 100644 --- a/aptos-move/framework/aptos-framework/doc/managed_coin.md +++ b/aptos-move/framework/aptos-framework/doc/managed_coin.md @@ -14,6 +14,8 @@ By utilizing this current module, a developer can create his own coin and care l - [Function `initialize`](#0x1_managed_coin_initialize) - [Function `mint`](#0x1_managed_coin_mint) - [Function `register`](#0x1_managed_coin_register) +- [Function `destroy_caps`](#0x1_managed_coin_destroy_caps) +- [Function `remove_caps`](#0x1_managed_coin_remove_caps) - [Specification](#@Specification_1) - [High-level Requirements](#high-level-req) - [Module-level Specification](#module-level-spec) @@ -21,6 +23,8 @@ By utilizing this current module, a developer can create his own coin and care l - [Function `initialize`](#@Specification_1_initialize) - [Function `mint`](#@Specification_1_mint) - [Function `register`](#@Specification_1_register) + - [Function `destroy_caps`](#@Specification_1_destroy_caps) + - [Function `remove_caps`](#@Specification_1_remove_caps)
use 0x1::coin;
@@ -231,6 +235,72 @@ Required if user wants to start accepting deposits of CoinType in h
 
 
 
+
+
+
+
+## Function `destroy_caps`
+
+Destroys capabilities from the account, so that the user no longer has access to mint or burn.
+
+
+
public entry fun destroy_caps<CoinType>(account: &signer)
+
+ + + +
+Implementation + + +
public entry fun destroy_caps<CoinType>(account: &signer) acquires Capabilities {
+    let (burn_cap, freeze_cap, mint_cap) = remove_caps<CoinType>(account);
+    destroy_burn_cap(burn_cap);
+    destroy_freeze_cap(freeze_cap);
+    destroy_mint_cap(mint_cap);
+}
+
+ + + +
+ + + +## Function `remove_caps` + +Removes capabilities from the account to be stored or destroyed elsewhere + + +
public fun remove_caps<CoinType>(account: &signer): (coin::BurnCapability<CoinType>, coin::FreezeCapability<CoinType>, coin::MintCapability<CoinType>)
+
+ + + +
+Implementation + + +
public fun remove_caps<CoinType>(
+    account: &signer
+): (BurnCapability<CoinType>, FreezeCapability<CoinType>, MintCapability<CoinType>) acquires Capabilities {
+    let account_addr = signer::address_of(account);
+    assert!(
+        exists<Capabilities<CoinType>>(account_addr),
+        error::not_found(ENO_CAPABILITIES),
+    );
+
+    let Capabilities<CoinType> {
+        burn_cap,
+        freeze_cap,
+        mint_cap,
+    } = move_from<Capabilities<CoinType>>(account_addr);
+    (burn_cap, freeze_cap, mint_cap)
+}
+
+ + +
@@ -423,4 +493,40 @@ Updating Account.guid_creation_num will not overflow.
+ + + +### Function `destroy_caps` + + +
public entry fun destroy_caps<CoinType>(account: &signer)
+
+ + + + +
let account_addr = signer::address_of(account);
+aborts_if !exists<Capabilities<CoinType>>(account_addr);
+ensures !exists<Capabilities<CoinType>>(account_addr);
+
+ + + + + +### Function `remove_caps` + + +
public fun remove_caps<CoinType>(account: &signer): (coin::BurnCapability<CoinType>, coin::FreezeCapability<CoinType>, coin::MintCapability<CoinType>)
+
+ + + + +
let account_addr = signer::address_of(account);
+aborts_if !exists<Capabilities<CoinType>>(account_addr);
+ensures !exists<Capabilities<CoinType>>(account_addr);
+
+ + [move-book]: https://aptos.dev/move/book/SUMMARY diff --git a/aptos-move/framework/aptos-framework/doc/multisig_account.md b/aptos-move/framework/aptos-framework/doc/multisig_account.md index 8ce70dc316506..74dca99c66d88 100644 --- a/aptos-move/framework/aptos-framework/doc/multisig_account.md +++ b/aptos-move/framework/aptos-framework/doc/multisig_account.md @@ -82,7 +82,9 @@ and implement the governance voting logic on top. - [Function `next_sequence_number`](#0x1_multisig_account_next_sequence_number) - [Function `vote`](#0x1_multisig_account_vote) - [Function `available_transaction_queue_capacity`](#0x1_multisig_account_available_transaction_queue_capacity) +- [Function `create_with_existing_account_call`](#0x1_multisig_account_create_with_existing_account_call) - [Function `create_with_existing_account`](#0x1_multisig_account_create_with_existing_account) +- [Function `create_with_existing_account_and_revoke_auth_key_call`](#0x1_multisig_account_create_with_existing_account_and_revoke_auth_key_call) - [Function `create_with_existing_account_and_revoke_auth_key`](#0x1_multisig_account_create_with_existing_account_and_revoke_auth_key) - [Function `create`](#0x1_multisig_account_create) - [Function `create_with_owners`](#0x1_multisig_account_create_with_owners) @@ -1932,6 +1934,50 @@ Return a bool tuple indicating whether an owner has voted and if so, whether the + + + + +## Function `create_with_existing_account_call` + +Private entry function that creates a new multisig account on top of an existing account. + +This offers a migration path for an existing account with any type of auth key. + +Note that this does not revoke auth key-based control over the account. Owners should separately rotate the auth +key after they are fully migrated to the new multisig account. Alternatively, they can call +create_with_existing_account_and_revoke_auth_key_call instead. + + +
entry fun create_with_existing_account_call(multisig_account: &signer, owners: vector<address>, num_signatures_required: u64, metadata_keys: vector<string::String>, metadata_values: vector<vector<u8>>)
+
+ + + +
+Implementation + + +
entry fun create_with_existing_account_call(
+    multisig_account: &signer,
+    owners: vector<address>,
+    num_signatures_required: u64,
+    metadata_keys: vector<String>,
+    metadata_values: vector<vector<u8>>,
+) acquires MultisigAccount {
+    create_with_owners_internal(
+        multisig_account,
+        owners,
+        num_signatures_required,
+        option::none<SignerCapability>(),
+        metadata_keys,
+        metadata_values,
+    );
+}
+
+ + +
@@ -2002,6 +2048,61 @@ create_with_existing_account_and_revoke_auth_key instead. + + + + +## Function `create_with_existing_account_and_revoke_auth_key_call` + +Private entry function that creates a new multisig account on top of an existing account and immediately rotate +the origin auth key to 0x0. + +Note: If the original account is a resource account, this does not revoke all control over it as if any +SignerCapability of the resource account still exists, it can still be used to generate the signer for the +account. + + +
entry fun create_with_existing_account_and_revoke_auth_key_call(multisig_account: &signer, owners: vector<address>, num_signatures_required: u64, metadata_keys: vector<string::String>, metadata_values: vector<vector<u8>>)
+
+ + + +
+Implementation + + +
entry fun create_with_existing_account_and_revoke_auth_key_call(
+    multisig_account: &signer,
+    owners: vector<address>,
+    num_signatures_required: u64,
+    metadata_keys: vector<String>,
+    metadata_values:vector<vector<u8>>,
+) acquires MultisigAccount {
+    create_with_owners_internal(
+        multisig_account,
+        owners,
+        num_signatures_required,
+        option::none<SignerCapability>(),
+        metadata_keys,
+        metadata_values,
+    );
+
+    // Rotate the account's auth key to 0x0, which effectively revokes control via auth key.
+    let multisig_address = address_of(multisig_account);
+    account::rotate_authentication_key_internal(multisig_account, ZERO_AUTH_KEY);
+    // This also needs to revoke any signer capability or rotation capability that exists for the account to
+    // completely remove all access to the account.
+    if (account::is_signer_capability_offered(multisig_address)) {
+        account::revoke_any_signer_capability(multisig_account);
+    };
+    if (account::is_rotation_capability_offered(multisig_address)) {
+        account::revoke_any_rotation_capability(multisig_account);
+    };
+}
+
+ + +
diff --git a/aptos-move/framework/aptos-framework/sources/managed_coin.move b/aptos-move/framework/aptos-framework/sources/managed_coin.move index d2932ddb4edd7..a5da4b24ad5c9 100644 --- a/aptos-move/framework/aptos-framework/sources/managed_coin.move +++ b/aptos-move/framework/aptos-framework/sources/managed_coin.move @@ -6,7 +6,9 @@ module aptos_framework::managed_coin { use std::error; use std::signer; - use aptos_framework::coin::{Self, BurnCapability, FreezeCapability, MintCapability}; + use aptos_framework::coin::{Self, BurnCapability, FreezeCapability, MintCapability, destroy_burn_cap, + destroy_freeze_cap, destroy_mint_cap + }; // // Errors @@ -97,6 +99,32 @@ module aptos_framework::managed_coin { coin::register(account); } + /// Destroys capabilities from the account, so that the user no longer has access to mint or burn. + public entry fun destroy_caps(account: &signer) acquires Capabilities { + let (burn_cap, freeze_cap, mint_cap) = remove_caps(account); + destroy_burn_cap(burn_cap); + destroy_freeze_cap(freeze_cap); + destroy_mint_cap(mint_cap); + } + + /// Removes capabilities from the account to be stored or destroyed elsewhere + public fun remove_caps( + account: &signer + ): (BurnCapability, FreezeCapability, MintCapability) acquires Capabilities { + let account_addr = signer::address_of(account); + assert!( + exists>(account_addr), + error::not_found(ENO_CAPABILITIES), + ); + + let Capabilities { + burn_cap, + freeze_cap, + mint_cap, + } = move_from>(account_addr); + (burn_cap, freeze_cap, mint_cap) + } + // // Tests // @@ -156,6 +184,40 @@ module aptos_framework::managed_coin { let new_supply = coin::supply(); assert!(option::extract(&mut new_supply) == 20, 2); + + // Destroy mint capabilities + destroy_caps(&mod_account); + assert!(!exists>(signer::address_of(&mod_account)), 3); + } + + #[test(source = @0xa11ce, destination = @0xb0b, mod_account = @0x1)] + public entry fun test_end_to_end_caps_removal( + source: signer, + destination: signer, + mod_account: signer + ) acquires Capabilities { + let source_addr = signer::address_of(&source); + let destination_addr = signer::address_of(&destination); + aptos_framework::account::create_account_for_test(source_addr); + aptos_framework::account::create_account_for_test(destination_addr); + aptos_framework::account::create_account_for_test(signer::address_of(&mod_account)); + aggregator_factory::initialize_aggregator_factory_for_test(&mod_account); + + initialize( + &mod_account, + b"Fake Money", + b"FMD", + 10, + true + ); + assert!(coin::is_coin_initialized(), 0); + + // Remove capabilities + let (burn_cap, freeze_cap, mint_cap) = remove_caps(&mod_account); + assert!(!exists>(signer::address_of(&mod_account)), 3); + coin::destroy_mint_cap(mint_cap); + coin::destroy_freeze_cap(freeze_cap); + coin::destroy_burn_cap(burn_cap); } #[test(source = @0xa11ce, destination = @0xb0b, mod_account = @0x1)] diff --git a/aptos-move/framework/aptos-framework/sources/managed_coin.spec.move b/aptos-move/framework/aptos-framework/sources/managed_coin.spec.move index e6eafd0904c11..344c9744f7c97 100644 --- a/aptos-move/framework/aptos-framework/sources/managed_coin.spec.move +++ b/aptos-move/framework/aptos-framework/sources/managed_coin.spec.move @@ -147,4 +147,16 @@ spec aptos_framework::managed_coin { aborts_if !exists>(account_addr) && !type_info::spec_is_struct(); ensures exists>(account_addr); } + + spec remove_caps(account: &signer): (BurnCapability, FreezeCapability, MintCapability) { + let account_addr = signer::address_of(account); + aborts_if !exists>(account_addr); + ensures !exists>(account_addr); + } + + spec destroy_caps (account: &signer) { + let account_addr = signer::address_of(account); + aborts_if !exists>(account_addr); + ensures !exists>(account_addr); + } } diff --git a/aptos-move/framework/aptos-framework/sources/multisig_account.move b/aptos-move/framework/aptos-framework/sources/multisig_account.move index 1917e584d4a3b..80fffd93ecd59 100644 --- a/aptos-move/framework/aptos-framework/sources/multisig_account.move +++ b/aptos-move/framework/aptos-framework/sources/multisig_account.move @@ -497,6 +497,30 @@ module aptos_framework::multisig_account { ////////////////////////// Multisig account creation functions /////////////////////////////// + /// Private entry function that creates a new multisig account on top of an existing account. + /// + /// This offers a migration path for an existing account with any type of auth key. + /// + /// Note that this does not revoke auth key-based control over the account. Owners should separately rotate the auth + /// key after they are fully migrated to the new multisig account. Alternatively, they can call + /// create_with_existing_account_and_revoke_auth_key_call instead. + entry fun create_with_existing_account_call( + multisig_account: &signer, + owners: vector
, + num_signatures_required: u64, + metadata_keys: vector, + metadata_values: vector>, + ) acquires MultisigAccount { + create_with_owners_internal( + multisig_account, + owners, + num_signatures_required, + option::none(), + metadata_keys, + metadata_values, + ); + } + /// Creates a new multisig account on top of an existing account. /// /// This offers a migration path for an existing account with a multi-ed25519 auth key (native multisig account). @@ -547,6 +571,41 @@ module aptos_framework::multisig_account { ); } + /// Private entry function that creates a new multisig account on top of an existing account and immediately rotate + /// the origin auth key to 0x0. + /// + /// Note: If the original account is a resource account, this does not revoke all control over it as if any + /// SignerCapability of the resource account still exists, it can still be used to generate the signer for the + /// account. + entry fun create_with_existing_account_and_revoke_auth_key_call( + multisig_account: &signer, + owners: vector
, + num_signatures_required: u64, + metadata_keys: vector, + metadata_values:vector>, + ) acquires MultisigAccount { + create_with_owners_internal( + multisig_account, + owners, + num_signatures_required, + option::none(), + metadata_keys, + metadata_values, + ); + + // Rotate the account's auth key to 0x0, which effectively revokes control via auth key. + let multisig_address = address_of(multisig_account); + account::rotate_authentication_key_internal(multisig_account, ZERO_AUTH_KEY); + // This also needs to revoke any signer capability or rotation capability that exists for the account to + // completely remove all access to the account. + if (account::is_signer_capability_offered(multisig_address)) { + account::revoke_any_signer_capability(multisig_account); + }; + if (account::is_rotation_capability_offered(multisig_address)) { + account::revoke_any_rotation_capability(multisig_account); + }; + } + /// Creates a new multisig account on top of an existing account and immediately rotate the origin auth key to 0x0. /// /// Note: If the original account is a resource account, this does not revoke all control over it as if any @@ -1688,6 +1747,26 @@ module aptos_framework::multisig_account { ); } + #[test] + public entry fun test_create_multisig_account_on_top_of_existing_with_signer() + acquires MultisigAccount { + setup(); + + let multisig_address = @0xabc; + create_account(multisig_address); + + let expected_owners = vector[@0x123, @0x124, @0x125]; + create_with_existing_account_call( + &create_signer(multisig_address), + expected_owners, + 2, + vector[], + vector[], + ); + assert_multisig_account_exists(multisig_address); + assert!(owners(multisig_address) == expected_owners, 0); + } + #[test] public entry fun test_create_multisig_account_on_top_of_existing_multi_ed25519_account() acquires MultisigAccount { @@ -1721,6 +1800,34 @@ module aptos_framework::multisig_account { assert!(owners(multisig_address) == expected_owners, 0); } + #[test] + public entry fun test_create_multisig_account_on_top_of_existing_and_revoke_auth_key_with_signer() + acquires MultisigAccount { + setup(); + + let multisig_address = @0xabc; + create_account(multisig_address); + + // Create both a signer capability and rotation capability offers + account::set_rotation_capability_offer(multisig_address, @0x123); + account::set_signer_capability_offer(multisig_address, @0x123); + + let expected_owners = vector[@0x123, @0x124, @0x125]; + create_with_existing_account_and_revoke_auth_key_call( + &create_signer(multisig_address), + expected_owners, + 2, + vector[], + vector[], + ); + assert_multisig_account_exists(multisig_address); + assert!(owners(multisig_address) == expected_owners, 0); + assert!(account::get_authentication_key(multisig_address) == ZERO_AUTH_KEY, 1); + // Verify that all capability offers have been wiped. + assert!(!account::is_rotation_capability_offered(multisig_address), 2); + assert!(!account::is_signer_capability_offered(multisig_address), 3); + } + #[test] public entry fun test_create_multisig_account_on_top_of_existing_multi_ed25519_account_and_revoke_auth_key() acquires MultisigAccount { diff --git a/aptos-move/framework/cached-packages/src/aptos_framework_sdk_builder.rs b/aptos-move/framework/cached-packages/src/aptos_framework_sdk_builder.rs index aa41aa6ea1fd3..e2a32c0640bf5 100644 --- a/aptos-move/framework/cached-packages/src/aptos_framework_sdk_builder.rs +++ b/aptos-move/framework/cached-packages/src/aptos_framework_sdk_builder.rs @@ -510,6 +510,11 @@ pub enum EntryFunctionCall { amount: u64, }, + /// Destroys capabilities from the account, so that the user no longer has access to mint or burn. + ManagedCoinDestroyCaps { + coin_type: TypeTag, + }, + /// Initialize new coin `CoinType` in Aptos Blockchain. /// Mint and Burn Capabilities will be stored under `account` in `Capabilities` resource. ManagedCoinInitialize { @@ -617,6 +622,33 @@ pub enum EntryFunctionCall { metadata_values: Vec>, }, + /// Private entry function that creates a new multisig account on top of an existing account and immediately rotate + /// the origin auth key to 0x0. + /// + /// Note: If the original account is a resource account, this does not revoke all control over it as if any + /// SignerCapability of the resource account still exists, it can still be used to generate the signer for the + /// account. + MultisigAccountCreateWithExistingAccountAndRevokeAuthKeyCall { + owners: Vec, + num_signatures_required: u64, + metadata_keys: Vec>, + metadata_values: Vec>, + }, + + /// Private entry function that creates a new multisig account on top of an existing account. + /// + /// This offers a migration path for an existing account with any type of auth key. + /// + /// Note that this does not revoke auth key-based control over the account. Owners should separately rotate the auth + /// key after they are fully migrated to the new multisig account. Alternatively, they can call + /// create_with_existing_account_and_revoke_auth_key_call instead. + MultisigAccountCreateWithExistingAccountCall { + owners: Vec, + num_signatures_required: u64, + metadata_keys: Vec>, + metadata_values: Vec>, + }, + /// Creates a new multisig account with the specified additional owner list and signatures required. /// /// @param additional_owners The owner account who calls this function cannot be in the additional_owners and there @@ -1341,6 +1373,7 @@ impl EntryFunctionCall { n_vec, } => jwks_update_federated_jwk_set(iss, kid_vec, alg_vec, e_vec, n_vec), ManagedCoinBurn { coin_type, amount } => managed_coin_burn(coin_type, amount), + ManagedCoinDestroyCaps { coin_type } => managed_coin_destroy_caps(coin_type), ManagedCoinInitialize { coin_type, name, @@ -1418,6 +1451,28 @@ impl EntryFunctionCall { metadata_keys, metadata_values, ), + MultisigAccountCreateWithExistingAccountAndRevokeAuthKeyCall { + owners, + num_signatures_required, + metadata_keys, + metadata_values, + } => multisig_account_create_with_existing_account_and_revoke_auth_key_call( + owners, + num_signatures_required, + metadata_keys, + metadata_values, + ), + MultisigAccountCreateWithExistingAccountCall { + owners, + num_signatures_required, + metadata_keys, + metadata_values, + } => multisig_account_create_with_existing_account_call( + owners, + num_signatures_required, + metadata_keys, + metadata_values, + ), MultisigAccountCreateWithOwners { additional_owners, num_signatures_required, @@ -2983,6 +3038,22 @@ pub fn managed_coin_burn(coin_type: TypeTag, amount: u64) -> TransactionPayload )) } +/// Destroys capabilities from the account, so that the user no longer has access to mint or burn. +pub fn managed_coin_destroy_caps(coin_type: TypeTag) -> TransactionPayload { + TransactionPayload::EntryFunction(EntryFunction::new( + ModuleId::new( + AccountAddress::new([ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 1, + ]), + ident_str!("managed_coin").to_owned(), + ), + ident_str!("destroy_caps").to_owned(), + vec![coin_type], + vec![], + )) +} + /// Initialize new coin `CoinType` in Aptos Blockchain. /// Mint and Burn Capabilities will be stored under `account` in `Capabilities` resource. pub fn managed_coin_initialize( @@ -3282,6 +3353,69 @@ pub fn multisig_account_create_with_existing_account_and_revoke_auth_key( )) } +/// Private entry function that creates a new multisig account on top of an existing account and immediately rotate +/// the origin auth key to 0x0. +/// +/// Note: If the original account is a resource account, this does not revoke all control over it as if any +/// SignerCapability of the resource account still exists, it can still be used to generate the signer for the +/// account. +pub fn multisig_account_create_with_existing_account_and_revoke_auth_key_call( + owners: Vec, + num_signatures_required: u64, + metadata_keys: Vec>, + metadata_values: Vec>, +) -> TransactionPayload { + TransactionPayload::EntryFunction(EntryFunction::new( + ModuleId::new( + AccountAddress::new([ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 1, + ]), + ident_str!("multisig_account").to_owned(), + ), + ident_str!("create_with_existing_account_and_revoke_auth_key_call").to_owned(), + vec![], + vec![ + bcs::to_bytes(&owners).unwrap(), + bcs::to_bytes(&num_signatures_required).unwrap(), + bcs::to_bytes(&metadata_keys).unwrap(), + bcs::to_bytes(&metadata_values).unwrap(), + ], + )) +} + +/// Private entry function that creates a new multisig account on top of an existing account. +/// +/// This offers a migration path for an existing account with any type of auth key. +/// +/// Note that this does not revoke auth key-based control over the account. Owners should separately rotate the auth +/// key after they are fully migrated to the new multisig account. Alternatively, they can call +/// create_with_existing_account_and_revoke_auth_key_call instead. +pub fn multisig_account_create_with_existing_account_call( + owners: Vec, + num_signatures_required: u64, + metadata_keys: Vec>, + metadata_values: Vec>, +) -> TransactionPayload { + TransactionPayload::EntryFunction(EntryFunction::new( + ModuleId::new( + AccountAddress::new([ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 1, + ]), + ident_str!("multisig_account").to_owned(), + ), + ident_str!("create_with_existing_account_call").to_owned(), + vec![], + vec![ + bcs::to_bytes(&owners).unwrap(), + bcs::to_bytes(&num_signatures_required).unwrap(), + bcs::to_bytes(&metadata_keys).unwrap(), + bcs::to_bytes(&metadata_values).unwrap(), + ], + )) +} + /// Creates a new multisig account with the specified additional owner list and signatures required. /// /// @param additional_owners The owner account who calls this function cannot be in the additional_owners and there @@ -5498,6 +5632,16 @@ mod decoder { } } + pub fn managed_coin_destroy_caps(payload: &TransactionPayload) -> Option { + if let TransactionPayload::EntryFunction(script) = payload { + Some(EntryFunctionCall::ManagedCoinDestroyCaps { + coin_type: script.ty_args().get(0)?.clone(), + }) + } else { + None + } + } + pub fn managed_coin_initialize(payload: &TransactionPayload) -> Option { if let TransactionPayload::EntryFunction(script) = payload { Some(EntryFunctionCall::ManagedCoinInitialize { @@ -5666,6 +5810,40 @@ mod decoder { } } + pub fn multisig_account_create_with_existing_account_and_revoke_auth_key_call( + payload: &TransactionPayload, + ) -> Option { + if let TransactionPayload::EntryFunction(script) = payload { + Some( + EntryFunctionCall::MultisigAccountCreateWithExistingAccountAndRevokeAuthKeyCall { + owners: bcs::from_bytes(script.args().get(0)?).ok()?, + num_signatures_required: bcs::from_bytes(script.args().get(1)?).ok()?, + metadata_keys: bcs::from_bytes(script.args().get(2)?).ok()?, + metadata_values: bcs::from_bytes(script.args().get(3)?).ok()?, + }, + ) + } else { + None + } + } + + pub fn multisig_account_create_with_existing_account_call( + payload: &TransactionPayload, + ) -> Option { + if let TransactionPayload::EntryFunction(script) = payload { + Some( + EntryFunctionCall::MultisigAccountCreateWithExistingAccountCall { + owners: bcs::from_bytes(script.args().get(0)?).ok()?, + num_signatures_required: bcs::from_bytes(script.args().get(1)?).ok()?, + metadata_keys: bcs::from_bytes(script.args().get(2)?).ok()?, + metadata_values: bcs::from_bytes(script.args().get(3)?).ok()?, + }, + ) + } else { + None + } + } + pub fn multisig_account_create_with_owners( payload: &TransactionPayload, ) -> Option { @@ -6801,6 +6979,10 @@ static SCRIPT_FUNCTION_DECODER_MAP: once_cell::sync::Lazy AccountAddress::new(m.address.into_bytes()), - _ => panic!("script not a dependency"), - }; - PackageDep { - account, - package_name, - } + .flat_map(|(name, unit)| match &unit.unit { + CompiledUnit::Module(m) => { + let package_name = name.as_str().to_string(); + let account = AccountAddress::new(m.address.into_bytes()); + + Some(PackageDep { + account, + package_name, + }) + }, + CompiledUnit::Script(_) => None, }) .collect::>() .into_iter() diff --git a/aptos-move/framework/src/natives/aggregator_natives/aggregator_v2.rs b/aptos-move/framework/src/natives/aggregator_natives/aggregator_v2.rs index 521d62bd6ae22..6a277b36bd864 100644 --- a/aptos-move/framework/src/natives/aggregator_natives/aggregator_v2.rs +++ b/aptos-move/framework/src/natives/aggregator_natives/aggregator_v2.rs @@ -6,7 +6,6 @@ use aptos_aggregator::{ bounded_math::{BoundedMath, SignedU128}, delayed_field_extension::DelayedFieldData, resolver::DelayedFieldResolver, - types::code_invariant_error, }; use aptos_gas_algebra::NumBytes; use aptos_gas_schedule::gas_params::natives::aptos_framework::*; @@ -14,9 +13,12 @@ use aptos_native_interface::{ safely_pop_arg, RawSafeNative, SafeNativeBuilder, SafeNativeContext, SafeNativeError, SafeNativeResult, }; -use aptos_types::delayed_fields::{ - calculate_width_for_constant_string, calculate_width_for_integer_embedded_string, - SnapshotToStringFormula, +use aptos_types::{ + delayed_fields::{ + calculate_width_for_constant_string, calculate_width_for_integer_embedded_string, + SnapshotToStringFormula, + }, + error::code_invariant_error, }; use move_binary_format::errors::PartialVMError; use move_vm_runtime::native_functions::NativeFunction; diff --git a/aptos-move/framework/src/natives/aggregator_natives/helpers_v1.rs b/aptos-move/framework/src/natives/aggregator_natives/helpers_v1.rs index 99d1211a5ea10..a633a3df3d9c8 100644 --- a/aptos-move/framework/src/natives/aggregator_natives/helpers_v1.rs +++ b/aptos-move/framework/src/natives/aggregator_natives/helpers_v1.rs @@ -56,19 +56,17 @@ pub(crate) fn unpack_aggregator_struct( let pop_with_err = |vec: &mut Vec, msg: &str| { vec.pop() - .map_or(Err(extension_error(msg)), |v| v.value_as::()) + .map_or_else(|| Err(extension_error(msg)), |v| v.value_as::()) }; let limit = pop_with_err(&mut fields, "unable to pop 'limit' field")?; - let key = fields - .pop() - .map_or(Err(extension_error("unable to pop `handle` field")), |v| { - v.value_as::() - })?; - let handle = fields - .pop() - .map_or(Err(extension_error("unable to pop `handle` field")), |v| { - v.value_as::() - })?; + let key = fields.pop().map_or_else( + || Err(extension_error("unable to pop `handle` field")), + |v| v.value_as::(), + )?; + let handle = fields.pop().map_or_else( + || Err(extension_error("unable to pop `handle` field")), + |v| v.value_as::(), + )?; Ok((TableHandle(handle), key, limit)) } diff --git a/aptos-move/move-examples/scripts/minter/build/Minter/bytecode_scripts/main.mv b/aptos-move/move-examples/scripts/minter/build/Minter/bytecode_scripts/main.mv index 1f982eefd3c12..8176dde7dd676 100644 Binary files a/aptos-move/move-examples/scripts/minter/build/Minter/bytecode_scripts/main.mv and b/aptos-move/move-examples/scripts/minter/build/Minter/bytecode_scripts/main.mv differ diff --git a/aptos-move/mvhashmap/Cargo.toml b/aptos-move/mvhashmap/Cargo.toml index 0ac539ed1b839..3fd3f93b1f68f 100644 --- a/aptos-move/mvhashmap/Cargo.toml +++ b/aptos-move/mvhashmap/Cargo.toml @@ -22,8 +22,6 @@ bytes = { workspace = true } claims = { workspace = true } crossbeam = { workspace = true } dashmap = { workspace = true } -derivative = { workspace = true } -move-binary-format = { workspace = true } move-core-types = { workspace = true } move-vm-types = { workspace = true } serde = { workspace = true } diff --git a/aptos-move/mvhashmap/src/lib.rs b/aptos-move/mvhashmap/src/lib.rs index a9f2ffe8cd3b1..1421fe6921735 100644 --- a/aptos-move/mvhashmap/src/lib.rs +++ b/aptos-move/mvhashmap/src/lib.rs @@ -53,10 +53,10 @@ impl< pub fn new() -> MVHashMap { MVHashMap { - data: VersionedData::new(), - group_data: VersionedGroupData::new(), - delayed_fields: VersionedDelayedFields::new(), - modules: VersionedModules::new(), + data: VersionedData::empty(), + group_data: VersionedGroupData::empty(), + delayed_fields: VersionedDelayedFields::empty(), + modules: VersionedModules::empty(), } } diff --git a/aptos-move/mvhashmap/src/types.rs b/aptos-move/mvhashmap/src/types.rs index 62cc81e30eaa6..d386dd7fe7278 100644 --- a/aptos-move/mvhashmap/src/types.rs +++ b/aptos-move/mvhashmap/src/types.rs @@ -1,19 +1,15 @@ // Copyright © Aptos Foundation // SPDX-License-Identifier: Apache-2.0 -use aptos_aggregator::{ - delta_change_set::DeltaOp, - types::{DelayedFieldsSpeculativeError, PanicOr}, -}; +use aptos_aggregator::{delta_change_set::DeltaOp, types::DelayedFieldsSpeculativeError}; use aptos_crypto::hash::HashValue; use aptos_types::{ + error::PanicOr, executable::ExecutableDescriptor, write_set::{TransactionWrite, WriteOpKind}, }; use aptos_vm_types::resolver::ResourceGroupSize; use bytes::Bytes; -use derivative::Derivative; -use move_binary_format::errors::PartialVMError; use move_core_types::value::MoveTypeLayout; use std::sync::{atomic::AtomicU32, Arc}; @@ -30,14 +26,13 @@ pub struct StorageVersion; // TODO: Find better representations for this, a similar one for TxnIndex. pub type Version = Result<(TxnIndex, Incarnation), StorageVersion>; -#[derive(Clone, Copy, PartialEq)] +#[derive(Clone, Copy, Debug, PartialEq)] pub(crate) enum Flag { - Done, - Estimate, + Done = 0, + Estimate = 1, } -#[derive(Debug, Derivative)] -#[derivative(PartialEq, Eq)] +#[derive(Debug, PartialEq, Eq)] pub enum MVGroupError { /// The base group contents are not initialized. Uninitialized, @@ -45,8 +40,6 @@ pub enum MVGroupError { TagNotFound, /// A dependency on other transaction has been found during the read. Dependency(TxnIndex), - /// Tag serialization is needed for group size computation. - TagSerializationError(#[derivative(PartialEq = "ignore")] PartialVMError), } /// Returned as Err(..) when failed to read from the multi-version data-structure. @@ -81,14 +74,25 @@ impl GroupReadResult { pub fn into_value(self) -> (Option, Option>) { match self { GroupReadResult::Value(maybe_bytes, maybe_layout) => (maybe_bytes, maybe_layout), - _ => unreachable!("Expected a value"), + GroupReadResult::Size(size) => { + unreachable!("Expected group value, found size {:?}", size) + }, + GroupReadResult::Uninitialized => { + unreachable!("Expected group value, found uninitialized") + }, } } pub fn into_size(self) -> ResourceGroupSize { match self { GroupReadResult::Size(size) => size, - _ => unreachable!("Expected size"), + GroupReadResult::Value(maybe_bytes, maybe_layout) => unreachable!( + "Expected size, found value bytes = {:?}, layout = {:?}", + maybe_bytes, maybe_layout + ), + GroupReadResult::Uninitialized => { + unreachable!("Expected group size, found uninitialized") + }, } } } diff --git a/aptos-move/mvhashmap/src/unit_tests/mod.rs b/aptos-move/mvhashmap/src/unit_tests/mod.rs index 4575e04912158..3f189ca68b189 100644 --- a/aptos-move/mvhashmap/src/unit_tests/mod.rs +++ b/aptos-move/mvhashmap/src/unit_tests/mod.rs @@ -298,7 +298,7 @@ fn create_write_read_placeholder_struct() { fn materialize_delta_shortcut() { use MVDataOutput::*; - let vd: VersionedData>, TestValue> = VersionedData::new(); + let vd: VersionedData>, TestValue> = VersionedData::empty(); let ap = KeyType(b"/foo/b".to_vec()); let limit = 10000; @@ -343,7 +343,7 @@ fn materialize_delta_shortcut() { #[test] #[should_panic] fn aggregator_base_mismatch() { - let vd: VersionedData>, TestValue> = VersionedData::new(); + let vd: VersionedData>, TestValue> = VersionedData::empty(); let ap = KeyType(b"/foo/b".to_vec()); vd.set_base_value( @@ -361,7 +361,7 @@ fn aggregator_base_mismatch() { #[test] #[should_panic] fn commit_without_deltas() { - let vd: VersionedData>, TestValue> = VersionedData::new(); + let vd: VersionedData>, TestValue> = VersionedData::empty(); let ap = KeyType(b"/foo/b".to_vec()); // Must panic as there are no deltas at all. @@ -371,7 +371,7 @@ fn commit_without_deltas() { #[test] #[should_panic] fn commit_without_entry() { - let vd: VersionedData>, TestValue> = VersionedData::new(); + let vd: VersionedData>, TestValue> = VersionedData::empty(); let ap = KeyType(b"/foo/b".to_vec()); vd.add_delta(ap.clone(), 8, delta_add(20, 1000)); diff --git a/aptos-move/mvhashmap/src/unit_tests/proptest_types.rs b/aptos-move/mvhashmap/src/unit_tests/proptest_types.rs index 09c532229d584..9cecda2c82e76 100644 --- a/aptos-move/mvhashmap/src/unit_tests/proptest_types.rs +++ b/aptos-move/mvhashmap/src/unit_tests/proptest_types.rs @@ -13,11 +13,12 @@ use aptos_types::{ state_store::state_value::StateValue, write_set::{TransactionWrite, WriteOpKind}, }; +use aptos_vm_types::resolver::ResourceGroupSize; use bytes::Bytes; use claims::assert_none; use proptest::{collection::vec, prelude::*, sample::Index, strategy::Strategy}; use std::{ - collections::{BTreeMap, HashMap}, + collections::{BTreeMap, HashMap, HashSet}, fmt::Debug, hash::Hash, sync::{ @@ -240,9 +241,21 @@ where let value = Value::new(None); let idx = idx as TxnIndex; if test_group { + map.group_data + .set_raw_base_values(key.clone(), vec![]) + .unwrap(); map.group_data() - .write(key.clone(), idx, 0, vec![(5, (value, None))]); - map.group_data().mark_estimate(&key, idx); + .write( + key.clone(), + idx, + 0, + vec![(5, (value, None))], + ResourceGroupSize::zero_combined(), + HashSet::new(), + ) + .unwrap(); + map.group_data() + .mark_estimate(&key, idx, [5usize].into_iter().collect()); } else { map.data().write(key.clone(), idx, 0, Arc::new(value), None); map.data().mark_estimate(&key, idx); @@ -293,12 +306,12 @@ where assert_value(v); break; }, - Err(MVGroupError::Uninitialized) => { + Err(MVGroupError::Uninitialized) + | Err(MVGroupError::TagNotFound) => { assert_eq!(baseline, ExpectedOutput::NotInMap, "{:?}", idx); break; }, Err(MVGroupError::Dependency(_i)) => (), - Err(_) => unreachable!("Unreachable error cases for test"), } } else { match map @@ -350,7 +363,15 @@ where let value = Value::new(None); if test_group { map.group_data() - .write(key, idx as TxnIndex, 1, vec![(5, (value, None))]); + .write( + key, + idx as TxnIndex, + 1, + vec![(5, (value, None))], + ResourceGroupSize::zero_combined(), + HashSet::new(), + ) + .unwrap(); } else { map.data() .write(key, idx as TxnIndex, 1, Arc::new(value), None); @@ -361,7 +382,15 @@ where let value = Value::new(Some(v.clone())); if test_group { map.group_data() - .write(key, idx as TxnIndex, 1, vec![(5, (value, None))]); + .write( + key, + idx as TxnIndex, + 1, + vec![(5, (value, None))], + ResourceGroupSize::zero_combined(), + HashSet::new(), + ) + .unwrap(); } else { map.data() .write(key, idx as TxnIndex, 1, Arc::new(value), None); diff --git a/aptos-move/mvhashmap/src/unsync_map.rs b/aptos-move/mvhashmap/src/unsync_map.rs index f3bbcef5f404d..7a11c73b1d780 100644 --- a/aptos-move/mvhashmap/src/unsync_map.rs +++ b/aptos-move/mvhashmap/src/unsync_map.rs @@ -6,15 +6,15 @@ use crate::{ utils::module_hash, BlockStateStats, }; -use aptos_aggregator::types::{code_invariant_error, DelayedFieldValue}; +use anyhow::anyhow; +use aptos_aggregator::types::DelayedFieldValue; use aptos_crypto::hash::HashValue; use aptos_types::{ - delayed_fields::PanicError, + error::{code_invariant_error, PanicError}, executable::{Executable, ExecutableDescriptor, ModulePath}, write_set::TransactionWrite, }; -use aptos_vm_types::resource_group_adapter::group_size_as_sum; -use move_binary_format::errors::PartialVMResult; +use aptos_vm_types::{resolver::ResourceGroupSize, resource_group_adapter::group_size_as_sum}; use move_core_types::value::MoveTypeLayout; use serde::Serialize; use std::{ @@ -44,7 +44,7 @@ pub struct UnsyncMap< resource_map: RefCell>>, // Optional hash can store the hash of the module to avoid re-computations. module_map: RefCell, Option)>>, - group_cache: RefCell>>>>, + group_cache: RefCell>, ResourceGroupSize)>>>, executable_cache: RefCell>>, executable_bytes: RefCell, delayed_field_map: RefCell>, @@ -102,18 +102,31 @@ impl< &self, group_key: K, base_values: impl IntoIterator, - ) { - let base_map = base_values + ) -> anyhow::Result<()> { + let base_map: HashMap> = base_values .into_iter() .map(|(t, v)| (t, ValueWithLayout::RawFromStorage(Arc::new(v)))) .collect(); + let base_size = group_size_as_sum( + base_map + .iter() + .flat_map(|(t, v)| v.bytes_len().map(|s| (t, s))), + ) + .map_err(|e| { + anyhow!( + "Tag serialization error in resource group at {:?}: {:?}", + group_key.clone(), + e + ) + })?; assert!( self.group_cache .borrow_mut() - .insert(group_key, RefCell::new(base_map)) + .insert(group_key, RefCell::new((base_map, base_size))) .is_none(), "UnsyncMap group cache must be empty to provide base values" ); + Ok(()) } pub fn update_tagged_base_value_with_layout( @@ -128,19 +141,15 @@ impl< .get_mut(&group_key) .expect("Unable to fetch the entry for the group key in group_cache") .borrow_mut() + .0 .insert(tag, ValueWithLayout::Exchanged(Arc::new(value), layout)); } - pub fn get_group_size(&self, group_key: &K) -> PartialVMResult { - Ok(match self.group_cache.borrow().get(group_key) { - Some(group_map) => GroupReadResult::Size(group_size_as_sum( - group_map - .borrow() - .iter() - .flat_map(|(t, v)| v.bytes_len().map(|s| (t, s))), - )?), + pub fn get_group_size(&self, group_key: &K) -> GroupReadResult { + match self.group_cache.borrow().get(group_key) { + Some(entry) => GroupReadResult::Size(entry.borrow().1), None => GroupReadResult::Uninitialized, - }) + } } pub fn fetch_group_tagged_data( @@ -153,6 +162,7 @@ impl< |group_map| { group_map .borrow() + .0 .get(value_tag) .cloned() .ok_or(UnsyncGroupError::TagNotFound) @@ -161,17 +171,41 @@ impl< } /// Contains the latest group ops for the given group key. - pub fn finalize_group(&self, group_key: &K) -> impl Iterator)> { - self.group_cache - .borrow() + pub fn finalize_group( + &self, + group_key: &K, + ) -> ( + impl Iterator)>, + ResourceGroupSize, + ) { + let binding = self.group_cache.borrow(); + let group = binding .get(group_key) .expect("Resource group must be cached") - .borrow() - .clone() - .into_iter() + .borrow(); + + (group.0.clone().into_iter(), group.1) } - pub fn insert_group_op( + pub fn insert_group_ops( + &self, + group_key: &K, + group_ops: impl IntoIterator>))>, + group_size: ResourceGroupSize, + ) -> Result<(), PanicError> { + for (value_tag, (group_op, maybe_layout)) in group_ops.into_iter() { + self.insert_group_op(group_key, value_tag, group_op, maybe_layout)?; + } + self.group_cache + .borrow_mut() + .get_mut(group_key) + .expect("Resource group must be cached") + .borrow_mut() + .1 = group_size; + Ok(()) + } + + fn insert_group_op( &self, group_key: &K, value_tag: T, @@ -186,6 +220,7 @@ impl< .get_mut(group_key) .expect("Resource group must be cached") .borrow_mut() + .0 .entry(value_tag.clone()), v.write_op_kind(), ) { @@ -199,11 +234,12 @@ impl< entry.insert(ValueWithLayout::Exchanged(Arc::new(v), maybe_layout)); }, (l, r) => { - println!("WriteOp kind {:?} not consistent with previous value at tag {:?}. l: {:?}, r: {:?}", v.write_op_kind(), value_tag, l, r); return Err(code_invariant_error(format!( - "WriteOp kind {:?} not consistent with previous value at tag {:?}", + "WriteOp kind {:?} not consistent with previous value at tag {:?}. Existing: {:?}, new: {:?}", v.write_op_kind(), - value_tag + value_tag, + l, + r, ))); }, } @@ -227,6 +263,7 @@ impl< self.group_cache.borrow().get(key).map(|group_map| { group_map .borrow() + .0 .iter() .map(|(tag, value)| (Arc::new(tag.clone()), value.clone())) .collect() @@ -327,7 +364,7 @@ mod test { map: &UnsyncMap>, usize, TestValue, ExecutableTestType, ()>, key: &KeyType>, ) -> HashMap> { - map.finalize_group(key).collect() + map.finalize_group(key).0.collect() } // TODO[agg_v2](test) Add tests with non trivial layout @@ -340,7 +377,8 @@ mod test { ap.clone(), // base tag 1, 2, 3 (1..4).map(|i| (i, TestValue::with_kind(i, true))), - ); + ) + .unwrap(); assert_ok!(map.insert_group_op(&ap, 2, TestValue::with_kind(202, false), None)); assert_ok!(map.insert_group_op(&ap, 3, TestValue::with_kind(203, false), None)); let committed = finalize_group_as_hashmap(&map, &ap); @@ -424,14 +462,14 @@ mod test { let ap = KeyType(b"/foo/f".to_vec()); let map = UnsyncMap::>, usize, TestValue, ExecutableTestType, ()>::new(); - map.set_group_base_values( + assert_ok!(map.set_group_base_values( ap.clone(), (1..4).map(|i| (i, TestValue::with_kind(i, true))), - ); - map.set_group_base_values( + )); + assert_ok!(map.set_group_base_values( ap.clone(), (1..4).map(|i| (i, TestValue::with_kind(i, true))), - ); + )); } #[should_panic] @@ -449,7 +487,7 @@ mod test { let ap = KeyType(b"/foo/b".to_vec()); let map = UnsyncMap::>, usize, TestValue, ExecutableTestType, ()>::new(); - let _ = map.finalize_group(&ap).collect::>(); + let _ = map.finalize_group(&ap).0.collect::>(); } #[test] @@ -457,13 +495,14 @@ mod test { let ap = KeyType(b"/foo/f".to_vec()); let map = UnsyncMap::>, usize, TestValue, ExecutableTestType, ()>::new(); - assert_ok_eq!(map.get_group_size(&ap), GroupReadResult::Uninitialized); + assert_eq!(map.get_group_size(&ap), GroupReadResult::Uninitialized); map.set_group_base_values( ap.clone(), // base tag 1, 2, 3, 4 (1..5).map(|i| (i, TestValue::creation_with_len(1))), - ); + ) + .unwrap(); let tag: usize = 5; let one_entry_len = TestValue::creation_with_len(1).bytes().unwrap().len(); @@ -471,13 +510,9 @@ mod test { let three_entry_len = TestValue::creation_with_len(3).bytes().unwrap().len(); let four_entry_len = TestValue::creation_with_len(4).bytes().unwrap().len(); - let exp_size = group_size_as_sum(vec![(&tag, one_entry_len); 4].into_iter()).unwrap(); - assert_ok_eq!(map.get_group_size(&ap), GroupReadResult::Size(exp_size)); + let base_size = group_size_as_sum(vec![(&tag, one_entry_len); 4].into_iter()).unwrap(); + assert_eq!(map.get_group_size(&ap), GroupReadResult::Size(base_size)); - assert_err!(map.insert_group_op(&ap, 0, TestValue::modification_with_len(2), None)); - assert_ok!(map.insert_group_op(&ap, 0, TestValue::creation_with_len(2), None)); - assert_err!(map.insert_group_op(&ap, 1, TestValue::creation_with_len(2), None)); - assert_ok!(map.insert_group_op(&ap, 1, TestValue::modification_with_len(2), None)); let exp_size = group_size_as_sum(vec![(&tag, two_entry_len); 2].into_iter().chain(vec![ ( &tag, @@ -486,10 +521,26 @@ mod test { 3 ])) .unwrap(); - assert_ok_eq!(map.get_group_size(&ap), GroupReadResult::Size(exp_size)); + assert_err!(map.insert_group_ops( + &ap, + vec![(0, (TestValue::modification_with_len(2), None))], + exp_size, + )); + assert_err!(map.insert_group_ops( + &ap, + vec![(1, (TestValue::creation_with_len(2), None))], + exp_size, + )); + assert_ok!(map.insert_group_ops( + &ap, + vec![ + (0, (TestValue::creation_with_len(2), None)), + (1, (TestValue::modification_with_len(2), None)) + ], + exp_size + )); + assert_eq!(map.get_group_size(&ap), GroupReadResult::Size(exp_size)); - assert_ok!(map.insert_group_op(&ap, 4, TestValue::modification_with_len(3), None)); - assert_ok!(map.insert_group_op(&ap, 5, TestValue::creation_with_len(3), None)); let exp_size = group_size_as_sum( vec![(&tag, one_entry_len); 2] .into_iter() @@ -497,10 +548,16 @@ mod test { .chain(vec![(&tag, three_entry_len); 2]), ) .unwrap(); - assert_ok_eq!(map.get_group_size(&ap), GroupReadResult::Size(exp_size)); + assert_ok!(map.insert_group_ops( + &ap, + vec![ + (4, (TestValue::modification_with_len(3), None)), + (5, (TestValue::creation_with_len(3), None)), + ], + exp_size + )); + assert_eq!(map.get_group_size(&ap), GroupReadResult::Size(exp_size)); - assert_ok!(map.insert_group_op(&ap, 0, TestValue::modification_with_len(4), None)); - assert_ok!(map.insert_group_op(&ap, 1, TestValue::modification_with_len(4), None)); let exp_size = group_size_as_sum( vec![(&tag, one_entry_len); 2] .into_iter() @@ -508,7 +565,15 @@ mod test { .chain(vec![(&tag, four_entry_len); 2]), ) .unwrap(); - assert_ok_eq!(map.get_group_size(&ap), GroupReadResult::Size(exp_size)); + assert_ok!(map.insert_group_ops( + &ap, + vec![ + (0, (TestValue::modification_with_len(4), None)), + (1, (TestValue::modification_with_len(4), None)) + ], + exp_size + )); + assert_eq!(map.get_group_size(&ap), GroupReadResult::Size(exp_size)); } #[test] @@ -526,7 +591,8 @@ mod test { ap.clone(), // base tag 1, 2, 3, 4 (1..5).map(|i| (i, TestValue::creation_with_len(i))), - ); + ) + .unwrap(); for i in 1..5 { assert_ok_eq!( diff --git a/aptos-move/mvhashmap/src/versioned_data.rs b/aptos-move/mvhashmap/src/versioned_data.rs index a9eb066f0617d..aed65b5a9bd40 100644 --- a/aptos-move/mvhashmap/src/versioned_data.rs +++ b/aptos-move/mvhashmap/src/versioned_data.rs @@ -3,7 +3,7 @@ // SPDX-License-Identifier: Apache-2.0 use crate::types::{ - Flag, Incarnation, MVDataError, MVDataOutput, ShiftedTxnIndex, TxnIndex, ValueWithLayout, + Incarnation, MVDataError, MVDataOutput, ShiftedTxnIndex, TxnIndex, ValueWithLayout, }; use anyhow::Result; use aptos_aggregator::delta_change_set::DeltaOp; @@ -17,19 +17,24 @@ use std::{ fmt::Debug, hash::Hash, sync::{ - atomic::{AtomicU64, Ordering}, + atomic::{AtomicBool, AtomicU64, Ordering}, Arc, }, }; +pub(crate) const FLAG_DONE: bool = false; +pub(crate) const FLAG_ESTIMATE: bool = true; + /// Every entry in shared multi-version data-structure has an "estimate" flag /// and some content. -struct Entry { +/// TODO: can remove pub(crate) once aggregator V1 is deprecated. +pub(crate) struct Entry { /// Actual contents. - cell: EntryCell, + pub(crate) value: V, - /// Used to mark the entry as a "write estimate". - flag: Flag, + /// Used to mark the entry as a "write estimate". Stored as an atomic so + /// marking an estimate can proceed w. read lock. + flag: AtomicBool, } /// Represents the content of a single entry in multi-version data-structure. @@ -49,7 +54,7 @@ enum EntryCell { /// A versioned value internally is represented as a BTreeMap from indices of /// transactions that update the given access path & the corresponding entries. struct VersionedValue { - versioned_map: BTreeMap>>, + versioned_map: BTreeMap>>>, } /// Maps each key (access path) to an internal versioned value representation. @@ -58,36 +63,39 @@ pub struct VersionedData { total_base_value_size: AtomicU64, } -impl Entry { - fn new_write_from(incarnation: Incarnation, value: ValueWithLayout) -> Entry { - Entry { - cell: EntryCell::Write(incarnation, value), - flag: Flag::Done, - } - } +fn new_write_entry(incarnation: Incarnation, value: ValueWithLayout) -> Entry> { + Entry::new(EntryCell::Write(incarnation, value)) +} - fn new_delta_from(data: DeltaOp) -> Entry { +fn new_delta_entry(data: DeltaOp) -> Entry> { + Entry::new(EntryCell::Delta(data, None)) +} + +impl Entry { + pub(crate) fn new(value: V) -> Entry { Entry { - cell: EntryCell::Delta(data, None), - flag: Flag::Done, + value, + flag: AtomicBool::new(FLAG_DONE), } } - fn flag(&self) -> Flag { - self.flag + pub(crate) fn is_estimate(&self) -> bool { + self.flag.load(Ordering::Relaxed) == FLAG_ESTIMATE } - fn mark_estimate(&mut self) { - self.flag = Flag::Estimate; + pub(crate) fn mark_estimate(&self) { + self.flag.store(FLAG_ESTIMATE, Ordering::Relaxed); } +} +impl Entry> { // The entry must be a delta, will record the provided value as a base value // shortcut (the value in storage before block execution). If a value was already // recorded, the new value is asserted for equality. fn record_delta_shortcut(&mut self, value: u128) { use crate::versioned_data::EntryCell::Delta; - self.cell = match self.cell { + self.value = match self.value { Delta(delta_op, maybe_shortcut) => { if let Some(prev_value) = maybe_shortcut { assert_eq!(value, prev_value, "Recording different shortcuts"); @@ -121,14 +129,14 @@ impl VersionedValue { // During traversal, all aggregator deltas have to be accumulated together. let mut accumulator: Option> = None; while let Some((idx, entry)) = iter.next_back() { - if entry.flag() == Flag::Estimate { + if entry.is_estimate() { // Found a dependency. return Err(Dependency( idx.idx().expect("May not depend on storage version"), )); } - match (&entry.cell, accumulator.as_mut()) { + match (&entry.value, accumulator.as_mut()) { (EntryCell::Write(incarnation, data), None) => { // Resolve to the write if no deltas were applied in between. return Ok(Versioned( @@ -214,7 +222,7 @@ impl VersionedValue { } impl VersionedData { - pub(crate) fn new() -> Self { + pub(crate) fn empty() -> Self { Self { values: DashMap::new(), total_base_value_size: AtomicU64::new(0), @@ -233,16 +241,16 @@ impl VersionedData { let mut v = self.values.entry(key).or_default(); v.versioned_map.insert( ShiftedTxnIndex::new(txn_idx), - CachePadded::new(Entry::new_delta_from(delta)), + CachePadded::new(new_delta_entry(delta)), ); } /// Mark an entry from transaction 'txn_idx' at access path 'key' as an estimated write /// (for future incarnation). Will panic if the entry is not in the data-structure. pub fn mark_estimate(&self, key: &K, txn_idx: TxnIndex) { - let mut v = self.values.get_mut(key).expect("Path must exist"); + let v = self.values.get(key).expect("Path must exist"); v.versioned_map - .get_mut(&ShiftedTxnIndex::new(txn_idx)) + .get(&ShiftedTxnIndex::new(txn_idx)) .expect("Entry by the txn must exist to mark estimate") .mark_estimate(); } @@ -295,10 +303,10 @@ impl VersionedData { self.total_base_value_size .fetch_add(base_size as u64, Ordering::Relaxed); } - v.insert(CachePadded::new(Entry::new_write_from(0, value))); + v.insert(CachePadded::new(new_write_entry(0, value))); }, Occupied(mut o) => { - if let EntryCell::Write(i, existing_value) = &o.get().cell { + if let EntryCell::Write(i, existing_value) = &o.get().value { assert!(*i == 0); match (existing_value, &value) { (RawFromStorage(ev), RawFromStorage(v)) => { @@ -311,7 +319,7 @@ impl VersionedData { }, (RawFromStorage(_), Exchanged(_, _)) => { // Received more info, update. - o.insert(CachePadded::new(Entry::new_write_from(0, value))); + o.insert(CachePadded::new(new_write_entry(0, value))); }, (Exchanged(ev, e_layout), Exchanged(v, layout)) => { // base value may have already been provided by another transaction @@ -345,7 +353,7 @@ impl VersionedData { let mut v = self.values.entry(key).or_default(); let prev_entry = v.versioned_map.insert( ShiftedTxnIndex::new(txn_idx), - CachePadded::new(Entry::new_write_from( + CachePadded::new(new_write_entry( incarnation, ValueWithLayout::Exchanged(data, maybe_layout), )), @@ -353,7 +361,7 @@ impl VersionedData { // Assert that the previous entry for txn_idx, if present, had lower incarnation. assert!(prev_entry.map_or(true, |entry| -> bool { - if let EntryCell::Write(i, _) = entry.cell { + if let EntryCell::Write(i, _) = entry.value { i < incarnation } else { true @@ -376,7 +384,7 @@ impl VersionedData { let mut v = self.values.entry(key).or_default(); let prev_entry = v.versioned_map.insert( ShiftedTxnIndex::new(txn_idx), - CachePadded::new(Entry::new_write_from( + CachePadded::new(new_write_entry( incarnation, ValueWithLayout::Exchanged(arc_data.clone(), None), )), @@ -384,7 +392,7 @@ impl VersionedData { // Changes versioned metadata that was stored. prev_entry.map_or(true, |entry| -> bool { - if let EntryCell::Write(_, existing_v) = &entry.cell { + if let EntryCell::Write(_, existing_v) = &entry.value { arc_data.as_state_value_metadata() != existing_v .extract_value_no_layout() diff --git a/aptos-move/mvhashmap/src/versioned_delayed_fields.rs b/aptos-move/mvhashmap/src/versioned_delayed_fields.rs index 5a5de5f44f09f..042e0024c8dd0 100644 --- a/aptos-move/mvhashmap/src/versioned_delayed_fields.rs +++ b/aptos-move/mvhashmap/src/versioned_delayed_fields.rs @@ -4,9 +4,9 @@ use crate::types::{AtomicTxnIndex, MVDelayedFieldsError, TxnIndex}; use aptos_aggregator::{ delayed_change::{ApplyBase, DelayedApplyEntry, DelayedEntry}, - types::{code_invariant_error, DelayedFieldValue, PanicOr, ReadPosition}, + types::{DelayedFieldValue, ReadPosition}, }; -use aptos_types::delayed_fields::PanicError; +use aptos_types::error::{code_invariant_error, PanicError, PanicOr}; use claims::assert_matches; use crossbeam::utils::CachePadded; use dashmap::DashMap; @@ -201,8 +201,12 @@ impl VersionedValue { } // Given a transaction index which should be committed next, returns the latest value - // below this version, or an error if such a value does not exist. - fn read_latest_committed_value( + // below this version, or if no such value exists, then the delayed field must have been + // created in the same block. In this case predict the value in the first (lowest) entry, + // or an error if such an entry cannot be found (must be due to speculation). The lowest + // entry is picked without regards to the indices, as it's for optimistic prediction + // purposes only (better to have some value than error). + fn read_latest_predicted_value( &self, next_idx_to_commit: TxnIndex, ) -> Result { @@ -212,10 +216,15 @@ impl VersionedValue { .range(0..next_idx_to_commit) .next_back() .map_or_else( - || { - self.base_value - .clone() - .ok_or(MVDelayedFieldsError::NotFound) + || match &self.base_value { + Some(value) => Ok(value.clone()), + None => match self.versioned_map.first_key_value() { + Some((_, entry)) => match entry.as_ref().deref() { + Value(v, _) => Ok(v.clone()), + Apply(_) | Estimate(_) => Err(MVDelayedFieldsError::NotFound), + }, + None => Err(MVDelayedFieldsError::NotFound), + }, }, |(_, entry)| match entry.as_ref().deref() { Value(v, _) => Ok(v.clone()), @@ -347,10 +356,12 @@ pub trait TVersionedDelayedFieldView { txn_idx: TxnIndex, ) -> Result>; - /// Returns the committed value from largest transaction index that is - /// smaller than the given current_txn_idx (read_position defined whether - /// inclusively or exclusively from the current transaction itself). - fn read_latest_committed_value( + /// Returns the committed value from largest transaction index that is smaller than the + /// given current_txn_idx (read_position defined whether inclusively or exclusively from + /// the current transaction itself). If such a value does not exist, the value might + /// be created in the current block, and the value from the first (lowest) entry is taken + /// as the prediction. + fn read_latest_predicted_value( &self, id: &K, current_txn_idx: TxnIndex, @@ -391,7 +402,7 @@ impl VersionedDelayedFields { /// Part of the big multi-versioned data-structure, which creates different types of /// versioned maps (including this one for delayed fields), and delegates access. Hence, /// new should only be used from the crate. - pub(crate) fn new() -> Self { + pub(crate) fn empty() -> Self { Self { values: DashMap::new(), next_idx_to_commit: CachePadded::new(AtomicTxnIndex::new(0)), @@ -536,7 +547,7 @@ impl VersionedDelayedFields { // remove delta in the commit VersionEntry::Value(v, Some(_)) => Some(v.clone()), VersionEntry::Apply(AggregatorDelta { delta }) => { - let prev_value = versioned_value.read_latest_committed_value(idx_to_commit) + let prev_value = versioned_value.read_latest_predicted_value(idx_to_commit) .map_err(|e| CommitError::CodeInvariantError(format!("Cannot read latest committed value for Apply(AggregatorDelta) during commit: {:?}", e)))?; if let DelayedFieldValue::Aggregator(base) = prev_value { let new_value = delta.apply_to(base).map_err(|e| { @@ -584,7 +595,7 @@ impl VersionedDelayedFields { let prev_value = self.values .get_mut(&base_aggregator) .ok_or_else(|| CommitError::CodeInvariantError("Cannot find base_aggregator for Apply(SnapshotDelta) during commit".to_string()))? - .read_latest_committed_value(idx_to_commit) + .read_latest_predicted_value(idx_to_commit) .map_err(|e| CommitError::CodeInvariantError(format!("Cannot read latest committed value for base aggregator for ApplySnapshotDelta) during commit: {:?}", e)))?; if let DelayedFieldValue::Aggregator(base) = prev_value { @@ -615,7 +626,7 @@ impl VersionedDelayedFields { .get_mut(&base_snapshot) .ok_or_else(|| CommitError::CodeInvariantError("Cannot find base_aggregator for Apply(SnapshotDelta) during commit".to_string()))? // Read values committed in this commit - .read_latest_committed_value(idx_to_commit + 1) + .read_latest_predicted_value(idx_to_commit + 1) .map_err(|e| CommitError::CodeInvariantError(format!("Cannot read latest committed value for base aggregator for ApplySnapshotDelta) during commit: {:?}", e)))?; if let DelayedFieldValue::Snapshot(base) = prev_value { @@ -705,7 +716,7 @@ impl TVersionedDelayedFieldView /// Returns the committed value from largest transaction index that is /// smaller than the given current_txn_idx (read_position defined whether /// inclusively or exclusively from the current transaction itself). - fn read_latest_committed_value( + fn read_latest_predicted_value( &self, id: &K, current_txn_idx: TxnIndex, @@ -715,7 +726,7 @@ impl TVersionedDelayedFieldView .get_mut(id) .ok_or(MVDelayedFieldsError::NotFound) .and_then(|v| { - v.read_latest_committed_value( + v.read_latest_predicted_value( match read_position { ReadPosition::BeforeCurrentTxn => current_txn_idx, ReadPosition::AfterCurrentTxn => current_txn_idx + 1, @@ -1194,7 +1205,50 @@ mod test { if let Some(entry) = aggregator_entry(type_index) { v.insert_speculative_value(10, entry).unwrap(); } - let _ = v.read_latest_committed_value(11); + let _ = v.read_latest_predicted_value(11); + } + + #[test_case(APPLY_AGGREGATOR)] + #[test_case(APPLY_SNAPSHOT)] + #[test_case(APPLY_DERIVED)] + fn read_first_entry_not_value(type_index: usize) { + let mut v = VersionedValue::new(None); + assert_matches!( + v.read_latest_predicted_value(11), + Err(MVDelayedFieldsError::NotFound) + ); + + if let Some(entry) = aggregator_entry(type_index) { + v.insert_speculative_value(12, entry).unwrap(); + } + assert_matches!( + v.read_latest_predicted_value(11), + Err(MVDelayedFieldsError::NotFound) + ); + } + + #[test] + fn read_first_entry_value() { + let mut v = VersionedValue::new(None); + v.insert_speculative_value(13, aggregator_entry(APPLY_AGGREGATOR).unwrap()) + .unwrap(); + v.insert_speculative_value(12, aggregator_entry(VALUE_AGGREGATOR).unwrap()) + .unwrap(); + + assert_matches!( + v.read_latest_predicted_value(11), + Ok(DelayedFieldValue::Aggregator(10)) + ); + + v.insert_speculative_value( + 9, + VersionEntry::Value(DelayedFieldValue::Aggregator(9), None), + ) + .unwrap(); + assert_matches!( + v.read_latest_predicted_value(11), + Ok(DelayedFieldValue::Aggregator(9)) + ); } #[should_panic] @@ -1204,11 +1258,11 @@ mod test { v.insert_speculative_value(3, aggregator_entry(VALUE_AGGREGATOR).unwrap()) .unwrap(); v.mark_estimate(3); - let _ = v.read_latest_committed_value(11); + let _ = v.read_latest_predicted_value(11); } #[test] - fn read_latest_committed_value() { + fn read_latest_predicted_value() { let mut v = VersionedValue::new(Some(DelayedFieldValue::Aggregator(5))); v.insert_speculative_value(2, aggregator_entry(VALUE_AGGREGATOR).unwrap()) .unwrap(); @@ -1219,15 +1273,15 @@ mod test { .unwrap(); assert_ok_eq!( - v.read_latest_committed_value(5), + v.read_latest_predicted_value(5), DelayedFieldValue::Aggregator(15) ); assert_ok_eq!( - v.read_latest_committed_value(4), + v.read_latest_predicted_value(4), DelayedFieldValue::Aggregator(10) ); assert_ok_eq!( - v.read_latest_committed_value(2), + v.read_latest_predicted_value(2), DelayedFieldValue::Aggregator(5) ); } diff --git a/aptos-move/mvhashmap/src/versioned_group_data.rs b/aptos-move/mvhashmap/src/versioned_group_data.rs index bf728828d64d6..afa857a49a9e1 100644 --- a/aptos-move/mvhashmap/src/versioned_group_data.rs +++ b/aptos-move/mvhashmap/src/versioned_group_data.rs @@ -1,473 +1,259 @@ // Copyright © Aptos Foundation // SPDX-License-Identifier: Apache-2.0 -use crate::types::{ - Flag, Incarnation, MVGroupError, ShiftedTxnIndex, TxnIndex, ValueWithLayout, Version, +use crate::{ + types::{ + Incarnation, MVDataError, MVDataOutput, MVGroupError, ShiftedTxnIndex, TxnIndex, + ValueWithLayout, Version, + }, + versioned_data::Entry as SizeEntry, + VersionedData, +}; +use anyhow::{anyhow, bail}; +use aptos_types::{ + error::{code_invariant_error, PanicError}, + write_set::{TransactionWrite, WriteOpKind}, }; -use anyhow::bail; -use aptos_types::write_set::{TransactionWrite, WriteOpKind}; use aptos_vm_types::{resolver::ResourceGroupSize, resource_group_adapter::group_size_as_sum}; -use claims::{assert_matches, assert_none, assert_some}; -use crossbeam::utils::CachePadded; +use claims::assert_some; use dashmap::DashMap; use move_core_types::value::MoveTypeLayout; use serde::Serialize; use std::{ collections::{ - btree_map::{self, BTreeMap}, - HashMap, + btree_map::{BTreeMap, Entry::Vacant}, + HashSet, }, fmt::Debug, hash::Hash, sync::Arc, }; -struct GroupEntry { - incarnation: Incarnation, - // Note: can be a raw pointer (different data-structure holds the value during the - // lifetime), but would require unsafe access. - value: ValueWithLayout, - flag: Flag, -} - -impl GroupEntry { - fn new(incarnation: Incarnation, value: ValueWithLayout) -> Self { - Self { - incarnation, - value, - flag: Flag::Done, - } - } -} - -/// Represents a group value, i.e. a key that does not correspond to a single value, -/// but instead a collection of values each associated with a tag. -/// -/// Implementation note: due to DashMap in VersionedGroupData, the updates are atomic. -/// If this changes, we must maintain invariants on insertion / deletion order among -/// members (e.g. versioned_map then idx_to_update, deletion vice versa). -pub(crate) struct VersionedGroupValue { - /// While versioned_map maps tags to versioned entries for the tag, idx_to_update - /// maps a transaction index to all corresponding group updates. ShiftedTxnIndex is used - /// to dedicated index 0 for base (storage version, prior to block execution) values. - versioned_map: HashMap>>>, - /// Mapping transaction indices to the set of group member updates. As it is required - /// to provide base values from storage, and since all versions including storage are - /// represented in the same data-structure, the key set corresponds to all relevant - /// tags (group membership is not fixed, see aip-9). - /// Note: if we do not garbage collect final idx_to_update contents until the end of - /// block execution (lifetime of the data-structure), then we can have other structures - /// hold raw pointers to the values as an optimization. - idx_to_update: BTreeMap>>>, - - /// Group contents corresponding to the latest committed version. - committed_group: HashMap>, - - /// Group size has changed between speculative executions. Useful to know for the best - /// heuristic behavior when reading the group size (e.g. wait on the dependency or not). - size_changed: bool, +#[derive(Default)] +struct VersionedGroupSize { + size_entries: BTreeMap>, + // Determines whether it is safe for size queries to read the value from an entry marked as + // ESTIMATE. The heuristic checks on every write, whether the same size would be returned + // after the respective write took effect. Once set, the flag remains set to true. + // TODO: Handle remove similarly. May want to depend on transaction indices, i.e. if size + // has changed early in the block, it may not have an influence on much later transactions. + size_has_changed: bool, } /// Maps each key (access path) to an internal VersionedValue. pub struct VersionedGroupData { - group_values: DashMap>, + // TODO: Optimize the key represetantion to avoid cloning and concatenation for APIs + // such as get, where only & of the key is needed. + values: VersionedData<(K, T), V>, + // TODO: Once AggregatorV1 is deprecated (no V: TransactionWrite trait bound), + // switch to VersionedVersionedData. + // If an entry exists for a group key in Dashmap, the group is considered initialized. + group_sizes: DashMap, + + // Stores a set of tags for this group, basically a superset of all tags encountered in + // group related APIs. The accesses are synchronized with group size entry (for now), + // but it is stored separately for conflict free read-path for txn materialization + // (as the contents of group_tags are used in preparing finalized group contents). + // Note: The contents of group_tags are non-deterministic, but finalize_group filters + // out tags for which the latest value does not exist. The implementation invariant + // that the contents observed in the multi-versioned map after index is committed + // must correspond to the outputs recorded by the committed transaction incarnations. + // (and the correctness of the outputs is the responsibility of BlockSTM validation). + group_tags: DashMap>, } -impl Default - for VersionedGroupValue +impl< + K: Hash + Clone + Debug + Eq, + T: Hash + Clone + Debug + Eq + Serialize, + V: TransactionWrite, + > VersionedGroupData { - fn default() -> Self { + pub(crate) fn empty() -> Self { Self { - versioned_map: HashMap::new(), - idx_to_update: BTreeMap::new(), - committed_group: HashMap::new(), - size_changed: false, + values: VersionedData::empty(), + group_sizes: DashMap::new(), + group_tags: DashMap::new(), } } -} -impl VersionedGroupValue { - fn set_raw_base_values(&mut self, values: impl Iterator) { - let zero_idx = ShiftedTxnIndex::zero_idx(); - match self.idx_to_update.get(&zero_idx) { - Some(previous) => { - // base value may have already been provided by another transaction - // executed simultaneously and asking for the same resource group. - // Value from storage must be identical, but then delayed field - // identifier exchange could've modified it. - // - // If they are RawFromStorage, they need to be identical. - // Assert the length of bytes for efficiency (instead of full equality) - for (tag, v) in values { - let prev_v = previous - .get(&tag) - .expect("Reading twice from storage must be consistent"); - if let ValueWithLayout::RawFromStorage(prev_v) = prev_v { - assert_eq!(v.bytes().map(|b| b.len()), prev_v.bytes().map(|b| b.len())); - } - } - }, - // For base value, incarnation is irrelevant, and is always set to 0. - None => { - self.write( - zero_idx, - 0, - values.map(|(k, v)| (k, ValueWithLayout::RawFromStorage(Arc::new(v)))), + pub(crate) fn num_keys(&self) -> usize { + self.group_sizes.len() + } + + pub fn set_raw_base_values( + &self, + group_key: K, + base_values: Vec<(T, V)>, + ) -> anyhow::Result<()> { + let mut group_sizes = self.group_sizes.entry(group_key.clone()).or_default(); + + if let Vacant(entry) = group_sizes.size_entries.entry(ShiftedTxnIndex::zero_idx()) { + // Perform group size computation if base not already provided. + let group_size = group_size_as_sum::( + base_values + .iter() + .flat_map(|(tag, value)| value.bytes().map(|b| (tag.clone(), b.len()))), + ) + .map_err(|e| { + anyhow!( + "Tag serialization error in resource group at {:?}: {:?}", + group_key.clone(), + e + ) + })?; + + entry.insert(SizeEntry::new(group_size)); + + let mut superset_tags = self.group_tags.entry(group_key.clone()).or_default(); + for (tag, value) in base_values.into_iter() { + superset_tags.insert(tag.clone()); + self.values.set_base_value( + (group_key.clone(), tag), + ValueWithLayout::RawFromStorage(Arc::new(value)), ); - }, + } } + + Ok(()) } - fn update_tagged_base_value_with_layout( - &mut self, + pub fn update_tagged_base_value_with_layout( + &self, + group_key: K, tag: T, value: V, layout: Option>, ) { - let zero_idx = ShiftedTxnIndex::zero_idx(); - let v = ValueWithLayout::Exchanged(Arc::new(value), layout.clone()); - - use btree_map::Entry::*; - match self - .versioned_map - .entry(tag.clone()) - .or_default() - .entry(zero_idx.clone()) - { - Occupied(mut o) => { - match &o.get().value { - ValueWithLayout::RawFromStorage(_) => { - o.insert(CachePadded::new(GroupEntry::new(0, v.clone()))); - - assert_matches!( - self.idx_to_update - .get_mut(&zero_idx) - .expect("Base version must exist when updating for exchange") - .insert(tag.clone(), v.clone()), - Some(ValueWithLayout::RawFromStorage(_)) - ); - - let existing = self - .committed_group - .get_mut(&tag) - .expect("Tag must exist in committed when updating for exchange"); - assert_matches!(existing, &mut ValueWithLayout::RawFromStorage(_)); - *existing = v; - }, - ValueWithLayout::Exchanged(_, _) => { - // already exchanged, skipping. - }, - } - }, - Vacant(_) => { - unreachable!("Base version must exist when updating for exchange") - }, - }; + self.values.set_base_value( + (group_key, tag), + ValueWithLayout::Exchanged(Arc::new(value), layout.clone()), + ); } - fn write( - &mut self, - shifted_idx: ShiftedTxnIndex, + /// Writes new resource group values (and size) specified by tag / value pair + /// iterators. Returns true if a new tag is written compared to the previous + /// incarnation (set of previous tags provided as a parameter), or if the size + /// as observed after the new write differs from before the write took place. + /// In these cases the caller (Block-STM) may have to do certain validations. + pub fn write( + &self, + group_key: K, + txn_idx: TxnIndex, incarnation: Incarnation, - values: impl Iterator)>, - ) -> bool { - let zero_idx = ShiftedTxnIndex::zero_idx(); - let at_base_version = shifted_idx == zero_idx; - - // Remove any prior entries. - let mut prev_tag_and_sizes: HashMap> = - self.remove(shifted_idx.clone()).into_iter().collect(); - - // Changes the set of values, or the size of the entries (that might have been - // used even when marked as an estimate, if self.size_changed was still false). - // Note: we can flag if an estimate entry's size was used, or if the group size - // read observed self.size_changed == false. Otherwise, as in vanilla Block-STM, - // it would suffice to simply check if the re-execution writes outside of the - // prior (group) write-set. Not implemented (yet), as for this optimization to - // be useful, the group metadata checks also need to be handled similarly. - let mut changes_behavior = false; - - let arc_map = values - .map(|(tag, v)| { - changes_behavior |= prev_tag_and_sizes.remove(&tag) != Some(v.bytes_len()); - - // Update versioned_map. - self.versioned_map.entry(tag.clone()).or_default().insert( - shifted_idx.clone(), - CachePadded::new(GroupEntry::new(incarnation, v.clone())), - ); - - (tag, v) - }) - .collect(); - - if !prev_tag_and_sizes.is_empty() { - changes_behavior = true; - } - - assert_none!( - self.idx_to_update - .insert(shifted_idx, CachePadded::new(arc_map)), - "prev_map previously removed and processed." - ); - - if at_base_version { - // base version is from storage and final - immediately treat as committed. - self.commit_idx(zero_idx, true) - .expect("Marking storage version as committed must succeed"); - } + values: impl IntoIterator>))>, + size: ResourceGroupSize, + mut prev_tags: HashSet, + ) -> Result { + let mut ret = false; + let mut tags_to_write = vec![]; - if changes_behavior && incarnation > 0 { - // Incarnation 0 sets the group contents the first time, but this is not - // considered as changing size between speculative executions - all later - // incarnations, however, are considered. - self.size_changed = true; - } + { + let superset_tags = self.group_tags.get(&group_key).ok_or_else(|| { + // Due to read-before-write. + code_invariant_error("Group (tags) must be initialized to write to") + })?; + + for (tag, (value, layout)) in values.into_iter() { + if !superset_tags.contains(&tag) { + tags_to_write.push(tag.clone()); + } - changes_behavior - } + ret |= !prev_tags.remove(&tag); - fn mark_estimate(&mut self, txn_idx: TxnIndex) { - let shifted_idx = ShiftedTxnIndex::new(txn_idx); - let idx_updates = self - .idx_to_update - .get(&shifted_idx) - .expect("Group updates must exist at the index to mark estimate"); - - // estimate flag lives in GroupEntry, w. value in versioned_map to simplify reading - // based on txn_idx and tag. marking estimates occurs per txn (data MVHashMap exposes - // the interface for txn_idx & key). Hence, we must mark tags individually. - for (tag, _) in idx_updates.iter() { - self.versioned_map - .get_mut(tag) - .expect("Versioned entry must exist for tag") - .get_mut(&shifted_idx) - .expect("Versioned entry must exist") - .flag = Flag::Estimate; + self.values.write( + (group_key.clone(), tag), + txn_idx, + incarnation, + Arc::new(value), + layout, + ); + } } - } - fn remove(&mut self, shifted_idx: ShiftedTxnIndex) -> Vec<(T, Option)> { - // Remove idx updates first, then entries. - let idx_update_tags: Vec<(T, Option)> = self - .idx_to_update - .remove(&shifted_idx) - .map_or(vec![], |map| { - map.into_inner() - .into_iter() - .map(|(tag, v)| (tag, v.bytes_len())) - .collect() - }); - - // Similar to mark_estimate, need to remove an individual entry for each tag. - for (tag, _) in idx_update_tags.iter() { - assert_some!( - self.versioned_map - .get_mut(tag) - .expect("Versioned entry must exist for tag") - .remove(&shifted_idx), - "Entry for tag / idx must exist to be removed" - ); + for prev_tag in prev_tags { + let key = (group_key.clone(), prev_tag); + self.values.remove(&key, txn_idx); } - idx_update_tags - } - - // Records the latest committed op for each tag in the group (removed tags ar excluded). - fn commit_idx( - &mut self, - shifted_idx: ShiftedTxnIndex, - allow_new_modification: bool, - ) -> anyhow::Result<()> { - use std::collections::hash_map::Entry::*; - use WriteOpKind::*; - - let idx_updates = self - .idx_to_update - .get(&shifted_idx) - .expect("Group updates must exist at the index to commit"); - for (tag, v) in idx_updates.iter() { - match (self.committed_group.entry(tag.clone()), v.write_op_kind()) { - (Occupied(entry), Deletion) => { - entry.remove(); - }, - (Occupied(mut entry), Modification) => { - entry.insert(v.clone()); - }, - (Vacant(entry), Creation) => { - entry.insert(v.clone()); - }, - (Vacant(entry), Modification) if allow_new_modification => { - entry.insert(v.clone()); - }, - (Occupied(mut entry), Creation) if entry.get().write_op_kind() == Deletion => { - entry.insert(v.clone()); - }, - (e, _) => { - bail!( - "[{shifted_idx:?}] WriteOp kind {:?} not consistent with previous value at tag {tag:?}, value: {e:?}", - v.write_op_kind(), - ); - }, - } + if !tags_to_write.is_empty() { + let mut superset_tags = self + .group_tags + .get_mut(&group_key) + .expect("Group must be initialized"); + superset_tags.extend(tags_to_write); } - Ok(()) - } - - fn get_committed_group(&self) -> Vec<(T, ValueWithLayout)> { - self.committed_group.clone().into_iter().collect() - } - - fn get_latest_tagged_value( - &self, - tag: &T, - txn_idx: TxnIndex, - ) -> Result<(Version, ValueWithLayout), MVGroupError> { - let common_error = || -> MVGroupError { - if self - .idx_to_update - .contains_key(&ShiftedTxnIndex::zero_idx()) - { - MVGroupError::TagNotFound - } else { - MVGroupError::Uninitialized - } - }; - - self.versioned_map - .get(tag) - .ok_or(common_error()) - .and_then(|tree| { - match tree - .range(ShiftedTxnIndex::zero_idx()..ShiftedTxnIndex::new(txn_idx)) - .next_back() - { - Some((idx, entry)) => { - if entry.flag == Flag::Estimate { - Err(MVGroupError::Dependency( - idx.idx() - .expect("Base version cannot be marked as estimate"), - )) - } else { - Ok(( - idx.idx().map(|idx| (idx, entry.incarnation)), - entry.value.clone(), - )) - } - }, - None => Err(common_error()), + let mut group_sizes = self.group_sizes.get_mut(&group_key).ok_or_else(|| { + // Due to read-before-write. + code_invariant_error("Group (sizes) must be initialized to write to") + })?; + + if !(group_sizes.size_has_changed && ret) { + let (size_changed, update_flag) = group_sizes + .size_entries + .range(ShiftedTxnIndex::zero_idx()..ShiftedTxnIndex::new(txn_idx + 1)) + .next_back() + .ok_or_else(|| { + code_invariant_error("Initialized group sizes must contain storage version") + }) + .map(|(idx, prev_size)| { + ( + prev_size.value != size, + // Update the size_has_changed flag if the entry isn't the base value + // (which may be non-existent) or if the incarnation > 0. + *idx != ShiftedTxnIndex::zero_idx() || incarnation > 0, + ) + })?; + + if size_changed { + ret = true; + if update_flag { + group_sizes.size_has_changed = true; } - }) - } - - fn get_latest_group_size(&self, txn_idx: TxnIndex) -> Result { - if !self - .idx_to_update - .contains_key(&ShiftedTxnIndex::zero_idx()) - { - return Err(MVGroupError::Uninitialized); - } - - let sizes = self - .versioned_map - .iter() - .flat_map(|(tag, tree)| { - tree.range(ShiftedTxnIndex::zero_idx()..ShiftedTxnIndex::new(txn_idx)) - .next_back() - .and_then(|(idx, entry)| { - // We would like to use the value in an estimated entry if size never changed - // between speculative executions, i.e. to depend on estimates only when the - // size has changed. In this case, execution can wait on a dependency, while - // validation can short circuit to fail. - if entry.flag == Flag::Estimate && self.size_changed { - Some(Err(MVGroupError::Dependency( - idx.idx().expect("May not depend on storage version"), - ))) - } else { - entry - .value - .bytes_len() - .map(|bytes_len| Ok((tag, bytes_len))) - } - }) - }) - .collect::, MVGroupError>>()?; - group_size_as_sum(sizes.into_iter()).map_err(MVGroupError::TagSerializationError) - } -} - -impl< - K: Hash + Clone + Debug + Eq, - T: Hash + Clone + Debug + Eq + Serialize, - V: TransactionWrite, - > VersionedGroupData -{ - pub(crate) fn new() -> Self { - Self { - group_values: DashMap::new(), + } } - } - - pub(crate) fn num_keys(&self) -> usize { - self.group_values.len() - } - pub fn set_raw_base_values(&self, key: K, base_values: impl IntoIterator) { - // Incarnation is irrelevant for storage version, set to 0. - self.group_values - .entry(key) - .or_default() - .set_raw_base_values(base_values.into_iter()); - } - - pub fn update_tagged_base_value_with_layout( - &self, - key: K, - tag: T, - value: V, - layout: Option>, - ) { - // Incarnation is irrelevant for storage version, set to 0. - self.group_values - .entry(key) - .or_default() - .update_tagged_base_value_with_layout(tag, value, layout); - } + group_sizes + .size_entries + .insert(ShiftedTxnIndex::new(txn_idx), SizeEntry::new(size)); - pub fn write( - &self, - key: K, - txn_idx: TxnIndex, - incarnation: Incarnation, - values: impl IntoIterator>))>, - ) -> bool { - self.group_values.entry(key).or_default().write( - ShiftedTxnIndex::new(txn_idx), - incarnation, - values - .into_iter() - .map(|(k, (v, l))| (k, ValueWithLayout::Exchanged(Arc::new(v), l))), - ) + Ok(ret) } /// Mark all entry from transaction 'txn_idx' at access path 'key' as an estimated write /// (for future incarnation). Will panic if the entry is not in the data-structure. - pub fn mark_estimate(&self, key: &K, txn_idx: TxnIndex) { - self.group_values - .get_mut(key) + pub fn mark_estimate(&self, group_key: &K, txn_idx: TxnIndex, tags: HashSet) { + for tag in tags { + let key = (group_key.clone(), tag); + self.values.mark_estimate(&key, txn_idx); + } + + self.group_sizes + .get(group_key) .expect("Path must exist") - .mark_estimate(txn_idx); + .size_entries + .get(&ShiftedTxnIndex::new(txn_idx)) + .expect("Entry by the txn must exist to mark estimate") + .mark_estimate(); } /// Remove all entries from transaction 'txn_idx' at access path 'key'. - pub fn remove(&self, key: &K, txn_idx: TxnIndex) { - let mut group = self.group_values.get_mut(key).expect("Path must exist"); - let removed = group.remove(ShiftedTxnIndex::new(txn_idx)); - - if !removed.is_empty() { - group.size_changed = true; + pub fn remove(&self, group_key: &K, txn_idx: TxnIndex, tags: HashSet) { + for tag in tags { + let key = (group_key.clone(), tag); + self.values.remove(&key, txn_idx); } + + // TODO: consider setting size_has_changed flag if e.g. the size observed + // after remove is different. + assert_some!( + self.group_sizes + .get_mut(group_key) + .expect("Path must exist") + .size_entries + .remove(&ShiftedTxnIndex::new(txn_idx)), + "Entry for the txn must exist to be deleted" + ); } /// Read the latest value corresponding to a tag at a given group (identified by key). @@ -477,40 +263,60 @@ impl< /// group to the provided layout. pub fn fetch_tagged_data( &self, - key: &K, + group_key: &K, tag: &T, txn_idx: TxnIndex, ) -> Result<(Version, ValueWithLayout), MVGroupError> { - match self.group_values.get(key) { - Some(g) => g.get_latest_tagged_value(tag, txn_idx), - None => Err(MVGroupError::Uninitialized), + let key = (group_key.clone(), tag.clone()); + let initialized = self.group_sizes.contains_key(group_key); + + match self.values.fetch_data(&key, txn_idx) { + Ok(MVDataOutput::Versioned(version, value)) => Ok((version, value)), + Err(MVDataError::Uninitialized) => Err(if initialized { + MVGroupError::TagNotFound + } else { + MVGroupError::Uninitialized + }), + Err(MVDataError::Dependency(dep_idx)) => Err(MVGroupError::Dependency(dep_idx)), + Ok(MVDataOutput::Resolved(_)) + | Err(MVDataError::Unresolved(_)) + | Err(MVDataError::DeltaApplicationFailure) => { + unreachable!("Not using aggregatorV1") + }, } } - /// Returns the sum of latest sizes of all group members (and respective tags), collected - /// based on the recorded list of tags. If the latest entry at a tag is marked as estimate - /// and the group size has changed between speculative executions then a dependency is - /// returned. Otherwise, the size is computed including the sizes of estimated entries. - /// This works w. Block-STM, because a validation wave is triggered when any group entry - /// size changes after re-execution (also when an entry is added or removed). pub fn get_group_size( &self, - key: &K, + group_key: &K, txn_idx: TxnIndex, ) -> Result { - match self.group_values.get(key) { - Some(g) => g.get_latest_group_size(txn_idx), + match self.group_sizes.get(group_key) { + Some(g) => g + .size_entries + .range(ShiftedTxnIndex::zero_idx()..ShiftedTxnIndex::new(txn_idx)) + .next_back() + .map(|(idx, size)| { + if size.is_estimate() && g.size_has_changed { + Err(MVGroupError::Dependency( + idx.idx().expect("May not depend on storage version"), + )) + } else { + Ok(size.value) + } + }) + .unwrap_or(Err(MVGroupError::Uninitialized)), None => Err(MVGroupError::Uninitialized), } } pub fn validate_group_size( &self, - key: &K, + group_key: &K, txn_idx: TxnIndex, group_size_to_validate: ResourceGroupSize, ) -> bool { - self.get_group_size(key, txn_idx) == Ok(group_size_to_validate) + self.get_group_size(group_key, txn_idx) == Ok(group_size_to_validate) } /// For a given key that corresponds to a group, and an index of a transaction the last @@ -528,21 +334,36 @@ impl< /// modification otherwise). When consistent, the output is Ok(..). pub fn finalize_group( &self, - key: &K, + group_key: &K, txn_idx: TxnIndex, - ) -> anyhow::Result)>> { - let mut v = self.group_values.get_mut(key).expect("Path must exist"); - - v.commit_idx(ShiftedTxnIndex::new(txn_idx), false)?; - Ok(v.get_committed_group()) - } - - pub fn get_last_committed_group( - &self, - key: &K, - ) -> anyhow::Result)>> { - let v = self.group_values.get_mut(key).expect("Path must exist"); - Ok(v.get_committed_group()) + ) -> anyhow::Result<(Vec<(T, ValueWithLayout)>, ResourceGroupSize)> { + let superset_tags = self + .group_tags + .get(group_key) + .expect("Group tags must be set") + .clone(); + + let committed_group = superset_tags + .into_iter() + .map( + |tag| match self.fetch_tagged_data(group_key, &tag, txn_idx + 1) { + Ok((_, value)) => Ok((value.write_op_kind() != WriteOpKind::Deletion) + .then(|| (tag, value.clone()))), + Err(MVGroupError::TagNotFound) => Ok(None), + Err(e) => { + bail!("Unexpected error in finalize group fetching value {:?}", e) + }, + }, + ) + .collect::>>()? + .into_iter() + .flatten() + .collect(); + Ok(( + committed_group, + self.get_group_size(group_key, txn_idx + 1) + .map_err(|e| anyhow!("Unexpected error in finalize group get size {:?}", e))?, + )) } } @@ -553,7 +374,10 @@ mod test { test::{KeyType, TestValue}, StorageVersion, }; - use claims::{assert_err, assert_matches, assert_none, assert_ok_eq, assert_some_eq}; + use claims::{ + assert_err, assert_matches, assert_none, assert_ok, assert_ok_eq, assert_some_eq, + }; + use std::collections::HashMap; use test_case::test_case; #[should_panic] @@ -562,14 +386,14 @@ mod test { #[test_case(2)] fn group_no_path_exists(test_idx: usize) { let ap = KeyType(b"/foo/b".to_vec()); - let map = VersionedGroupData::>, usize, TestValue>::new(); + let map = VersionedGroupData::>, usize, TestValue>::empty(); match test_idx { 0 => { - map.mark_estimate(&ap, 1); + map.mark_estimate(&ap, 1, HashSet::new()); }, 1 => { - map.remove(&ap, 2); + map.remove(&ap, 2, HashSet::new()); }, 2 => { let _ = map.finalize_group(&ap, 0); @@ -579,111 +403,247 @@ mod test { } #[test] - fn group_uninitialized() { + fn group_write_behavior_changes() { let ap_0 = KeyType(b"/foo/a".to_vec()); let ap_1 = KeyType(b"/foo/b".to_vec()); - let ap_2 = KeyType(b"/foo/c".to_vec()); + let map = VersionedGroupData::>, usize, TestValue>::empty(); + assert_ok!(map.set_raw_base_values(ap_0.clone(), vec![])); + assert_ok!(map.set_raw_base_values(ap_1.clone(), vec![])); + + let test_values = vec![ + (0usize, (TestValue::creation_with_len(1), None)), + (1usize, (TestValue::creation_with_len(1), None)), + ]; + let test_tags: HashSet = (0..2).collect(); + + // Sizes do need to be accurate with respect to written values for test. + let fake_size = ResourceGroupSize::Combined { + num_tagged_resources: 2, + all_tagged_resources_size: 20, + }; + let fake_changed_size = ResourceGroupSize::Combined { + num_tagged_resources: 3, + all_tagged_resources_size: 20, + }; + + let check_write = |ap: &KeyType>, + idx, + incarnation, + size, + prev_tags, + expected_write_ret, + expected_size_changed| { + assert_ok_eq!( + map.write( + ap.clone(), + idx, + incarnation, + test_values.clone().into_iter(), + size, + prev_tags, + ), + expected_write_ret + ); + + assert_eq!( + map.group_sizes.get(ap).unwrap().size_has_changed, + expected_size_changed, + ); + assert_eq!( + map.group_sizes + .get(ap) + .unwrap() + .size_entries + .get(&ShiftedTxnIndex::new(idx)) + .unwrap() + .value, + size + ); + }; + + // Incarnation 0 changes behavior due to empty prior tags, leading to write returning Ok(false), + // but it should not set the size_changed flag. + check_write(&ap_0, 3, 0, fake_size, HashSet::new(), true, false); + // However, if the first write is by incarnation >0, then size_has_changed will also be set. + check_write(&ap_1, 5, 1, fake_size, HashSet::new(), true, true); + + // Incarnation 1 does not change size. + check_write(&ap_0, 3, 1, fake_size, test_tags.clone(), false, false); + // Even with incarnation > 0, observed size does not change. + check_write(&ap_0, 4, 1, fake_size, HashSet::new(), true, false); + + // Incarnation 2 changes size. + check_write( + &ap_0, + 3, + 2, + fake_changed_size, + test_tags.clone(), + true, + true, + ); + // Once size_changed is set, it stays true. + check_write( + &ap_0, + 3, + 3, + fake_changed_size, + test_tags.clone(), + false, + true, + ); + check_write(&ap_0, 6, 0, fake_changed_size, HashSet::new(), true, true); + } + + #[test] + fn group_initialize_and_write() { + let ap = KeyType(b"/foo/a".to_vec()); + let ap_empty = KeyType(b"/foo/b".to_vec()); - let map = VersionedGroupData::>, usize, TestValue>::new(); + let map = VersionedGroupData::>, usize, TestValue>::empty(); + assert_matches!(map.get_group_size(&ap, 3), Err(MVGroupError::Uninitialized)); assert_matches!( - map.get_group_size(&ap_0, 3), + map.fetch_tagged_data(&ap, &1, 3), Err(MVGroupError::Uninitialized) ); - map.write( - ap_1.clone(), + // Does not need to be accurate. + let idx_3_size = ResourceGroupSize::Combined { + num_tagged_resources: 2, + all_tagged_resources_size: 20, + }; + // Write should fail because group is not initialized (R before W, where + // and read causes the base values/size to be set). + assert_err!(map.write( + ap.clone(), + 3, + 1, + (0..2).map(|i| (i, (TestValue::creation_with_len(1), None))), + idx_3_size, + HashSet::new(), + )); + assert_ok!(map.set_raw_base_values(ap.clone(), vec![])); + // Write should now succeed. + assert_ok!(map.write( + ap.clone(), 3, 1, // tags 0, 1, 2. (0..2).map(|i| (i, (TestValue::creation_with_len(1), None))), + idx_3_size, + HashSet::new(), + )); + + // Check sizes. + assert_ok_eq!(map.get_group_size(&ap, 4), idx_3_size); + assert_ok_eq!( + map.get_group_size(&ap, 3), + ResourceGroupSize::zero_combined() ); - // Size should be uninitialized even if the output of lower txn is stored - // (as long as the base isn't set). + // Check values. assert_matches!( - map.get_group_size(&ap_1, 3), - Err(MVGroupError::Uninitialized) - ); - assert_matches!( - map.get_group_size(&ap_1, 4), - Err(MVGroupError::Uninitialized) + map.fetch_tagged_data(&ap, &1, 3), + Err(MVGroupError::TagNotFound) ); - // for reading a tag at ap_1, w.o. returning size, idx = 3 is Uninitialized. assert_matches!( - map.fetch_tagged_data(&ap_1, &1, 3), - Err(MVGroupError::Uninitialized) + map.fetch_tagged_data(&ap, &3, 4), + Err(MVGroupError::TagNotFound) ); // ... but idx = 4 should find the previously stored value. assert_eq!( - map.fetch_tagged_data(&ap_1, &1, 4).unwrap(), - // Arc compares by value, no return size, incarnation. + map.fetch_tagged_data(&ap, &1, 4).unwrap(), ( Ok((3, 1)), ValueWithLayout::Exchanged(Arc::new(TestValue::creation_with_len(1)), None) ) ); - // ap_0 should still be uninitialized. + + // ap_empty should still be uninitialized. + assert_matches!( + map.fetch_tagged_data(&ap_empty, &1, 3), + Err(MVGroupError::Uninitialized) + ); assert_matches!( - map.fetch_tagged_data(&ap_0, &1, 3), + map.get_group_size(&ap_empty, 3), Err(MVGroupError::Uninitialized) ); + } - map.write( - ap_2.clone(), + #[test] + fn group_base_and_write() { + let ap = KeyType(b"/foo/a".to_vec()); + let map = VersionedGroupData::>, usize, TestValue>::empty(); + + // base tags 0, 1. + let base_values = vec![ + (0usize, TestValue::creation_with_len(1)), + (1usize, TestValue::creation_with_len(2)), + ]; + assert_ok!(map.set_raw_base_values(ap.clone(), base_values)); + + assert_ok!(map.write( + ap.clone(), 4, 0, // tags 1, 2. (1..3).map(|i| (i, (TestValue::creation_with_len(4), None))), - ); - assert_matches!( - map.fetch_tagged_data(&ap_2, &2, 4), - Err(MVGroupError::Uninitialized) - ); - map.set_raw_base_values( - ap_2.clone(), - // base tags 0, 1. - (0..2).map(|i| (i, TestValue::creation_with_len(2))), - ); + ResourceGroupSize::zero_combined(), + HashSet::new(), + )); - // Tag not found vs not initialized, assert_matches!( - map.fetch_tagged_data(&ap_2, &2, 4), + map.fetch_tagged_data(&ap, &2, 4), Err(MVGroupError::TagNotFound) ); assert_matches!( - map.fetch_tagged_data(&ap_2, &4, 5), + map.fetch_tagged_data(&ap, &3, 5), Err(MVGroupError::TagNotFound) ); - // vs finding a versioned entry from txn 4, vs from storage. assert_eq!( - map.fetch_tagged_data(&ap_2, &2, 5).unwrap(), + map.fetch_tagged_data(&ap, &2, 5).unwrap(), ( Ok((4, 0)), ValueWithLayout::Exchanged(Arc::new(TestValue::creation_with_len(4)), None) ) ); assert_eq!( - map.fetch_tagged_data(&ap_2, &0, 5).unwrap(), + map.fetch_tagged_data(&ap, &1, 4).unwrap(), ( Err(StorageVersion), ValueWithLayout::RawFromStorage(Arc::new(TestValue::creation_with_len(2))) ) ); + assert_eq!( + map.fetch_tagged_data(&ap, &0, 6).unwrap(), + ( + Err(StorageVersion), + ValueWithLayout::RawFromStorage(Arc::new(TestValue::creation_with_len(1))) + ) + ); } #[test] fn group_read_write_estimate() { use MVGroupError::*; let ap = KeyType(b"/foo/f".to_vec()); - let map = VersionedGroupData::>, usize, TestValue>::new(); + let map = VersionedGroupData::>, usize, TestValue>::empty(); - map.write( + let idx_5_size = ResourceGroupSize::Combined { + num_tagged_resources: 2, + all_tagged_resources_size: 20, + }; + + assert_ok!(map.set_raw_base_values(ap.clone(), vec![])); + assert_ok!(map.write( ap.clone(), 5, 3, // tags 0, 1, values are derived from [txn_idx, incarnation] seed. (0..2).map(|i| (i, (TestValue::new(vec![5, 3]), None))), - ); + idx_5_size, + HashSet::new(), + )); assert_eq!( map.fetch_tagged_data(&ap, &1, 12).unwrap(), ( @@ -691,13 +651,15 @@ mod test { ValueWithLayout::Exchanged(Arc::new(TestValue::new(vec![5, 3])), None) ) ); - map.write( + assert_ok!(map.write( ap.clone(), 10, 1, // tags 1, 2, values are derived from [txn_idx, incarnation] seed. (1..3).map(|i| (i, (TestValue::new(vec![10, 1]), None))), - ); + ResourceGroupSize::zero_combined(), + HashSet::new(), + )); assert_eq!( map.fetch_tagged_data(&ap, &1, 12).unwrap(), ( @@ -706,10 +668,10 @@ mod test { ) ); - map.mark_estimate(&ap, 10); + map.mark_estimate(&ap, 10, (1..3).collect()); assert_matches!(map.fetch_tagged_data(&ap, &1, 12), Err(Dependency(10))); assert_matches!(map.fetch_tagged_data(&ap, &2, 12), Err(Dependency(10))); - assert_matches!(map.fetch_tagged_data(&ap, &3, 12), Err(Uninitialized)); + assert_matches!(map.fetch_tagged_data(&ap, &3, 12), Err(TagNotFound)); assert_eq!( map.fetch_tagged_data(&ap, &0, 12).unwrap(), ( @@ -717,8 +679,9 @@ mod test { ValueWithLayout::Exchanged(Arc::new(TestValue::new(vec![5, 3])), None) ) ); + assert_matches!(map.get_group_size(&ap, 12), Err(Dependency(10))); - map.remove(&ap, 10); + map.remove(&ap, 10, (1..3).collect()); assert_eq!( map.fetch_tagged_data(&ap, &0, 12).unwrap(), ( @@ -733,43 +696,20 @@ mod test { ValueWithLayout::Exchanged(Arc::new(TestValue::new(vec![5, 3])), None) ) ); + + // Size should also be removed at 10. + assert_ok_eq!(map.get_group_size(&ap, 12), idx_5_size); } #[test] - fn latest_group_size() { - use MVGroupError::*; + fn group_size_changed_dependency() { let ap = KeyType(b"/foo/f".to_vec()); - let map = VersionedGroupData::>, usize, TestValue>::new(); - - map.write( - ap.clone(), - 5, - 3, - // tags 0, 1 - (0..2).map(|i| (i, (TestValue::creation_with_len(2), None))), - ); - - map.write( - ap.clone(), - 5, - 3, - // tags 0, 1 - (0..2).map(|i| (i, (TestValue::creation_with_len(2), None))), - ); - assert_matches!(map.get_group_size(&ap, 12), Err(Uninitialized)); - - map.set_raw_base_values( - ap.clone(), - // base tag 1, 2, 3, 4 - (1..5).map(|i| (i, TestValue::creation_with_len(1))), - ); + let map = VersionedGroupData::>, usize, TestValue>::empty(); let tag: usize = 5; let one_entry_len = TestValue::creation_with_len(1).bytes().unwrap().len(); let two_entry_len = TestValue::creation_with_len(2).bytes().unwrap().len(); - let three_entry_len = TestValue::creation_with_len(3).bytes().unwrap().len(); - let four_entry_len = TestValue::creation_with_len(4).bytes().unwrap().len(); - let exp_size = group_size_as_sum(vec![(&tag, two_entry_len); 2].into_iter().chain(vec![ + let idx_5_size = group_size_as_sum(vec![(&tag, two_entry_len); 2].into_iter().chain(vec![ ( &tag, one_entry_len @@ -777,363 +717,339 @@ mod test { 3 ])) .unwrap(); - assert_ok_eq!(map.get_group_size(&ap, 12), exp_size); - - map.write( - ap.clone(), - 10, - 1, - // tags 4, 5 - (4..6).map(|i| (i, (TestValue::creation_with_len(3), None))), - ); - let exp_size_12 = group_size_as_sum( - vec![(&tag, one_entry_len); 2] - .into_iter() - .chain(vec![(&tag, two_entry_len); 2]) - .chain(vec![(&tag, three_entry_len); 2]), - ) - .unwrap(); - assert_ok_eq!(map.get_group_size(&ap, 12), exp_size_12); - assert_ok_eq!(map.get_group_size(&ap, 10), exp_size); - - map.mark_estimate(&ap, 5); - assert_matches!(map.get_group_size(&ap, 12), Err(Dependency(5))); - let exp_size_4 = group_size_as_sum(vec![(&tag, one_entry_len); 4].into_iter()).unwrap(); - - assert_ok_eq!(map.get_group_size(&ap, 4), exp_size_4); + let base_size = group_size_as_sum(vec![(&tag, one_entry_len); 4].into_iter()).unwrap(); + let idx_5_size_with_ones = + group_size_as_sum(vec![(&tag, one_entry_len); 5].into_iter()).unwrap(); - map.write( + assert_ok!(map.set_raw_base_values( ap.clone(), - 6, - 1, - (0..2).map(|i| (i, (TestValue::creation_with_len(4), None))), - ); - let exp_size_7 = group_size_as_sum(vec![(&tag, one_entry_len); 3].into_iter().chain(vec![ - ( - &tag, - four_entry_len - ); - 2 - ])) - .unwrap(); - - assert_ok_eq!(map.get_group_size(&ap, 7), exp_size_7); - assert_matches!(map.get_group_size(&ap, 6), Err(Dependency(5))); - - map.remove(&ap, 5); - assert_ok_eq!(map.get_group_size(&ap, 6), exp_size_4); - } - - #[test] - fn size_changed_dependency() { - let ap = KeyType(b"/foo/f".to_vec()); - let map = VersionedGroupData::>, usize, TestValue>::new(); - - map.write( + // base tag 1, 2, 3, 4 + (1..5) + .map(|i| (i, TestValue::creation_with_len(1))) + .collect(), + )); + assert_ok!(map.write( ap.clone(), 5, 0, // tags 0, 1 (0..2).map(|i| (i, (TestValue::creation_with_len(2), None))), - ); + idx_5_size, + HashSet::new(), + )); - map.set_raw_base_values( - ap.clone(), - // base tag 1, 2, 3, 4 - (1..5).map(|i| (i, TestValue::creation_with_len(1))), - ); // Incarnation 0 and base values should not affect size_changed flag. - assert!(!map.group_values.get(&ap).unwrap().size_changed); + assert!(!map.group_sizes.get(&ap).unwrap().size_has_changed); - let tag: usize = 5; - let one_entry_len = TestValue::creation_with_len(1).bytes().unwrap().len(); - let two_entry_len = TestValue::creation_with_len(2).bytes().unwrap().len(); - let exp_size = group_size_as_sum(vec![(&tag, two_entry_len); 2].into_iter().chain(vec![ - ( - &tag, - one_entry_len - ); - 3 - ])) - .unwrap(); - let exp_size_with_ones = - group_size_as_sum(vec![(&tag, one_entry_len); 5].into_iter()).unwrap(); + assert_ok_eq!(map.get_group_size(&ap, 5), base_size); + assert!(map.validate_group_size(&ap, 4, base_size)); + assert!(!map.validate_group_size(&ap, 5, idx_5_size)); + assert_ok_eq!(map.get_group_size(&ap, 6), idx_5_size); // Despite estimates, should still return size. - map.mark_estimate(&ap, 5); - assert_ok_eq!(map.get_group_size(&ap, 12), exp_size); - assert!(map.validate_group_size(&ap, 12, exp_size)); - assert!(!map.validate_group_size(&ap, 12, exp_size_with_ones)); - - // Same write again won't change size. - map.write( - ap.clone(), - 5, - 1, - (0..2).map(|i| (i, (TestValue::creation_with_len(2), None))), + map.mark_estimate(&ap, 5, (0..2).collect()); + assert_ok_eq!(map.get_group_size(&ap, 12), idx_5_size); + assert!(map.validate_group_size(&ap, 12, idx_5_size)); + assert!(!map.validate_group_size(&ap, 12, ResourceGroupSize::zero_combined())); + + // Different write, same size again. + assert_ok_eq!( + map.write( + ap.clone(), + 5, + 1, + (0..3).map(|i| (i, (TestValue::creation_with_len(2), None))), + idx_5_size, + (0..2).collect(), + ), + true ); - assert!(!map.group_values.get(&ap).unwrap().size_changed); - map.mark_estimate(&ap, 5); - assert_ok_eq!(map.get_group_size(&ap, 12), exp_size); - assert!(map.validate_group_size(&ap, 12, exp_size)); - assert!(!map.validate_group_size(&ap, 12, exp_size_with_ones)); - - // Removing nothing won't change size. - map.remove(&ap, 6); - assert!(!map.group_values.get(&ap).unwrap().size_changed); - - map.write( + assert!(!map.group_sizes.get(&ap).unwrap().size_has_changed); + map.mark_estimate(&ap, 5, (0..2).collect()); + assert_ok_eq!(map.get_group_size(&ap, 12), idx_5_size); + assert!(map.validate_group_size(&ap, 12, idx_5_size)); + assert!(!map.validate_group_size(&ap, 12, ResourceGroupSize::zero_concrete())); + + // Remove currently does not affect size_has_changed. + map.remove(&ap, 5, (0..3).collect()); + assert!(!map.group_sizes.get(&ap).unwrap().size_has_changed); + assert_ok_eq!(map.get_group_size(&ap, 4), base_size); + assert!(map.validate_group_size(&ap, 6, base_size)); + + assert_ok!(map.write( ap.clone(), 5, 2, - (0..2).map(|i| (i, (TestValue::creation_with_len(1), None))), - ); + (0..3).map(|i| (i, (TestValue::creation_with_len(1), None))), + idx_5_size_with_ones, + (0..2).collect(), + )); // Size has changed between speculative writes. - assert!(map.group_values.get(&ap).unwrap().size_changed); - assert_ok_eq!(map.get_group_size(&ap, 12), exp_size_with_ones); - assert!(map.validate_group_size(&ap, 12, exp_size_with_ones)); - assert!(!map.validate_group_size(&ap, 12, exp_size)); + assert!(map.group_sizes.get(&ap).unwrap().size_has_changed); + assert_ok_eq!(map.get_group_size(&ap, 10), idx_5_size_with_ones); + assert!(map.validate_group_size(&ap, 10, idx_5_size_with_ones)); + assert!(!map.validate_group_size(&ap, 10, idx_5_size)); + assert_ok_eq!(map.get_group_size(&ap, 3), base_size); - map.mark_estimate(&ap, 5); + map.mark_estimate(&ap, 5, (0..3).collect()); assert_matches!( map.get_group_size(&ap, 12), Err(MVGroupError::Dependency(5)) ); - assert!(!map.validate_group_size(&ap, 12, exp_size_with_ones)); - assert!(!map.validate_group_size(&ap, 12, exp_size)); - - // Next check that size change gets properly set w. differing set of writes. - let ap_1 = KeyType(b"/foo/1".to_vec()); - let ap_2 = KeyType(b"/foo/2".to_vec()); - let ap_3 = KeyType(b"/foo/3".to_vec()); - - map.write( - ap_1.clone(), - 5, - 0, - // tags 0, 1 - (0..2).map(|i| (i, (TestValue::creation_with_len(2), None))), - ); - assert!(!map.group_values.get(&ap_1).unwrap().size_changed); - map.write( - ap_1.clone(), - 5, - 1, - // tags 0, 1 - (0..1).map(|i| (i, (TestValue::creation_with_len(2), None))), - ); - assert!(map.group_values.get(&ap_1).unwrap().size_changed); + assert!(!map.validate_group_size(&ap, 12, idx_5_size_with_ones)); + assert!(!map.validate_group_size(&ap, 12, idx_5_size)); + } - map.write( - ap_2.clone(), - 5, - 0, - // tags 0, 1 - (0..2).map(|i| (i, (TestValue::creation_with_len(2), None))), + #[test] + fn group_write_tags_change_behavior() { + let ap = KeyType(b"/foo/1".to_vec()); + + let map = VersionedGroupData::>, usize, TestValue>::empty(); + assert_ok!(map.set_raw_base_values(ap.clone(), vec![],)); + + assert_ok_eq!( + map.write( + ap.clone(), + 5, + 0, + // tags 0, 1 + (0..2).map(|i| (i, (TestValue::creation_with_len(2), None))), + ResourceGroupSize::zero_combined(), + HashSet::new(), + ), + true, ); - assert!(!map.group_values.get(&ap_2).unwrap().size_changed); - map.write( - ap_2.clone(), - 5, - 1, - // tags 0, 1 - (1..3).map(|i| (i, (TestValue::creation_with_len(2), None))), + // Write changes behavior (requiring re-validation) because of tags only when + // the new tags are not contained in the old tags. Not when a tag is no longer + // written. This is because no information about a resource in a group is + // validated by equality (group size and metadata are stored separately) - + // and in this sense resources in group are like normal resources. + assert_ok_eq!( + map.write( + ap.clone(), + 5, + 1, + // tags 0 - contained among {0, 1} + (0..1).map(|i| (i, (TestValue::creation_with_len(2), None))), + ResourceGroupSize::zero_combined(), + (0..2).collect(), + ), + false ); - assert!(map.group_values.get(&ap_2).unwrap().size_changed); - - map.write( - ap_3.clone(), - 5, - 0, - // tags 0, 1 - (0..2).map(|i| (i, (TestValue::creation_with_len(2), None))), + assert_ok_eq!( + map.write( + ap.clone(), + 5, + 2, + // tags 0, 1 - not contained among {0} + (0..2).map(|i| (i, (TestValue::creation_with_len(2), None))), + ResourceGroupSize::zero_combined(), + (0..1).collect(), + ), + true ); - assert!(!map.group_values.get(&ap_3).unwrap().size_changed); - map.remove(&ap_3, 5); - assert!(map.group_values.get(&ap_3).unwrap().size_changed); } fn finalize_group_as_hashmap( map: &VersionedGroupData>, usize, TestValue>, key: &KeyType>, idx: TxnIndex, - ) -> HashMap> { - map.finalize_group(key, idx).unwrap().into_iter().collect() + ) -> ( + HashMap>, + ResourceGroupSize, + ) { + let (group, size) = map.finalize_group(key, idx).unwrap(); + + (group.into_iter().collect(), size) } #[test] - fn group_commit_idx() { + fn group_finalize() { let ap = KeyType(b"/foo/f".to_vec()); - let map = VersionedGroupData::>, usize, TestValue>::new(); + let map = VersionedGroupData::>, usize, TestValue>::empty(); + + let base_values: Vec<_> = (1..4) + .map(|i| (i, TestValue::creation_with_len(i))) + .collect(); - map.set_raw_base_values( + assert_ok!(map.set_raw_base_values( ap.clone(), // base tag 1, 2, 3 - (1..4).map(|i| (i, TestValue::with_kind(i, true))), - ); - map.write( + base_values.clone(), + )); + let base_size = group_size_as_sum( + base_values + .into_iter() + .map(|(tag, value)| (tag, value.bytes().unwrap().len())), + ) + .unwrap(); + + // Does not need to be accurate. + let idx_3_size = ResourceGroupSize::Combined { + num_tagged_resources: 2, + all_tagged_resources_size: 20, + }; + let idx_5_size = ResourceGroupSize::Combined { + num_tagged_resources: 5, + all_tagged_resources_size: 50, + }; + let idx_7_size = ResourceGroupSize::Combined { + num_tagged_resources: 7, + all_tagged_resources_size: 70, + }; + let idx_8_size = ResourceGroupSize::Combined { + num_tagged_resources: 8, + all_tagged_resources_size: 80, + }; + + assert_ok!(map.write( ap.clone(), 7, 3, // insert at 0, remove at 1. vec![ - (0, (TestValue::with_kind(100, true), None)), + (0, (TestValue::creation_with_len(100), None)), (1, (TestValue::deletion(), None)), ], - ); - map.write( + idx_7_size, + HashSet::new(), + )); + assert_ok!(map.write( ap.clone(), 3, 0, // tags 2, 3 - (2..4).map(|i| (i, (TestValue::with_kind(200 + i, false), None))), - ); - let committed_3 = finalize_group_as_hashmap(&map, &ap, 3); + (2..4).map(|i| (i, (TestValue::creation_with_len(200 + i), None))), + idx_3_size, + HashSet::new(), + )); + + let (finalized_3, size_3) = finalize_group_as_hashmap(&map, &ap, 3); + // Finalize returns size recorded by txn 3, while get_group_size at txn index + // 3 must return the size recorded below it. + assert_eq!(size_3, idx_3_size); + assert_ok_eq!(map.get_group_size(&ap, 3), base_size,); + // The value at tag 1 is from base, while 2 and 3 are from txn 3. // (Arc compares with value equality) - assert_eq!(committed_3.len(), 3); + assert_eq!(finalized_3.len(), 3); assert_some_eq!( - committed_3.get(&1), - &ValueWithLayout::RawFromStorage(Arc::new(TestValue::with_kind(1, true))) + finalized_3.get(&1), + &ValueWithLayout::RawFromStorage(Arc::new(TestValue::creation_with_len(1))) ); assert_some_eq!( - committed_3.get(&2), - &ValueWithLayout::Exchanged(Arc::new(TestValue::with_kind(202, false)), None) + finalized_3.get(&2), + &ValueWithLayout::Exchanged(Arc::new(TestValue::creation_with_len(202)), None) ); assert_some_eq!( - committed_3.get(&3), - &ValueWithLayout::Exchanged(Arc::new(TestValue::with_kind(203, false)), None) + finalized_3.get(&3), + &ValueWithLayout::Exchanged(Arc::new(TestValue::creation_with_len(203)), None) ); - map.write(ap.clone(), 5, 3, vec![ - (3, (TestValue::with_kind(303, false), None)), - (4, (TestValue::with_kind(304, true), None)), - ]); - let committed_5 = finalize_group_as_hashmap(&map, &ap, 5); - assert_eq!(committed_5.len(), 4); + assert_ok!(map.write( + ap.clone(), + 5, + 3, + vec![ + (3, (TestValue::creation_with_len(303), None)), + (4, (TestValue::creation_with_len(304), None)), + ], + idx_5_size, + HashSet::new(), + )); + // Finalize should work even for indices without writes. + let (finalized_6, size_6) = finalize_group_as_hashmap(&map, &ap, 6); + assert_eq!(size_6, idx_5_size); + assert_eq!(finalized_6.len(), 4); assert_some_eq!( - committed_5.get(&1), - &ValueWithLayout::RawFromStorage(Arc::new(TestValue::with_kind(1, true))) + finalized_6.get(&1), + &ValueWithLayout::RawFromStorage(Arc::new(TestValue::creation_with_len(1))) ); assert_some_eq!( - committed_5.get(&2), - &ValueWithLayout::Exchanged(Arc::new(TestValue::with_kind(202, false)), None) + finalized_6.get(&2), + &ValueWithLayout::Exchanged(Arc::new(TestValue::creation_with_len(202)), None) ); assert_some_eq!( - committed_5.get(&3), - &ValueWithLayout::Exchanged(Arc::new(TestValue::with_kind(303, false)), None) + finalized_6.get(&3), + &ValueWithLayout::Exchanged(Arc::new(TestValue::creation_with_len(303)), None) ); assert_some_eq!( - committed_5.get(&4), - &ValueWithLayout::Exchanged(Arc::new(TestValue::with_kind(304, true)), None) + finalized_6.get(&4), + &ValueWithLayout::Exchanged(Arc::new(TestValue::creation_with_len(304)), None) ); - let committed_7 = finalize_group_as_hashmap(&map, &ap, 7); - assert_eq!(committed_7.len(), 4); + let (finalized_7, size_7) = finalize_group_as_hashmap(&map, &ap, 7); + assert_eq!(size_7, idx_7_size); + assert_eq!(finalized_7.len(), 4); assert_some_eq!( - committed_7.get(&0), - &ValueWithLayout::Exchanged(Arc::new(TestValue::with_kind(100, true)), None) + finalized_7.get(&0), + &ValueWithLayout::Exchanged(Arc::new(TestValue::creation_with_len(100)), None) ); - assert_none!(committed_7.get(&1)); + assert_none!(finalized_7.get(&1)); assert_some_eq!( - committed_7.get(&2), - &ValueWithLayout::Exchanged(Arc::new(TestValue::with_kind(202, false)), None) + finalized_7.get(&2), + &ValueWithLayout::Exchanged(Arc::new(TestValue::creation_with_len(202)), None) ); assert_some_eq!( - committed_7.get(&3), - &ValueWithLayout::Exchanged(Arc::new(TestValue::with_kind(303, false)), None) + finalized_7.get(&3), + &ValueWithLayout::Exchanged(Arc::new(TestValue::creation_with_len(303)), None) ); assert_some_eq!( - committed_7.get(&4), - &ValueWithLayout::Exchanged(Arc::new(TestValue::with_kind(304, true)), None) + finalized_7.get(&4), + &ValueWithLayout::Exchanged(Arc::new(TestValue::creation_with_len(304)), None) ); - map.write( + assert_ok!(map.write( ap.clone(), 8, 0, // re-insert at 1, remove everything else vec![ (0, (TestValue::deletion(), None)), - (1, (TestValue::with_kind(400, true), None)), + (1, (TestValue::creation_with_len(400), None)), (2, (TestValue::deletion(), None)), (3, (TestValue::deletion(), None)), (4, (TestValue::deletion(), None)), ], - ); - let committed_8 = finalize_group_as_hashmap(&map, &ap, 8); - assert_eq!(committed_8.len(), 1); + idx_8_size, + HashSet::new(), + )); + let (finalized_8, size_8) = finalize_group_as_hashmap(&map, &ap, 8); + assert_eq!(size_8, idx_8_size); + assert_eq!(finalized_8.len(), 1); assert_some_eq!( - committed_8.get(&1), - &ValueWithLayout::Exchanged(Arc::new(TestValue::with_kind(400, true)), None) + finalized_8.get(&1), + &ValueWithLayout::Exchanged(Arc::new(TestValue::creation_with_len(400)), None) ); } // TODO[agg_v2](test) Test with non trivial layout. #[test] - fn group_commit_op_kind_checks() { + fn group_base_layout() { let ap = KeyType(b"/foo/f".to_vec()); - let map = VersionedGroupData::>, usize, TestValue>::new(); - - map.set_raw_base_values( - ap.clone(), - // base tag 1, 2, 3 - (1..4).map(|i| (i, TestValue::with_kind(i, true))), - ); - map.write( - ap.clone(), - 3, - 2, - // remove at 0, must fail commit. - vec![(0, (TestValue::deletion(), None))], - ); - assert_err!(map.finalize_group(&ap, 3)); + let map = VersionedGroupData::>, usize, TestValue>::empty(); - map.write( - ap.clone(), - 3, - 2, - // modify at 0, must fail commit. - vec![(0, (TestValue::with_kind(100, false), None))], - ); - assert_err!(map.finalize_group(&ap, 3)); - - map.write( - ap.clone(), - 3, - 2, - // create at 1, must fail commit - vec![(1, (TestValue::with_kind(101, true), None))], + assert_ok!(map.set_raw_base_values(ap.clone(), vec![(1, TestValue::creation_with_len(1))],)); + assert_eq!( + map.fetch_tagged_data(&ap, &1, 6).unwrap(), + ( + Err(StorageVersion), + ValueWithLayout::RawFromStorage(Arc::new(TestValue::creation_with_len(1))) + ) ); - assert_err!(map.finalize_group(&ap, 3)); - // sanity check the commit succeeds with proper kind. - map.write( + map.update_tagged_base_value_with_layout( ap.clone(), - 3, - 2, - // modify at 0, must fail commit. - vec![ - (0, (TestValue::with_kind(100, true), None)), - (1, (TestValue::with_kind(101, false), None)), - ], - ); - let committed = finalize_group_as_hashmap(&map, &ap, 3); - assert_some_eq!( - committed.get(&0), - &ValueWithLayout::Exchanged(Arc::new(TestValue::with_kind(100, true)), None) - ); - assert_some_eq!( - committed.get(&1), - &ValueWithLayout::Exchanged(Arc::new(TestValue::with_kind(101, false)), None) - ); - assert_some_eq!( - committed.get(&2), - &ValueWithLayout::RawFromStorage(Arc::new(TestValue::with_kind(2, true))) + 1, + TestValue::creation_with_len(1), + None, ); - assert_some_eq!( - committed.get(&3), - &ValueWithLayout::RawFromStorage(Arc::new(TestValue::with_kind(3, true))) + assert_eq!( + map.fetch_tagged_data(&ap, &1, 6).unwrap(), + ( + Err(StorageVersion), + ValueWithLayout::Exchanged(Arc::new(TestValue::creation_with_len(1)), None) + ) ); } } diff --git a/aptos-move/mvhashmap/src/versioned_modules.rs b/aptos-move/mvhashmap/src/versioned_modules.rs index edab94b933fdd..828677c52e4e8 100644 --- a/aptos-move/mvhashmap/src/versioned_modules.rs +++ b/aptos-move/mvhashmap/src/versioned_modules.rs @@ -101,7 +101,7 @@ impl Default for VersionedValue { } impl VersionedModules { - pub(crate) fn new() -> Self { + pub(crate) fn empty() -> Self { Self { values: DashMap::new(), } diff --git a/aptos-node/src/lib.rs b/aptos-node/src/lib.rs index 93acbfd7e7e44..abf6e340e6c4d 100644 --- a/aptos-node/src/lib.rs +++ b/aptos-node/src/lib.rs @@ -26,7 +26,7 @@ use aptos_logger::{prelude::*, telemetry_log_writer::TelemetryLog, Level, Logger use aptos_state_sync_driver::driver_factory::StateSyncRuntimes; use aptos_types::{chain_id::ChainId, on_chain_config::OnChainJWKConsensusConfig}; use clap::Parser; -use futures::channel::mpsc; +use futures::channel::{mpsc, oneshot}; use hex::{FromHex, FromHexError}; use rand::{rngs::StdRng, SeedableRng}; use std::{ @@ -209,11 +209,21 @@ pub struct AptosHandle { _indexer_db_runtime: Option, } -/// Start an Aptos node pub fn start( config: NodeConfig, log_file: Option, create_global_rayon_pool: bool, +) -> anyhow::Result<()> { + start_and_report_ports(config, log_file, create_global_rayon_pool, None, None) +} + +/// Start an Aptos node +pub fn start_and_report_ports( + config: NodeConfig, + log_file: Option, + create_global_rayon_pool: bool, + api_port_tx: Option>, + indexer_grpc_port_tx: Option>, ) -> anyhow::Result<()> { // Setup panic handler aptos_crash_handler::setup_panic_handler(); @@ -252,8 +262,13 @@ pub fn start( } // Set up the node environment and start it - let _node_handle = - setup_environment_and_start_node(config, remote_log_receiver, Some(logger_filter_update))?; + let _node_handle = setup_environment_and_start_node( + config, + remote_log_receiver, + Some(logger_filter_update), + api_port_tx, + indexer_grpc_port_tx, + )?; let term = Arc::new(AtomicBool::new(false)); while !term.load(Ordering::Acquire) { thread::park(); @@ -563,7 +578,7 @@ where let config = OnChainJWKConsensusConfig::default_enabled(); println!("Flag `INITIALIZE_JWK_CONSENSUS` detected, will enable JWK Consensus for all default OIDC providers in genesis: {:?}", config); Some(config) - }, + } _ => None, }; }))) @@ -597,6 +612,8 @@ pub fn setup_environment_and_start_node( mut node_config: NodeConfig, remote_log_rx: Option>, logger_filter_update_job: Option, + api_port_tx: Option>, + indexer_grpc_port_tx: Option>, ) -> anyhow::Result { // Log the node config at node startup node_config.log_all_configs(); @@ -687,14 +704,20 @@ pub fn setup_environment_and_start_node( indexer_runtime, indexer_grpc_runtime, internal_indexer_db_runtime, + mempool_client_sender, ) = services::bootstrap_api_and_indexer( &node_config, db_rw.clone(), chain_id, indexer_db_opt, update_receiver, + api_port_tx, + indexer_grpc_port_tx, )?; + // Set mempool client sender in order to enable the Mempool API in the admin service + admin_service.set_mempool_client_sender(mempool_client_sender); + // Create mempool and get the consensus to mempool sender let (mempool_runtime, consensus_to_mempool_sender) = services::start_mempool_runtime_and_get_consensus_sender( diff --git a/aptos-node/src/services.rs b/aptos-node/src/services.rs index 2a686806ae360..9537f4ea3ef9f 100644 --- a/aptos-node/src/services.rs +++ b/aptos-node/src/services.rs @@ -19,7 +19,9 @@ use aptos_indexer_grpc_table_info::runtime::{ bootstrap as bootstrap_indexer_table_info, bootstrap_internal_indexer_db, }; use aptos_logger::{debug, telemetry_log_writer::TelemetryLog, LoggerFilterUpdater}; -use aptos_mempool::{network::MempoolSyncMsg, MempoolClientRequest, QuorumStoreRequest}; +use aptos_mempool::{ + network::MempoolSyncMsg, MempoolClientRequest, MempoolClientSender, QuorumStoreRequest, +}; use aptos_mempool_notifications::MempoolNotificationListener; use aptos_network::application::{interface::NetworkClientInterface, storage::PeersAndMetadata}; use aptos_network_benchmark::{run_netbench_service, NetbenchMessage}; @@ -32,7 +34,7 @@ use aptos_storage_interface::{DbReader, DbReaderWriter}; use aptos_time_service::TimeService; use aptos_types::{chain_id::ChainId, indexer::indexer_db_reader::IndexerReader}; use aptos_validator_transaction_pool::VTxnPoolState; -use futures::channel::{mpsc, mpsc::Sender}; +use futures::channel::{mpsc, mpsc::Sender, oneshot}; use std::{sync::Arc, time::Instant}; use tokio::{ runtime::{Handle, Runtime}, @@ -50,6 +52,8 @@ pub fn bootstrap_api_and_indexer( chain_id: ChainId, internal_indexer_db: Option, update_receiver: Option>, + api_port_tx: Option>, + indexer_grpc_port_tx: Option>, ) -> anyhow::Result<( Receiver, Option, @@ -57,6 +61,7 @@ pub fn bootstrap_api_and_indexer( Option, Option, Option, + MempoolClientSender, )> { // Create the mempool client and sender let (mempool_client_sender, mempool_client_receiver) = @@ -97,6 +102,7 @@ pub fn bootstrap_api_and_indexer( db_rw.reader.clone(), mempool_client_sender.clone(), indexer_reader.clone(), + api_port_tx, )?) } else { None @@ -109,6 +115,7 @@ pub fn bootstrap_api_and_indexer( db_rw.reader.clone(), mempool_client_sender.clone(), indexer_reader, + indexer_grpc_port_tx, ); // Create the indexer runtime @@ -116,7 +123,7 @@ pub fn bootstrap_api_and_indexer( node_config, chain_id, db_rw.reader.clone(), - mempool_client_sender, + mempool_client_sender.clone(), )?; Ok(( @@ -126,6 +133,7 @@ pub fn bootstrap_api_and_indexer( indexer_runtime, indexer_grpc, db_indexer_runtime, + mempool_client_sender, )) } diff --git a/config/src/config/consensus_config.rs b/config/src/config/consensus_config.rs index 021edf0b365b4..f907072edfb67 100644 --- a/config/src/config/consensus_config.rs +++ b/config/src/config/consensus_config.rs @@ -78,6 +78,7 @@ pub struct ConsensusConfig { // must match one of the CHAIN_HEALTH_WINDOW_SIZES values. pub window_for_chain_health: usize, pub chain_health_backoff: Vec, + // Deprecated pub qc_aggregator_type: QcAggregatorType, // Max blocks allowed for block retrieval requests pub max_blocks_per_sending_request: u64, @@ -89,8 +90,9 @@ pub struct ConsensusConfig { pub rand_rb_config: ReliableBroadcastConfig, pub num_bounded_executor_tasks: u64, pub enable_pre_commit: bool, - pub max_pending_rounds_in_commit_vote_cache: u64, + pub optimistic_sig_verification: bool, + pub enable_round_timeout_msg: bool, } /// Deprecated @@ -301,7 +303,6 @@ impl Default for ConsensusConfig { backoff_proposal_delay_ms: 300, }, ], - qc_aggregator_type: QcAggregatorType::default(), // This needs to fit into the network message size, so with quorum store it can be much bigger max_blocks_per_sending_request: 10, @@ -320,6 +321,8 @@ impl Default for ConsensusConfig { num_bounded_executor_tasks: 16, enable_pre_commit: true, max_pending_rounds_in_commit_vote_cache: 100, + optimistic_sig_verification: false, + enable_round_timeout_msg: false, } } } diff --git a/config/src/config/consensus_observer_config.rs b/config/src/config/consensus_observer_config.rs index 02d8572134950..0ca55c31d50e9 100644 --- a/config/src/config/consensus_observer_config.rs +++ b/config/src/config/consensus_observer_config.rs @@ -9,8 +9,8 @@ use serde::{Deserialize, Serialize}; use serde_yaml::Value; // Useful constants for enabling consensus observer on different node types -const ENABLE_ON_VALIDATORS: bool = true; -const ENABLE_ON_VALIDATOR_FULLNODES: bool = true; +const ENABLE_ON_VALIDATORS: bool = false; +const ENABLE_ON_VALIDATOR_FULLNODES: bool = false; const ENABLE_ON_PUBLIC_FULLNODES: bool = false; #[derive(Clone, Copy, Debug, Deserialize, PartialEq, Serialize)] diff --git a/config/src/config/node_config_loader.rs b/config/src/config/node_config_loader.rs index b5235620ddf58..62a694f9bfacd 100644 --- a/config/src/config/node_config_loader.rs +++ b/config/src/config/node_config_loader.rs @@ -159,9 +159,9 @@ fn get_chain_id(node_config: &NodeConfig) -> Result { // TODO: can we make this less hacky? // Load the genesis transaction from disk - let genesis_txn = get_genesis_txn(node_config).ok_or(Error::InvariantViolation( - "The genesis transaction was not found!".to_string(), - ))?; + let genesis_txn = get_genesis_txn(node_config).ok_or_else(|| { + Error::InvariantViolation("The genesis transaction was not found!".to_string()) + })?; // Extract the chain ID from the genesis transaction match genesis_txn { diff --git a/config/src/config/secure_backend_config.rs b/config/src/config/secure_backend_config.rs index 62bf3b0c041e6..b0db79659caa5 100644 --- a/config/src/config/secure_backend_config.rs +++ b/config/src/config/secure_backend_config.rs @@ -79,7 +79,7 @@ impl VaultConfig { let path = self .ca_certificate .as_ref() - .ok_or(Error::Missing("ca_certificate"))?; + .ok_or_else(|| Error::Missing("ca_certificate"))?; read_file(path) } } diff --git a/config/src/config/state_sync_config.rs b/config/src/config/state_sync_config.rs index 2966a42e2e10f..1004bd48b5479 100644 --- a/config/src/config/state_sync_config.rs +++ b/config/src/config/state_sync_config.rs @@ -11,7 +11,7 @@ use serde::{Deserialize, Serialize}; use serde_yaml::Value; // The maximum message size per state sync message -const MAX_MESSAGE_SIZE: usize = 8 * 1024 * 1024; /* 8 MiB */ +const MAX_MESSAGE_SIZE: usize = 10 * 1024 * 1024; /* 10 MiB */ // The maximum chunk sizes for data client requests and response const MAX_EPOCH_CHUNK_SIZE: u64 = 200; diff --git a/consensus/consensus-types/src/common.rs b/consensus/consensus-types/src/common.rs index 7dbc1888b7203..db20a4fd9f3fd 100644 --- a/consensus/consensus-types/src/common.rs +++ b/consensus/consensus-types/src/common.rs @@ -6,7 +6,6 @@ use crate::{ payload::{OptQuorumStorePayload, PayloadExecutionLimit}, proof_of_store::{BatchInfo, ProofCache, ProofOfStore}, }; -use anyhow::bail; use aptos_crypto::{ hash::{CryptoHash, CryptoHasher}, HashValue, @@ -520,8 +519,7 @@ impl Payload { (true, Payload::OptQuorumStore(opt_quorum_store)) => { let proof_with_data = opt_quorum_store.proof_with_data(); Self::verify_with_cache(&proof_with_data.batch_summary, validator, proof_cache)?; - // TODO(ibalajiarun): Remove this log when OptQS is enabled. - bail!("OptQuorumStore Payload is not expected yet"); + Ok(()) }, (_, _) => Err(anyhow::anyhow!( "Wrong payload type. Expected Payload::InQuorumStore {} got {} ", diff --git a/consensus/consensus-types/src/lib.rs b/consensus/consensus-types/src/lib.rs index bc70a1ad942f2..27ca8b6f92874 100644 --- a/consensus/consensus-types/src/lib.rs +++ b/consensus/consensus-types/src/lib.rs @@ -13,6 +13,7 @@ pub mod order_vote; pub mod order_vote_msg; pub mod order_vote_proposal; pub mod payload; +pub mod payload_pull_params; pub mod pipeline; pub mod pipeline_execution_result; pub mod pipelined_block; @@ -22,6 +23,7 @@ pub mod proposal_msg; pub mod quorum_cert; pub mod randomness; pub mod request_response; +pub mod round_timeout; pub mod safety_data; pub mod sync_info; pub mod timeout_2chain; diff --git a/consensus/consensus-types/src/order_vote.rs b/consensus/consensus-types/src/order_vote.rs index c2a257cdf83c5..b487f9a1668d8 100644 --- a/consensus/consensus-types/src/order_vote.rs +++ b/consensus/consensus-types/src/order_vote.rs @@ -6,7 +6,10 @@ use crate::common::Author; use anyhow::{ensure, Context}; use aptos_crypto::{bls12381, HashValue}; use aptos_short_hex_str::AsShortHexStr; -use aptos_types::{ledger_info::LedgerInfo, validator_verifier::ValidatorVerifier}; +use aptos_types::{ + ledger_info::{LedgerInfo, SignatureWithStatus}, + validator_verifier::ValidatorVerifier, +}; use serde::{Deserialize, Serialize}; use std::fmt::{Debug, Display, Formatter}; @@ -16,8 +19,8 @@ pub struct OrderVote { author: Author, /// LedgerInfo of a block that is going to be ordered in case this vote gathers QC. ledger_info: LedgerInfo, - /// Signature of the LedgerInfo. - signature: bls12381::Signature, + /// Signature on the LedgerInfo along with a status on whether the signature is verified. + signature: SignatureWithStatus, } impl Display for OrderVote { @@ -48,7 +51,7 @@ impl OrderVote { Self { author, ledger_info, - signature, + signature: SignatureWithStatus::from(signature), } } @@ -61,9 +64,25 @@ impl OrderVote { } pub fn signature(&self) -> &bls12381::Signature { + self.signature.signature() + } + + // Question: SignatureWithStatus has interior mutability. Is it okay to expose this? + pub fn signature_with_status(&self) -> &SignatureWithStatus { &self.signature } + pub fn is_verified(&self) -> bool { + self.signature.is_verified() + } + + /// Only the verify method in validator verifier can set the signature status verified. + /// This method additionally lets the tests to set the status to verified. + #[cfg(any(test, feature = "fuzzing"))] + pub fn set_verified(&self) { + self.signature.set_verified(); + } + pub fn epoch(&self) -> u64 { self.ledger_info.epoch() } @@ -75,7 +94,7 @@ impl OrderVote { "Failed to verify OrderVote. Consensus data hash is not Zero" ); validator - .verify(self.author(), &self.ledger_info, &self.signature) + .optimistic_verify(self.author(), &self.ledger_info, &self.signature) .context("Failed to verify OrderVote")?; Ok(()) diff --git a/consensus/consensus-types/src/payload_pull_params.rs b/consensus/consensus-types/src/payload_pull_params.rs new file mode 100644 index 0000000000000..682f9b2185194 --- /dev/null +++ b/consensus/consensus-types/src/payload_pull_params.rs @@ -0,0 +1,91 @@ +// Copyright (c) Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::{ + common::{Author, PayloadFilter}, + utils::PayloadTxnsSize, +}; +use std::{collections::HashSet, time::Duration}; + +#[derive(Clone)] +pub struct OptQSPayloadPullParams { + pub exclude_authors: HashSet, + pub minimum_batch_age_usecs: u64, +} + +pub struct PayloadPullParameters { + pub max_poll_time: Duration, + pub max_txns: PayloadTxnsSize, + pub max_txns_after_filtering: u64, + pub soft_max_txns_after_filtering: u64, + pub max_inline_txns: PayloadTxnsSize, + pub user_txn_filter: PayloadFilter, + pub pending_ordering: bool, + pub pending_uncommitted_blocks: usize, + pub recent_max_fill_fraction: f32, + pub block_timestamp: Duration, + pub maybe_optqs_payload_pull_params: Option, +} + +impl std::fmt::Debug for OptQSPayloadPullParams { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("OptQSPayloadPullParams") + .field("exclude_authors", &self.exclude_authors) + .field("minimum_batch_age_useds", &self.minimum_batch_age_usecs) + .finish() + } +} + +impl PayloadPullParameters { + pub fn new_for_test( + max_poll_time: Duration, + max_txns: u64, + max_txns_bytes: u64, + max_txns_after_filtering: u64, + soft_max_txns_after_filtering: u64, + max_inline_txns: u64, + max_inline_txns_bytes: u64, + user_txn_filter: PayloadFilter, + pending_ordering: bool, + pending_uncommitted_blocks: usize, + recent_max_fill_fraction: f32, + block_timestamp: Duration, + ) -> Self { + Self { + max_poll_time, + max_txns: PayloadTxnsSize::new(max_txns, max_txns_bytes), + max_txns_after_filtering, + soft_max_txns_after_filtering, + max_inline_txns: PayloadTxnsSize::new(max_inline_txns, max_inline_txns_bytes), + user_txn_filter, + pending_ordering, + pending_uncommitted_blocks, + recent_max_fill_fraction, + block_timestamp, + maybe_optqs_payload_pull_params: None, + } + } +} + +impl std::fmt::Debug for PayloadPullParameters { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("PayloadPullParameters") + .field("max_poll_time", &self.max_poll_time) + .field("max_items", &self.max_txns) + .field("max_unique_items", &self.max_txns_after_filtering) + .field( + "soft_max_txns_after_filtering", + &self.soft_max_txns_after_filtering, + ) + .field("max_inline_items", &self.max_inline_txns) + .field("pending_ordering", &self.pending_ordering) + .field( + "pending_uncommitted_blocks", + &self.pending_uncommitted_blocks, + ) + .field("recent_max_fill_fraction", &self.recent_max_fill_fraction) + .field("block_timestamp", &self.block_timestamp) + .field("optqs_params", &self.maybe_optqs_payload_pull_params) + .finish() + } +} diff --git a/consensus/consensus-types/src/pipeline/commit_vote.rs b/consensus/consensus-types/src/pipeline/commit_vote.rs index a7ab26d07b271..96a6bd7fde85d 100644 --- a/consensus/consensus-types/src/pipeline/commit_vote.rs +++ b/consensus/consensus-types/src/pipeline/commit_vote.rs @@ -7,7 +7,9 @@ use anyhow::Context; use aptos_crypto::{bls12381, CryptoMaterialError}; use aptos_short_hex_str::AsShortHexStr; use aptos_types::{ - block_info::BlockInfo, ledger_info::LedgerInfo, validator_signer::ValidatorSigner, + block_info::BlockInfo, + ledger_info::{LedgerInfo, SignatureWithStatus}, + validator_signer::ValidatorSigner, validator_verifier::ValidatorVerifier, }; use serde::{Deserialize, Serialize}; @@ -17,7 +19,8 @@ use std::fmt::{Debug, Display, Formatter}; pub struct CommitVote { author: Author, ledger_info: LedgerInfo, - signature: bls12381::Signature, + /// Signature on the LedgerInfo along with a status on whether the signature is verified. + signature: SignatureWithStatus, } // this is required by structured log @@ -62,7 +65,7 @@ impl CommitVote { Self { author, ledger_info, - signature, + signature: SignatureWithStatus::from(signature), } } @@ -78,6 +81,13 @@ impl CommitVote { /// Return the signature of the vote pub fn signature(&self) -> &bls12381::Signature { + self.signature.signature() + } + + /// Returns the signature along with the verification status of the signature. + // Note: SignatureWithStatus has interior mutability for verification status. + // Need to make sure the verification status is set to true only the verification is successful. + pub fn signature_with_status(&self) -> &SignatureWithStatus { &self.signature } @@ -93,7 +103,7 @@ impl CommitVote { /// and then verifies the signature. pub fn verify(&self, validator: &ValidatorVerifier) -> anyhow::Result<()> { validator - .verify(self.author(), &self.ledger_info, &self.signature) + .optimistic_verify(self.author(), &self.ledger_info, &self.signature) .context("Failed to verify Commit Vote") } diff --git a/consensus/consensus-types/src/request_response.rs b/consensus/consensus-types/src/request_response.rs index c650141e7878a..f10e35285e532 100644 --- a/consensus/consensus-types/src/request_response.rs +++ b/consensus/consensus-types/src/request_response.rs @@ -3,6 +3,7 @@ use crate::{ common::{Payload, PayloadFilter}, + payload_pull_params::OptQSPayloadPullParams, utils::PayloadTxnsSize, }; use anyhow::Result; @@ -16,8 +17,8 @@ pub struct GetPayloadRequest { pub max_txns_after_filtering: u64, // soft max number of transactions after filtering in the block (i.e. include one that crosses it) pub soft_max_txns_after_filtering: u64, - // target txns with opt batches in max_txns as pct - pub opt_batch_txns_pct: u8, + // opt payload pull params + pub maybe_optqs_payload_pull_params: Option, // max number of inline transactions (transactions without a proof of store) pub max_inline_txns: PayloadTxnsSize, // return non full diff --git a/consensus/consensus-types/src/round_timeout.rs b/consensus/consensus-types/src/round_timeout.rs new file mode 100644 index 0000000000000..e16d718f7dd38 --- /dev/null +++ b/consensus/consensus-types/src/round_timeout.rs @@ -0,0 +1,184 @@ +// Copyright (c) Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::{ + common::{Author, Round}, + sync_info::SyncInfo, + timeout_2chain::TwoChainTimeout, +}; +use anyhow::{ensure, Context}; +use aptos_bitvec::BitVec; +use aptos_crypto::bls12381; +use aptos_short_hex_str::AsShortHexStr; +use aptos_types::validator_verifier::ValidatorVerifier; +use serde::{Deserialize, Serialize}; + +#[derive(Deserialize, Serialize, Clone, PartialEq, Eq, Hash, Debug)] +pub enum RoundTimeoutReason { + Unknown, + ProposalNotReceived, + PayloadUnavailable { missing_authors: BitVec }, + NoQC, +} + +impl std::fmt::Display for RoundTimeoutReason { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + match self { + RoundTimeoutReason::Unknown => write!(f, "Unknown"), + RoundTimeoutReason::ProposalNotReceived => write!(f, "ProposalNotReceived"), + RoundTimeoutReason::PayloadUnavailable { .. } => { + write!(f, "PayloadUnavailable",) + }, + RoundTimeoutReason::NoQC => write!(f, "NoQC"), + } + } +} + +#[derive(Deserialize, Serialize, Clone, PartialEq, Eq)] +pub struct RoundTimeout { + // The timeout + timeout: TwoChainTimeout, + author: Author, + reason: RoundTimeoutReason, + /// Signature on the Timeout + signature: bls12381::Signature, +} + +// this is required by structured log +impl std::fmt::Debug for RoundTimeout { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(f, "{}", self) + } +} + +impl std::fmt::Display for RoundTimeout { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + write!( + f, + "RoundTimeoutV2: [timeout: {}, author: {}, reason: {}]", + self.timeout, + self.author.short_str(), + self.reason + ) + } +} + +impl RoundTimeout { + pub fn new( + timeout: TwoChainTimeout, + author: Author, + reason: RoundTimeoutReason, + signature: bls12381::Signature, + ) -> Self { + Self { + timeout, + author, + reason, + signature, + } + } + + pub fn epoch(&self) -> u64 { + self.timeout.epoch() + } + + pub fn round(&self) -> Round { + self.timeout.round() + } + + pub fn two_chain_timeout(&self) -> &TwoChainTimeout { + &self.timeout + } + + pub fn author(&self) -> Author { + self.author + } + + pub fn verify(&self, validator: &ValidatorVerifier) -> anyhow::Result<()> { + self.timeout.verify(validator)?; + validator + .verify( + self.author(), + &self.timeout.signing_format(), + &self.signature, + ) + .context("Failed to verify 2-chain timeout signature")?; + Ok(()) + } + + pub fn reason(&self) -> &RoundTimeoutReason { + &self.reason + } + + pub fn signature(&self) -> &bls12381::Signature { + &self.signature + } +} + +#[derive(Deserialize, Serialize, Clone, Debug, PartialEq, Eq)] +pub struct RoundTimeoutMsg { + /// The container for the vote (VoteData, LedgerInfo, Signature) + round_timeout: RoundTimeout, + /// Sync info carries information about highest QC, TC and LedgerInfo + sync_info: SyncInfo, +} + +impl std::fmt::Display for RoundTimeoutMsg { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + write!( + f, + "RoundTimeoutV2Msg: [{}], SyncInfo: [{}]", + self.round_timeout, self.sync_info + ) + } +} + +impl RoundTimeoutMsg { + pub fn new(round_timeout: RoundTimeout, sync_info: SyncInfo) -> Self { + Self { + round_timeout, + sync_info, + } + } + + /// SyncInfo of the given vote message + pub fn sync_info(&self) -> &SyncInfo { + &self.sync_info + } + + pub fn epoch(&self) -> u64 { + self.round_timeout.epoch() + } + + pub fn verify(&self, validator: &ValidatorVerifier) -> anyhow::Result<()> { + ensure!( + self.round_timeout.epoch() == self.sync_info.epoch(), + "RoundTimeoutV2Msg has different epoch" + ); + ensure!( + self.round_timeout.round() > self.sync_info.highest_round(), + "Timeout Round should be higher than SyncInfo" + ); + ensure!( + self.round_timeout.two_chain_timeout().hqc_round() + <= self.sync_info.highest_certified_round(), + "2-chain Timeout hqc should be less or equal than the sync info hqc" + ); + // We're not verifying SyncInfo here yet: we are going to verify it only in case we need + // it. This way we avoid verifying O(n) SyncInfo messages while aggregating the votes + // (O(n^2) signature verifications). + self.round_timeout.verify(validator) + } + + pub fn round(&self) -> u64 { + self.round_timeout.round() + } + + pub fn author(&self) -> Author { + self.round_timeout.author() + } + + pub fn timeout(&self) -> RoundTimeout { + self.round_timeout.clone() + } +} diff --git a/consensus/consensus-types/src/sync_info.rs b/consensus/consensus-types/src/sync_info.rs index f4eddf3d50ccf..0c8171016e610 100644 --- a/consensus/consensus-types/src/sync_info.rs +++ b/consensus/consensus-types/src/sync_info.rs @@ -36,13 +36,13 @@ impl Display for SyncInfo { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { write!( f, - "SyncInfo[certified_round: {}, ordered_round: {}, timeout round: {}, committed_round: {},\n hqc: {:?},\n hoc: {:?},\n hcc: {:?}]", + "SyncInfo[certified_round: {}, ordered_round: {}, timeout round: {}, committed_round: {},\n hqc: {},\n hoc: {},\n hcc: {}]", self.highest_certified_round(), self.highest_ordered_round(), self.highest_timeout_round(), self.highest_commit_round(), self.highest_quorum_cert, - self.highest_ordered_cert.as_ref().map_or("None".to_string(), |cert| cert.to_string()), + self.highest_ordered_cert.as_ref().map_or_else(|| "None".to_string(), |cert| cert.to_string()), self.highest_commit_cert, ) } diff --git a/consensus/consensus-types/src/vote.rs b/consensus/consensus-types/src/vote.rs index 8d5868a06c3b1..20400be9a92c8 100644 --- a/consensus/consensus-types/src/vote.rs +++ b/consensus/consensus-types/src/vote.rs @@ -9,7 +9,8 @@ use anyhow::{ensure, Context}; use aptos_crypto::{bls12381, hash::CryptoHash, CryptoMaterialError}; use aptos_short_hex_str::AsShortHexStr; use aptos_types::{ - ledger_info::LedgerInfo, validator_signer::ValidatorSigner, + ledger_info::{LedgerInfo, SignatureWithStatus}, + validator_signer::ValidatorSigner, validator_verifier::ValidatorVerifier, }; use serde::{Deserialize, Serialize}; @@ -21,14 +22,14 @@ use std::fmt::{Debug, Display, Formatter}; /// is gathers QuorumCertificate (see the detailed explanation in the comments of `LedgerInfo`). #[derive(Deserialize, Serialize, Clone, PartialEq, Eq)] pub struct Vote { - /// The data of the vote + /// The data of the vote. vote_data: VoteData, /// The identity of the voter. author: Author, /// LedgerInfo of a block that is going to be committed in case this vote gathers QC. ledger_info: LedgerInfo, - /// Signature of the LedgerInfo - signature: bls12381::Signature, + /// Signature on the LedgerInfo along with a status on whether the signature is verified. + signature: SignatureWithStatus, /// The 2-chain timeout and corresponding signature. two_chain_timeout: Option<(TwoChainTimeout, bls12381::Signature)>, } @@ -83,7 +84,7 @@ impl Vote { vote_data, author, ledger_info, - signature, + signature: SignatureWithStatus::from(signature), two_chain_timeout: None, } } @@ -109,9 +110,25 @@ impl Vote { /// Return the signature of the vote pub fn signature(&self) -> &bls12381::Signature { + self.signature.signature() + } + + pub fn signature_with_status(&self) -> &SignatureWithStatus { &self.signature } + /// Returns whether the signature is verified + pub fn is_verified(&self) -> bool { + self.signature.is_verified() + } + + /// Only the verify method in validator verifier can set the signature status verified. + /// This method additionally lets the tests to set the status to verified. + #[cfg(any(test, feature = "fuzzing"))] + pub fn set_verified(&self) { + self.signature.set_verified(); + } + /// Returns the 2-chain timeout. pub fn generate_2chain_timeout(&self, qc: QuorumCert) -> TwoChainTimeout { TwoChainTimeout::new( @@ -140,12 +157,14 @@ impl Vote { /// Verifies that the consensus data hash of LedgerInfo corresponds to the vote info, /// and then verifies the signature. pub fn verify(&self, validator: &ValidatorVerifier) -> anyhow::Result<()> { + // TODO(ibalajiarun): Ensure timeout is None if RoundTimeoutMsg is enabled. + ensure!( self.ledger_info.consensus_data_hash() == self.vote_data.hash(), "Vote's hash mismatch with LedgerInfo" ); validator - .verify(self.author(), &self.ledger_info, &self.signature) + .optimistic_verify(self.author(), &self.ledger_info, &self.signature) .context("Failed to verify Vote")?; if let Some((timeout, signature)) = &self.two_chain_timeout { ensure!( diff --git a/consensus/consensus-types/src/wrapped_ledger_info.rs b/consensus/consensus-types/src/wrapped_ledger_info.rs index ee254af17304b..e0c745053948c 100644 --- a/consensus/consensus-types/src/wrapped_ledger_info.rs +++ b/consensus/consensus-types/src/wrapped_ledger_info.rs @@ -28,11 +28,7 @@ pub struct WrappedLedgerInfo { impl Display for WrappedLedgerInfo { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { - write!( - f, - "WrappedLedgerInfo: [{}, {}]", - self.vote_data, self.signed_ledger_info - ) + write!(f, "WrappedLedgerInfo: [{}]", self.signed_ledger_info) } } diff --git a/consensus/safety-rules/src/fuzzing_utils.rs b/consensus/safety-rules/src/fuzzing_utils.rs index 0ed1f9a0b6735..411bb984afc5f 100644 --- a/consensus/safety-rules/src/fuzzing_utils.rs +++ b/consensus/safety-rules/src/fuzzing_utils.rs @@ -204,10 +204,10 @@ prop_compose! { validator_infos, ); if include_epoch_state { - Some(EpochState { + Some(EpochState::new( epoch, verifier - }) + )) } else { None } diff --git a/consensus/safety-rules/src/tests/suite.rs b/consensus/safety-rules/src/tests/suite.rs index 5ecc32190f748..c6cff3135ee58 100644 --- a/consensus/safety-rules/src/tests/suite.rs +++ b/consensus/safety-rules/src/tests/suite.rs @@ -596,7 +596,7 @@ fn test_validator_not_in_set(safety_rules: &Callback) { next_epoch_state.epoch = 1; let rand_signer = ValidatorSigner::random([0xFu8; 32]); next_epoch_state.verifier = - ValidatorVerifier::new_single(rand_signer.author(), rand_signer.public_key()); + ValidatorVerifier::new_single(rand_signer.author(), rand_signer.public_key()).into(); let a2 = test_utils::make_proposal_with_parent_and_overrides( Payload::empty(false, true), round + 2, @@ -634,7 +634,7 @@ fn test_key_not_in_store(safety_rules: &Callback) { next_epoch_state.epoch = 1; let rand_signer = ValidatorSigner::random([0xFu8; 32]); next_epoch_state.verifier = - ValidatorVerifier::new_single(signer.author(), rand_signer.public_key()); + ValidatorVerifier::new_single(signer.author(), rand_signer.public_key()).into(); let a2 = test_utils::make_proposal_with_parent_and_overrides( Payload::empty(false, true), round + 2, diff --git a/consensus/src/block_storage/block_store.rs b/consensus/src/block_storage/block_store.rs index 8670e161602da..f56dfa80c23eb 100644 --- a/consensus/src/block_storage/block_store.rs +++ b/consensus/src/block_storage/block_store.rs @@ -18,6 +18,7 @@ use crate::{ util::time_service::TimeService, }; use anyhow::{bail, ensure, format_err, Context}; +use aptos_bitvec::BitVec; use aptos_consensus_types::{ block::Block, common::Round, @@ -472,18 +473,19 @@ impl BlockStore { self.pending_blocks.clone() } - pub async fn wait_for_payload(&self, block: &Block) -> anyhow::Result<()> { - tokio::time::timeout( - Duration::from_secs(1), - self.payload_manager.get_transactions(block), - ) - .await??; + pub async fn wait_for_payload(&self, block: &Block, deadline: Duration) -> anyhow::Result<()> { + let duration = deadline.saturating_sub(self.time_service.get_current_timestamp()); + tokio::time::timeout(duration, self.payload_manager.get_transactions(block)).await??; Ok(()) } - pub fn check_payload(&self, proposal: &Block) -> bool { + pub fn check_payload(&self, proposal: &Block) -> Result<(), BitVec> { self.payload_manager.check_payload_availability(proposal) } + + pub fn get_block_for_round(&self, round: Round) -> Option> { + self.inner.read().get_block_for_round(round) + } } impl BlockReader for BlockStore { diff --git a/consensus/src/block_storage/block_store_test.rs b/consensus/src/block_storage/block_store_test.rs index 41def8f1c322d..09513e648bca5 100644 --- a/consensus/src/block_storage/block_store_test.rs +++ b/consensus/src/block_storage/block_store_test.rs @@ -300,6 +300,7 @@ async fn test_insert_vote() { voter, ) .unwrap(); + vote.set_verified(); let vote_res = pending_votes.insert_vote(&vote, &validator_verifier); // first vote of an author is accepted @@ -329,6 +330,7 @@ async fn test_insert_vote() { final_voter, ) .unwrap(); + vote.set_verified(); match pending_votes.insert_vote(&vote, &validator_verifier) { VoteReceptionResult::NewQuorumCertificate(qc) => { assert_eq!(qc.certified_block().id(), block.id()); diff --git a/consensus/src/block_storage/block_tree.rs b/consensus/src/block_storage/block_tree.rs index 0edb607579c72..5d1df54149cbf 100644 --- a/consensus/src/block_storage/block_tree.rs +++ b/consensus/src/block_storage/block_tree.rs @@ -15,10 +15,13 @@ use aptos_consensus_types::{ }; use aptos_crypto::HashValue; use aptos_logger::prelude::*; -use aptos_types::{block_info::BlockInfo, ledger_info::LedgerInfoWithSignatures}; +use aptos_types::{ + block_info::{BlockInfo, Round}, + ledger_info::LedgerInfoWithSignatures, +}; use mirai_annotations::{checked_verify_eq, precondition}; use std::{ - collections::{vec_deque::VecDeque, HashMap, HashSet}, + collections::{vec_deque::VecDeque, BTreeMap, HashMap, HashSet}, sync::Arc, }; @@ -89,6 +92,9 @@ pub struct BlockTree { pruned_block_ids: VecDeque, /// Num pruned blocks to keep in memory. max_pruned_blocks_in_mem: usize, + + /// Round to Block index. We expect only one block per round. + round_to_ids: BTreeMap, } impl BlockTree { @@ -108,6 +114,8 @@ impl BlockTree { let root_id = root.id(); let mut id_to_block = HashMap::new(); + let mut round_to_ids = BTreeMap::new(); + round_to_ids.insert(root.round(), root_id); id_to_block.insert(root_id, LinkableBlock::new(root)); counters::NUM_BLOCKS_IN_TREE.set(1); @@ -132,6 +140,7 @@ impl BlockTree { pruned_block_ids, max_pruned_blocks_in_mem, highest_2chain_timeout_cert, + round_to_ids, } } @@ -165,7 +174,10 @@ impl BlockTree { fn remove_block(&mut self, block_id: HashValue) { // Remove the block from the store - self.id_to_block.remove(&block_id); + if let Some(block) = self.id_to_block.remove(&block_id) { + let round = block.executed_block().round(); + self.round_to_ids.remove(&round); + }; self.id_to_quorum_cert.remove(&block_id); } @@ -178,6 +190,12 @@ impl BlockTree { .map(|lb| lb.executed_block().clone()) } + pub(super) fn get_block_for_round(&self, round: Round) -> Option> { + self.round_to_ids + .get(&round) + .and_then(|block_id| self.get_block(block_id)) + } + pub(super) fn ordered_root(&self) -> Arc { self.get_block(&self.ordered_root_id) .expect("Root must exist") @@ -241,6 +259,16 @@ impl BlockTree { let linkable_block = LinkableBlock::new(block); let arc_block = Arc::clone(linkable_block.executed_block()); assert!(self.id_to_block.insert(block_id, linkable_block).is_none()); + // Note: the assumption is that we have/enforce unequivocal proposer election. + if let Some(old_block_id) = self.round_to_ids.get(&arc_block.round()) { + warn!( + "Multiple blocks received for round {}. Previous block id: {}", + arc_block.round(), + old_block_id + ); + } else { + self.round_to_ids.insert(arc_block.round(), block_id); + } counters::NUM_BLOCKS_IN_TREE.inc(); Ok(arc_block) } diff --git a/consensus/src/consensus_observer/network/observer_message.rs b/consensus/src/consensus_observer/network/observer_message.rs index 1905d162b0354..7b5dc2c7c60c2 100644 --- a/consensus/src/consensus_observer/network/observer_message.rs +++ b/consensus/src/consensus_observer/network/observer_message.rs @@ -998,7 +998,7 @@ mod test { 100, ); let validator_verifier = ValidatorVerifier::new(vec![validator_consensus_info]); - let epoch_state = EpochState::new(current_epoch, validator_verifier.clone()); + let epoch_state = EpochState::new(current_epoch, validator_verifier); // Verify the commit proof and ensure it fails (the signature set is insufficient) let error = commit_decision @@ -1130,7 +1130,7 @@ mod test { 100, ); let validator_verifier = ValidatorVerifier::new(vec![validator_consensus_info]); - let epoch_state = EpochState::new(current_epoch, validator_verifier.clone()); + let epoch_state = EpochState::new(current_epoch, validator_verifier); // Verify the ordered proof and ensure it fails (the signature set is insufficient) let error = ordered_block @@ -1345,7 +1345,7 @@ mod test { 100, ); let validator_verifier = ValidatorVerifier::new(vec![validator_consensus_info]); - let epoch_state = EpochState::new(current_epoch, validator_verifier.clone()); + let epoch_state = EpochState::new(current_epoch, validator_verifier); // Verify the block payload signatures and ensure it fails (the signature set is insufficient) let error = block_payload diff --git a/consensus/src/consensus_observer/observer/active_state.rs b/consensus/src/consensus_observer/observer/active_state.rs index f162fab553e15..bcf0462c75f5c 100644 --- a/consensus/src/consensus_observer/observer/active_state.rs +++ b/consensus/src/consensus_observer/observer/active_state.rs @@ -209,10 +209,10 @@ async fn extract_on_chain_configs( let validator_set: ValidatorSet = on_chain_configs .get() .expect("Failed to get the validator set from the on-chain configs!"); - let epoch_state = Arc::new(EpochState { - epoch: on_chain_configs.epoch(), - verifier: (&validator_set).into(), - }); + let epoch_state = Arc::new(EpochState::new( + on_chain_configs.epoch(), + (&validator_set).into(), + )); // Extract the consensus config (or use the default if it's missing) let onchain_consensus_config: anyhow::Result = on_chain_configs.get(); diff --git a/consensus/src/consensus_observer/observer/payload_store.rs b/consensus/src/consensus_observer/observer/payload_store.rs index 59859ec0b82ea..c75ea87fa4322 100644 --- a/consensus/src/consensus_observer/observer/payload_store.rs +++ b/consensus/src/consensus_observer/observer/payload_store.rs @@ -981,8 +981,11 @@ mod test { validator_signer.public_key(), 100, ); - let validator_verifier = ValidatorVerifier::new(vec![validator_consensus_info]); - let epoch_state = EpochState::new(next_epoch, validator_verifier.clone()); + let validator_verifier = Arc::new(ValidatorVerifier::new(vec![validator_consensus_info])); + let epoch_state = EpochState { + epoch: next_epoch, + verifier: validator_verifier.clone(), + }; // Verify the block payload signatures (for this epoch) block_payload_store.verify_payload_signatures(&epoch_state); @@ -997,7 +1000,10 @@ mod test { ); // Create an epoch state for the future epoch (with a non-empty verifier) - let epoch_state = EpochState::new(future_epoch, validator_verifier); + let epoch_state = EpochState { + epoch: future_epoch, + verifier: validator_verifier.clone(), + }; // Verify the block payload signatures (for the future epoch) block_payload_store.verify_payload_signatures(&epoch_state); diff --git a/consensus/src/counters.rs b/consensus/src/counters.rs index 1af6f4f8c6da1..5f655d5a69909 100644 --- a/consensus/src/counters.rs +++ b/consensus/src/counters.rs @@ -955,6 +955,14 @@ pub static PENDING_STATE_SYNC_NOTIFICATION: Lazy = Lazy::new(|| { .unwrap() }); +pub static PENDING_COMMIT_NOTIFICATION: Lazy = Lazy::new(|| { + register_int_gauge!( + "aptos_consensus_pending_commit_notification", + "Count of the pending commit notification" + ) + .unwrap() +}); + /// Count of the pending quorum store commit notification. pub static PENDING_QUORUM_STORE_COMMIT_NOTIFICATION: Lazy = Lazy::new(|| { register_int_gauge!( @@ -1311,3 +1319,14 @@ pub static CONSENSUS_PROPOSAL_PAYLOAD_FETCH_DURATION: Lazy = Lazy: ) .unwrap() }); + +pub static CONSENSUS_PROPOSAL_PAYLOAD_BATCH_AVAILABILITY_IN_QS: Lazy = Lazy::new( + || { + register_int_counter_vec!( + "aptos_consensus_proposal_payload_batch_availability", + "The number of batches in payload that are available and missing locally by batch author", + &["author", "is_proof", "state"] + ) + .unwrap() + }, +); diff --git a/consensus/src/dag/adapter.rs b/consensus/src/dag/adapter.rs index 36bd615345b04..36941b875cbdb 100644 --- a/consensus/src/dag/adapter.rs +++ b/consensus/src/dag/adapter.rs @@ -297,11 +297,13 @@ impl StorageAdapter { usize::try_from(*index) .map_err(|_err| anyhow!("index {} out of bounds", index)) .and_then(|index| { - validators.get(index).cloned().ok_or(anyhow!( - "index {} is larger than number of validators {}", - index, - validators.len() - )) + validators.get(index).cloned().ok_or_else(|| { + anyhow!( + "index {} is larger than number of validators {}", + index, + validators.len() + ) + }) }) }) .collect() diff --git a/consensus/src/dag/dag_driver.rs b/consensus/src/dag/dag_driver.rs index 2395ba30ef264..fa0caee1faa8a 100644 --- a/consensus/src/dag/dag_driver.rs +++ b/consensus/src/dag/dag_driver.rs @@ -21,13 +21,14 @@ use crate::{ }, DAGRpcResult, RpcHandler, }, - payload_client::{PayloadClient, PayloadPullParameters}, + payload_client::PayloadClient, }; use anyhow::{bail, ensure}; use aptos_collections::BoundedVecDeque; use aptos_config::config::DagPayloadConfig; use aptos_consensus_types::{ common::{Author, Payload, PayloadFilter}, + payload_pull_params::PayloadPullParameters, utils::PayloadTxnsSize, }; use aptos_crypto::hash::CryptoHash; @@ -266,7 +267,7 @@ impl DagDriver { max_txns_after_filtering: max_txns, soft_max_txns_after_filtering: max_txns, max_inline_txns: PayloadTxnsSize::new(100, 100 * 1024), - opt_batch_txns_pct: 0, + maybe_optqs_payload_pull_params: None, user_txn_filter: payload_filter, pending_ordering: false, pending_uncommitted_blocks: 0, diff --git a/consensus/src/dag/tests/dag_driver_tests.rs b/consensus/src/dag/tests/dag_driver_tests.rs index 4e8ace01b5a04..5519a873fb477 100644 --- a/consensus/src/dag/tests/dag_driver_tests.rs +++ b/consensus/src/dag/tests/dag_driver_tests.rs @@ -137,7 +137,7 @@ fn setup( ) -> DagDriver { let epoch_state = Arc::new(EpochState { epoch: 1, - verifier: validator_verifier, + verifier: validator_verifier.into(), }); let mock_ledger_info = LedgerInfo::mock_genesis(None); diff --git a/consensus/src/dag/tests/dag_state_sync_tests.rs b/consensus/src/dag/tests/dag_state_sync_tests.rs index 90d7d246a2e5d..fe046b2cb62fc 100644 --- a/consensus/src/dag/tests/dag_state_sync_tests.rs +++ b/consensus/src/dag/tests/dag_state_sync_tests.rs @@ -157,7 +157,7 @@ async fn test_dag_state_sync() { let validators = validator_verifier.get_ordered_account_addresses(); let epoch_state = Arc::new(EpochState { epoch: 1, - verifier: validator_verifier, + verifier: validator_verifier.into(), }); let storage = Arc::new(MockStorage::new()); diff --git a/consensus/src/dag/tests/dag_test.rs b/consensus/src/dag/tests/dag_test.rs index c050b81c55cbf..f45162618b8b6 100644 --- a/consensus/src/dag/tests/dag_test.rs +++ b/consensus/src/dag/tests/dag_test.rs @@ -138,7 +138,7 @@ fn setup() -> ( let (signers, validator_verifier) = random_validator_verifier(4, None, false); let epoch_state = Arc::new(EpochState { epoch: 1, - verifier: validator_verifier, + verifier: validator_verifier.into(), }); let storage = Arc::new(MockStorage::new()); let payload_manager = Arc::new(MockPayloadManager {}); diff --git a/consensus/src/dag/tests/fetcher_test.rs b/consensus/src/dag/tests/fetcher_test.rs index 713c3bb60fabc..b442d8f15db6e 100644 --- a/consensus/src/dag/tests/fetcher_test.rs +++ b/consensus/src/dag/tests/fetcher_test.rs @@ -18,7 +18,7 @@ async fn test_dag_fetcher_receiver() { let (signers, validator_verifier) = random_validator_verifier(4, None, false); let epoch_state = Arc::new(EpochState { epoch: 1, - verifier: validator_verifier, + verifier: validator_verifier.into(), }); let storage = Arc::new(MockStorage::new()); let dag = Arc::new(DagStore::new( diff --git a/consensus/src/dag/tests/helpers.rs b/consensus/src/dag/tests/helpers.rs index ff19b6876e2db..dab407099c303 100644 --- a/consensus/src/dag/tests/helpers.rs +++ b/consensus/src/dag/tests/helpers.rs @@ -8,6 +8,7 @@ use crate::{ }, payload_manager::TPayloadManager, }; +use aptos_bitvec::BitVec; use aptos_consensus_types::{ block::Block, common::{Author, Payload, Round}, @@ -26,7 +27,7 @@ impl TPayloadManager for MockPayloadManager { fn notify_commit(&self, _block_timestamp: u64, _payloads: Vec) {} - fn check_payload_availability(&self, _block: &Block) -> bool { + fn check_payload_availability(&self, _block: &Block) -> Result<(), BitVec> { unimplemented!() } diff --git a/consensus/src/dag/tests/integration_tests.rs b/consensus/src/dag/tests/integration_tests.rs index 2a19bcc3e88b7..f44c8af90d0e7 100644 --- a/consensus/src/dag/tests/integration_tests.rs +++ b/consensus/src/dag/tests/integration_tests.rs @@ -67,10 +67,7 @@ impl DagBootstrapUnit { >, all_signers: Vec, ) -> (Self, UnboundedReceiver) { - let epoch_state = Arc::new(EpochState { - epoch, - verifier: storage.get_validator_set().into(), - }); + let epoch_state = Arc::new(EpochState::new(epoch, storage.get_validator_set().into())); let ledger_info = generate_ledger_info_with_sig(&all_signers, storage.get_ledger_info()); let dag_storage = dag_test::MockStorage::new_with_ledger_info(ledger_info, epoch_state.clone()); @@ -136,7 +133,7 @@ fn create_network( playground: &mut NetworkPlayground, id: usize, author: Author, - validators: ValidatorVerifier, + validators: Arc, ) -> ( NetworkSender, Box< @@ -178,6 +175,7 @@ fn bootstrap_nodes( validators: ValidatorVerifier, ) -> (Vec, Vec>) { let peers_and_metadata = playground.peer_protocols(); + let validators = Arc::new(validators); let (nodes, ordered_node_receivers) = signers .iter() .enumerate() @@ -194,7 +192,7 @@ fn bootstrap_nodes( .insert_connection_metadata(peer_network_id, conn_meta) .unwrap(); - let (_, storage) = MockStorage::start_for_testing((&validators).into()); + let (_, storage) = MockStorage::start_for_testing((&*validators).into()); let (network, network_events) = create_network(playground, id, signer.author(), validators.clone()); diff --git a/consensus/src/dag/tests/order_rule_tests.rs b/consensus/src/dag/tests/order_rule_tests.rs index a46ed9b7f0499..8dbdd1a7c7b06 100644 --- a/consensus/src/dag/tests/order_rule_tests.rs +++ b/consensus/src/dag/tests/order_rule_tests.rs @@ -132,7 +132,7 @@ proptest! { let nodes = generate_dag_nodes(&dag, &validators); let epoch_state = Arc::new(EpochState { epoch: 1, - verifier: validator_verifier, + verifier: validator_verifier.into(), }); let mut dag = InMemDag::new_empty(epoch_state.clone(), 0, TEST_DAG_WINDOW); for round_nodes in &nodes { @@ -219,7 +219,7 @@ fn test_order_rule_basic() { let nodes = generate_dag_nodes(&dag, &validators); let epoch_state = Arc::new(EpochState { epoch: 1, - verifier: validator_verifier, + verifier: validator_verifier.into(), }); let mut dag = InMemDag::new_empty(epoch_state.clone(), 0, TEST_DAG_WINDOW); for round_nodes in &nodes { diff --git a/consensus/src/dag/tests/rb_handler_tests.rs b/consensus/src/dag/tests/rb_handler_tests.rs index 8d3c9b2b42932..97d58e31aba1d 100644 --- a/consensus/src/dag/tests/rb_handler_tests.rs +++ b/consensus/src/dag/tests/rb_handler_tests.rs @@ -43,7 +43,7 @@ async fn test_node_broadcast_receiver_succeed() { let (signers, validator_verifier) = random_validator_verifier(4, None, false); let epoch_state = Arc::new(EpochState { epoch: 1, - verifier: validator_verifier.clone(), + verifier: validator_verifier.into(), }); let signers: Vec<_> = signers.into_iter().map(Arc::new).collect(); @@ -101,6 +101,7 @@ async fn test_node_broadcast_receiver_succeed() { #[tokio::test] async fn test_node_broadcast_receiver_failure() { let (signers, validator_verifier) = random_validator_verifier(4, None, false); + let validator_verifier = Arc::new(validator_verifier); let epoch_state = Arc::new(EpochState { epoch: 1, verifier: validator_verifier.clone(), @@ -197,7 +198,7 @@ async fn test_node_broadcast_receiver_storage() { let signers: Vec<_> = signers.into_iter().map(Arc::new).collect(); let epoch_state = Arc::new(EpochState { epoch: 1, - verifier: validator_verifier, + verifier: validator_verifier.into(), }); let storage = Arc::new(MockStorage::new()); diff --git a/consensus/src/epoch_manager.rs b/consensus/src/epoch_manager.rs index a6c43221f9a0d..9dea39ef66045 100644 --- a/consensus/src/epoch_manager.rs +++ b/consensus/src/epoch_manager.rs @@ -21,6 +21,7 @@ use crate::{ proposal_generator::{ ChainHealthBackoffConfig, PipelineBackpressureConfig, ProposalGenerator, }, + proposal_status_tracker::{ExponentialWindowFailureTracker, OptQSPullParamsProvider}, proposer_election::ProposerElection, rotating_proposer_election::{choose_leader, RotatingProposer}, round_proposer_election::RoundProposer, @@ -826,6 +827,15 @@ impl EpochManager

{ self.pending_blocks.clone(), )); + let failures_tracker = Arc::new(Mutex::new(ExponentialWindowFailureTracker::new( + 100, + epoch_state.verifier.get_ordered_account_addresses(), + ))); + let opt_qs_payload_param_provider = Arc::new(OptQSPullParamsProvider::new( + self.config.quorum_store.enable_opt_quorum_store, + failures_tracker.clone(), + )); + info!(epoch = epoch, "Create ProposalGenerator"); // txn manager is required both by proposal generator (to pull the proposers) // and by event processor (to update their status). @@ -854,6 +864,7 @@ impl EpochManager

{ self.config .quorum_store .allow_batches_without_pos_in_proposal, + opt_qs_payload_param_provider, ); let (round_manager_tx, round_manager_rx) = aptos_channel::new( QueueStyle::KLAST, @@ -887,6 +898,7 @@ impl EpochManager

{ onchain_randomness_config, onchain_jwk_consensus_config, fast_rand_config, + failures_tracker, ); round_manager.init(last_vote).await; @@ -1063,9 +1075,12 @@ impl EpochManager

{ let validator_set: ValidatorSet = payload .get() .expect("failed to get ValidatorSet from payload"); + let mut verifier: ValidatorVerifier = (&validator_set).into(); + verifier.set_optimistic_sig_verification_flag(self.config.optimistic_sig_verification); + let epoch_state = Arc::new(EpochState { epoch: payload.epoch(), - verifier: (&validator_set).into(), + verifier: verifier.into(), }); self.epoch_state = Some(epoch_state.clone()); @@ -1080,15 +1095,15 @@ impl EpochManager

{ let dkg_state = payload.get::(); if let Err(error) = &onchain_consensus_config { - error!("Failed to read on-chain consensus config {}", error); + warn!("Failed to read on-chain consensus config {}", error); } if let Err(error) = &onchain_execution_config { - error!("Failed to read on-chain execution config {}", error); + warn!("Failed to read on-chain execution config {}", error); } if let Err(error) = &randomness_config_move_struct { - error!("Failed to read on-chain randomness config {}", error); + warn!("Failed to read on-chain randomness config {}", error); } self.epoch_state = Some(epoch_state.clone()); @@ -1484,6 +1499,7 @@ impl EpochManager

{ ConsensusMsg::ProposalMsg(_) | ConsensusMsg::SyncInfo(_) | ConsensusMsg::VoteMsg(_) + | ConsensusMsg::RoundTimeoutMsg(_) | ConsensusMsg::OrderVoteMsg(_) | ConsensusMsg::CommitVoteMsg(_) | ConsensusMsg::CommitDecisionMsg(_) diff --git a/consensus/src/execution_pipeline.rs b/consensus/src/execution_pipeline.rs index 2c5c4ae94e14c..8743ae9fbd06f 100644 --- a/consensus/src/execution_pipeline.rs +++ b/consensus/src/execution_pipeline.rs @@ -14,7 +14,7 @@ use aptos_consensus_types::{block::Block, pipeline_execution_result::PipelineExe use aptos_crypto::HashValue; use aptos_executor_types::{ state_checkpoint_output::StateCheckpointOutput, BlockExecutorTrait, ExecutorError, - ExecutorResult, + ExecutorResult, StateComputeResult, }; use aptos_experimental_runtimes::thread_manager::optimal_min_len; use aptos_logger::{debug, warn}; @@ -35,6 +35,12 @@ use std::{ }; use tokio::sync::{mpsc, oneshot}; +pub type PreCommitHook = Box< + dyn 'static + + FnOnce(&[SignedTransaction], &StateComputeResult) -> BoxFuture<'static, ()> + + Send, +>; + #[allow(clippy::unwrap_used)] pub static SIG_VERIFY_POOL: Lazy> = Lazy::new(|| { Arc::new( @@ -88,6 +94,7 @@ impl ExecutionPipeline { parent_block_id: HashValue, txn_generator: BlockPreparer, block_executor_onchain_config: BlockExecutorConfigFromOnchain, + pre_commit_hook: PreCommitHook, lifetime_guard: CountedRequest<()>, ) -> StateComputeResultFut { let (result_tx, result_rx) = oneshot::channel(); @@ -101,6 +108,7 @@ impl ExecutionPipeline { block_preparer: txn_generator, result_tx, command_creation_time: Instant::now(), + pre_commit_hook, lifetime_guard, }) .expect("Failed to send block to execution pipeline."); @@ -127,6 +135,7 @@ impl ExecutionPipeline { block_executor_onchain_config, parent_block_id, block_preparer, + pre_commit_hook, result_tx, command_creation_time, lifetime_guard, @@ -163,6 +172,7 @@ impl ExecutionPipeline { block: (block.id(), sig_verified_txns).into(), parent_block_id, block_executor_onchain_config, + pre_commit_hook, result_tx, command_creation_time: Instant::now(), lifetime_guard, @@ -196,6 +206,7 @@ impl ExecutionPipeline { block, parent_block_id, block_executor_onchain_config, + pre_commit_hook, result_tx, command_creation_time, lifetime_guard, @@ -232,6 +243,7 @@ impl ExecutionPipeline { block_id, parent_block_id, state_checkpoint_output, + pre_commit_hook, result_tx, command_creation_time: Instant::now(), lifetime_guard, @@ -252,6 +264,7 @@ impl ExecutionPipeline { block_id, parent_block_id, state_checkpoint_output: execution_result, + pre_commit_hook, result_tx, command_creation_time, lifetime_guard, @@ -274,6 +287,7 @@ impl ExecutionPipeline { } .await; let pipeline_res = res.map(|(output, execution_duration)| { + let pre_commit_hook_fut = pre_commit_hook(&input_txns, &output); let pre_commit_fut: BoxFuture<'static, ExecutorResult<()>> = if output.epoch_state().is_some() || !enable_pre_commit { // hack: it causes issue if pre-commit is finished at an epoch ending, and @@ -285,7 +299,9 @@ impl ExecutionPipeline { executor.pre_commit_block(block_id, parent_block_id) }) .await - .expect("failed to spawn_blocking") + .expect("failed to spawn_blocking")?; + pre_commit_hook_fut.await; + Ok(()) }) } else { // kick off pre-commit right away @@ -295,6 +311,7 @@ impl ExecutionPipeline { .send(PreCommitCommand { block_id, parent_block_id, + pre_commit_hook_fut, result_tx: pre_commit_result_tx, lifetime_guard, }) @@ -322,6 +339,7 @@ impl ExecutionPipeline { while let Some(PreCommitCommand { block_id, parent_block_id, + pre_commit_hook_fut, result_tx, lifetime_guard, }) = block_rx.recv().await @@ -336,7 +354,9 @@ impl ExecutionPipeline { }) ) .await - .expect("Failed to spawn_blocking().") + .expect("Failed to spawn_blocking().")?; + pre_commit_hook_fut.await; + Ok(()) } .await; result_tx @@ -355,6 +375,7 @@ struct PrepareBlockCommand { // The parent block id. parent_block_id: HashValue, block_preparer: BlockPreparer, + pre_commit_hook: PreCommitHook, result_tx: oneshot::Sender>, command_creation_time: Instant, lifetime_guard: CountedRequest<()>, @@ -365,6 +386,7 @@ struct ExecuteBlockCommand { block: ExecutableBlock, parent_block_id: HashValue, block_executor_onchain_config: BlockExecutorConfigFromOnchain, + pre_commit_hook: PreCommitHook, result_tx: oneshot::Sender>, command_creation_time: Instant, lifetime_guard: CountedRequest<()>, @@ -375,6 +397,7 @@ struct LedgerApplyCommand { block_id: HashValue, parent_block_id: HashValue, state_checkpoint_output: ExecutorResult<(StateCheckpointOutput, Duration)>, + pre_commit_hook: PreCommitHook, result_tx: oneshot::Sender>, command_creation_time: Instant, lifetime_guard: CountedRequest<()>, @@ -383,6 +406,7 @@ struct LedgerApplyCommand { struct PreCommitCommand { block_id: HashValue, parent_block_id: HashValue, + pre_commit_hook_fut: BoxFuture<'static, ()>, result_tx: oneshot::Sender>, lifetime_guard: CountedRequest<()>, } diff --git a/consensus/src/lib.rs b/consensus/src/lib.rs index f8545073966bd..3660afb3b49f5 100644 --- a/consensus/src/lib.rs +++ b/consensus/src/lib.rs @@ -31,6 +31,8 @@ mod network_tests; mod payload_client; mod pending_order_votes; mod pending_votes; +#[cfg(test)] +mod pending_votes_test; pub mod persistent_liveness_storage; mod pipeline; pub mod quorum_store; diff --git a/consensus/src/liveness/leader_reputation.rs b/consensus/src/liveness/leader_reputation.rs index dfeabed8181bd..ccd82776d98da 100644 --- a/consensus/src/liveness/leader_reputation.rs +++ b/consensus/src/liveness/leader_reputation.rs @@ -283,11 +283,13 @@ impl NewBlockEventAggregation { usize::try_from(*index) .map_err(|_err| format!("index {} out of bounds", index)) .and_then(|index| { - validators.get(index).ok_or(format!( - "index {} is larger than number of validators {}", - index, - validators.len() - )) + validators.get(index).ok_or_else(|| { + format!( + "index {} is larger than number of validators {}", + index, + validators.len() + ) + }) }) }) .collect() diff --git a/consensus/src/liveness/leader_reputation_test.rs b/consensus/src/liveness/leader_reputation_test.rs index 06ba20faa77c7..39bc23c688a06 100644 --- a/consensus/src/liveness/leader_reputation_test.rs +++ b/consensus/src/liveness/leader_reputation_test.rs @@ -655,7 +655,8 @@ fn test_extract_epoch_to_proposers_impl() { .iter() .map(|author| ValidatorConsensusInfo::new(*author, public_key.clone(), 1)) .collect::>(), - ), + ) + .into(), } } diff --git a/consensus/src/liveness/mod.rs b/consensus/src/liveness/mod.rs index f7e8f11bceb05..effa52291246f 100644 --- a/consensus/src/liveness/mod.rs +++ b/consensus/src/liveness/mod.rs @@ -5,6 +5,7 @@ pub(crate) mod cached_proposer_election; pub(crate) mod leader_reputation; pub(crate) mod proposal_generator; +pub(crate) mod proposal_status_tracker; pub(crate) mod proposer_election; pub(crate) mod rotating_proposer_election; pub(crate) mod round_proposer_election; diff --git a/consensus/src/liveness/proposal_generator.rs b/consensus/src/liveness/proposal_generator.rs index 334b0a76fbf4e..47bdea4c9ce95 100644 --- a/consensus/src/liveness/proposal_generator.rs +++ b/consensus/src/liveness/proposal_generator.rs @@ -2,7 +2,9 @@ // Parts of the project are originally copyright © Meta Platforms, Inc. // SPDX-License-Identifier: Apache-2.0 -use super::proposer_election::ProposerElection; +use super::{ + proposal_status_tracker::TOptQSPullParamsProvider, proposer_election::ProposerElection, +}; use crate::{ block_storage::BlockReader, counters::{ @@ -12,7 +14,7 @@ use crate::{ PROPOSER_MAX_BLOCK_TXNS_TO_EXECUTE, PROPOSER_PENDING_BLOCKS_COUNT, PROPOSER_PENDING_BLOCKS_FILL_FRACTION, }, - payload_client::{PayloadClient, PayloadPullParameters}, + payload_client::PayloadClient, util::time_service::TimeService, }; use anyhow::{bail, ensure, format_err, Context}; @@ -23,6 +25,7 @@ use aptos_consensus_types::{ block::Block, block_data::BlockData, common::{Author, Payload, PayloadFilter, Round}, + payload_pull_params::PayloadPullParameters, pipelined_block::ExecutionSummary, quorum_cert::QuorumCert, utils::PayloadTxnsSize, @@ -267,6 +270,7 @@ pub struct ProposalGenerator { vtxn_config: ValidatorTxnConfig, allow_batches_without_pos_in_proposal: bool, + opt_qs_payload_param_provider: Arc, } impl ProposalGenerator { @@ -287,6 +291,7 @@ impl ProposalGenerator { quorum_store_enabled: bool, vtxn_config: ValidatorTxnConfig, allow_batches_without_pos_in_proposal: bool, + opt_qs_payload_param_provider: Arc, ) -> Self { Self { author, @@ -305,6 +310,7 @@ impl ProposalGenerator { quorum_store_enabled, vtxn_config, allow_batches_without_pos_in_proposal, + opt_qs_payload_param_provider, } } @@ -353,6 +359,7 @@ impl ProposalGenerator { bail!("Already proposed in the round {}", round); } } + let maybe_optqs_payload_pull_params = self.opt_qs_payload_param_provider.get_params(); let hqc = self.ensure_highest_quorum_cert(round)?; @@ -456,7 +463,7 @@ impl ProposalGenerator { soft_max_txns_after_filtering: max_txns_from_block_to_execute .unwrap_or(max_block_txns_after_filtering), max_inline_txns: self.max_inline_txns, - opt_batch_txns_pct: 0, + maybe_optqs_payload_pull_params, user_txn_filter: payload_filter, pending_ordering, pending_uncommitted_blocks: pending_blocks.len(), diff --git a/consensus/src/liveness/proposal_generator_test.rs b/consensus/src/liveness/proposal_generator_test.rs index aae56dc864644..5aa907d7fe672 100644 --- a/consensus/src/liveness/proposal_generator_test.rs +++ b/consensus/src/liveness/proposal_generator_test.rs @@ -8,6 +8,7 @@ use crate::{ proposal_generator::{ ChainHealthBackoffConfig, PipelineBackpressureConfig, ProposalGenerator, }, + proposal_status_tracker::TOptQSPullParamsProvider, rotating_proposer_election::RotatingProposer, unequivocal_proposer_election::UnequivocalProposerElection, }, @@ -17,6 +18,7 @@ use crate::{ use aptos_consensus_types::{ block::{block_test_utils::certificate_for_genesis, Block}, common::Author, + payload_pull_params::OptQSPayloadPullParams, utils::PayloadTxnsSize, }; use aptos_types::{on_chain_config::ValidatorTxnConfig, validator_signer::ValidatorSigner}; @@ -27,6 +29,14 @@ fn empty_callback() -> BoxFuture<'static, ()> { async move {}.boxed() } +struct MockOptQSPayloadProvider {} + +impl TOptQSPullParamsProvider for MockOptQSPayloadProvider { + fn get_params(&self) -> Option { + None + } +} + #[tokio::test] async fn test_proposal_generation_empty_tree() { let signer = ValidatorSigner::random(None); @@ -47,6 +57,7 @@ async fn test_proposal_generation_empty_tree() { false, ValidatorTxnConfig::default_disabled(), true, + Arc::new(MockOptQSPayloadProvider {}), ); let proposer_election = Arc::new(UnequivocalProposerElection::new(Arc::new( RotatingProposer::new(vec![signer.author()], 1), @@ -92,6 +103,7 @@ async fn test_proposal_generation_parent() { false, ValidatorTxnConfig::default_disabled(), true, + Arc::new(MockOptQSPayloadProvider {}), ); let proposer_election = Arc::new(UnequivocalProposerElection::new(Arc::new( RotatingProposer::new(vec![inserter.signer().author()], 1), @@ -167,6 +179,7 @@ async fn test_old_proposal_generation() { false, ValidatorTxnConfig::default_disabled(), true, + Arc::new(MockOptQSPayloadProvider {}), ); let proposer_election = Arc::new(UnequivocalProposerElection::new(Arc::new( RotatingProposer::new(vec![inserter.signer().author()], 1), @@ -207,6 +220,7 @@ async fn test_correct_failed_authors() { false, ValidatorTxnConfig::default_disabled(), true, + Arc::new(MockOptQSPayloadProvider {}), ); let proposer_election = Arc::new(UnequivocalProposerElection::new(Arc::new( RotatingProposer::new(vec![author, peer1, peer2], 1), diff --git a/consensus/src/liveness/proposal_status_tracker.rs b/consensus/src/liveness/proposal_status_tracker.rs new file mode 100644 index 0000000000000..23f635e260fb8 --- /dev/null +++ b/consensus/src/liveness/proposal_status_tracker.rs @@ -0,0 +1,210 @@ +// Copyright (c) Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use super::round_state::NewRoundReason; +use aptos_collections::BoundedVecDeque; +use aptos_consensus_types::{ + common::Author, payload_pull_params::OptQSPayloadPullParams, round_timeout::RoundTimeoutReason, +}; +use aptos_infallible::Mutex; +use std::{collections::HashSet, sync::Arc}; + +pub trait TPastProposalStatusTracker: Send + Sync { + fn push(&self, status: NewRoundReason); +} + +pub trait TOptQSPullParamsProvider: Send + Sync { + fn get_params(&self) -> Option; +} + +/// A exponential window based algorithm to decide whether to go optimistic or not, based on +/// configurable number of past proposal statuses +/// +/// Initialize the window at 2. +/// - For each proposal failure, double the window up to a MAX size +/// - If there are no failures within the window, then propose optimistic batch +/// - If there are no failures up to MAX proposals, reset the window to 2. +pub struct ExponentialWindowFailureTracker { + window: usize, + max_window: usize, + past_round_statuses: BoundedVecDeque, + last_consecutive_success_count: usize, + ordered_authors: Vec, +} + +impl ExponentialWindowFailureTracker { + pub(crate) fn new(max_window: usize, ordered_authors: Vec) -> Self { + Self { + window: 2, + max_window, + past_round_statuses: BoundedVecDeque::new(max_window), + last_consecutive_success_count: 0, + ordered_authors, + } + } + + pub(crate) fn push(&mut self, status: NewRoundReason) { + self.past_round_statuses.push_back(status); + self.compute_failure_window(); + } + + fn last_consecutive_statuses_matching(&self, matcher: F) -> usize + where + F: Fn(&NewRoundReason) -> bool, + { + self.past_round_statuses + .iter() + .rev() + .take_while(|reason| matcher(reason)) + .count() + } + + fn compute_failure_window(&mut self) { + self.last_consecutive_success_count = self.last_consecutive_statuses_matching(|reason| { + !matches!( + reason, + NewRoundReason::Timeout(RoundTimeoutReason::PayloadUnavailable { .. }) + ) + }); + if self.last_consecutive_success_count == 0 { + self.window *= 2; + self.window = self.window.min(self.max_window); + } else if self.last_consecutive_success_count == self.past_round_statuses.len() { + self.window = 2; + } + } + + fn get_exclude_authors(&self) -> HashSet { + let mut exclude_authors = HashSet::new(); + + let limit = self.window; + for round_reason in self.past_round_statuses.iter().rev().take(limit) { + if let NewRoundReason::Timeout(RoundTimeoutReason::PayloadUnavailable { + missing_authors, + }) = round_reason + { + for author_idx in missing_authors.iter_ones() { + if let Some(author) = self.ordered_authors.get(author_idx) { + exclude_authors.insert(*author); + } + } + } + } + + exclude_authors + } +} + +impl TPastProposalStatusTracker for Mutex { + fn push(&self, status: NewRoundReason) { + self.lock().push(status) + } +} + +pub struct OptQSPullParamsProvider { + enable_opt_qs: bool, + failure_tracker: Arc>, +} + +impl OptQSPullParamsProvider { + pub fn new( + enable_opt_qs: bool, + failure_tracker: Arc>, + ) -> Self { + Self { + enable_opt_qs, + failure_tracker, + } + } +} + +impl TOptQSPullParamsProvider for OptQSPullParamsProvider { + fn get_params(&self) -> Option { + if !self.enable_opt_qs { + return None; + } + + let tracker = self.failure_tracker.lock(); + + if tracker.last_consecutive_success_count < tracker.window { + return None; + } + + let exclude_authors = tracker.get_exclude_authors(); + Some(OptQSPayloadPullParams { + exclude_authors, + minimum_batch_age_usecs: 50_000_000, + }) + } +} + +#[cfg(test)] +mod tests { + use super::ExponentialWindowFailureTracker; + use crate::liveness::round_state::NewRoundReason; + use aptos_bitvec::BitVec; + use aptos_consensus_types::round_timeout::RoundTimeoutReason; + use aptos_types::validator_verifier::random_validator_verifier; + + #[test] + fn test_exponential_window_failure_tracker() { + let (_signers, verifier) = random_validator_verifier(4, None, false); + let mut tracker = + ExponentialWindowFailureTracker::new(100, verifier.get_ordered_account_addresses()); + assert_eq!(tracker.max_window, 100); + + tracker.push(NewRoundReason::QCReady); + assert_eq!(tracker.window, 2); + assert_eq!(tracker.last_consecutive_success_count, 1); + + tracker.push(NewRoundReason::QCReady); + assert_eq!(tracker.window, 2); + assert_eq!(tracker.last_consecutive_success_count, 2); + + tracker.push(NewRoundReason::QCReady); + assert_eq!(tracker.window, 2); + assert_eq!(tracker.last_consecutive_success_count, 3); + + tracker.push(NewRoundReason::Timeout( + RoundTimeoutReason::ProposalNotReceived, + )); + assert_eq!(tracker.window, 2); + assert_eq!(tracker.last_consecutive_success_count, 4); + + tracker.push(NewRoundReason::Timeout(RoundTimeoutReason::NoQC)); + assert_eq!(tracker.window, 2); + assert_eq!(tracker.last_consecutive_success_count, 5); + + tracker.push(NewRoundReason::Timeout(RoundTimeoutReason::Unknown)); + assert_eq!(tracker.window, 2); + assert_eq!(tracker.last_consecutive_success_count, 6); + + tracker.push(NewRoundReason::Timeout( + RoundTimeoutReason::PayloadUnavailable { + missing_authors: BitVec::with_num_bits(4), + }, + )); + assert_eq!(tracker.window, 4); + assert_eq!(tracker.last_consecutive_success_count, 0); + + tracker.push(NewRoundReason::QCReady); + assert_eq!(tracker.window, 4); + assert_eq!(tracker.last_consecutive_success_count, 1); + + // Check that the window does not grow beyond max_window + for _ in 0..10 { + tracker.push(NewRoundReason::Timeout( + RoundTimeoutReason::PayloadUnavailable { + missing_authors: BitVec::with_num_bits(4), + }, + )); + } + assert_eq!(tracker.window, tracker.max_window); + + for _ in 0..tracker.max_window { + tracker.push(NewRoundReason::QCReady); + } + assert_eq!(tracker.window, 2); + assert_eq!(tracker.last_consecutive_success_count, tracker.max_window); + } +} diff --git a/consensus/src/liveness/round_state.rs b/consensus/src/liveness/round_state.rs index 74e78e9c9f024..bef7b31b44fe3 100644 --- a/consensus/src/liveness/round_state.rs +++ b/consensus/src/liveness/round_state.rs @@ -4,34 +4,35 @@ use crate::{ counters, - pending_votes::{PendingVotes, VoteReceptionResult}, + pending_votes::{PendingVotes, VoteReceptionResult, VoteStatus}, util::time_service::{SendTask, TimeService}, }; use aptos_consensus_types::{ - common::Round, sync_info::SyncInfo, timeout_2chain::TwoChainTimeoutWithPartialSignatures, + common::Round, + round_timeout::{RoundTimeout, RoundTimeoutReason}, + sync_info::SyncInfo, + timeout_2chain::TwoChainTimeoutWithPartialSignatures, vote::Vote, }; use aptos_crypto::HashValue; use aptos_logger::{prelude::*, Schema}; -use aptos_types::{ - ledger_info::LedgerInfoWithVerifiedSignatures, validator_verifier::ValidatorVerifier, -}; +use aptos_types::validator_verifier::ValidatorVerifier; use futures::future::AbortHandle; use serde::Serialize; use std::{fmt, sync::Arc, time::Duration}; /// A reason for starting a new round: introduced for monitoring / debug purposes. -#[derive(Serialize, Debug, PartialEq, Eq)] +#[derive(Serialize, Debug, PartialEq, Eq, Clone)] pub enum NewRoundReason { QCReady, - Timeout, + Timeout(RoundTimeoutReason), } impl fmt::Display for NewRoundReason { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { NewRoundReason::QCReady => write!(f, "QCReady"), - NewRoundReason::Timeout => write!(f, "TCReady"), + NewRoundReason::Timeout(_) => write!(f, "TCReady"), } } } @@ -45,7 +46,7 @@ pub struct NewRoundEvent { pub round: Round, pub reason: NewRoundReason, pub timeout: Duration, - pub prev_round_votes: Vec<(HashValue, LedgerInfoWithVerifiedSignatures)>, + pub prev_round_votes: Vec<(HashValue, VoteStatus)>, pub prev_round_timeout_votes: Option, } @@ -159,6 +160,8 @@ pub struct RoundState { pending_votes: PendingVotes, // Vote sent locally for the current round. vote_sent: Option, + // Timeout sent locally for the current round. + timeout_sent: Option, // The handle to cancel previous timeout task when moving to next round. abort_handle: Option, } @@ -206,13 +209,14 @@ impl RoundState { timeout_sender, pending_votes, vote_sent: None, + timeout_sent: None, abort_handle: None, } } /// Return if already voted for timeout - pub fn is_vote_timeout(&self) -> bool { - self.vote_sent.as_ref().map_or(false, |v| v.is_timeout()) + pub fn is_timeout_sent(&self) -> bool { + self.vote_sent.as_ref().map_or(false, |v| v.is_timeout()) || self.timeout_sent.is_some() } /// Return the current round. @@ -239,7 +243,11 @@ impl RoundState { /// Notify the RoundState about the potentially new QC, TC, and highest ordered round. /// Note that some of these values might not be available by the caller. - pub fn process_certificates(&mut self, sync_info: SyncInfo) -> Option { + pub fn process_certificates( + &mut self, + sync_info: SyncInfo, + verifier: &ValidatorVerifier, + ) -> Option { if sync_info.highest_ordered_round() > self.highest_ordered_round { self.highest_ordered_round = sync_info.highest_ordered_round(); } @@ -251,14 +259,23 @@ impl RoundState { self.current_round = new_round; self.pending_votes = PendingVotes::new(); self.vote_sent = None; + self.timeout_sent = None; let timeout = self.setup_timeout(1); + + let (prev_round_timeout_votes, prev_round_timeout_reason) = prev_round_timeout_votes + .map(|votes| votes.unpack_aggregate(verifier)) + .unzip(); + // The new round reason is QCReady in case both QC.round + 1 == new_round, otherwise // it's Timeout and TC.round + 1 == new_round. let new_round_reason = if sync_info.highest_certified_round() + 1 == new_round { NewRoundReason::QCReady } else { - NewRoundReason::Timeout + let prev_round_timeout_reason = + prev_round_timeout_reason.unwrap_or(RoundTimeoutReason::Unknown); + NewRoundReason::Timeout(prev_round_timeout_reason) }; + let new_round_event = NewRoundEvent { round: self.current_round, reason: new_round_reason, @@ -275,10 +292,10 @@ impl RoundState { pub fn insert_vote( &mut self, vote: &Vote, - verifier: &ValidatorVerifier, + validator_verifier: &ValidatorVerifier, ) -> VoteReceptionResult { if vote.vote_data().proposed().round() == self.current_round { - self.pending_votes.insert_vote(vote, verifier) + self.pending_votes.insert_vote(vote, validator_verifier) } else { VoteReceptionResult::UnexpectedRound( vote.vote_data().proposed().round(), @@ -287,16 +304,38 @@ impl RoundState { } } + pub fn insert_round_timeout( + &mut self, + timeout: &RoundTimeout, + verifier: &ValidatorVerifier, + ) -> VoteReceptionResult { + if timeout.round() == self.current_round { + self.pending_votes.insert_round_timeout(timeout, verifier) + } else { + VoteReceptionResult::UnexpectedRound(timeout.round(), self.current_round) + } + } + pub fn record_vote(&mut self, vote: Vote) { if vote.vote_data().proposed().round() == self.current_round { self.vote_sent = Some(vote); } } + pub fn record_round_timeout(&mut self, timeout: RoundTimeout) { + if timeout.round() == self.current_round { + self.timeout_sent = Some(timeout) + } + } + pub fn vote_sent(&self) -> Option { self.vote_sent.clone() } + pub fn timeout_sent(&self) -> Option { + self.timeout_sent.clone() + } + /// Setup the timeout task and return the duration of the current timeout fn setup_timeout(&mut self, multiplier: u32) -> Duration { let timeout_sender = self.timeout_sender.clone(); @@ -337,7 +376,7 @@ impl RoundState { round = self.current_round, "{:?} passed since the previous deadline.", now.checked_sub(self.current_round_deadline) - .map_or("0 ms".to_string(), |v| format!("{:?}", v)) + .map_or_else(|| "0 ms".to_string(), |v| format!("{:?}", v)) ); debug!( round = self.current_round, diff --git a/consensus/src/liveness/round_state_test.rs b/consensus/src/liveness/round_state_test.rs index ad2eec8809e53..10027e86c351e 100644 --- a/consensus/src/liveness/round_state_test.rs +++ b/consensus/src/liveness/round_state_test.rs @@ -11,6 +11,7 @@ use crate::{ use aptos_consensus_types::{ common::Round, quorum_cert::QuorumCert, + round_timeout::RoundTimeoutReason, sync_info::SyncInfo, timeout_2chain::{TwoChainTimeout, TwoChainTimeoutCertificate}, vote_data::VoteData, @@ -20,6 +21,7 @@ use aptos_types::{ aggregate_signature::AggregateSignature, block_info::BlockInfo, ledger_info::{LedgerInfo, LedgerInfoWithSignatures}, + validator_verifier::random_validator_verifier, }; use futures::StreamExt; use std::{sync::Arc, time::Duration}; @@ -40,10 +42,11 @@ fn test_round_time_interval() { #[tokio::test] /// Verify that RoundState properly outputs local timeout events upon timeout async fn test_basic_timeout() { + let (_, verifier) = random_validator_verifier(1, None, false); let (mut pm, mut timeout_rx) = make_round_state(); // jump start the round_state - pm.process_certificates(generate_sync_info(Some(0), None, None)); + pm.process_certificates(generate_sync_info(Some(0), None, None), &verifier); for _ in 0..2 { let round = timeout_rx.next().await.unwrap(); // Here we just test timeout send retry, @@ -55,30 +58,31 @@ async fn test_basic_timeout() { #[test] fn test_round_event_generation() { + let (_, verifier) = random_validator_verifier(1, None, false); let (mut pm, _) = make_round_state(); // Happy path with new QC expect_qc( 2, - pm.process_certificates(generate_sync_info(Some(1), None, None)), + pm.process_certificates(generate_sync_info(Some(1), None, None), &verifier), ); // Old QC does not generate anything assert!(pm - .process_certificates(generate_sync_info(Some(1), None, None)) + .process_certificates(generate_sync_info(Some(1), None, None), &verifier) .is_none()); // A TC for a higher round expect_timeout( 3, - pm.process_certificates(generate_sync_info(None, Some(2), None)), + pm.process_certificates(generate_sync_info(None, Some(2), None), &verifier), ); // In case both QC and TC are present choose the one with the higher value expect_timeout( 4, - pm.process_certificates(generate_sync_info(Some(2), Some(3), None)), + pm.process_certificates(generate_sync_info(Some(2), Some(3), None), &verifier), ); // In case both QC and TC are present with the same value, choose QC expect_qc( 5, - pm.process_certificates(generate_sync_info(Some(4), Some(4), None)), + pm.process_certificates(generate_sync_info(Some(4), Some(4), None), &verifier), ); } @@ -101,7 +105,10 @@ fn expect_qc(round: Round, event: Option) { fn expect_timeout(round: Round, event: Option) { let event = event.unwrap(); assert_eq!(round, event.round); - assert_eq!(event.reason, NewRoundReason::Timeout); + assert_eq!( + event.reason, + NewRoundReason::Timeout(RoundTimeoutReason::Unknown) + ); } fn generate_sync_info( diff --git a/consensus/src/logging.rs b/consensus/src/logging.rs index 27cba58a3280a..a6eb19cfc658b 100644 --- a/consensus/src/logging.rs +++ b/consensus/src/logging.rs @@ -40,6 +40,7 @@ pub enum LogEvent { ReceiveProposal, ReceiveSyncInfo, ReceiveVote, + ReceiveRoundTimeout, ReceiveOrderVote, RetrieveBlock, StateSync, diff --git a/consensus/src/network.rs b/consensus/src/network.rs index 517c01fce472c..99b437f69c957 100644 --- a/consensus/src/network.rs +++ b/consensus/src/network.rs @@ -29,6 +29,7 @@ use aptos_consensus_types::{ pipeline::{commit_decision::CommitDecision, commit_vote::CommitVote}, proof_of_store::{ProofOfStore, ProofOfStoreMsg, SignedBatchInfo, SignedBatchInfoMsg}, proposal_msg::ProposalMsg, + round_timeout::RoundTimeoutMsg, sync_info::SyncInfo, vote_msg::VoteMsg, }; @@ -198,7 +199,7 @@ pub struct NetworkSender { // Self sender and self receivers provide a shortcut for sending the messages to itself. // (self sending is not supported by the networking API). self_sender: aptos_channels::UnboundedSender>, - validators: ValidatorVerifier, + validators: Arc, time_service: aptos_time_service::TimeService, } @@ -207,7 +208,7 @@ impl NetworkSender { author: Author, consensus_network_client: ConsensusNetworkClient>, self_sender: aptos_channels::UnboundedSender>, - validators: ValidatorVerifier, + validators: Arc, ) -> Self { NetworkSender { author, @@ -317,6 +318,8 @@ impl NetworkSender { } pub fn broadcast_without_self(&self, msg: ConsensusMsg) { + fail_point!("consensus::send::any", |_| ()); + let self_author = self.author; let mut other_validators: Vec<_> = self .validators @@ -405,6 +408,12 @@ impl NetworkSender { self.broadcast(msg).await } + pub async fn broadcast_round_timeout(&self, round_timeout: RoundTimeoutMsg) { + fail_point!("consensus::send::round_timeout", |_| ()); + let msg = ConsensusMsg::RoundTimeoutMsg(Box::new(round_timeout)); + self.broadcast(msg).await + } + pub async fn broadcast_order_vote(&self, order_vote_msg: OrderVoteMsg) { fail_point!("consensus::send::order_vote", |_| ()); let msg = ConsensusMsg::OrderVoteMsg(Box::new(order_vote_msg)); @@ -749,6 +758,7 @@ impl NetworkTask { }, consensus_msg @ (ConsensusMsg::ProposalMsg(_) | ConsensusMsg::VoteMsg(_) + | ConsensusMsg::RoundTimeoutMsg(_) | ConsensusMsg::OrderVoteMsg(_) | ConsensusMsg::SyncInfo(_) | ConsensusMsg::EpochRetrievalRequest(_) diff --git a/consensus/src/network_interface.rs b/consensus/src/network_interface.rs index ea5b8646074f0..297b66ea7cfaf 100644 --- a/consensus/src/network_interface.rs +++ b/consensus/src/network_interface.rs @@ -18,6 +18,7 @@ use aptos_consensus_types::{ pipeline::{commit_decision::CommitDecision, commit_vote::CommitVote}, proof_of_store::{ProofOfStoreMsg, SignedBatchInfoMsg}, proposal_msg::ProposalMsg, + round_timeout::RoundTimeoutMsg, sync_info::SyncInfo, vote_msg::VoteMsg, }; @@ -80,6 +81,8 @@ pub enum ConsensusMsg { /// OrderVoteMsg is the struct that is broadcasted by a validator on receiving quorum certificate /// on a block. OrderVoteMsg(Box), + /// RoundTimeoutMsg is broadcasted by a validator once it decides to timeout the current round. + RoundTimeoutMsg(Box), } /// Network type for consensus @@ -107,6 +110,7 @@ impl ConsensusMsg { ConsensusMsg::CommitMessage(_) => "CommitMessage", ConsensusMsg::RandGenMessage(_) => "RandGenMessage", ConsensusMsg::BatchResponseV2(_) => "BatchResponseV2", + ConsensusMsg::RoundTimeoutMsg(_) => "RoundTimeoutV2", } } } diff --git a/consensus/src/network_tests.rs b/consensus/src/network_tests.rs index f1d18d90d6e7c..10d33f3b87a63 100644 --- a/consensus/src/network_tests.rs +++ b/consensus/src/network_tests.rs @@ -625,6 +625,7 @@ mod tests { let mut playground = NetworkPlayground::new(runtime.handle().clone()); let mut nodes = Vec::new(); let (signers, validator_verifier) = random_validator_verifier(num_nodes, None, false); + let validator_verifier = Arc::new(validator_verifier); let peers: Vec<_> = signers.iter().map(|signer| signer.author()).collect(); let peers_and_metadata = PeersAndMetadata::new(&[NetworkId::Validator]); @@ -741,6 +742,7 @@ mod tests { let mut playground = NetworkPlayground::new(runtime.handle().clone()); let mut nodes = Vec::new(); let (signers, validator_verifier) = random_validator_verifier(num_nodes, None, false); + let validator_verifier = Arc::new(validator_verifier); let peers: Vec<_> = signers.iter().map(|signer| signer.author()).collect(); let peers_and_metadata = PeersAndMetadata::new(&[NetworkId::Validator]); diff --git a/consensus/src/payload_client/mixed.rs b/consensus/src/payload_client/mixed.rs index 5e35b5aff6bae..afc981ab4a3b8 100644 --- a/consensus/src/payload_client/mixed.rs +++ b/consensus/src/payload_client/mixed.rs @@ -1,12 +1,13 @@ // Copyright © Aptos Foundation // SPDX-License-Identifier: Apache-2.0 -use super::PayloadPullParameters; use crate::{ error::QuorumStoreError, payload_client::{user::UserPayloadClient, PayloadClient}, }; -use aptos_consensus_types::{common::Payload, utils::PayloadTxnsSize}; +use aptos_consensus_types::{ + common::Payload, payload_pull_params::PayloadPullParameters, utils::PayloadTxnsSize, +}; use aptos_logger::debug; use aptos_types::{on_chain_config::ValidatorTxnConfig, validator_txn::ValidatorTransaction}; use aptos_validator_transaction_pool::TransactionFilter; @@ -112,9 +113,11 @@ impl PayloadClient for MixedPayloadClient { mod tests { use crate::payload_client::{ mixed::MixedPayloadClient, user, validator::DummyValidatorTxnClient, PayloadClient, - PayloadPullParameters, }; - use aptos_consensus_types::common::{Payload, PayloadFilter}; + use aptos_consensus_types::{ + common::{Payload, PayloadFilter}, + payload_pull_params::PayloadPullParameters, + }; use aptos_types::{on_chain_config::ValidatorTxnConfig, validator_txn::ValidatorTransaction}; use aptos_validator_transaction_pool as vtxn_pool; use std::{collections::HashSet, sync::Arc, time::Duration}; diff --git a/consensus/src/payload_client/mod.rs b/consensus/src/payload_client/mod.rs index 1b769faa9c36a..e38ba3194329f 100644 --- a/consensus/src/payload_client/mod.rs +++ b/consensus/src/payload_client/mod.rs @@ -2,88 +2,15 @@ // SPDX-License-Identifier: Apache-2.0 use crate::error::QuorumStoreError; -use aptos_consensus_types::{ - common::{Payload, PayloadFilter}, - utils::PayloadTxnsSize, -}; +use aptos_consensus_types::{common::Payload, payload_pull_params::PayloadPullParameters}; use aptos_types::validator_txn::ValidatorTransaction; use aptos_validator_transaction_pool::TransactionFilter; -use core::fmt; use futures::future::BoxFuture; -use std::time::Duration; pub mod mixed; pub mod user; pub mod validator; -pub struct PayloadPullParameters { - pub max_poll_time: Duration, - pub max_txns: PayloadTxnsSize, - pub max_txns_after_filtering: u64, - pub soft_max_txns_after_filtering: u64, - pub max_inline_txns: PayloadTxnsSize, - pub opt_batch_txns_pct: u8, - pub user_txn_filter: PayloadFilter, - pub pending_ordering: bool, - pub pending_uncommitted_blocks: usize, - pub recent_max_fill_fraction: f32, - pub block_timestamp: Duration, -} - -impl PayloadPullParameters { - #[cfg(test)] - fn new_for_test( - max_poll_time: Duration, - max_txns: u64, - max_txns_bytes: u64, - max_txns_after_filtering: u64, - soft_max_txns_after_filtering: u64, - max_inline_txns: u64, - max_inline_txns_bytes: u64, - user_txn_filter: PayloadFilter, - pending_ordering: bool, - pending_uncommitted_blocks: usize, - recent_max_fill_fraction: f32, - block_timestamp: Duration, - ) -> Self { - Self { - max_poll_time, - max_txns: PayloadTxnsSize::new(max_txns, max_txns_bytes), - max_txns_after_filtering, - soft_max_txns_after_filtering, - max_inline_txns: PayloadTxnsSize::new(max_inline_txns, max_inline_txns_bytes), - opt_batch_txns_pct: 0, - user_txn_filter, - pending_ordering, - pending_uncommitted_blocks, - recent_max_fill_fraction, - block_timestamp, - } - } -} - -impl fmt::Debug for PayloadPullParameters { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("PayloadPullParameters") - .field("max_poll_time", &self.max_poll_time) - .field("max_items", &self.max_txns) - .field("max_unique_items", &self.max_txns_after_filtering) - .field( - "soft_max_txns_after_filtering", - &self.soft_max_txns_after_filtering, - ) - .field("max_inline_items", &self.max_inline_txns) - .field("pending_ordering", &self.pending_ordering) - .field( - "pending_uncommitted_blocks", - &self.pending_uncommitted_blocks, - ) - .field("recent_max_fill_fraction", &self.recent_max_fill_fraction) - .field("block_timestamp", &self.block_timestamp) - .finish() - } -} - #[async_trait::async_trait] pub trait PayloadClient: Send + Sync { async fn pull_payload( diff --git a/consensus/src/payload_client/user/mod.rs b/consensus/src/payload_client/user/mod.rs index 9d6cafbed2322..e3f2ca8acba43 100644 --- a/consensus/src/payload_client/user/mod.rs +++ b/consensus/src/payload_client/user/mod.rs @@ -1,9 +1,8 @@ // Copyright © Aptos Foundation // SPDX-License-Identifier: Apache-2.0 -use super::PayloadPullParameters; use crate::error::QuorumStoreError; -use aptos_consensus_types::common::Payload; +use aptos_consensus_types::{common::Payload, payload_pull_params::PayloadPullParameters}; #[cfg(test)] use aptos_types::transaction::SignedTransaction; use futures::future::BoxFuture; diff --git a/consensus/src/payload_client/user/quorum_store_client.rs b/consensus/src/payload_client/user/quorum_store_client.rs index b145ba1f76f61..c8c541208c863 100644 --- a/consensus/src/payload_client/user/quorum_store_client.rs +++ b/consensus/src/payload_client/user/quorum_store_client.rs @@ -2,13 +2,12 @@ // SPDX-License-Identifier: Apache-2.0 use crate::{ - counters::WAIT_FOR_FULL_BLOCKS_TRIGGERED, - error::QuorumStoreError, - monitor, - payload_client::{user::UserPayloadClient, PayloadPullParameters}, + counters::WAIT_FOR_FULL_BLOCKS_TRIGGERED, error::QuorumStoreError, monitor, + payload_client::user::UserPayloadClient, }; use aptos_consensus_types::{ common::{Payload, PayloadFilter}, + payload_pull_params::{OptQSPayloadPullParams, PayloadPullParameters}, request_response::{GetPayloadCommand, GetPayloadRequest, GetPayloadResponse}, utils::PayloadTxnsSize, }; @@ -52,7 +51,7 @@ impl QuorumStoreClient { max_txns_after_filtering: u64, soft_max_txns_after_filtering: u64, max_inline_txns: PayloadTxnsSize, - txns_with_proofs_pct: u8, + maybe_optqs_payload_pull_params: Option, return_non_full: bool, exclude_payloads: PayloadFilter, block_timestamp: Duration, @@ -62,7 +61,7 @@ impl QuorumStoreClient { max_txns, max_txns_after_filtering, soft_max_txns_after_filtering, - opt_batch_txns_pct: txns_with_proofs_pct, + maybe_optqs_payload_pull_params, max_inline_txns, filter: exclude_payloads, return_non_full, @@ -119,7 +118,7 @@ impl UserPayloadClient for QuorumStoreClient { params.max_txns_after_filtering, params.soft_max_txns_after_filtering, params.max_inline_txns, - params.opt_batch_txns_pct, + params.maybe_optqs_payload_pull_params.clone(), return_non_full || return_empty || done, params.user_txn_filter.clone(), params.block_timestamp, diff --git a/consensus/src/payload_manager.rs b/consensus/src/payload_manager.rs index c2e7c580fb9b3..16f3e305ea585 100644 --- a/consensus/src/payload_manager.rs +++ b/consensus/src/payload_manager.rs @@ -10,6 +10,7 @@ use crate::{ counters, quorum_store::{batch_store::BatchReader, quorum_store_coordinator::CoordinatorCommand}, }; +use aptos_bitvec::BitVec; use aptos_consensus_types::{ block::Block, common::{DataStatus, Payload, ProofWithData, Round}, @@ -26,8 +27,9 @@ use aptos_logger::prelude::*; use aptos_types::{transaction::SignedTransaction, PeerId}; use async_trait::async_trait; use futures::{channel::mpsc::Sender, FutureExt}; +use itertools::Itertools; use std::{ - collections::{btree_map::Entry, BTreeMap}, + collections::{btree_map::Entry, BTreeMap, HashMap}, ops::Deref, sync::Arc, }; @@ -48,7 +50,7 @@ pub trait TPayloadManager: Send + Sync { /// Check if the transactions corresponding are available. This is specific to payload /// manager implementations. For optimistic quorum store, we only check if optimistic /// batches are available locally. - fn check_payload_availability(&self, block: &Block) -> bool; + fn check_payload_availability(&self, block: &Block) -> Result<(), BitVec>; /// Get the transactions in a block's payload. This function returns a vector of transactions. async fn get_transactions( @@ -72,8 +74,8 @@ impl TPayloadManager for DirectMempoolPayloadManager { fn prefetch_payload_data(&self, _payload: &Payload, _timestamp: u64) {} - fn check_payload_availability(&self, _block: &Block) -> bool { - true + fn check_payload_availability(&self, _block: &Block) -> Result<(), BitVec> { + Ok(()) } async fn get_transactions( @@ -103,6 +105,7 @@ pub struct QuorumStorePayloadManager { coordinator_tx: Sender, maybe_consensus_publisher: Option>, ordered_authors: Vec, + address_to_validator_index: HashMap, } impl QuorumStorePayloadManager { @@ -111,12 +114,14 @@ impl QuorumStorePayloadManager { coordinator_tx: Sender, maybe_consensus_publisher: Option>, ordered_authors: Vec, + address_to_validator_index: HashMap, ) -> Self { Self { batch_reader, coordinator_tx, maybe_consensus_publisher, ordered_authors, + address_to_validator_index, } } @@ -294,25 +299,81 @@ impl TPayloadManager for QuorumStorePayloadManager { }; } - fn check_payload_availability(&self, block: &Block) -> bool { + fn check_payload_availability(&self, block: &Block) -> Result<(), BitVec> { let Some(payload) = block.payload() else { - return true; + return Ok(()); }; match payload { Payload::DirectMempool(_) => { unreachable!("QuorumStore doesn't support DirectMempool payload") }, - Payload::InQuorumStore(_) => true, - Payload::InQuorumStoreWithLimit(_) => true, - Payload::QuorumStoreInlineHybrid(_, _, _) => true, + Payload::InQuorumStore(_) => Ok(()), + Payload::InQuorumStoreWithLimit(_) => Ok(()), + Payload::QuorumStoreInlineHybrid(inline_batches, proofs, _) => { + fn update_availability_metrics<'a>( + batch_reader: &Arc, + is_proof_label: &str, + batch_infos: impl Iterator, + ) { + for (author, chunk) in &batch_infos.chunk_by(|info| info.author()) { + let (available_count, missing_count) = chunk + .map(|info| batch_reader.exists(info.digest())) + .fold((0, 0), |(available_count, missing_count), item| { + if item.is_some() { + (available_count + 1, missing_count) + } else { + (available_count, missing_count + 1) + } + }); + counters::CONSENSUS_PROPOSAL_PAYLOAD_BATCH_AVAILABILITY_IN_QS + .with_label_values(&[ + &author.to_hex_literal(), + is_proof_label, + "available", + ]) + .inc_by(available_count as u64); + counters::CONSENSUS_PROPOSAL_PAYLOAD_BATCH_AVAILABILITY_IN_QS + .with_label_values(&[ + &author.to_hex_literal(), + is_proof_label, + "missing", + ]) + .inc_by(missing_count as u64); + } + } + + update_availability_metrics( + &self.batch_reader, + "false", + inline_batches.iter().map(|(batch_info, _)| batch_info), + ); + update_availability_metrics( + &self.batch_reader, + "true", + proofs.proofs.iter().map(|proof| proof.info()), + ); + + // The payload is considered available because it contains only proofs that guarantee network availabiliy + // or inlined transactions. + Ok(()) + }, Payload::OptQuorumStore(opt_qs_payload) => { + let mut missing_authors = BitVec::with_num_bits(self.ordered_authors.len() as u16); for batch in opt_qs_payload.opt_batches().deref() { if self.batch_reader.exists(batch.digest()).is_none() { - return false; + let index = *self + .address_to_validator_index + .get(&batch.author()) + .expect("Payload author should have been verified"); + missing_authors.set(index as u16); } } - true + if missing_authors.all_zeros() { + Ok(()) + } else { + Err(missing_authors) + } }, } } @@ -402,7 +463,7 @@ impl TPayloadManager for QuorumStorePayloadManager { ) .await?; let inline_batch_txns = opt_qs_payload.inline_batches().transactions(); - let all_txns = [opt_batch_txns, proof_batch_txns, inline_batch_txns].concat(); + let all_txns = [proof_batch_txns, opt_batch_txns, inline_batch_txns].concat(); BlockTransactionPayload::new_opt_quorum_store( all_txns, opt_qs_payload.proof_with_data().deref().clone(), @@ -685,7 +746,7 @@ impl TPayloadManager for ConsensusObserverPayloadManager { // noop } - fn check_payload_availability(&self, _block: &Block) -> bool { + fn check_payload_availability(&self, _block: &Block) -> Result<(), BitVec> { unreachable!("this method isn't used in ConsensusObserver") } diff --git a/consensus/src/pending_order_votes.rs b/consensus/src/pending_order_votes.rs index 46cf23cfe2b90..58aaf8b90a852 100644 --- a/consensus/src/pending_order_votes.rs +++ b/consensus/src/pending_order_votes.rs @@ -2,12 +2,12 @@ // Parts of the project are originally copyright © Meta Platforms, Inc. // SPDX-License-Identifier: Apache-2.0 +use crate::counters; use aptos_consensus_types::{common::Author, order_vote::OrderVote, quorum_cert::QuorumCert}; use aptos_crypto::{hash::CryptoHash, HashValue}; use aptos_logger::prelude::*; use aptos_types::{ - aggregate_signature::PartialSignatures, - ledger_info::{LedgerInfo, LedgerInfoWithSignatures, LedgerInfoWithVerifiedSignatures}, + ledger_info::{LedgerInfo, LedgerInfoWithSignatures, LedgerInfoWithUnverifiedSignatures}, validator_verifier::{ValidatorVerifier, VerifyError}, }; use std::{collections::HashMap, sync::Arc}; @@ -33,12 +33,12 @@ pub enum OrderVoteReceptionResult { #[derive(Debug, PartialEq, Eq)] enum OrderVoteStatus { EnoughVotes(LedgerInfoWithSignatures), - NotEnoughVotes(LedgerInfoWithVerifiedSignatures), + NotEnoughVotes(LedgerInfoWithUnverifiedSignatures), } /// A PendingVotes structure keep track of order votes for the last few rounds pub struct PendingOrderVotes { - /// Maps LedgerInfo digest to associated signatures (contained in a partial LedgerInfoWithSignatures). + /// Maps LedgerInfo digest to associated signatures. /// Order vote status stores caches the information on whether the votes are enough to form a QC. /// We also store the QC that the order votes certify. li_digest_to_votes: @@ -75,9 +75,8 @@ impl PendingOrderVotes { verified_quorum_cert.expect( "Quorum Cert is expected when creating a new entry in pending order votes", ), - OrderVoteStatus::NotEnoughVotes(LedgerInfoWithVerifiedSignatures::new( + OrderVoteStatus::NotEnoughVotes(LedgerInfoWithUnverifiedSignatures::new( order_vote.ledger_info().clone(), - PartialSignatures::empty(), )), ) }); @@ -110,16 +109,20 @@ impl PendingOrderVotes { order_vote.author() ); } - li_with_sig.add_signature(order_vote.author(), order_vote.signature().clone()); - // check if we have enough signatures to create a QC - match validator_verifier.check_voting_power(li_with_sig.signatures().keys(), true) { - // a quorum of signature was reached, a new QC is formed + li_with_sig.add_signature(order_vote.author(), order_vote.signature_with_status()); + match li_with_sig.check_voting_power(validator_verifier, true) { Ok(aggregated_voting_power) => { assert!( aggregated_voting_power >= validator_verifier.quorum_voting_power(), "QC aggregation should not be triggered if we don't have enough votes to form a QC" ); - match li_with_sig.aggregate_signatures(validator_verifier) { + let verification_result = { + let _timer = counters::VERIFY_MSG + .with_label_values(&["order_vote_aggregate_and_verify"]) + .start_timer(); + li_with_sig.aggregate_and_verify(validator_verifier) + }; + match verification_result { Ok(ledger_info_with_sig) => { *status = OrderVoteStatus::EnoughVotes(ledger_info_with_sig.clone()); @@ -128,16 +131,15 @@ impl PendingOrderVotes { ledger_info_with_sig, )) }, + Err(VerifyError::TooLittleVotingPower { voting_power, .. }) => { + OrderVoteReceptionResult::VoteAdded(voting_power) + }, Err(e) => OrderVoteReceptionResult::ErrorAggregatingSignature(e), } }, - - // not enough votes Err(VerifyError::TooLittleVotingPower { voting_power, .. }) => { OrderVoteReceptionResult::VoteAdded(voting_power) }, - - // error Err(error) => { error!( "MUST_FIX: order vote received could not be added: {}, order vote: {}", @@ -175,11 +177,11 @@ impl PendingOrderVotes { #[cfg(test)] mod tests { - use super::{OrderVoteReceptionResult, PendingOrderVotes}; + use super::{OrderVoteReceptionResult, OrderVoteStatus, PendingOrderVotes}; use aptos_consensus_types::{order_vote::OrderVote, quorum_cert::QuorumCert}; - use aptos_crypto::HashValue; + use aptos_crypto::{bls12381, hash::CryptoHash, HashValue}; use aptos_types::{ - block_info::BlockInfo, ledger_info::LedgerInfo, + aggregate_signature::PartialSignatures, block_info::BlockInfo, ledger_info::LedgerInfo, validator_verifier::random_validator_verifier, }; @@ -195,7 +197,7 @@ mod tests { fn order_vote_aggregation() { ::aptos_logger::Logger::init_for_testing(); // set up 4 validators - let (signers, validator) = random_validator_verifier(4, Some(2), false); + let (signers, verifier) = random_validator_verifier(4, Some(2), false); let mut pending_order_votes = PendingOrderVotes::new(); @@ -209,22 +211,19 @@ mod tests { ); // first time a new order vote is added -> OrderVoteAdded + order_vote_1_author_0.set_verified(); assert_eq!( pending_order_votes.insert_order_vote( &order_vote_1_author_0, - &validator, + &verifier, Some(qc.clone()) ), - OrderVoteReceptionResult::VoteAdded(1), + OrderVoteReceptionResult::VoteAdded(1) ); // same author voting for the same thing -> OrderVoteAdded assert_eq!( - pending_order_votes.insert_order_vote( - &order_vote_1_author_0, - &validator, - Some(qc.clone()) - ), + pending_order_votes.insert_order_vote(&order_vote_1_author_0, &verifier, None), OrderVoteReceptionResult::VoteAdded(1) ); @@ -235,13 +234,14 @@ mod tests { li2.clone(), signers[1].sign(&li2).expect("Unable to sign ledger info"), ); + order_vote_2_author_1.set_verified(); assert_eq!( pending_order_votes.insert_order_vote( &order_vote_2_author_1, - &validator, + &verifier, Some(qc.clone()) ), - OrderVoteReceptionResult::VoteAdded(1), + OrderVoteReceptionResult::VoteAdded(1) ); assert!(!pending_order_votes.has_enough_order_votes(&li1)); @@ -252,13 +252,10 @@ mod tests { li2.clone(), signers[2].sign(&li2).expect("Unable to sign ledger info"), ); - match pending_order_votes.insert_order_vote( - &order_vote_2_author_2, - &validator, - Some(qc.clone()), - ) { - OrderVoteReceptionResult::NewLedgerInfoWithSignatures((_, li_with_sig)) => { - assert!(li_with_sig.check_voting_power(&validator).is_ok()); + order_vote_2_author_2.set_verified(); + match pending_order_votes.insert_order_vote(&order_vote_2_author_2, &verifier, None) { + OrderVoteReceptionResult::NewLedgerInfoWithSignatures((_qc, li_with_sig)) => { + assert!(li_with_sig.check_voting_power(&verifier).is_ok()); }, _ => { panic!("No QC formed."); @@ -271,4 +268,111 @@ mod tests { assert!(!pending_order_votes.has_enough_order_votes(&li1)); assert!(!pending_order_votes.has_enough_order_votes(&li2)); } + + #[test] + fn order_vote_aggregation_with_unverified_votes() { + ::aptos_logger::Logger::init_for_testing(); + + let (signers, verifier) = random_validator_verifier(5, Some(3), false); + let mut pending_order_votes = PendingOrderVotes::new(); + let mut partial_signatures = PartialSignatures::empty(); + let qc = QuorumCert::dummy(); + + // create random vote from validator[0] + let li = random_ledger_info(); + let li_hash = li.hash(); + let vote_0 = OrderVote::new_with_signature( + signers[0].author(), + li.clone(), + signers[0].sign(&li).expect("Unable to sign ledger info"), + ); + partial_signatures.add_signature(signers[0].author(), vote_0.signature().clone()); + + let vote_1 = OrderVote::new_with_signature( + signers[1].author(), + li.clone(), + signers[1].sign(&li).expect("Unable to sign ledger info"), + ); + partial_signatures.add_signature(signers[1].author(), vote_1.signature().clone()); + + let vote_2 = OrderVote::new_with_signature( + signers[2].author(), + li.clone(), + bls12381::Signature::dummy_signature(), + ); + + let vote_3 = OrderVote::new_with_signature( + signers[3].author(), + li.clone(), + signers[3].sign(&li).expect("Unable to sign ledger info"), + ); + partial_signatures.add_signature(signers[3].author(), vote_3.signature().clone()); + + let vote_4 = OrderVote::new_with_signature( + signers[4].author(), + li.clone(), + signers[4].sign(&li).expect("Unable to sign ledger info"), + ); + + assert_eq!( + pending_order_votes.insert_order_vote(&vote_0, &verifier, Some(qc.clone())), + OrderVoteReceptionResult::VoteAdded(1) + ); + + vote_0.set_verified(); + assert_eq!( + pending_order_votes.insert_order_vote(&vote_0, &verifier, None), + OrderVoteReceptionResult::VoteAdded(1) + ); + + assert_eq!( + pending_order_votes.insert_order_vote(&vote_1, &verifier, None), + OrderVoteReceptionResult::VoteAdded(2) + ); + + assert_eq!(verifier.pessimistic_verify_set().len(), 0); + assert_eq!( + pending_order_votes.insert_order_vote(&vote_2, &verifier, None), + OrderVoteReceptionResult::VoteAdded(2) + ); + assert_eq!(verifier.pessimistic_verify_set().len(), 1); + let (_, order_vote_status) = pending_order_votes + .li_digest_to_votes + .get(&li_hash) + .unwrap(); + match order_vote_status { + OrderVoteStatus::NotEnoughVotes(li_with_sig) => { + assert_eq!(li_with_sig.verified_voters().count(), 2); + assert_eq!(li_with_sig.unverified_voters().count(), 0); + }, + _ => { + panic!("QC should not be formed yet."); + }, + } + + let aggregate_sig = verifier + .aggregate_signatures(partial_signatures.signatures_iter()) + .unwrap(); + match pending_order_votes.insert_order_vote(&vote_3, &verifier, None) { + OrderVoteReceptionResult::NewLedgerInfoWithSignatures((_qc, li_with_sig)) => { + assert!(li_with_sig.check_voting_power(&verifier).is_ok()); + + assert_eq!(li_with_sig.signatures().clone(), aggregate_sig.clone()); + }, + _ => { + panic!("No QC formed."); + }, + }; + + match pending_order_votes.insert_order_vote(&vote_4, &verifier, None) { + OrderVoteReceptionResult::NewLedgerInfoWithSignatures((_qc, li_with_sig)) => { + assert!(li_with_sig.check_voting_power(&verifier).is_ok()); + + assert_eq!(li_with_sig.signatures().clone(), aggregate_sig.clone()); + }, + _ => { + panic!("No QC formed."); + }, + }; + } } diff --git a/consensus/src/pending_votes.rs b/consensus/src/pending_votes.rs index b2177d2c5889a..e6651681766b3 100644 --- a/consensus/src/pending_votes.rs +++ b/consensus/src/pending_votes.rs @@ -9,25 +9,23 @@ //! Votes are automatically dropped when the structure goes out of scope. use crate::counters; +use aptos_bitvec::BitVec; use aptos_consensus_types::{ common::Author, quorum_cert::QuorumCert, - timeout_2chain::{TwoChainTimeoutCertificate, TwoChainTimeoutWithPartialSignatures}, + round_timeout::{RoundTimeout, RoundTimeoutReason}, + timeout_2chain::{ + TwoChainTimeout, TwoChainTimeoutCertificate, TwoChainTimeoutWithPartialSignatures, + }, vote::Vote, - vote_data::VoteData, }; -use aptos_crypto::{hash::CryptoHash, HashValue}; +use aptos_crypto::{bls12381, hash::CryptoHash, HashValue}; use aptos_logger::prelude::*; use aptos_types::{ - aggregate_signature::PartialSignatures, - ledger_info::LedgerInfoWithVerifiedSignatures, + ledger_info::{LedgerInfoWithSignatures, LedgerInfoWithUnverifiedSignatures}, validator_verifier::{ValidatorVerifier, VerifyError}, }; -use std::{ - collections::{BTreeMap, HashMap}, - fmt, - sync::Arc, -}; +use std::{collections::HashMap, fmt, sync::Arc}; /// Result of the vote processing. The failure case (Verification error) is returned /// as the Error part of the result. @@ -54,17 +52,124 @@ pub enum VoteReceptionResult { UnexpectedRound(u64, u64), /// Receive f+1 timeout to trigger a local timeout, return the amount of voting power TC currently has. EchoTimeout(u128), + /// The author of the vote is unknown + UnknownAuthor(Author), +} + +#[derive(Debug, PartialEq, Eq)] +pub enum VoteStatus { + EnoughVotes(LedgerInfoWithSignatures), + NotEnoughVotes(LedgerInfoWithUnverifiedSignatures), +} + +#[derive(Debug)] +pub(super) struct TwoChainTimeoutVotes { + timeout_reason: HashMap, + partial_2chain_tc: TwoChainTimeoutWithPartialSignatures, +} + +impl TwoChainTimeoutVotes { + pub(super) fn new(timeout: TwoChainTimeout) -> Self { + Self { + partial_2chain_tc: TwoChainTimeoutWithPartialSignatures::new(timeout.clone()), + timeout_reason: HashMap::new(), + } + } + + pub(super) fn add( + &mut self, + author: Author, + timeout: TwoChainTimeout, + signature: bls12381::Signature, + reason: RoundTimeoutReason, + ) { + self.partial_2chain_tc.add(author, timeout, signature); + self.timeout_reason.entry(author).or_insert(reason); + } + + pub(super) fn partial_2chain_tc_mut(&mut self) -> &mut TwoChainTimeoutWithPartialSignatures { + &mut self.partial_2chain_tc + } + + fn aggregated_timeout_reason(&self, verifier: &ValidatorVerifier) -> RoundTimeoutReason { + let mut reason_voting_power: HashMap = HashMap::new(); + let mut missing_batch_authors: HashMap = HashMap::new(); + // let ordered_authors = verifier.get_ordered_account_addresses(); + for (author, reason) in &self.timeout_reason { + // To aggregate the reason, we only care about the variant type itself and + // exclude any data within the variants. + let reason_key = match reason { + reason @ RoundTimeoutReason::Unknown + | reason @ RoundTimeoutReason::ProposalNotReceived + | reason @ RoundTimeoutReason::NoQC => reason.clone(), + RoundTimeoutReason::PayloadUnavailable { missing_authors } => { + for missing_idx in missing_authors.iter_ones() { + *missing_batch_authors.entry(missing_idx).or_default() += + verifier.get_voting_power(author).unwrap_or_default() as u128; + } + RoundTimeoutReason::PayloadUnavailable { + // Since we care only about the variant type, we replace the bitvec + // with a placeholder. + missing_authors: BitVec::with_num_bits(verifier.len() as u16), + } + }, + }; + *reason_voting_power.entry(reason_key).or_default() += + verifier.get_voting_power(author).unwrap_or_default() as u128; + } + // The aggregated timeout reason is the reason with the most voting power received from + // at least f+1 peers by voting power. If such voting power does not exist, then the + // reason is unknown. + + reason_voting_power + .into_iter() + .max_by_key(|(_, voting_power)| *voting_power) + .filter(|(_, voting_power)| { + verifier + .check_aggregated_voting_power(*voting_power, false) + .is_ok() + }) + .map(|(reason, _)| { + // If the aggregated reason is due to unavailable payload, we will compute the + // aggregated missing authors bitvec counting batch authors that have been reported + // missing by minority peers. + if matches!(reason, RoundTimeoutReason::PayloadUnavailable { .. }) { + let mut aggregated_bitvec = BitVec::with_num_bits(verifier.len() as u16); + for (author_idx, voting_power) in missing_batch_authors { + if verifier + .check_aggregated_voting_power(voting_power, false) + .is_ok() + { + aggregated_bitvec.set(author_idx as u16); + } + } + RoundTimeoutReason::PayloadUnavailable { + missing_authors: aggregated_bitvec, + } + } else { + reason + } + }) + .unwrap_or(RoundTimeoutReason::Unknown) + } + + pub(crate) fn unpack_aggregate( + self, + verifier: &ValidatorVerifier, + ) -> (TwoChainTimeoutWithPartialSignatures, RoundTimeoutReason) { + let aggregated_reason = self.aggregated_timeout_reason(verifier); + (self.partial_2chain_tc, aggregated_reason) + } } /// A PendingVotes structure keep track of votes pub struct PendingVotes { - /// Maps LedgerInfo digest to associated signatures (contained in a partial LedgerInfoWithSignatures). + /// Maps LedgerInfo digest to associated signatures. /// This might keep multiple LedgerInfos for the current round: either due to different proposals (byzantine behavior) /// or due to different NIL proposals (clients can have a different view of what block to extend). - li_digest_to_votes: - HashMap, + li_digest_to_votes: HashMap, /// Tracks all the signatures of the 2-chain timeout for the given round. - maybe_partial_2chain_tc: Option, + maybe_2chain_timeout_votes: Option, /// Map of Author to (vote, li_digest). This is useful to discard multiple votes. author_to_vote: HashMap, /// Whether we have echoed timeout for this round. @@ -76,12 +181,96 @@ impl PendingVotes { pub fn new() -> Self { PendingVotes { li_digest_to_votes: HashMap::new(), - maybe_partial_2chain_tc: None, + maybe_2chain_timeout_votes: None, author_to_vote: HashMap::new(), echo_timeout: false, } } + /// Insert a RoundTimeout and return a TimeoutCertificate if it can be formed + pub fn insert_round_timeout( + &mut self, + round_timeout: &RoundTimeout, + validator_verifier: &ValidatorVerifier, + ) -> VoteReceptionResult { + // + // Let's check if we can create a TC + // + + let timeout = round_timeout.two_chain_timeout(); + let signature = round_timeout.signature(); + + let validator_voting_power = validator_verifier + .get_voting_power(&round_timeout.author()) + .unwrap_or(0); + if validator_voting_power == 0 { + warn!( + "Received vote with no voting power, from {}", + round_timeout.author() + ); + } + let cur_epoch = round_timeout.epoch(); + let cur_round = round_timeout.round(); + + counters::CONSENSUS_CURRENT_ROUND_TIMEOUT_VOTED_POWER + .with_label_values(&[&round_timeout.author().to_string()]) + .set(validator_voting_power as f64); + counters::CONSENSUS_LAST_TIMEOUT_VOTE_EPOCH + .with_label_values(&[&round_timeout.author().to_string()]) + .set(cur_epoch as i64); + counters::CONSENSUS_LAST_TIMEOUT_VOTE_ROUND + .with_label_values(&[&round_timeout.author().to_string()]) + .set(cur_round as i64); + + let two_chain_votes = self + .maybe_2chain_timeout_votes + .get_or_insert_with(|| TwoChainTimeoutVotes::new(timeout.clone())); + two_chain_votes.add( + round_timeout.author(), + timeout.clone(), + signature.clone(), + round_timeout.reason().clone(), + ); + + let partial_tc = two_chain_votes.partial_2chain_tc_mut(); + let tc_voting_power = + match validator_verifier.check_voting_power(partial_tc.signers(), true) { + Ok(_) => { + return match partial_tc.aggregate_signatures(validator_verifier) { + Ok(tc_with_sig) => { + VoteReceptionResult::New2ChainTimeoutCertificate(Arc::new(tc_with_sig)) + }, + Err(e) => VoteReceptionResult::ErrorAggregatingTimeoutCertificate(e), + }; + }, + Err(VerifyError::TooLittleVotingPower { voting_power, .. }) => voting_power, + Err(error) => { + error!( + "MUST_FIX: 2-chain timeout vote received could not be added: {}, vote: {}", + error, timeout + ); + return VoteReceptionResult::ErrorAddingVote(error); + }, + }; + + // Echo timeout if receive f+1 timeout message. + if !self.echo_timeout { + let f_plus_one = validator_verifier.total_voting_power() + - validator_verifier.quorum_voting_power() + + 1; + if tc_voting_power >= f_plus_one { + self.echo_timeout = true; + return VoteReceptionResult::EchoTimeout(tc_voting_power); + } + } + + // + // No TC could be formed, return the TC's voting power + // + + VoteReceptionResult::VoteAdded(tc_voting_power) + } + /// Insert a vote and if the vote is valid, return a QuorumCertificate preferentially over a /// TimeoutCertificate if either can can be formed pub fn insert_vote( @@ -133,21 +322,23 @@ impl PendingVotes { let len = self.li_digest_to_votes.len() + 1; // obtain the ledger info with signatures associated to the vote's ledger info - let (hash_index, li_with_sig) = - self.li_digest_to_votes.entry(li_digest).or_insert_with(|| { - // if the ledger info with signatures doesn't exist yet, create it - ( - len, - LedgerInfoWithVerifiedSignatures::new( - vote.ledger_info().clone(), - PartialSignatures::empty(), - ), - ) - }); - - let validator_voting_power = validator_verifier - .get_voting_power(&vote.author()) - .unwrap_or(0); + let (hash_index, status) = self.li_digest_to_votes.entry(li_digest).or_insert_with(|| { + ( + len, + VoteStatus::NotEnoughVotes(LedgerInfoWithUnverifiedSignatures::new( + vote.ledger_info().clone(), + )), + ) + }); + + let validator_voting_power = validator_verifier.get_voting_power(&vote.author()); + + if validator_voting_power.is_none() { + warn!("Received vote from an unknown author: {}", vote.author()); + return VoteReceptionResult::UnknownAuthor(vote.author()); + } + let validator_voting_power = + validator_voting_power.expect("Author must exist in the validator set."); if validator_voting_power == 0 { warn!("Received vote with no voting power, from {}", vote.author()); } @@ -168,39 +359,58 @@ impl PendingVotes { .set(cur_round); } - // add this vote to the ledger info with signatures - li_with_sig.add_signature(vote.author(), vote.signature().clone()); + let voting_power = match status { + VoteStatus::EnoughVotes(li_with_sig) => { + return VoteReceptionResult::NewQuorumCertificate(Arc::new(QuorumCert::new( + vote.vote_data().clone(), + li_with_sig.clone(), + ))); + }, + VoteStatus::NotEnoughVotes(li_with_sig) => { + // add this vote to the ledger info with signatures + li_with_sig.add_signature(vote.author(), vote.signature_with_status()); - // check if we have enough signatures to create a QC - let voting_power = match validator_verifier - .check_voting_power(li_with_sig.signatures().keys(), true) - { - // a quorum of signature was reached, a new QC is formed - Ok(aggregated_voting_power) => { - assert!( - aggregated_voting_power >= validator_verifier.quorum_voting_power(), - "QC aggregation should not be triggered if we don't have enough votes to form a QC" - ); - match li_with_sig.aggregate_signatures(validator_verifier) { - Ok(ledger_info_with_sig) => { - return VoteReceptionResult::NewQuorumCertificate(Arc::new( - QuorumCert::new(vote.vote_data().clone(), ledger_info_with_sig), - )) + // check if we have enough signatures to create a QC + match li_with_sig.check_voting_power(validator_verifier, true) { + // a quorum of signature was reached, a new QC is formed + Ok(aggregated_voting_power) => { + assert!( + aggregated_voting_power >= validator_verifier.quorum_voting_power(), + "QC aggregation should not be triggered if we don't have enough votes to form a QC" + ); + let verification_result = { + let _timer = counters::VERIFY_MSG + .with_label_values(&["vote_aggregate_and_verify"]) + .start_timer(); + + li_with_sig.aggregate_and_verify(validator_verifier) + }; + match verification_result { + Ok(ledger_info_with_sig) => { + *status = VoteStatus::EnoughVotes(ledger_info_with_sig.clone()); + return VoteReceptionResult::NewQuorumCertificate(Arc::new( + QuorumCert::new(vote.vote_data().clone(), ledger_info_with_sig), + )); + }, + Err(VerifyError::TooLittleVotingPower { voting_power, .. }) => { + voting_power + }, + Err(e) => return VoteReceptionResult::ErrorAggregatingSignature(e), + } }, - Err(e) => return VoteReceptionResult::ErrorAggregatingSignature(e), - } - }, - // not enough votes - Err(VerifyError::TooLittleVotingPower { voting_power, .. }) => voting_power, + // not enough votes + Err(VerifyError::TooLittleVotingPower { voting_power, .. }) => voting_power, - // error - Err(error) => { - error!( - "MUST_FIX: vote received could not be added: {}, vote: {}", - error, vote - ); - return VoteReceptionResult::ErrorAddingVote(error); + // error + Err(error) => { + error!( + "MUST_FIX: vote received could not be added: {}, vote: {}", + error, vote + ); + return VoteReceptionResult::ErrorAddingVote(error); + }, + } }, }; @@ -219,10 +429,17 @@ impl PendingVotes { .with_label_values(&[&vote.author().to_string()]) .set(cur_round); - let partial_tc = self - .maybe_partial_2chain_tc - .get_or_insert_with(|| TwoChainTimeoutWithPartialSignatures::new(timeout.clone())); - partial_tc.add(vote.author(), timeout.clone(), signature.clone()); + let two_chain_votes = self + .maybe_2chain_timeout_votes + .get_or_insert_with(|| TwoChainTimeoutVotes::new(timeout.clone())); + two_chain_votes.add( + vote.author(), + timeout.clone(), + signature.clone(), + RoundTimeoutReason::Unknown, + ); + + let partial_tc = two_chain_votes.partial_2chain_tc_mut(); let tc_voting_power = match validator_verifier.check_voting_power(partial_tc.signers(), true) { Ok(_) => { @@ -262,64 +479,7 @@ impl PendingVotes { VoteReceptionResult::VoteAdded(voting_power) } - pub fn aggregate_qc_now( - validator_verifier: &ValidatorVerifier, - li_with_sig: &LedgerInfoWithVerifiedSignatures, - vote_data: &VoteData, - ) -> VoteReceptionResult { - match li_with_sig.aggregate_signatures(validator_verifier) { - Ok(ledger_info_with_sig) => VoteReceptionResult::NewQuorumCertificate(Arc::new( - QuorumCert::new(vote_data.clone(), ledger_info_with_sig), - )), - Err(e) => VoteReceptionResult::ErrorAggregatingSignature(e), - } - } - - pub fn process_delayed_qc( - &mut self, - validator_verifier: &ValidatorVerifier, - vote: Vote, - ) -> VoteReceptionResult { - let li_digest = vote.ledger_info().hash(); - match self.li_digest_to_votes.get_mut(&li_digest) { - Some((_, li_with_sig)) => { - match validator_verifier.check_voting_power(li_with_sig.signatures().keys(), true) { - // a quorum of signature was reached, a new QC is formed - Ok(_) => { - Self::aggregate_qc_now(validator_verifier, li_with_sig, vote.vote_data()) - }, - - // not enough votes - Err(VerifyError::TooLittleVotingPower { .. }) => { - panic!("Delayed QC aggregation should not be triggered if we don't have enough votes to form a QC"); - }, - - // error - Err(error) => { - error!( - "MUST_FIX: vote received could not be added: {}, vote: {}", - error, vote - ); - VoteReceptionResult::ErrorAddingVote(error) - }, - } - }, - None => { - error!( - "No LedgerInfoWithSignatures found for the given digest: {}", - li_digest - ); - VoteReceptionResult::ErrorAddingVote(VerifyError::EmptySignature) - }, - } - } - - pub fn drain_votes( - &mut self, - ) -> ( - Vec<(HashValue, LedgerInfoWithVerifiedSignatures)>, - Option, - ) { + pub fn drain_votes(&mut self) -> (Vec<(HashValue, VoteStatus)>, Option) { for (hash_index, _) in self.li_digest_to_votes.values() { let hash_index_str = hash_index_to_str(*hash_index); for author in self.author_to_vote.keys() { @@ -328,8 +488,8 @@ impl PendingVotes { .set(0_f64); } } - if let Some(partial_tc) = &self.maybe_partial_2chain_tc { - for author in partial_tc.signers() { + if let Some(votes) = &self.maybe_2chain_timeout_votes { + for author in votes.partial_2chain_tc.signers() { counters::CONSENSUS_CURRENT_ROUND_TIMEOUT_VOTED_POWER .with_label_values(&[&author.to_string()]) .set(0_f64); @@ -339,9 +499,9 @@ impl PendingVotes { ( self.li_digest_to_votes .drain() - .map(|(key, (_, li))| (key, li)) + .map(|(key, (_, vote_status))| (key, vote_status)) .collect(), - self.maybe_partial_2chain_tc.take(), + self.maybe_2chain_timeout_votes.take(), ) } } @@ -360,25 +520,30 @@ fn hash_index_to_str(hash_index: usize) -> String { impl fmt::Display for PendingVotes { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - // collect votes per ledger info - let votes = self - .li_digest_to_votes - .iter() - .map(|(li_digest, (_, li))| (li_digest, li.signatures().keys().collect::>())) - .collect::>(); + write!(f, "PendingVotes: [")?; + + for (li_digest, (_, status)) in self.li_digest_to_votes.iter() { + match status { + VoteStatus::EnoughVotes(_li) => { + write!(f, "LI {} has aggregated QC", li_digest)?; + }, + VoteStatus::NotEnoughVotes(li) => { + write!( + f, + "LI {} has {} verified votes, {} unverified votes", + li_digest, + li.verified_voters().count(), + li.unverified_voters().count(), + )?; + }, + } + } // collect timeout votes let timeout_votes = self - .maybe_partial_2chain_tc + .maybe_2chain_timeout_votes .as_ref() - .map(|partial_tc| partial_tc.signers().collect::>()); - - // write - write!(f, "PendingVotes: [")?; - - for (hash, authors) in votes { - write!(f, "LI {} has {} votes {:?} ", hash, authors.len(), authors)?; - } + .map(|votes| votes.partial_2chain_tc.signers().collect::>()); if let Some(authors) = timeout_votes { write!(f, "{} timeout {:?}", authors.len(), authors)?; @@ -394,13 +559,13 @@ impl fmt::Display for PendingVotes { #[cfg(test)] mod tests { - use super::{PendingVotes, VoteReceptionResult}; + use super::{PendingVotes, VoteReceptionResult, VoteStatus}; use aptos_consensus_types::{ block::block_test_utils::certificate_for_genesis, vote::Vote, vote_data::VoteData, }; - use aptos_crypto::HashValue; + use aptos_crypto::{bls12381, hash::CryptoHash, HashValue}; use aptos_types::{ - block_info::BlockInfo, ledger_info::LedgerInfo, + aggregate_signature::PartialSignatures, block_info::BlockInfo, ledger_info::LedgerInfo, validator_verifier::random_validator_verifier, }; use itertools::Itertools; @@ -425,7 +590,7 @@ mod tests { ::aptos_logger::Logger::init_for_testing(); // set up 4 validators - let (signers, validator) = random_validator_verifier(4, Some(2), false); + let (signers, validator_verifier) = random_validator_verifier(4, Some(2), false); let mut pending_votes = PendingVotes::new(); // create random vote from validator[0] @@ -434,15 +599,16 @@ mod tests { let vote_data_1_author_0 = Vote::new(vote_data_1, signers[0].author(), li1, &signers[0]).unwrap(); + vote_data_1_author_0.set_verified(); // first time a new vote is added -> VoteAdded assert_eq!( - pending_votes.insert_vote(&vote_data_1_author_0, &validator), + pending_votes.insert_vote(&vote_data_1_author_0, &validator_verifier), VoteReceptionResult::VoteAdded(1) ); // same author voting for the same thing -> DuplicateVote assert_eq!( - pending_votes.insert_vote(&vote_data_1_author_0, &validator), + pending_votes.insert_vote(&vote_data_1_author_0, &validator_verifier), VoteReceptionResult::DuplicateVote ); @@ -456,8 +622,9 @@ mod tests { &signers[0], ) .unwrap(); + vote_data_2_author_0.set_verified(); assert_eq!( - pending_votes.insert_vote(&vote_data_2_author_0, &validator), + pending_votes.insert_vote(&vote_data_2_author_0, &validator_verifier), VoteReceptionResult::EquivocateVote ); @@ -469,22 +636,160 @@ mod tests { &signers[1], ) .unwrap(); + vote_data_2_author_1.set_verified(); assert_eq!( - pending_votes.insert_vote(&vote_data_2_author_1, &validator), + pending_votes.insert_vote(&vote_data_2_author_1, &validator_verifier), VoteReceptionResult::VoteAdded(1) ); // two votes for the ledger info -> NewQuorumCertificate let vote_data_2_author_2 = Vote::new(vote_data_2, signers[2].author(), li2, &signers[2]).unwrap(); - match pending_votes.insert_vote(&vote_data_2_author_2, &validator) { + vote_data_2_author_2.set_verified(); + match pending_votes.insert_vote(&vote_data_2_author_2, &validator_verifier) { + VoteReceptionResult::NewQuorumCertificate(qc) => { + assert!(qc + .ledger_info() + .check_voting_power(&validator_verifier) + .is_ok()); + }, + _ => { + panic!("No QC formed."); + }, + }; + } + + #[test] + fn test_qc_aggregation_with_unverified_votes() { + ::aptos_logger::Logger::init_for_testing(); + + // set up 4 validators + let (signers, validator_verifier) = random_validator_verifier(7, Some(3), false); + let mut pending_votes = PendingVotes::new(); + + // create random vote from validator[0] + let mut li = random_ledger_info(); + let vote_data = random_vote_data(); + li.set_consensus_data_hash(vote_data.hash()); + let li_hash = li.hash(); + + let mut partial_sigs = PartialSignatures::empty(); + + let vote_0 = Vote::new( + vote_data.clone(), + signers[0].author(), + li.clone(), + &signers[0], + ) + .unwrap(); + + let vote_1 = Vote::new( + vote_data.clone(), + signers[1].author(), + li.clone(), + &signers[1], + ) + .unwrap(); + + let vote_2 = Vote::new_with_signature( + vote_data.clone(), + signers[2].author(), + li.clone(), + bls12381::Signature::dummy_signature(), + ); + + let vote_3 = Vote::new( + vote_data.clone(), + signers[3].author(), + li.clone(), + &signers[3], + ) + .unwrap(); + + let vote_4 = Vote::new( + vote_data.clone(), + signers[4].author(), + li.clone(), + &signers[4], + ) + .unwrap(); + + // first time a new vote is added -> VoteAdded + assert_eq!( + pending_votes.insert_vote(&vote_0, &validator_verifier), + VoteReceptionResult::VoteAdded(1) + ); + partial_sigs.add_signature(signers[0].author(), vote_0.signature().clone()); + + // same author voting for the same thing -> DuplicateVote + vote_0.set_verified(); + assert_eq!( + pending_votes.insert_vote(&vote_0, &validator_verifier), + VoteReceptionResult::DuplicateVote + ); + + assert_eq!( + pending_votes.insert_vote(&vote_1, &validator_verifier), + VoteReceptionResult::VoteAdded(2) + ); + partial_sigs.add_signature(signers[1].author(), vote_1.signature().clone()); + + assert_eq!(validator_verifier.pessimistic_verify_set().len(), 0); + + assert_eq!( + pending_votes.insert_vote(&vote_2, &validator_verifier), + VoteReceptionResult::VoteAdded(2) + ); + + assert_eq!(validator_verifier.pessimistic_verify_set().len(), 1); + let (_, vote_status) = pending_votes.li_digest_to_votes.get(&li_hash).unwrap(); + match vote_status { + VoteStatus::NotEnoughVotes(li_with_sig) => { + assert_eq!(li_with_sig.verified_voters().count(), 2); + assert_eq!(li_with_sig.unverified_voters().count(), 0); + }, + _ => { + panic!("QC should not be formed yet."); + }, + } + + partial_sigs.add_signature(signers[3].author(), vote_3.signature().clone()); + let aggregated_sig = validator_verifier + .aggregate_signatures(partial_sigs.signatures_iter()) + .unwrap(); + match pending_votes.insert_vote(&vote_3, &validator_verifier) { + VoteReceptionResult::NewQuorumCertificate(qc) => { + assert!(qc + .ledger_info() + .check_voting_power(&validator_verifier) + .is_ok()); + assert_eq!( + qc.ledger_info().signatures().clone(), + aggregated_sig.clone() + ); + }, + _ => { + panic!("No QC formed."); + }, + }; + + match pending_votes.insert_vote(&vote_4, &validator_verifier) { VoteReceptionResult::NewQuorumCertificate(qc) => { - assert!(qc.ledger_info().check_voting_power(&validator).is_ok()); + assert!(qc + .ledger_info() + .check_voting_power(&validator_verifier) + .is_ok()); + assert_eq!( + qc.ledger_info().signatures().clone(), + aggregated_sig.clone() + ); }, _ => { panic!("No QC formed."); }, }; + + assert_eq!(validator_verifier.pessimistic_verify_set().len(), 1); } #[test] @@ -492,7 +797,7 @@ mod tests { ::aptos_logger::Logger::init_for_testing(); // set up 4 validators - let (signers, validator) = random_validator_verifier(4, None, false); + let (signers, validator_verifier) = random_validator_verifier(4, None, false); let mut pending_votes = PendingVotes::new(); // submit a new vote from validator[0] -> VoteAdded @@ -500,8 +805,9 @@ mod tests { let vote0 = random_vote_data(); let mut vote0_author_0 = Vote::new(vote0, signers[0].author(), li0, &signers[0]).unwrap(); + vote0_author_0.set_verified(); assert_eq!( - pending_votes.insert_vote(&vote0_author_0, &validator), + pending_votes.insert_vote(&vote0_author_0, &validator_verifier), VoteReceptionResult::VoteAdded(1) ); @@ -511,7 +817,7 @@ mod tests { vote0_author_0.add_2chain_timeout(timeout, signature); assert_eq!( - pending_votes.insert_vote(&vote0_author_0, &validator), + pending_votes.insert_vote(&vote0_author_0, &validator_verifier), VoteReceptionResult::VoteAdded(1) ); @@ -519,8 +825,9 @@ mod tests { let li1 = random_ledger_info(); let vote1 = random_vote_data(); let mut vote1_author_1 = Vote::new(vote1, signers[1].author(), li1, &signers[1]).unwrap(); + vote1_author_1.set_verified(); assert_eq!( - pending_votes.insert_vote(&vote1_author_1, &validator), + pending_votes.insert_vote(&vote1_author_1, &validator_verifier), VoteReceptionResult::VoteAdded(1) ); @@ -528,7 +835,7 @@ mod tests { let timeout = vote1_author_1.generate_2chain_timeout(certificate_for_genesis()); let signature = timeout.sign(&signers[1]).unwrap(); vote1_author_1.add_2chain_timeout(timeout, signature); - match pending_votes.insert_vote(&vote1_author_1, &validator) { + match pending_votes.insert_vote(&vote1_author_1, &validator_verifier) { VoteReceptionResult::EchoTimeout(voting_power) => { assert_eq!(voting_power, 2); }, @@ -545,14 +852,16 @@ mod tests { let timeout = vote2_author_2.generate_2chain_timeout(certificate_for_genesis()); let signature = timeout.sign(&signers[2]).unwrap(); vote2_author_2.add_2chain_timeout(timeout, signature); - - match pending_votes.insert_vote(&vote2_author_2, &validator) { + vote2_author_2.set_verified(); + match pending_votes.insert_vote(&vote2_author_2, &validator_verifier) { VoteReceptionResult::New2ChainTimeoutCertificate(tc) => { - assert!(validator + assert!(validator_verifier .check_voting_power( tc.signatures_with_rounds() .get_voters( - &validator.get_ordered_account_addresses_iter().collect_vec() + &validator_verifier + .get_ordered_account_addresses_iter() + .collect_vec() ) .iter(), true diff --git a/consensus/src/pending_votes_test.rs b/consensus/src/pending_votes_test.rs new file mode 100644 index 0000000000000..e2fc1de8d3ff6 --- /dev/null +++ b/consensus/src/pending_votes_test.rs @@ -0,0 +1,161 @@ +// Copyright (c) Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::pending_votes::TwoChainTimeoutVotes; +use aptos_bitvec::BitVec; +use aptos_consensus_types::{ + quorum_cert::QuorumCert, round_timeout::RoundTimeoutReason, timeout_2chain::TwoChainTimeout, +}; +use aptos_types::validator_verifier::{ + random_validator_verifier, random_validator_verifier_with_voting_power, +}; +use itertools::Itertools; + +#[test] +fn test_two_chain_timeout_votes_aggregation() { + let epoch = 1; + let round = 10; + let (signers, verifier) = random_validator_verifier(4, None, false); + let all_reasons = [ + RoundTimeoutReason::NoQC, + RoundTimeoutReason::ProposalNotReceived, + RoundTimeoutReason::Unknown, + RoundTimeoutReason::PayloadUnavailable { + missing_authors: BitVec::with_num_bits(signers.len() as u16), + }, + ]; + + // Majority nodes timeout with same reason + for reason in &all_reasons { + let timeout = TwoChainTimeout::new(epoch, round, QuorumCert::dummy()); + let mut two_chain_timeout_votes = TwoChainTimeoutVotes::new(timeout); + for signer in signers.iter().take(3) { + let author = signer.author(); + let timeout = TwoChainTimeout::new(epoch, round, QuorumCert::dummy()); + let signature = signer.sign(&timeout.signing_format()).unwrap(); + two_chain_timeout_votes.add(author, timeout, signature, reason.clone()); + } + let (_, aggregate_timeout_reason) = two_chain_timeout_votes.unpack_aggregate(&verifier); + assert_eq!(aggregate_timeout_reason, reason.clone()); + } + + // Minority nodes timeout with same reason and one with different reason + for permut in all_reasons.iter().permutations(2) { + let timeout = TwoChainTimeout::new(epoch, round, QuorumCert::dummy()); + let mut two_chain_timeout_votes = TwoChainTimeoutVotes::new(timeout); + for signer in signers.iter().take(2) { + let author = signer.author(); + let timeout = TwoChainTimeout::new(epoch, round, QuorumCert::dummy()); + let signature = signer.sign(&timeout.signing_format()).unwrap(); + two_chain_timeout_votes.add(author, timeout, signature, permut[0].clone()); + } + + let author = signers[2].author(); + let timeout = TwoChainTimeout::new(epoch, round, QuorumCert::dummy()); + let signature = signers[2].sign(&timeout.signing_format()).unwrap(); + two_chain_timeout_votes.add(author, timeout, signature, permut[1].clone()); + + let (_, aggregate_timeout_reason) = two_chain_timeout_votes.unpack_aggregate(&verifier); + assert_eq!(aggregate_timeout_reason, permut[0].clone()); + } +} + +#[test] +fn test_two_chain_timeout_aggregate_missing_authors() { + let epoch = 1; + let round = 10; + let (signers, verifier) = + random_validator_verifier_with_voting_power(4, None, false, &[3, 3, 2, 1]); + + let permutations = [true, true, false, false] + .iter() + .copied() + .permutations(4) + .unique(); + + // Minority nodes report the same set of missing authors + for permut in permutations { + let timeout = TwoChainTimeout::new(epoch, round, QuorumCert::dummy()); + let mut two_chain_timeout_votes = TwoChainTimeoutVotes::new(timeout); + for signer in signers.iter().take(2) { + let author = signer.author(); + let timeout = TwoChainTimeout::new(epoch, round, QuorumCert::dummy()); + let signature = signer.sign(&timeout.signing_format()).unwrap(); + let reason = RoundTimeoutReason::PayloadUnavailable { + missing_authors: permut.clone().into(), + }; + two_chain_timeout_votes.add(author, timeout, signature, reason); + } + + let author = signers[2].author(); + let timeout = TwoChainTimeout::new(epoch, round, QuorumCert::dummy()); + let signature = signers[2].sign(&timeout.signing_format()).unwrap(); + two_chain_timeout_votes.add(author, timeout, signature, RoundTimeoutReason::Unknown); + + let (_, aggregate_timeout_reason) = two_chain_timeout_votes.unpack_aggregate(&verifier); + + assert_eq!( + aggregate_timeout_reason, + RoundTimeoutReason::PayloadUnavailable { + missing_authors: permut.clone().into() + } + ); + } + + // Not enough votes to form a valid timeout reason + let timeout = TwoChainTimeout::new(epoch, round, QuorumCert::dummy()); + let mut two_chain_timeout_votes = TwoChainTimeoutVotes::new(timeout); + + let author = signers[2].author(); + let timeout = TwoChainTimeout::new(epoch, round, QuorumCert::dummy()); + let signature = signers[2].sign(&timeout.signing_format()).unwrap(); + two_chain_timeout_votes.add( + author, + timeout, + signature, + RoundTimeoutReason::PayloadUnavailable { + missing_authors: vec![true, false, false, false].into(), + }, + ); + + let (_, aggregate_timeout_reason) = two_chain_timeout_votes.unpack_aggregate(&verifier); + + assert_eq!(aggregate_timeout_reason, RoundTimeoutReason::Unknown); + + // Not enough nodes vote for the same node. + let timeout = TwoChainTimeout::new(epoch, round, QuorumCert::dummy()); + let mut two_chain_timeout_votes = TwoChainTimeoutVotes::new(timeout); + + let author = signers[2].author(); + let timeout = TwoChainTimeout::new(epoch, round, QuorumCert::dummy()); + let signature = signers[2].sign(&timeout.signing_format()).unwrap(); + two_chain_timeout_votes.add( + author, + timeout, + signature, + RoundTimeoutReason::PayloadUnavailable { + missing_authors: vec![false, true, false, false].into(), + }, + ); + + let author = signers[3].author(); + let timeout = TwoChainTimeout::new(epoch, round, QuorumCert::dummy()); + let signature = signers[3].sign(&timeout.signing_format()).unwrap(); + two_chain_timeout_votes.add( + author, + timeout, + signature, + RoundTimeoutReason::PayloadUnavailable { + missing_authors: vec![false, false, false, true].into(), + }, + ); + + let (_, aggregate_timeout_reason) = two_chain_timeout_votes.unpack_aggregate(&verifier); + + assert_eq!( + aggregate_timeout_reason, + RoundTimeoutReason::PayloadUnavailable { + missing_authors: BitVec::with_num_bits(4) + } + ); +} diff --git a/consensus/src/persistent_liveness_storage.rs b/consensus/src/persistent_liveness_storage.rs index 7e69b3304f653..0b09759e2fa06 100644 --- a/consensus/src/persistent_liveness_storage.rs +++ b/consensus/src/persistent_liveness_storage.rs @@ -436,8 +436,8 @@ impl PersistentLivenessStorage for StorageWriteProxy { } info!( "Starting up the consensus state machine with recovery data - [last_vote {}], [highest timeout certificate: {}]", - initial_data.last_vote.as_ref().map_or("None".to_string(), |v| v.to_string()), - initial_data.highest_2chain_timeout_certificate().as_ref().map_or("None".to_string(), |v| v.to_string()), + initial_data.last_vote.as_ref().map_or_else(|| "None".to_string(), |v| v.to_string()), + initial_data.highest_2chain_timeout_certificate().as_ref().map_or_else(|| "None".to_string(), |v| v.to_string()), ); LivenessStorageData::FullRecoveryData(initial_data) diff --git a/consensus/src/pipeline/buffer_item.rs b/consensus/src/pipeline/buffer_item.rs index 4854574575abd..c16d8430c42b8 100644 --- a/consensus/src/pipeline/buffer_item.rs +++ b/consensus/src/pipeline/buffer_item.rs @@ -2,7 +2,9 @@ // Parts of the project are originally copyright © Meta Platforms, Inc. // SPDX-License-Identifier: Apache-2.0 -use crate::{pipeline::hashable::Hashable, state_replication::StateComputerCommitCallBackType}; +use crate::{ + counters, pipeline::hashable::Hashable, state_replication::StateComputerCommitCallBackType, +}; use anyhow::anyhow; use aptos_consensus_types::{ common::{Author, Round}, @@ -14,13 +16,13 @@ use aptos_executor_types::ExecutorResult; use aptos_logger::prelude::*; use aptos_reliable_broadcast::DropGuard; use aptos_types::{ - aggregate_signature::PartialSignatures, block_info::BlockInfo, - ledger_info::{LedgerInfo, LedgerInfoWithSignatures, LedgerInfoWithVerifiedSignatures}, + ledger_info::{LedgerInfo, LedgerInfoWithSignatures, LedgerInfoWithUnverifiedSignatures}, validator_verifier::ValidatorVerifier, }; use futures::future::BoxFuture; use itertools::zip_eq; +use std::collections::HashMap; use tokio::time::Instant; fn generate_commit_ledger_info( @@ -38,64 +40,24 @@ fn generate_commit_ledger_info( ) } -fn verify_signatures( - unverified_signatures: PartialSignatures, - validator: &ValidatorVerifier, +fn ledger_info_with_unverified_signatures( + unverified_votes: HashMap, commit_ledger_info: &LedgerInfo, -) -> PartialSignatures { - // Returns a valid partial signature from a set of unverified signatures. - // TODO: Validating individual signatures in expensive. Replace this with optimistic signature - // verification for BLS. Here, we can implement a tree-based batch verification technique that - // filters out invalid signature shares much faster when there are only a few of them - // (e.g., [LM07]: Finding Invalid Signatures in Pairing-Based Batches, - // by Law, Laurie and Matt, Brian J., in Cryptography and Coding, 2007). - PartialSignatures::new( - unverified_signatures - .signatures() - .iter() - .filter(|(author, sig)| validator.verify(**author, commit_ledger_info, sig).is_ok()) - .map(|(author, sig)| (*author, sig.clone())) - .collect(), - ) -} - -fn generate_executed_item_from_ordered( - commit_info: BlockInfo, - executed_blocks: Vec, - verified_signatures: PartialSignatures, - callback: StateComputerCommitCallBackType, - ordered_proof: LedgerInfoWithSignatures, - order_vote_enabled: bool, -) -> BufferItem { - debug!("{} advance to executed from ordered", commit_info); - let partial_commit_proof = LedgerInfoWithVerifiedSignatures::new( - generate_commit_ledger_info(&commit_info, &ordered_proof, order_vote_enabled), - verified_signatures, - ); - BufferItem::Executed(Box::new(ExecutedItem { - executed_blocks, - partial_commit_proof, - callback, - commit_info, - ordered_proof, - })) -} - -fn aggregate_commit_proof( - commit_ledger_info: &LedgerInfo, - verified_signatures: &PartialSignatures, - validator: &ValidatorVerifier, -) -> LedgerInfoWithSignatures { - let aggregated_sig = validator - .aggregate_signatures(verified_signatures.signatures_iter()) - .expect("Failed to generate aggregated signature"); - LedgerInfoWithSignatures::new(commit_ledger_info.clone(), aggregated_sig) +) -> LedgerInfoWithUnverifiedSignatures { + let mut li_with_sig = LedgerInfoWithUnverifiedSignatures::new(commit_ledger_info.clone()); + for vote in unverified_votes.values() { + let sig = vote.signature_with_status(); + if vote.ledger_info() == commit_ledger_info { + li_with_sig.add_signature(vote.author(), sig); + } + } + li_with_sig } // we differentiate buffer items at different stages // for better code readability pub struct OrderedItem { - pub unverified_signatures: PartialSignatures, + pub unverified_votes: HashMap, // This can happen in the fast forward sync path, where we can receive the commit proof // from peers. pub commit_proof: Option, @@ -106,7 +68,7 @@ pub struct OrderedItem { pub struct ExecutedItem { pub executed_blocks: Vec, - pub partial_commit_proof: LedgerInfoWithVerifiedSignatures, + pub partial_commit_proof: LedgerInfoWithUnverifiedSignatures, pub callback: StateComputerCommitCallBackType, pub commit_info: BlockInfo, pub ordered_proof: LedgerInfoWithSignatures, @@ -114,7 +76,7 @@ pub struct ExecutedItem { pub struct SignedItem { pub executed_blocks: Vec, - pub partial_commit_proof: LedgerInfoWithVerifiedSignatures, + pub partial_commit_proof: LedgerInfoWithUnverifiedSignatures, pub callback: StateComputerCommitCallBackType, pub commit_vote: CommitVote, pub rb_handle: Option<(Instant, DropGuard)>, @@ -146,10 +108,10 @@ impl BufferItem { ordered_blocks: Vec, ordered_proof: LedgerInfoWithSignatures, callback: StateComputerCommitCallBackType, - unverified_signatures: PartialSignatures, + unverified_votes: HashMap, ) -> Self { Self::Ordered(Box::new(OrderedItem { - unverified_signatures, + unverified_votes, commit_proof: None, callback, ordered_blocks, @@ -170,7 +132,7 @@ impl BufferItem { let OrderedItem { ordered_blocks, commit_proof, - unverified_signatures, + unverified_votes, callback, ordered_proof, } = *ordered_item; @@ -211,16 +173,11 @@ impl BufferItem { order_vote_enabled, ); - let verified_signatures = - verify_signatures(unverified_signatures, validator, &commit_ledger_info); - if (validator.check_voting_power(verified_signatures.signatures().keys(), true)) - .is_ok() - { - let commit_proof = aggregate_commit_proof( - &commit_ledger_info, - &verified_signatures, - validator, - ); + let mut partial_commit_proof = ledger_info_with_unverified_signatures( + unverified_votes, + &commit_ledger_info, + ); + if let Ok(commit_proof) = partial_commit_proof.aggregate_and_verify(validator) { debug!( "{} advance to aggregated from ordered", commit_proof.commit_info() @@ -231,14 +188,13 @@ impl BufferItem { callback, })) } else { - generate_executed_item_from_ordered( - commit_info, + Self::Executed(Box::new(ExecutedItem { executed_blocks, - verified_signatures, + partial_commit_proof, callback, + commit_info, ordered_proof, - order_vote_enabled, - ) + })) } } }, @@ -294,7 +250,7 @@ impl BufferItem { partial_commit_proof: local_commit_proof, .. } = *signed_item; - assert_eq!(local_commit_proof.commit_info(), commit_proof.commit_info(),); + assert_eq!(local_commit_proof.commit_info(), commit_proof.commit_info()); debug!( "{} advance to aggregated with commit decision", commit_proof.commit_info() @@ -348,43 +304,50 @@ impl BufferItem { pub fn try_advance_to_aggregated(self, validator: &ValidatorVerifier) -> Self { match self { Self::Signed(signed_item) => { - if validator - .check_voting_power(signed_item.partial_commit_proof.signatures().keys(), true) + if signed_item + .partial_commit_proof + .check_voting_power(validator, true) .is_ok() { - Self::Aggregated(Box::new(AggregatedItem { - executed_blocks: signed_item.executed_blocks, - commit_proof: aggregate_commit_proof( - signed_item.partial_commit_proof.ledger_info(), - signed_item.partial_commit_proof.partial_sigs(), - validator, - ), - callback: signed_item.callback, - })) - } else { - Self::Signed(signed_item) + let _time = counters::VERIFY_MSG + .with_label_values(&["commit_vote_aggregate_and_verify"]) + .start_timer(); + if let Ok(commit_proof) = signed_item + .partial_commit_proof + .clone() + .aggregate_and_verify(validator) + { + return Self::Aggregated(Box::new(AggregatedItem { + executed_blocks: signed_item.executed_blocks, + commit_proof, + callback: signed_item.callback, + })); + } } + Self::Signed(signed_item) }, - Self::Executed(executed_item) => { - if validator - .check_voting_power( - executed_item.partial_commit_proof.signatures().keys(), - true, - ) + Self::Executed(mut executed_item) => { + if executed_item + .partial_commit_proof + .check_voting_power(validator, true) .is_ok() { - Self::Aggregated(Box::new(AggregatedItem { - executed_blocks: executed_item.executed_blocks, - commit_proof: aggregate_commit_proof( - executed_item.partial_commit_proof.ledger_info(), - executed_item.partial_commit_proof.partial_sigs(), - validator, - ), - callback: executed_item.callback, - })) - } else { - Self::Executed(executed_item) + let _time = counters::VERIFY_MSG + .with_label_values(&["commit_vote_aggregate_and_verify"]) + .start_timer(); + + if let Ok(commit_proof) = executed_item + .partial_commit_proof + .aggregate_and_verify(validator) + { + return Self::Aggregated(Box::new(AggregatedItem { + executed_blocks: executed_item.executed_blocks, + commit_proof, + callback: executed_item.callback, + })); + } } + Self::Executed(executed_item) }, _ => self, } @@ -417,7 +380,7 @@ impl BufferItem { pub fn add_signature_if_matched(&mut self, vote: CommitVote) -> anyhow::Result<()> { let target_commit_info = vote.commit_info(); let author = vote.author(); - let signature = vote.signature().clone(); + let signature = vote.signature_with_status(); match self { Self::Ordered(ordered) => { if ordered @@ -429,9 +392,7 @@ impl BufferItem { // when advancing to executed item, we will check if the sigs are valid. // each author at most stores a single sig for each item, // so an adversary will not be able to flood our memory. - ordered - .unverified_signatures - .add_signature(author, signature); + ordered.unverified_votes.insert(author, vote); return Ok(()); } }, @@ -497,3 +458,310 @@ impl BufferItem { } } } + +#[cfg(test)] +mod test { + use super::*; + use aptos_consensus_types::{block::Block, block_data::BlockData}; + use aptos_crypto::HashValue; + use aptos_executor_types::StateComputeResult; + use aptos_types::{ + aggregate_signature::AggregateSignature, + ledger_info::LedgerInfo, + validator_signer::ValidatorSigner, + validator_verifier::{ValidatorConsensusInfo, ValidatorVerifier}, + }; + use std::collections::{BTreeMap, HashMap}; + + fn create_validators() -> (Vec, ValidatorVerifier) { + const NUM_SIGNERS: u8 = 7; + let validator_signers: Vec = (0..NUM_SIGNERS) + .map(|i| ValidatorSigner::random([i; 32])) + .collect(); + let mut validator_infos = vec![]; + + for validator in validator_signers.iter() { + validator_infos.push(ValidatorConsensusInfo::new( + validator.author(), + validator.public_key(), + 1, + )); + } + + let mut validator_verifier = + ValidatorVerifier::new_with_quorum_voting_power(validator_infos, 5) + .expect("Incorrect quorum size."); + validator_verifier.set_optimistic_sig_verification_flag(true); + (validator_signers, validator_verifier) + } + + fn create_pipelined_block() -> PipelinedBlock { + PipelinedBlock::new( + Block::new_for_testing( + HashValue::random(), + BlockData::dummy_with_validator_txns(vec![]), + None, + ), + vec![], + StateComputeResult::new_dummy(), + ) + } + + fn create_valid_commit_votes( + validator_signers: Vec, + ledger_info: LedgerInfo, + ) -> Vec { + let mut commit_votes = vec![]; + for validator in validator_signers.iter() { + let commit_vote = + CommitVote::new(validator.author(), ledger_info.clone(), validator).unwrap(); + commit_votes.push(commit_vote); + } + commit_votes + } + + #[test] + fn test_buffer_item_happy_path_1() { + let (validator_signers, validator_verifier) = create_validators(); + let pipelined_block = create_pipelined_block(); + let block_info = pipelined_block.block_info(); + let ledger_info = LedgerInfo::new(block_info.clone(), HashValue::zero()); + let ordered_proof = + LedgerInfoWithSignatures::new(ledger_info.clone(), AggregateSignature::empty()); + let commit_votes = + create_valid_commit_votes(validator_signers.clone(), ledger_info.clone()); + let mut partial_signatures = BTreeMap::new(); + partial_signatures.insert( + validator_signers[0].author(), + commit_votes[0].signature().clone(), + ); + partial_signatures.insert( + validator_signers[1].author(), + commit_votes[1].signature().clone(), + ); + partial_signatures.insert( + validator_signers[2].author(), + commit_votes[2].signature().clone(), + ); + partial_signatures.insert( + validator_signers[3].author(), + commit_votes[3].signature().clone(), + ); + partial_signatures.insert( + validator_signers[4].author(), + commit_votes[4].signature().clone(), + ); + let li_with_sig = validator_verifier + .aggregate_signatures(partial_signatures.iter()) + .unwrap(); + let commit_proof = LedgerInfoWithSignatures::new(ledger_info.clone(), li_with_sig); + + let mut cached_commit_votes = HashMap::new(); + cached_commit_votes.insert(commit_votes[0].author(), commit_votes[0].clone()); + cached_commit_votes.insert(commit_votes[1].author(), commit_votes[1].clone()); + let mut ordered_item = BufferItem::new_ordered( + vec![pipelined_block.clone()], + ordered_proof.clone(), + Box::new(move |_, _| {}), + cached_commit_votes, + ); + + ordered_item + .add_signature_if_matched(commit_votes[2].clone()) + .unwrap(); + ordered_item + .add_signature_if_matched(commit_votes[3].clone()) + .unwrap(); + + let mut executed_item = ordered_item.advance_to_executed_or_aggregated( + vec![pipelined_block.clone()], + &validator_verifier, + None, + true, + ); + + match executed_item { + BufferItem::Executed(ref executed_item_inner) => { + assert_eq!(executed_item_inner.executed_blocks, vec![ + pipelined_block.clone() + ]); + assert_eq!(executed_item_inner.commit_info, block_info); + assert_eq!( + executed_item_inner + .partial_commit_proof + .all_voters() + .count(), + 4 + ); + assert_eq!(executed_item_inner.ordered_proof, ordered_proof); + }, + _ => panic!("Expected executed item."), + } + + executed_item + .add_signature_if_matched(commit_votes[4].clone()) + .unwrap(); + let aggregated_item = executed_item.try_advance_to_aggregated(&validator_verifier); + match aggregated_item { + BufferItem::Aggregated(aggregated_item_inner) => { + assert_eq!(aggregated_item_inner.executed_blocks, vec![pipelined_block]); + assert_eq!(aggregated_item_inner.commit_proof, commit_proof); + }, + _ => panic!("Expected aggregated item."), + } + } + + // This tests the case where some of the commit votes are not correct + #[test] + fn test_buffer_item_bad_path_1() { + let (validator_signers, validator_verifier) = create_validators(); + let pipelined_block = create_pipelined_block(); + let block_info = pipelined_block.block_info(); + let ledger_info = LedgerInfo::new(block_info.clone(), HashValue::zero()); + let ordered_proof = + LedgerInfoWithSignatures::new(ledger_info.clone(), AggregateSignature::empty()); + let mut commit_votes = + create_valid_commit_votes(validator_signers.clone(), ledger_info.clone()); + + // Corrupting commit_votes[3], commit_votes[5] + commit_votes[3] = CommitVote::new_with_signature( + validator_signers[3].author(), + ledger_info.clone(), + bls12381::Signature::dummy_signature(), + ); + commit_votes[5] = CommitVote::new_with_signature( + validator_signers[5].author(), + ledger_info.clone(), + bls12381::Signature::dummy_signature(), + ); + + let mut partial_signatures = BTreeMap::new(); + partial_signatures.insert( + validator_signers[0].author(), + commit_votes[0].signature().clone(), + ); + partial_signatures.insert( + validator_signers[1].author(), + commit_votes[1].signature().clone(), + ); + partial_signatures.insert( + validator_signers[2].author(), + commit_votes[2].signature().clone(), + ); + partial_signatures.insert( + validator_signers[4].author(), + commit_votes[4].signature().clone(), + ); + partial_signatures.insert( + validator_signers[6].author(), + commit_votes[6].signature().clone(), + ); + let li_with_sig = validator_verifier + .aggregate_signatures(partial_signatures.iter()) + .unwrap(); + let commit_proof = LedgerInfoWithSignatures::new(ledger_info.clone(), li_with_sig); + + let mut cached_commit_votes = HashMap::new(); + cached_commit_votes.insert(commit_votes[0].author(), commit_votes[0].clone()); + cached_commit_votes.insert(commit_votes[1].author(), commit_votes[1].clone()); + let mut ordered_item = BufferItem::new_ordered( + vec![pipelined_block.clone()], + ordered_proof.clone(), + Box::new(move |_, _| {}), + cached_commit_votes, + ); + + ordered_item + .add_signature_if_matched(commit_votes[2].clone()) + .unwrap(); + ordered_item + .add_signature_if_matched(commit_votes[3].clone()) + .unwrap(); + + assert_eq!(validator_verifier.pessimistic_verify_set().len(), 0); + let mut executed_item = ordered_item.advance_to_executed_or_aggregated( + vec![pipelined_block.clone()], + &validator_verifier, + None, + true, + ); + + match executed_item { + BufferItem::Executed(ref executed_item_inner) => { + assert_eq!(executed_item_inner.executed_blocks, vec![ + pipelined_block.clone() + ]); + assert_eq!(executed_item_inner.commit_info, block_info); + assert_eq!( + executed_item_inner + .partial_commit_proof + .all_voters() + .count(), + 4 + ); + assert_eq!(executed_item_inner.ordered_proof, ordered_proof); + }, + _ => panic!("Expected executed item."), + } + + executed_item + .add_signature_if_matched(commit_votes[4].clone()) + .unwrap(); + + let mut executed_item = executed_item.try_advance_to_aggregated(&validator_verifier); + match executed_item { + BufferItem::Executed(ref executed_item_inner) => { + assert_eq!(executed_item_inner.executed_blocks, vec![ + pipelined_block.clone() + ]); + assert_eq!(executed_item_inner.commit_info, block_info); + assert_eq!( + executed_item_inner + .partial_commit_proof + .all_voters() + .count(), + 4, // Commit_votes[3] is not correct and will be removed from the partial_commit_proof + ); + assert_eq!(executed_item_inner.ordered_proof, ordered_proof); + }, + _ => panic!("Expected executed item."), + } + assert_eq!(validator_verifier.pessimistic_verify_set().len(), 1); + + executed_item + .add_signature_if_matched(commit_votes[5].clone()) + .unwrap(); + + let mut executed_item = executed_item.try_advance_to_aggregated(&validator_verifier); + match executed_item { + BufferItem::Executed(ref executed_item_inner) => { + assert_eq!(executed_item_inner.executed_blocks, vec![ + pipelined_block.clone() + ]); + assert_eq!(executed_item_inner.commit_info, block_info); + assert_eq!( + executed_item_inner + .partial_commit_proof + .all_voters() + .count(), + 4, // Commit_votes[5] is not correct and will be removed from the partial_commit_proof + ); + assert_eq!(executed_item_inner.ordered_proof, ordered_proof); + }, + _ => panic!("Expected executed item."), + } + assert_eq!(validator_verifier.pessimistic_verify_set().len(), 2); + + executed_item + .add_signature_if_matched(commit_votes[6].clone()) + .unwrap(); + let aggregated_item = executed_item.try_advance_to_aggregated(&validator_verifier); + match aggregated_item { + BufferItem::Aggregated(aggregated_item_inner) => { + assert_eq!(aggregated_item_inner.executed_blocks, vec![pipelined_block]); + assert_eq!(aggregated_item_inner.commit_proof, commit_proof); + }, + _ => panic!("Expected aggregated item."), + } + } +} diff --git a/consensus/src/pipeline/buffer_manager.rs b/consensus/src/pipeline/buffer_manager.rs index 38d5aa8578893..4b7d2e9712ddf 100644 --- a/consensus/src/pipeline/buffer_manager.rs +++ b/consensus/src/pipeline/buffer_manager.rs @@ -31,17 +31,18 @@ use aptos_consensus_types::{ pipeline::commit_vote::CommitVote, pipelined_block::PipelinedBlock, }; -use aptos_crypto::HashValue; +use aptos_crypto::{bls12381, HashValue}; use aptos_executor_types::ExecutorResult; use aptos_logger::prelude::*; use aptos_network::protocols::{rpc::error::RpcError, wire::handshake::v1::ProtocolId}; use aptos_reliable_broadcast::{DropGuard, ReliableBroadcast}; use aptos_time_service::TimeService; use aptos_types::{ - account_address::AccountAddress, aggregate_signature::PartialSignatures, - epoch_change::EpochChangeProof, epoch_state::EpochState, ledger_info::LedgerInfoWithSignatures, + account_address::AccountAddress, epoch_change::EpochChangeProof, epoch_state::EpochState, + ledger_info::LedgerInfoWithSignatures, }; use bytes::Bytes; +use fail::fail_point; use futures::{ channel::{ mpsc::{unbounded, UnboundedReceiver, UnboundedSender}, @@ -209,7 +210,6 @@ impl BufferManager { .max_delay(Duration::from_secs(5)); let (tx, rx) = unbounded(); - Self { author, @@ -259,7 +259,6 @@ impl BufferManager { back_pressure_enabled, highest_committed_round, latest_round: highest_committed_round, - consensus_observer_config, consensus_publisher, @@ -415,23 +414,18 @@ impl BufferManager { .await .expect("Failed to send execution schedule request"); - let mut unverified_signatures = PartialSignatures::empty(); + let mut unverified_votes = HashMap::new(); if let Some(block) = ordered_blocks.last() { if let Some(votes) = self.pending_commit_votes.remove(&block.round()) { - votes - .values() - .filter(|vote| vote.commit_info().id() == block.id()) - .for_each(|vote| { - unverified_signatures.add_signature(vote.author(), vote.signature().clone()) - }); + for (_, vote) in votes { + if vote.commit_info().id() == block.id() { + unverified_votes.insert(vote.author(), vote); + } + } } } - let item = BufferItem::new_ordered( - ordered_blocks, - ordered_proof, - callback, - unverified_signatures, - ); + let item = + BufferItem::new_ordered(ordered_blocks, ordered_proof, callback, unverified_votes); self.buffer.push_back(item); } @@ -704,6 +698,17 @@ impl BufferManager { } } + fn generate_commit_message(commit_vote: CommitVote) -> CommitMessage { + fail_point!("consensus::create_invalid_commit_vote", |_| { + CommitMessage::Vote(CommitVote::new_with_signature( + commit_vote.author(), + commit_vote.ledger_info().clone(), + bls12381::Signature::dummy_signature(), + )) + }); + CommitMessage::Vote(commit_vote) + } + /// If the signing response is successful, advance the item to Signed and broadcast commit votes. async fn process_signing_response(&mut self, response: SigningResponse) { let SigningResponse { @@ -733,7 +738,7 @@ impl BufferManager { let mut signed_item = item.advance_to_signed(self.author, signature); let signed_item_mut = signed_item.unwrap_signed_mut(); let commit_vote = signed_item_mut.commit_vote.clone(); - let commit_vote = CommitMessage::Vote(commit_vote); + let commit_vote = Self::generate_commit_message(commit_vote); signed_item_mut.rb_handle = self .do_reliable_broadcast(commit_vote) .map(|handle| (Instant::now(), handle)); diff --git a/consensus/src/pipeline/tests/buffer_manager_tests.rs b/consensus/src/pipeline/tests/buffer_manager_tests.rs index 9ef9ed94600cd..da3fc1cd733ae 100644 --- a/consensus/src/pipeline/tests/buffer_manager_tests.rs +++ b/consensus/src/pipeline/tests/buffer_manager_tests.rs @@ -72,7 +72,7 @@ pub fn prepare_buffer_manager( HashValue, Vec, Receiver, - ValidatorVerifier, + Arc, ) { let num_nodes = 1; let channel_size = 30; @@ -114,6 +114,7 @@ pub fn prepare_buffer_manager( let consensus_network_client = ConsensusNetworkClient::new(network_client); let (self_loop_tx, self_loop_rx) = aptos_channels::new_unbounded_test(); + let validators = Arc::new(validators); let network = NetworkSender::new( author, consensus_network_client, @@ -190,7 +191,7 @@ pub fn launch_buffer_manager() -> ( Runtime, Vec, Receiver, - ValidatorVerifier, + Arc, ) { let runtime = consensus_runtime(); diff --git a/consensus/src/quorum_store/batch_generator.rs b/consensus/src/quorum_store/batch_generator.rs index 65805bb356ffd..fd6001ce18d96 100644 --- a/consensus/src/quorum_store/batch_generator.rs +++ b/consensus/src/quorum_store/batch_generator.rs @@ -495,10 +495,10 @@ impl BatchGenerator { "QS: got clean request from execution, block timestamp {}", block_timestamp ); - assert!( - self.latest_block_timestamp <= block_timestamp, - "Decreasing block timestamp" - ); + // Block timestamp is updated asynchronously, so it may race when it enters state sync. + if self.latest_block_timestamp > block_timestamp { + continue; + } self.latest_block_timestamp = block_timestamp; for (author, batch_id) in batches.iter().map(|b| (b.author(), b.batch_id())) { diff --git a/consensus/src/quorum_store/batch_proof_queue.rs b/consensus/src/quorum_store/batch_proof_queue.rs index cfff3bb9c7061..c542d97d7d9e3 100644 --- a/consensus/src/quorum_store/batch_proof_queue.rs +++ b/consensus/src/quorum_store/batch_proof_queue.rs @@ -7,7 +7,7 @@ use super::{ }; use crate::quorum_store::counters; use aptos_consensus_types::{ - common::TxnSummaryWithExpiration, + common::{Author, TxnSummaryWithExpiration}, payload::TDataInfo, proof_of_store::{BatchInfo, ProofOfStore}, utils::PayloadTxnsSize, @@ -69,10 +69,16 @@ pub struct BatchProofQueue { remaining_proofs: u64, remaining_local_txns: u64, remaining_local_proofs: u64, + + batch_expiry_gap_when_init_usecs: u64, } impl BatchProofQueue { - pub(crate) fn new(my_peer_id: PeerId, batch_store: Arc) -> Self { + pub(crate) fn new( + my_peer_id: PeerId, + batch_store: Arc, + batch_expiry_gap_when_init_usecs: u64, + ) -> Self { Self { my_peer_id, author_to_batches: HashMap::new(), @@ -85,6 +91,7 @@ impl BatchProofQueue { remaining_proofs: 0, remaining_local_txns: 0, remaining_local_proofs: 0, + batch_expiry_gap_when_init_usecs, } } @@ -389,11 +396,13 @@ impl BatchProofQueue { let (result, all_txns, unique_txns, is_full) = self.pull_internal( false, excluded_batches, + &HashSet::new(), max_txns, max_txns_after_filtering, soft_max_txns_after_filtering, return_non_full, block_timestamp, + None, ); let proof_of_stores: Vec<_> = result .into_iter() @@ -429,20 +438,24 @@ impl BatchProofQueue { pub fn pull_batches( &mut self, excluded_batches: &HashSet, + exclude_authors: &HashSet, max_txns: PayloadTxnsSize, max_txns_after_filtering: u64, soft_max_txns_after_filtering: u64, return_non_full: bool, block_timestamp: Duration, + minimum_batch_age_usecs: Option, ) -> (Vec, PayloadTxnsSize, u64) { let (result, all_txns, unique_txns, _) = self.pull_internal( true, excluded_batches, + exclude_authors, max_txns, max_txns_after_filtering, soft_max_txns_after_filtering, return_non_full, block_timestamp, + minimum_batch_age_usecs, ); let batches = result.into_iter().map(|item| item.info.clone()).collect(); (batches, all_txns, unique_txns) @@ -463,11 +476,13 @@ impl BatchProofQueue { ) { let (batches, all_txns, unique_txns) = self.pull_batches( excluded_batches, + &HashSet::new(), max_txns, max_txns_after_filtering, soft_max_txns_after_filtering, return_non_full, block_timestamp, + None, ); let mut result = Vec::new(); for batch in batches.into_iter() { @@ -489,11 +504,13 @@ impl BatchProofQueue { &mut self, batches_without_proofs: bool, excluded_batches: &HashSet, + exclude_authors: &HashSet, max_txns: PayloadTxnsSize, max_txns_after_filtering: u64, soft_max_txns_after_filtering: u64, return_non_full: bool, block_timestamp: Duration, + min_batch_age_usecs: Option, ) -> (Vec<&QueueItem>, PayloadTxnsSize, u64, bool) { let mut result = Vec::new(); let mut cur_unique_txns = 0; @@ -515,10 +532,27 @@ impl BatchProofQueue { } } + let max_batch_creation_ts_usecs = min_batch_age_usecs + .map(|min_age| aptos_infallible::duration_since_epoch().as_micros() as u64 - min_age); let mut iters = vec![]; - for (_, batches) in self.author_to_batches.iter() { + for (_, batches) in self + .author_to_batches + .iter() + .filter(|(author, _)| !exclude_authors.contains(author)) + { let batch_iter = batches.iter().rev().filter_map(|(sort_key, info)| { if let Some(item) = self.items.get(&sort_key.batch_key) { + let batch_create_ts_usecs = + item.info.expiration() - self.batch_expiry_gap_when_init_usecs; + + // Ensure that the batch was created at least `min_batch_age_usecs` ago to + // reduce the chance of inline fetches. + if max_batch_creation_ts_usecs + .is_some_and(|max_create_ts| batch_create_ts_usecs > max_create_ts) + { + return None; + } + if item.is_committed() { return None; } diff --git a/consensus/src/quorum_store/batch_requester.rs b/consensus/src/quorum_store/batch_requester.rs index 01b5e47c7f003..c7dfe6aeff0c1 100644 --- a/consensus/src/quorum_store/batch_requester.rs +++ b/consensus/src/quorum_store/batch_requester.rs @@ -114,7 +114,7 @@ impl BatchRequester { retry_interval_ms: usize, rpc_timeout_ms: usize, network_sender: T, - validator_verifier: ValidatorVerifier, + validator_verifier: Arc, ) -> Self { Self { epoch, @@ -124,7 +124,7 @@ impl BatchRequester { retry_interval_ms, rpc_timeout_ms, network_sender, - validator_verifier: Arc::new(validator_verifier), + validator_verifier, } } diff --git a/consensus/src/quorum_store/batch_store.rs b/consensus/src/quorum_store/batch_store.rs index 7ae887ea08f37..450a199a15ed6 100644 --- a/consensus/src/quorum_store/batch_store.rs +++ b/consensus/src/quorum_store/batch_store.rs @@ -332,17 +332,8 @@ impl BatchStore { pub fn update_certified_timestamp(&self, certified_time: u64) { trace!("QS: batch reader updating time {:?}", certified_time); - let prev_time = self - .last_certified_time + self.last_certified_time .fetch_max(certified_time, Ordering::SeqCst); - // Note: prev_time may be equal to certified_time due to state-sync - // at the epoch boundary. - assert!( - prev_time <= certified_time, - "Decreasing executed block timestamp reported to BatchReader {} {}", - prev_time, - certified_time, - ); let expired_keys = self.clear_expired_payload(certified_time); if let Err(e) = self.db.delete_batches(expired_keys) { diff --git a/consensus/src/quorum_store/proof_coordinator.rs b/consensus/src/quorum_store/proof_coordinator.rs index 73a1ebabe9c4d..16df06d1cb500 100644 --- a/consensus/src/quorum_store/proof_coordinator.rs +++ b/consensus/src/quorum_store/proof_coordinator.rs @@ -297,7 +297,7 @@ impl ProofCoordinator { mut self, mut rx: Receiver, mut network_sender: impl QuorumStoreSender, - validator_verifier: ValidatorVerifier, + validator_verifier: Arc, ) { let mut interval = time::interval(Duration::from_millis(100)); loop { diff --git a/consensus/src/quorum_store/proof_manager.rs b/consensus/src/quorum_store/proof_manager.rs index a33e0c1165292..7df9ab38b2783 100644 --- a/consensus/src/quorum_store/proof_manager.rs +++ b/consensus/src/quorum_store/proof_manager.rs @@ -34,7 +34,6 @@ pub struct ProofManager { back_pressure_total_proof_limit: u64, remaining_total_proof_num: u64, allow_batches_without_pos_in_proposal: bool, - enable_opt_quorum_store: bool, } impl ProofManager { @@ -44,16 +43,19 @@ impl ProofManager { back_pressure_total_proof_limit: u64, batch_store: Arc, allow_batches_without_pos_in_proposal: bool, - enable_opt_quorum_store: bool, + batch_expiry_gap_when_init_usecs: u64, ) -> Self { Self { - batch_proof_queue: BatchProofQueue::new(my_peer_id, batch_store), + batch_proof_queue: BatchProofQueue::new( + my_peer_id, + batch_store, + batch_expiry_gap_when_init_usecs, + ), back_pressure_total_txn_limit, remaining_total_txn_num: 0, back_pressure_total_proof_limit, remaining_total_proof_num: 0, allow_batches_without_pos_in_proposal, - enable_opt_quorum_store, } } @@ -106,10 +108,6 @@ impl ProofManager { PayloadFilter::InQuorumStore(proofs) => proofs, }; - let max_txns_with_proof = request - .max_txns - .compute_pct(100 - request.opt_batch_txns_pct); - let ( proof_block, txns_with_proof_size, @@ -117,7 +115,7 @@ impl ProofManager { proof_queue_fully_utilized, ) = self.batch_proof_queue.pull_proofs( &excluded_batches, - max_txns_with_proof, + request.max_txns, request.max_txns_after_filtering, request.soft_max_txns_after_filtering, request.return_non_full, @@ -129,26 +127,30 @@ impl ProofManager { counters::PROOF_QUEUE_FULLY_UTILIZED .observe(if proof_queue_fully_utilized { 1.0 } else { 0.0 }); - let (opt_batches, opt_batch_txns_size) = if self.enable_opt_quorum_store { + let (opt_batches, opt_batch_txns_size) = // TODO(ibalajiarun): Support unique txn calculation - let max_opt_batch_txns_size = request.max_txns - txns_with_proof_size; - let (opt_batches, opt_payload_size, _) = self.batch_proof_queue.pull_batches( - &excluded_batches - .iter() - .cloned() - .chain(proof_block.iter().map(|proof| proof.info().clone())) - .collect(), - max_opt_batch_txns_size, - request.max_txns_after_filtering, - request.soft_max_txns_after_filtering, - request.return_non_full, - request.block_timestamp, - ); + if let Some(ref params) = request.maybe_optqs_payload_pull_params { + let max_opt_batch_txns_size = request.max_txns - txns_with_proof_size; + let (opt_batches, opt_payload_size, _) = + self.batch_proof_queue.pull_batches( + &excluded_batches + .iter() + .cloned() + .chain(proof_block.iter().map(|proof| proof.info().clone())) + .collect(), + ¶ms.exclude_authors, + max_opt_batch_txns_size, + request.max_txns_after_filtering, + request.soft_max_txns_after_filtering, + request.return_non_full, + request.block_timestamp, + Some(params.minimum_batch_age_usecs), + ); - (opt_batches, opt_payload_size) - } else { - (Vec::new(), PayloadTxnsSize::zero()) - }; + (opt_batches, opt_payload_size) + } else { + (Vec::new(), PayloadTxnsSize::zero()) + }; let cur_txns = txns_with_proof_size + opt_batch_txns_size; let (inline_block, inline_block_size) = @@ -183,7 +185,7 @@ impl ProofManager { counters::NUM_INLINE_BATCHES.observe(inline_block.len() as f64); counters::NUM_INLINE_TXNS.observe(inline_block_size.count() as f64); - let response = if self.enable_opt_quorum_store { + let response = if request.maybe_optqs_payload_pull_params.is_some() { let inline_batches = inline_block.into(); Payload::OptQuorumStore(OptQuorumStorePayload::new( inline_batches, diff --git a/consensus/src/quorum_store/quorum_store_builder.rs b/consensus/src/quorum_store/quorum_store_builder.rs index 3a5edb1f9a323..34eeb93233e98 100644 --- a/consensus/src/quorum_store/quorum_store_builder.rs +++ b/consensus/src/quorum_store/quorum_store_builder.rs @@ -127,7 +127,7 @@ pub struct InnerBuilder { mempool_txn_pull_timeout_ms: u64, aptos_db: Arc, network_sender: NetworkSender, - verifier: ValidatorVerifier, + verifier: Arc, proof_cache: ProofCache, backend: SecureBackend, coordinator_tx: Sender, @@ -161,7 +161,7 @@ impl InnerBuilder { mempool_txn_pull_timeout_ms: u64, aptos_db: Arc, network_sender: NetworkSender, - verifier: ValidatorVerifier, + verifier: Arc, proof_cache: ProofCache, backend: SecureBackend, quorum_store_storage: Arc, @@ -365,7 +365,7 @@ impl InnerBuilder { * self.num_validators, self.batch_store.clone().unwrap(), self.config.allow_batches_without_pos_in_proposal, - self.config.enable_opt_quorum_store, + self.config.batch_expiry_gap_when_init_usecs, ); spawn_named!( "proof_manager", @@ -446,6 +446,7 @@ impl InnerBuilder { self.coordinator_tx.clone(), consensus_publisher, self.verifier.get_ordered_account_addresses(), + self.verifier.address_to_validator_index().clone(), )), Some(self.quorum_store_msg_tx.clone()), ) diff --git a/consensus/src/quorum_store/tests/batch_proof_queue_test.rs b/consensus/src/quorum_store/tests/batch_proof_queue_test.rs index 2741ea3a6a912..96ab5414ab120 100644 --- a/consensus/src/quorum_store/tests/batch_proof_queue_test.rs +++ b/consensus/src/quorum_store/tests/batch_proof_queue_test.rs @@ -62,7 +62,7 @@ fn proof_of_store_with_size( fn test_proof_queue_sorting() { let my_peer_id = PeerId::random(); let batch_store = batch_store_for_test(5 * 1024 * 1024); - let mut proof_queue = BatchProofQueue::new(my_peer_id, batch_store); + let mut proof_queue = BatchProofQueue::new(my_peer_id, batch_store, 1); let author_0 = PeerId::random(); let author_1 = PeerId::random(); @@ -149,7 +149,7 @@ fn test_proof_queue_sorting() { fn test_proof_calculate_remaining_txns_and_proofs() { let my_peer_id = PeerId::random(); let batch_store = batch_store_for_test(5 * 1024 * 1024); - let mut proof_queue = BatchProofQueue::new(my_peer_id, batch_store); + let mut proof_queue = BatchProofQueue::new(my_peer_id, batch_store, 1); let now_in_secs = aptos_infallible::duration_since_epoch().as_secs() as u64; let now_in_usecs = aptos_infallible::duration_since_epoch().as_micros() as u64; let author_0 = PeerId::random(); @@ -409,7 +409,7 @@ fn test_proof_calculate_remaining_txns_and_proofs() { fn test_proof_pull_proofs_with_duplicates() { let my_peer_id = PeerId::random(); let batch_store = batch_store_for_test(5 * 1024 * 1024); - let mut proof_queue = BatchProofQueue::new(my_peer_id, batch_store); + let mut proof_queue = BatchProofQueue::new(my_peer_id, batch_store, 1); let now_in_secs = aptos_infallible::duration_since_epoch().as_secs() as u64; let now_in_usecs = now_in_secs * 1_000_000; let txns = vec![ @@ -660,7 +660,7 @@ fn test_proof_pull_proofs_with_duplicates() { fn test_proof_queue_soft_limit() { let my_peer_id = PeerId::random(); let batch_store = batch_store_for_test(5 * 1024 * 1024); - let mut proof_queue = BatchProofQueue::new(my_peer_id, batch_store); + let mut proof_queue = BatchProofQueue::new(my_peer_id, batch_store, 1); let author = PeerId::random(); @@ -702,7 +702,7 @@ fn test_proof_queue_soft_limit() { fn test_proof_queue_insert_after_commit() { let my_peer_id = PeerId::random(); let batch_store = batch_store_for_test(5 * 1024); - let mut proof_queue = BatchProofQueue::new(my_peer_id, batch_store); + let mut proof_queue = BatchProofQueue::new(my_peer_id, batch_store, 1); let author = PeerId::random(); let author_batches = vec![ @@ -734,7 +734,7 @@ fn test_proof_queue_insert_after_commit() { fn test_proof_queue_pull_full_utilization() { let my_peer_id = PeerId::random(); let batch_store = batch_store_for_test(5 * 1024); - let mut proof_queue = BatchProofQueue::new(my_peer_id, batch_store); + let mut proof_queue = BatchProofQueue::new(my_peer_id, batch_store, 1); let author = PeerId::random(); let author_batches = vec![ diff --git a/consensus/src/quorum_store/tests/batch_requester_test.rs b/consensus/src/quorum_store/tests/batch_requester_test.rs index 018fce0c486b2..33b63849e1940 100644 --- a/consensus/src/quorum_store/tests/batch_requester_test.rs +++ b/consensus/src/quorum_store/tests/batch_requester_test.rs @@ -89,7 +89,8 @@ async fn test_batch_request_exists() { 1_000, 1_000, MockBatchRequester::new(batch_response), - ValidatorVerifier::new_single(validator_signer.author(), validator_signer.public_key()), + ValidatorVerifier::new_single(validator_signer.author(), validator_signer.public_key()) + .into(), ); let (_, subscriber_rx) = oneshot::channel(); @@ -184,7 +185,7 @@ async fn test_batch_request_not_exists_not_expired() { retry_interval_ms, 1_000, MockBatchRequester::new(batch_response), - validator_verifier, + validator_verifier.into(), ); let request_start = Instant::now(); @@ -232,7 +233,7 @@ async fn test_batch_request_not_exists_expired() { retry_interval_ms, 1_000, MockBatchRequester::new(batch_response), - validator_verifier, + validator_verifier.into(), ); let request_start = Instant::now(); diff --git a/consensus/src/quorum_store/tests/direct_mempool_quorum_store_test.rs b/consensus/src/quorum_store/tests/direct_mempool_quorum_store_test.rs index 7f04c4abf71ca..aa90aa5f03546 100644 --- a/consensus/src/quorum_store/tests/direct_mempool_quorum_store_test.rs +++ b/consensus/src/quorum_store/tests/direct_mempool_quorum_store_test.rs @@ -35,11 +35,11 @@ async fn test_block_request_no_txns() { max_txns_after_filtering: 100, soft_max_txns_after_filtering: 100, max_inline_txns: PayloadTxnsSize::new(50, 500), - opt_batch_txns_pct: 0, return_non_full: true, filter: PayloadFilter::DirectMempool(vec![]), callback: consensus_callback, block_timestamp: aptos_infallible::duration_since_epoch(), + maybe_optqs_payload_pull_params: None, })) .unwrap(); diff --git a/consensus/src/quorum_store/tests/proof_coordinator_test.rs b/consensus/src/quorum_store/tests/proof_coordinator_test.rs index 2a2a2378ee9b2..e38fb3fda274f 100644 --- a/consensus/src/quorum_store/tests/proof_coordinator_test.rs +++ b/consensus/src/quorum_store/tests/proof_coordinator_test.rs @@ -62,6 +62,7 @@ async fn test_proof_coordinator_basic() { let (proof_coordinator_tx, proof_coordinator_rx) = channel(100); let (tx, mut rx) = channel(100); let network_sender = MockQuorumStoreSender::new(tx); + let verifier = Arc::new(verifier); tokio::spawn(proof_coordinator.start(proof_coordinator_rx, network_sender, verifier.clone())); let batch_author = signers[0].author(); diff --git a/consensus/src/quorum_store/tests/proof_manager_test.rs b/consensus/src/quorum_store/tests/proof_manager_test.rs index cf87abfecba84..3eebe4c667937 100644 --- a/consensus/src/quorum_store/tests/proof_manager_test.rs +++ b/consensus/src/quorum_store/tests/proof_manager_test.rs @@ -17,7 +17,7 @@ use std::{cmp::max, collections::HashSet}; fn create_proof_manager() -> ProofManager { let batch_store = batch_store_for_test(5 * 1024 * 1024); - ProofManager::new(PeerId::random(), 10, 10, batch_store, true, false) + ProofManager::new(PeerId::random(), 10, 10, batch_store, true, 1) } fn create_proof(author: PeerId, expiration: u64, batch_sequence: u64) -> ProofOfStore { @@ -62,8 +62,8 @@ async fn get_proposal( filter: PayloadFilter::InQuorumStore(filter_set), callback: callback_tx, block_timestamp: aptos_infallible::duration_since_epoch(), - opt_batch_txns_pct: 0, return_non_full: true, + maybe_optqs_payload_pull_params: None, }); proof_manager.handle_proposal_request(req); let GetPayloadResponse::GetPayloadResponse(payload) = callback_rx.await.unwrap().unwrap(); diff --git a/consensus/src/rand/rand_gen/rand_store.rs b/consensus/src/rand/rand_gen/rand_store.rs index 99c63b7a40b0f..aa43427a14a89 100644 --- a/consensus/src/rand/rand_gen/rand_store.rs +++ b/consensus/src/rand/rand_gen/rand_store.rs @@ -440,7 +440,7 @@ mod tests { let rand_config = RandConfig::new( authors[my_index], target_epoch, - verifier, + verifier.into(), vuf_pub_params, rand_keys, weighted_config, diff --git a/consensus/src/rand/rand_gen/types.rs b/consensus/src/rand/rand_gen/types.rs index 9c996ca140c95..61a31c33ec42e 100644 --- a/consensus/src/rand/rand_gen/types.rs +++ b/consensus/src/rand/rand_gen/types.rs @@ -581,7 +581,7 @@ impl CertifiedAugDataAck { pub struct RandConfig { author: Author, epoch: u64, - validator: ValidatorVerifier, + validator: Arc, // public parameters of the weighted VUF vuf_pp: WvufPP, // key shares for weighted VUF @@ -604,7 +604,7 @@ impl RandConfig { pub fn new( author: Author, epoch: u64, - validator: ValidatorVerifier, + validator: Arc, vuf_pp: WvufPP, keys: RandKeys, wconfig: WeightedConfig, diff --git a/consensus/src/round_manager.rs b/consensus/src/round_manager.rs index f423d93d1e0ff..3c8cdb6993159 100644 --- a/consensus/src/round_manager.rs +++ b/consensus/src/round_manager.rs @@ -16,6 +16,7 @@ use crate::{ error::{error_kind, VerifyError}, liveness::{ proposal_generator::ProposalGenerator, + proposal_status_tracker::TPastProposalStatusTracker, proposer_election::ProposerElection, round_state::{NewRoundEvent, NewRoundReason, RoundState, RoundStateLogSchema}, unequivocal_proposer_election::UnequivocalProposerElection, @@ -26,7 +27,7 @@ use crate::{ network::NetworkSender, network_interface::ConsensusMsg, pending_order_votes::{OrderVoteReceptionResult, PendingOrderVotes}, - pending_votes::VoteReceptionResult, + pending_votes::{VoteReceptionResult, VoteStatus}, persistent_liveness_storage::PersistentLivenessStorage, quorum_store::types::BatchMsg, rand::rand_gen::types::{FastShare, RandConfig, Share, TShare}, @@ -39,12 +40,15 @@ use aptos_consensus_types::{ block::Block, block_data::BlockType, common::{Author, Round}, + order_vote::OrderVote, order_vote_msg::OrderVoteMsg, + pipelined_block::PipelinedBlock, proof_of_store::{ProofCache, ProofOfStoreMsg, SignedBatchInfoMsg}, proposal_msg::ProposalMsg, quorum_cert::QuorumCert, + round_timeout::{RoundTimeout, RoundTimeoutMsg, RoundTimeoutReason}, sync_info::SyncInfo, - timeout_2chain::TwoChainTimeoutCertificate, + timeout_2chain::{TwoChainTimeout, TwoChainTimeoutCertificate}, vote::Vote, vote_data::VoteData, vote_msg::VoteMsg, @@ -77,10 +81,11 @@ use tokio::{ time::{sleep, Instant}, }; -#[derive(Serialize, Clone)] +#[derive(Debug, Serialize, Clone)] pub enum UnverifiedEvent { ProposalMsg(Box), VoteMsg(Box), + RoundTimeoutMsg(Box), OrderVoteMsg(Box), SyncInfo(Box), BatchMsg(Box), @@ -122,6 +127,15 @@ impl UnverifiedEvent { } VerifiedEvent::VoteMsg(v) }, + UnverifiedEvent::RoundTimeoutMsg(v) => { + if !self_message { + v.verify(validator)?; + counters::VERIFY_MSG + .with_label_values(&["timeout"]) + .observe(start_time.elapsed().as_secs_f64()); + } + VerifiedEvent::RoundTimeoutMsg(v) + }, UnverifiedEvent::OrderVoteMsg(v) => { if !self_message { v.verify_order_vote(validator)?; @@ -177,6 +191,7 @@ impl UnverifiedEvent { UnverifiedEvent::BatchMsg(b) => b.epoch(), UnverifiedEvent::SignedBatchInfo(sd) => sd.epoch(), UnverifiedEvent::ProofOfStoreMsg(p) => p.epoch(), + UnverifiedEvent::RoundTimeoutMsg(t) => Ok(t.epoch()), } } } @@ -191,6 +206,7 @@ impl From for UnverifiedEvent { ConsensusMsg::BatchMsg(m) => UnverifiedEvent::BatchMsg(m), ConsensusMsg::SignedBatchInfo(m) => UnverifiedEvent::SignedBatchInfo(m), ConsensusMsg::ProofOfStoreMsg(m) => UnverifiedEvent::ProofOfStoreMsg(m), + ConsensusMsg::RoundTimeoutMsg(m) => UnverifiedEvent::RoundTimeoutMsg(m), _ => unreachable!("Unexpected conversion"), } } @@ -202,6 +218,7 @@ pub enum VerifiedEvent { ProposalMsg(Box), VerifiedProposalMsg(Box), VoteMsg(Box), + RoundTimeoutMsg(Box), OrderVoteMsg(Box), UnverifiedSyncInfo(Box), BatchMsg(Box), @@ -251,6 +268,7 @@ pub struct RoundManager { futures: FuturesUnordered< Pin, Block, Instant)> + Send>>, >, + proposal_status_tracker: Arc, } impl RoundManager { @@ -270,6 +288,7 @@ impl RoundManager { randomness_config: OnChainRandomnessConfig, jwk_consensus_config: OnChainJWKConsensusConfig, fast_rand_config: Option, + proposal_status_tracker: Arc, ) -> Self { // when decoupled execution is false, // the counter is still static. @@ -300,6 +319,7 @@ impl RoundManager { pending_order_votes: PendingOrderVotes::new(), blocks_with_broadcasted_fast_shares: LruCache::new(5), futures: FuturesUnordered::new(), + proposal_status_tracker, } } @@ -340,7 +360,7 @@ impl RoundManager { NewRoundReason::QCReady => { counters::QC_ROUNDS_COUNT.inc(); }, - NewRoundReason::Timeout => { + NewRoundReason::Timeout(_) => { counters::TIMEOUT_ROUNDS_COUNT.inc(); }, }; @@ -351,6 +371,9 @@ impl RoundManager { self.pending_order_votes .garbage_collect(self.block_store.sync_info().highest_ordered_round()); + self.proposal_status_tracker + .push(new_round_event.reason.clone()); + if self .proposer_election .is_valid_proposer(self.proposal_generator.author(), new_round_event.round) @@ -389,10 +412,9 @@ impl RoundManager { safety_rules: Arc>, proposer_election: Arc, ) -> anyhow::Result<()> { - let epoch = epoch_state.epoch; Self::log_collected_vote_stats(epoch_state.clone(), &new_round_event); let proposal_msg = Self::generate_proposal( - epoch, + epoch_state.clone(), new_round_event, sync_info, network.clone(), @@ -421,10 +443,17 @@ impl RoundManager { let prev_round_votes_for_li = new_round_event .prev_round_votes .iter() - .map(|(_, li_with_sig)| { - let (voting_power, votes): (Vec<_>, Vec<_>) = li_with_sig - .signatures() - .keys() + .map(|(_, vote_status)| { + let all_voters = match vote_status { + VoteStatus::EnoughVotes(li_with_sig) => epoch_state + .verifier + .aggregate_signature_authors(li_with_sig.signatures()), + VoteStatus::NotEnoughVotes(li_with_sig) => { + li_with_sig.all_voters().collect::>() + }, + }; + let (voting_power, votes): (Vec<_>, Vec<_>) = all_voters + .into_iter() .map(|author| { epoch_state .verifier @@ -492,7 +521,7 @@ impl RoundManager { new_round_event: NewRoundEvent, ) -> anyhow::Result { Self::generate_proposal( - self.epoch_state().epoch, + self.epoch_state.clone(), new_round_event, self.block_store.sync_info(), self.network.clone(), @@ -504,7 +533,7 @@ impl RoundManager { } async fn generate_proposal( - epoch: u64, + epoch_state: Arc, new_round_event: NewRoundEvent, sync_info: SyncInfo, network: Arc, @@ -527,7 +556,11 @@ impl RoundManager { Block::new_proposal_from_block_data_and_signature(proposal, signature); observe_block(signed_proposal.timestamp_usecs(), BlockStage::SIGNED); info!( - Self::new_log_with_round_epoch(LogEvent::Propose, new_round_event.round, epoch), + Self::new_log_with_round_epoch( + LogEvent::Propose, + new_round_event.round, + epoch_state.epoch + ), "{}", signed_proposal ); Ok(ProposalMsg::new(signed_proposal, sync_info)) @@ -600,17 +633,15 @@ impl RoundManager { ); // Some information in SyncInfo is ahead of what we have locally. // First verify the SyncInfo (didn't verify it in the yet). - sync_info - .verify(&self.epoch_state().verifier) - .map_err(|e| { - error!( - SecurityEvent::InvalidSyncInfoMsg, - sync_info = sync_info, - remote_peer = author, - error = ?e, - ); - VerifyError::from(e) - })?; + sync_info.verify(&self.epoch_state.verifier).map_err(|e| { + error!( + SecurityEvent::InvalidSyncInfoMsg, + sync_info = sync_info, + remote_peer = author, + error = ?e, + ); + VerifyError::from(e) + })?; SYNC_INFO_RECEIVED_WITH_NEWER_CERT.inc(); let result = self .block_store @@ -682,6 +713,23 @@ impl RoundManager { sync_or_not } + fn compute_timeout_reason(&self, round: Round) -> RoundTimeoutReason { + if self.round_state().vote_sent().is_some() { + return RoundTimeoutReason::NoQC; + } + + match self.block_store.get_block_for_round(round) { + None => RoundTimeoutReason::ProposalNotReceived, + Some(block) => { + if let Err(missing_authors) = self.block_store.check_payload(block.block()) { + RoundTimeoutReason::PayloadUnavailable { missing_authors } + } else { + RoundTimeoutReason::Unknown + } + }, + } + } + /// The replica broadcasts a "timeout vote message", which includes the round signature, which /// can be aggregated to a TimeoutCertificate. /// The timeout vote message can be one of the following three options: @@ -702,55 +750,101 @@ impl RoundManager { bail!("[RoundManager] sync_only flag is set, broadcasting SyncInfo"); } - let (is_nil_vote, mut timeout_vote) = match self.round_state.vote_sent() { - Some(vote) if vote.vote_data().proposed().round() == round => { - (vote.vote_data().is_for_nil(), vote) - }, - _ => { - // Didn't vote in this round yet, generate a backup vote - let nil_block = self - .proposal_generator - .generate_nil_block(round, self.proposer_election.clone())?; - info!( - self.new_log(LogEvent::VoteNIL), - "Planning to vote for a NIL block {}", nil_block + if self.local_config.enable_round_timeout_msg { + let timeout = if let Some(timeout) = self.round_state.timeout_sent() { + timeout + } else { + let timeout = TwoChainTimeout::new( + self.epoch_state.epoch, + round, + self.block_store.highest_quorum_cert().as_ref().clone(), ); - counters::VOTE_NIL_COUNT.inc(); - let nil_vote = self.vote_block(nil_block).await?; - (true, nil_vote) - }, - }; + let signature = self + .safety_rules + .lock() + .sign_timeout_with_qc( + &timeout, + self.block_store.highest_2chain_timeout_cert().as_deref(), + ) + .context("[RoundManager] SafetyRules signs 2-chain timeout")?; - if !timeout_vote.is_timeout() { - let timeout = timeout_vote - .generate_2chain_timeout(self.block_store.highest_quorum_cert().as_ref().clone()); - let signature = self - .safety_rules - .lock() - .sign_timeout_with_qc( - &timeout, - self.block_store.highest_2chain_timeout_cert().as_deref(), + let timeout_reason = self.compute_timeout_reason(round); + + RoundTimeout::new( + timeout, + self.proposal_generator.author(), + timeout_reason, + signature, ) - .context("[RoundManager] SafetyRules signs 2-chain timeout")?; - timeout_vote.add_2chain_timeout(timeout, signature); - } + }; - self.round_state.record_vote(timeout_vote.clone()); - let timeout_vote_msg = VoteMsg::new(timeout_vote, self.block_store.sync_info()); - self.network.broadcast_timeout_vote(timeout_vote_msg).await; - warn!( - round = round, - remote_peer = self.proposer_election.get_valid_proposer(round), - voted_nil = is_nil_vote, - event = LogEvent::Timeout, - ); - bail!("Round {} timeout, broadcast to all peers", round); + self.round_state.record_round_timeout(timeout.clone()); + let round_timeout_msg = RoundTimeoutMsg::new(timeout, self.block_store.sync_info()); + self.network + .broadcast_round_timeout(round_timeout_msg) + .await; + warn!( + round = round, + remote_peer = self.proposer_election.get_valid_proposer(round), + event = LogEvent::Timeout, + ); + bail!("Round {} timeout, broadcast to all peers", round); + } else { + let (is_nil_vote, mut timeout_vote) = match self.round_state.vote_sent() { + Some(vote) if vote.vote_data().proposed().round() == round => { + (vote.vote_data().is_for_nil(), vote) + }, + _ => { + // Didn't vote in this round yet, generate a backup vote + let nil_block = self + .proposal_generator + .generate_nil_block(round, self.proposer_election.clone())?; + info!( + self.new_log(LogEvent::VoteNIL), + "Planning to vote for a NIL block {}", nil_block + ); + counters::VOTE_NIL_COUNT.inc(); + let nil_vote = self.vote_block(nil_block).await?; + (true, nil_vote) + }, + }; + + if !timeout_vote.is_timeout() { + let timeout = timeout_vote.generate_2chain_timeout( + self.block_store.highest_quorum_cert().as_ref().clone(), + ); + let signature = self + .safety_rules + .lock() + .sign_timeout_with_qc( + &timeout, + self.block_store.highest_2chain_timeout_cert().as_deref(), + ) + .context("[RoundManager] SafetyRules signs 2-chain timeout")?; + timeout_vote.add_2chain_timeout(timeout, signature); + } + + self.round_state.record_vote(timeout_vote.clone()); + let timeout_vote_msg = VoteMsg::new(timeout_vote, self.block_store.sync_info()); + self.network.broadcast_timeout_vote(timeout_vote_msg).await; + warn!( + round = round, + remote_peer = self.proposer_election.get_valid_proposer(round), + voted_nil = is_nil_vote, + event = LogEvent::Timeout, + ); + bail!("Round {} timeout, broadcast to all peers", round); + } } /// This function is called only after all the dependencies of the given QC have been retrieved. async fn process_certificates(&mut self) -> anyhow::Result<()> { let sync_info = self.block_store.sync_info(); - if let Some(new_round_event) = self.round_state.process_certificates(sync_info) { + let epoch_state = self.epoch_state.clone(); + if let Some(new_round_event) = self + .round_state + .process_certificates(sync_info, &epoch_state.verifier) + { self.process_new_round_event(new_round_event).await?; } Ok(()) @@ -877,16 +971,30 @@ impl RoundManager { observe_block(proposal.timestamp_usecs(), BlockStage::SYNCED); + // Since processing proposal is delayed due to backpressure or payload availability, we add + // the block to the block store so that we don't need to fetch it from remote once we + // are out of the backpressure. Please note that delayed processing of proposal is not + // guaranteed to add the block to the block store if we don't get out of the backpressure + // before the timeout, so this is needed to ensure that the proposed block is added to + // the block store irrespective. Also, it is possible that delayed processing of proposal + // tries to add the same block again, which is okay as `execute_and_insert_block` call + // is idempotent. + self.block_store + .insert_block(proposal.clone()) + .await + .context("[RoundManager] Failed to insert the block into BlockStore")?; + let block_store = self.block_store.clone(); - if !block_store.check_payload(&proposal) { + if block_store.check_payload(&proposal).is_err() { debug!("Payload not available locally for block: {}", proposal.id()); counters::CONSENSUS_PROPOSAL_PAYLOAD_AVAILABILITY .with_label_values(&["missing"]) .inc(); let start_time = Instant::now(); + let deadline = self.round_state.current_round_deadline(); let future = async move { ( - block_store.wait_for_payload(&proposal).await, + block_store.wait_for_payload(&proposal, deadline).await, proposal, start_time, ) @@ -914,18 +1022,7 @@ impl RoundManager { if self.block_store.vote_back_pressure() { counters::CONSENSUS_WITHOLD_VOTE_BACKPRESSURE_TRIGGERED.observe(1.0); // In case of back pressure, we delay processing proposal. This is done by resending the - // same proposal to self after some time. Even if processing proposal is delayed, we add - // the block to the block store so that we don't need to fetch it from remote once we - // are out of the backpressure. Please note that delayed processing of proposal is not - // guaranteed to add the block to the block store if we don't get out of the backpressure - // before the timeout, so this is needed to ensure that the proposed block is added to - // the block store irrespective. Also, it is possible that delayed processing of proposal - // tries to add the same block again, which is okay as `execute_and_insert_block` call - // is idempotent. - self.block_store - .insert_block(proposal.clone()) - .await - .context("[RoundManager] Failed to execute_and_insert the block")?; + // same proposal to self after some time. Self::resend_verified_proposal_to_self( self.block_store.clone(), self.buffered_proposal_tx.clone(), @@ -989,12 +1086,28 @@ impl RoundManager { } } - pub async fn process_verified_proposal(&mut self, proposal: Block) -> anyhow::Result<()> { - let proposal_round = proposal.round(); + async fn create_vote(&mut self, proposal: Block) -> anyhow::Result { let vote = self .vote_block(proposal) .await .context("[RoundManager] Process proposal")?; + + fail_point!("consensus::create_invalid_vote", |_| { + use aptos_crypto::bls12381; + let faulty_vote = Vote::new_with_signature( + vote.vote_data().clone(), + vote.author(), + vote.ledger_info().clone(), + bls12381::Signature::dummy_signature(), + ); + Ok(faulty_vote) + }); + Ok(vote) + } + + pub async fn process_verified_proposal(&mut self, proposal: Block) -> anyhow::Result<()> { + let proposal_round = proposal.round(); + let vote = self.create_vote(proposal).await?; self.round_state.record_vote(vote.clone()); let vote_msg = VoteMsg::new(vote.clone(), self.block_store.sync_info()); @@ -1097,7 +1210,7 @@ impl RoundManager { let start = Instant::now(); order_vote_msg .quorum_cert() - .verify(&self.epoch_state().verifier) + .verify(&self.epoch_state.verifier) .context("[OrderVoteMsg QuorumCert verification failed")?; counters::VERIFY_MSG .with_label_values(&["order_vote_qc"]) @@ -1139,6 +1252,33 @@ impl RoundManager { Ok(()) } + async fn create_order_vote( + &mut self, + block: Arc, + qc: Arc, + ) -> anyhow::Result { + let order_vote_proposal = block.order_vote_proposal(qc); + let order_vote_result = self + .safety_rules + .lock() + .construct_and_sign_order_vote(&order_vote_proposal); + let order_vote = order_vote_result.context(format!( + "[RoundManager] SafetyRules Rejected {} for order vote", + block.block() + ))?; + + fail_point!("consensus::create_invalid_order_vote", |_| { + use aptos_crypto::bls12381; + let faulty_order_vote = OrderVote::new_with_signature( + order_vote.author(), + order_vote.ledger_info().clone(), + bls12381::Signature::dummy_signature(), + ); + Ok(faulty_order_vote) + }); + Ok(order_vote) + } + async fn broadcast_order_vote( &mut self, vote: &Vote, @@ -1146,22 +1286,16 @@ impl RoundManager { ) -> anyhow::Result<()> { if let Some(proposed_block) = self.block_store.get_block(vote.vote_data().proposed().id()) { // Generate an order vote with ledger_info = proposed_block - let order_vote_proposal = proposed_block.order_vote_proposal(qc.clone()); - let order_vote_result = self - .safety_rules - .lock() - .construct_and_sign_order_vote(&order_vote_proposal); - let order_vote = order_vote_result.context(format!( - "[RoundManager] SafetyRules Rejected {} for order vote", - proposed_block.block() - ))?; + let order_vote = self + .create_order_vote(proposed_block.clone(), qc.clone()) + .await?; if !proposed_block.block().is_nil_block() { observe_block( proposed_block.block().timestamp_usecs(), BlockStage::ORDER_VOTED, ); } - let order_vote_msg = OrderVoteMsg::new(order_vote.clone(), qc.as_ref().clone()); + let order_vote_msg = OrderVoteMsg::new(order_vote, qc.as_ref().clone()); info!( self.new_log(LogEvent::BroadcastOrderVote), "{}", order_vote_msg @@ -1302,7 +1436,7 @@ impl RoundManager { VoteReceptionResult::New2ChainTimeoutCertificate(tc) => { self.new_2chain_tc_aggregated(tc).await }, - VoteReceptionResult::EchoTimeout(_) if !self.round_state.is_vote_timeout() => { + VoteReceptionResult::EchoTimeout(_) if !self.round_state.is_timeout_sent() => { self.process_local_timeout(round).await }, VoteReceptionResult::VoteAdded(_) => { @@ -1314,6 +1448,70 @@ impl RoundManager { } } + async fn process_timeout_reception_result( + &mut self, + timeout: &RoundTimeout, + result: VoteReceptionResult, + ) -> anyhow::Result<()> { + let round = timeout.round(); + match result { + VoteReceptionResult::New2ChainTimeoutCertificate(tc) => { + self.new_2chain_tc_aggregated(tc).await + }, + VoteReceptionResult::EchoTimeout(_) if !self.round_state.is_timeout_sent() => { + self.process_local_timeout(round).await + }, + VoteReceptionResult::VoteAdded(_) | VoteReceptionResult::EchoTimeout(_) => Ok(()), + result @ VoteReceptionResult::NewQuorumCertificate(_) + | result @ VoteReceptionResult::DuplicateVote => { + bail!("Unexpected result from timeout processing: {:?}", result); + }, + e => Err(anyhow::anyhow!("{:?}", e)), + } + } + + pub async fn process_round_timeout_msg( + &mut self, + round_timeout_msg: RoundTimeoutMsg, + ) -> anyhow::Result<()> { + fail_point!("consensus::process_round_timeout_msg", |_| { + Err(anyhow::anyhow!( + "Injected error in process_round_timeout_msg" + )) + }); + // Check whether this validator is a valid recipient of the vote. + if self + .ensure_round_and_sync_up( + round_timeout_msg.round(), + round_timeout_msg.sync_info(), + round_timeout_msg.author(), + ) + .await + .context("[RoundManager] Stop processing vote")? + { + self.process_round_timeout(round_timeout_msg.timeout()) + .await + .context("[RoundManager] Add a new timeout")?; + } + Ok(()) + } + + async fn process_round_timeout(&mut self, timeout: RoundTimeout) -> anyhow::Result<()> { + info!( + self.new_log(LogEvent::ReceiveRoundTimeout) + .remote_peer(timeout.author()), + vote = %timeout, + epoch = timeout.epoch(), + round = timeout.round(), + ); + + let vote_reception_result = self + .round_state + .insert_round_timeout(&timeout, &self.epoch_state.verifier); + self.process_timeout_reception_result(&timeout, vote_reception_result) + .await + } + async fn process_order_vote_reception_result( &mut self, result: OrderVoteReceptionResult, @@ -1436,9 +1634,10 @@ impl RoundManager { /// To jump start new round with the current certificates we have. pub async fn init(&mut self, last_vote_sent: Option) { + let epoch_state = self.epoch_state.clone(); let new_round_event = self .round_state - .process_certificates(self.block_store.sync_info()) + .process_certificates(self.block_store.sync_info(), &epoch_state.verifier) .expect("Can not jump start a round_state from existing certificates."); if let Some(vote) = last_vote_sent { self.round_state.record_vote(vote); @@ -1459,10 +1658,6 @@ impl RoundManager { self.safety_rules = safety_rules } - pub fn epoch_state(&self) -> &EpochState { - &self.epoch_state - } - pub fn round_state(&self) -> &RoundState { &self.round_state } @@ -1471,7 +1666,7 @@ impl RoundManager { Self::new_log_with_round_epoch( event, self.round_state().current_round(), - self.epoch_state().epoch, + self.epoch_state.epoch, ) } @@ -1490,7 +1685,7 @@ impl RoundManager { mut buffered_proposal_rx: aptos_channel::Receiver, close_rx: oneshot::Receiver>, ) { - info!(epoch = self.epoch_state().epoch, "RoundManager started"); + info!(epoch = self.epoch_state.epoch, "RoundManager started"); let mut close_rx = close_rx.into_stream(); loop { tokio::select! { @@ -1500,7 +1695,7 @@ impl RoundManager { ack_sender.send(()).expect("[RoundManager] Fail to ack shutdown"); } break; - }, + } proposal = buffered_proposal_rx.select_next_some() => { let mut proposals = vec![proposal]; while let Some(Some(proposal)) = buffered_proposal_rx.next().now_or_never() { @@ -1566,6 +1761,9 @@ impl RoundManager { VerifiedEvent::VoteMsg(vote_msg) => { monitor!("process_vote", self.process_vote_msg(*vote_msg).await) } + VerifiedEvent::RoundTimeoutMsg(timeout_msg) => { + monitor!("process_round_timeout", self.process_round_timeout_msg(*timeout_msg).await) + } VerifiedEvent::OrderVoteMsg(order_vote_msg) => { monitor!("process_order_vote", self.process_order_vote_msg(*order_vote_msg).await) } @@ -1594,7 +1792,7 @@ impl RoundManager { }, } } - info!(epoch = self.epoch_state().epoch, "RoundManager stopped"); + info!(epoch = self.epoch_state.epoch, "RoundManager stopped"); } #[cfg(feature = "failpoints")] diff --git a/consensus/src/round_manager_fuzzing.rs b/consensus/src/round_manager_fuzzing.rs index 2eefa70b0752d..16e98b94a3f19 100644 --- a/consensus/src/round_manager_fuzzing.rs +++ b/consensus/src/round_manager_fuzzing.rs @@ -20,7 +20,9 @@ use crate::{ persistent_liveness_storage::{PersistentLivenessStorage, RecoveryData}, pipeline::execution_client::DummyExecutionClient, round_manager::RoundManager, - test_utils::{MockPayloadManager, MockStorage}, + test_utils::{ + MockOptQSPayloadProvider, MockPastProposalStatusTracker, MockPayloadManager, MockStorage, + }, util::{mock_time_service::SimulatedTimeService, time_service::TimeService}, }; use aptos_channels::{self, aptos_channel, message_queues::QueueStyle}; @@ -148,10 +150,7 @@ fn create_node_for_fuzzing() -> RoundManager { let (self_sender, _self_receiver) = aptos_channels::new_unbounded_test(); - let epoch_state = Arc::new(EpochState { - epoch: 1, - verifier: storage.get_validator_set().into(), - }); + let epoch_state = Arc::new(EpochState::new(1, storage.get_validator_set().into())); let network = Arc::new(NetworkSender::new( signer.author(), consensus_network_client, @@ -183,6 +182,7 @@ fn create_node_for_fuzzing() -> RoundManager { false, ValidatorTxnConfig::default_disabled(), true, + Arc::new(MockOptQSPayloadProvider {}), ); // @@ -212,6 +212,7 @@ fn create_node_for_fuzzing() -> RoundManager { OnChainRandomnessConfig::default_enabled(), OnChainJWKConsensusConfig::default_enabled(), None, + Arc::new(MockPastProposalStatusTracker {}), ) } diff --git a/consensus/src/round_manager_test.rs b/consensus/src/round_manager_test.rs index c12e476a7f56c..29716947991bd 100644 --- a/consensus/src/round_manager_test.rs +++ b/consensus/src/round_manager_test.rs @@ -23,8 +23,8 @@ use crate::{ round_manager::RoundManager, test_utils::{ consensus_runtime, create_vec_signed_transactions, - mock_execution_client::MockExecutionClient, timed_block_on, MockPayloadManager, - MockStorage, TreeInserter, + mock_execution_client::MockExecutionClient, timed_block_on, MockOptQSPayloadProvider, + MockPastProposalStatusTracker, MockPayloadManager, MockStorage, TreeInserter, }, util::time_service::{ClockTimeService, TimeService}, }; @@ -42,6 +42,7 @@ use aptos_consensus_types::{ common::{Author, Payload, Round}, pipeline::commit_decision::CommitDecision, proposal_msg::ProposalMsg, + round_timeout::RoundTimeoutMsg, sync_info::SyncInfo, timeout_2chain::{TwoChainTimeout, TwoChainTimeoutWithPartialSignatures}, utils::PayloadTxnsSize, @@ -231,10 +232,7 @@ impl NodeSetup { onchain_jwk_consensus_config: OnChainJWKConsensusConfig, ) -> Self { let _entered_runtime = executor.enter(); - let epoch_state = Arc::new(EpochState { - epoch: 1, - verifier: storage.get_validator_set().into(), - }); + let epoch_state = Arc::new(EpochState::new(1, storage.get_validator_set().into())); let validators = epoch_state.verifier.clone(); let (network_reqs_tx, network_reqs_rx) = aptos_channel::new(QueueStyle::FIFO, 8, None); let (connection_reqs_tx, _) = aptos_channel::new(QueueStyle::FIFO, 8, None); @@ -307,6 +305,7 @@ impl NodeSetup { false, onchain_consensus_config.effective_validator_txn_config(), true, + Arc::new(MockOptQSPayloadProvider {}), ); let round_state = Self::create_round_state(time_service); @@ -334,6 +333,7 @@ impl NodeSetup { onchain_randomness_config.clone(), onchain_jwk_consensus_config.clone(), None, + Arc::new(MockPastProposalStatusTracker {}), ); block_on(round_manager.init(last_vote_sent)); Self { @@ -447,6 +447,17 @@ impl NodeSetup { } } + pub async fn next_timeout(&mut self) -> RoundTimeoutMsg { + match self.next_network_message().await { + ConsensusMsg::RoundTimeoutMsg(v) => *v, + msg => panic!( + "Unexpected Consensus Message: {:?} on node {}", + msg, + self.identity_desc() + ), + } + } + pub async fn next_commit_decision(&mut self) -> CommitDecision { match self.next_network_message().await { ConsensusMsg::CommitDecisionMsg(v) => *v, @@ -637,6 +648,7 @@ fn process_and_vote_on_proposal( info!("Processing votes on node {}", proposer_node.identity_desc()); if process_votes { for vote_msg in votes { + vote_msg.vote().set_verified(); timed_block_on( runtime, proposer_node.round_manager.process_vote_msg(vote_msg), @@ -688,6 +700,7 @@ fn new_round_on_quorum_cert() { .await .unwrap(); let vote_msg = node.next_vote().await; + vote_msg.vote().set_verified(); // Adding vote to form a QC node.round_manager.process_vote_msg(vote_msg).await.unwrap(); @@ -984,13 +997,14 @@ fn sync_info_carried_on_timeout_vote() { .insert_single_quorum_cert(block_0_quorum_cert.clone()) .unwrap(); - node.round_manager - .round_state - .process_certificates(SyncInfo::new( + node.round_manager.round_state.process_certificates( + SyncInfo::new( block_0_quorum_cert.clone(), block_0_quorum_cert.into_wrapped_ledger_info(), None, - )); + ), + &generate_validator_verifier(&[node.signer.clone()]), + ); node.round_manager .process_local_timeout(2) .await @@ -1488,6 +1502,46 @@ fn nil_vote_on_timeout() { }); } +#[test] +/// Generate a Timeout upon timeout if no votes have been sent in the round. +fn timeout_round_on_timeout() { + let runtime = consensus_runtime(); + let mut playground = NetworkPlayground::new(runtime.handle().clone()); + let local_config = ConsensusConfig { + enable_round_timeout_msg: true, + ..Default::default() + }; + let mut nodes = NodeSetup::create_nodes( + &mut playground, + runtime.handle().clone(), + 1, + None, + None, + Some(local_config), + None, + None, + ); + let node = &mut nodes[0]; + let genesis = node.block_store.ordered_root(); + timed_block_on(&runtime, async { + node.next_proposal().await; + // Process the outgoing vote message and verify that it contains a round signature + // and that the vote extends genesis. + node.round_manager + .process_local_timeout(1) + .await + .unwrap_err(); + let timeout_msg = node.next_timeout().await; + + let timeout = timeout_msg.timeout(); + + assert_eq!(timeout.round(), 1); + assert_eq!(timeout.author(), node.signer.author()); + assert_eq!(timeout.epoch(), 1); + assert_eq!(timeout.two_chain_timeout().hqc_round(), genesis.round()); + }); +} + #[test] /// If the node votes in a round, upon timeout the same vote is re-sent with a timeout signature. fn vote_resent_on_timeout() { @@ -1529,6 +1583,50 @@ fn vote_resent_on_timeout() { }); } +#[test] +/// If the node votes in a round, upon timeout the same vote is re-sent with a timeout signature. +fn timeout_sent_on_timeout_after_vote() { + let runtime = consensus_runtime(); + let mut playground = NetworkPlayground::new(runtime.handle().clone()); + let local_config = ConsensusConfig { + enable_round_timeout_msg: true, + ..Default::default() + }; + let mut nodes = NodeSetup::create_nodes( + &mut playground, + runtime.handle().clone(), + 1, + None, + None, + Some(local_config), + None, + None, + ); + let node = &mut nodes[0]; + timed_block_on(&runtime, async { + let proposal_msg = node.next_proposal().await; + let id = proposal_msg.proposal().id(); + node.round_manager + .process_proposal_msg(proposal_msg) + .await + .unwrap(); + let vote_msg = node.next_vote().await; + let vote = vote_msg.vote(); + assert!(!vote.is_timeout()); + assert_eq!(vote.vote_data().proposed().id(), id); + // Process the outgoing vote message and verify that it contains a round signature + // and that the vote is the same as above. + node.round_manager + .process_local_timeout(1) + .await + .unwrap_err(); + let timeout_msg = node.next_timeout().await; + + assert_eq!(timeout_msg.round(), vote.vote_data().proposed().round()); + assert_eq!(timeout_msg.sync_info(), vote_msg.sync_info()); + }); +} + #[test] #[ignore] // TODO: this test needs to be fixed! fn sync_on_partial_newer_sync_info() { @@ -1548,7 +1646,7 @@ fn sync_on_partial_newer_sync_info() { runtime.spawn(playground.start()); timed_block_on(&runtime, async { // commit block 1 after 4 rounds - for _ in 1..=4 { + for i in 1..=4 { let proposal_msg = node.next_proposal().await; node.round_manager @@ -1556,6 +1654,9 @@ fn sync_on_partial_newer_sync_info() { .await .unwrap(); let vote_msg = node.next_vote().await; + if i < 2 { + vote_msg.vote().set_verified(); + } // Adding vote to form a QC node.round_manager.process_vote_msg(vote_msg).await.unwrap(); } @@ -1652,6 +1753,7 @@ fn safety_rules_crash() { // sign proposal reset_safety_rules(&mut node); + vote_msg.vote().set_verified(); node.round_manager.process_vote_msg(vote_msg).await.unwrap(); } @@ -1691,6 +1793,9 @@ fn echo_timeout() { // node 0 doesn't timeout and should echo the timeout after 2 timeout message for i in 0..3 { let timeout_vote = node_0.next_vote().await; + if i < 2 { + timeout_vote.vote().set_verified(); + } let result = node_0.round_manager.process_vote_msg(timeout_vote).await; // first and third message should not timeout if i == 0 || i == 2 { @@ -1704,8 +1809,13 @@ fn echo_timeout() { let node_1 = &mut nodes[1]; // it receives 4 timeout messages (1 from each) and doesn't echo since it already timeout - for _ in 0..4 { + for i in 0..4 { let timeout_vote = node_1.next_vote().await; + // Verifying only some vote messages to check that round manager can accept both + // verified and unverified votes + if i < 2 { + timeout_vote.vote().set_verified(); + } node_1 .round_manager .process_vote_msg(timeout_vote) @@ -2027,6 +2137,7 @@ pub fn forking_retrieval_test() { } let vote_msg_on_timeout = node.next_vote().await; + vote_msg_on_timeout.vote().set_verified(); assert!(vote_msg_on_timeout.vote().is_timeout()); if node.id != behind_node { let result = node diff --git a/consensus/src/state_computer.rs b/consensus/src/state_computer.rs index b06d254aea429..28e29379846d6 100644 --- a/consensus/src/state_computer.rs +++ b/consensus/src/state_computer.rs @@ -7,7 +7,7 @@ use crate::{ block_storage::tracing::{observe_block, BlockStage}, counters, error::StateSyncError, - execution_pipeline::ExecutionPipeline, + execution_pipeline::{ExecutionPipeline, PreCommitHook}, monitor, payload_manager::TPayloadManager, pipeline::pipeline_phase::CountedRequest, @@ -24,13 +24,14 @@ use aptos_consensus_types::{ pipelined_block::PipelinedBlock, }; use aptos_crypto::HashValue; -use aptos_executor_types::{BlockExecutorTrait, ExecutorResult}; +use aptos_executor_types::{BlockExecutorTrait, ExecutorResult, StateComputeResult}; use aptos_infallible::RwLock; use aptos_logger::prelude::*; +use aptos_metrics_core::IntGauge; use aptos_types::{ account_address::AccountAddress, block_executor::config::BlockExecutorConfigFromOnchain, - contract_event::ContractEvent, epoch_state::EpochState, ledger_info::LedgerInfoWithSignatures, - randomness::Randomness, transaction::Transaction, + block_metadata_ext::BlockMetadataExt, epoch_state::EpochState, + ledger_info::LedgerInfoWithSignatures, randomness::Randomness, transaction::SignedTransaction, }; use fail::fail_point; use futures::{future::BoxFuture, SinkExt, StreamExt}; @@ -39,11 +40,7 @@ use tokio::sync::Mutex as AsyncMutex; pub type StateComputeResultFut = BoxFuture<'static, ExecutorResult>; -type NotificationType = ( - Box, - Vec, - Vec, // Subscribable events, e.g. NewEpochEvent, DKGStartEvent -); +type NotificationType = BoxFuture<'static, ()>; #[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord, Hash)] struct LogicalTime { @@ -73,7 +70,8 @@ pub struct ExecutionProxy { executor: Arc, txn_notifier: Arc, state_sync_notifier: Arc, - async_state_sync_notifier: aptos_channels::Sender, + pre_commit_notifier: aptos_channels::Sender, + commit_notifier: aptos_channels::Sender, write_mutex: AsyncMutex, transaction_filter: Arc, execution_pipeline: ExecutionPipeline, @@ -89,28 +87,22 @@ impl ExecutionProxy { txn_filter: TransactionFilter, enable_pre_commit: bool, ) -> Self { - let (tx, mut rx) = - aptos_channels::new::(10, &counters::PENDING_STATE_SYNC_NOTIFICATION); - let notifier = state_sync_notifier.clone(); - handle.spawn(async move { - while let Some((callback, txns, subscribable_events)) = rx.next().await { - if let Err(e) = monitor!( - "notify_state_sync", - notifier.notify_new_commit(txns, subscribable_events).await - ) { - error!(error = ?e, "Failed to notify state synchronizer"); - } + let pre_commit_notifier = Self::spawn_future_runner( + handle, + "pre-commit", + &counters::PENDING_STATE_SYNC_NOTIFICATION, + ); + let commit_notifier = + Self::spawn_future_runner(handle, "commit", &counters::PENDING_COMMIT_NOTIFICATION); - callback(); - } - }); let execution_pipeline = ExecutionPipeline::spawn(executor.clone(), handle, enable_pre_commit); Self { executor, txn_notifier, state_sync_notifier, - async_state_sync_notifier: tx, + pre_commit_notifier, + commit_notifier, write_mutex: AsyncMutex::new(LogicalTime::new(0, 0)), transaction_filter: Arc::new(txn_filter), execution_pipeline, @@ -118,33 +110,62 @@ impl ExecutionProxy { } } - fn transactions_to_commit( - &self, - executed_block: &PipelinedBlock, - validators: &[AccountAddress], - randomness_enabled: bool, - ) -> Vec { - // reconfiguration suffix don't execute - if executed_block.is_reconfiguration_suffix() { - return vec![]; - } - - let user_txns = executed_block.input_transactions().clone(); - let validator_txns = executed_block.validator_txns().cloned().unwrap_or_default(); - let metadata = if randomness_enabled { - executed_block - .block() - .new_metadata_with_randomness(validators, executed_block.randomness().cloned()) - } else { - executed_block.block().new_block_metadata(validators).into() - }; - - let input_txns = Block::combine_to_input_transactions(validator_txns, user_txns, metadata); + fn spawn_future_runner( + handle: &tokio::runtime::Handle, + name: &'static str, + pending_notifications_gauge: &IntGauge, + ) -> aptos_channels::Sender { + let (tx, mut rx) = aptos_channels::new::(10, pending_notifications_gauge); + let _join_handle = handle.spawn(async move { + while let Some(fut) = rx.next().await { + fut.await + } + info!(name = name, "Future runner stopped.") + }); + tx + } - // Adds StateCheckpoint/BlockEpilogue transaction if needed. - executed_block - .compute_result() - .transactions_to_commit(input_txns, executed_block.id()) + fn pre_commit_hook( + &self, + block: &Block, + metadata: BlockMetadataExt, + payload_manager: Arc, + ) -> PreCommitHook { + let mut pre_commit_notifier = self.pre_commit_notifier.clone(); + let state_sync_notifier = self.state_sync_notifier.clone(); + let payload = block.payload().cloned(); + let timestamp = block.timestamp_usecs(); + let validator_txns = block.validator_txns().cloned().unwrap_or_default(); + let block_id = block.id(); + Box::new( + move |user_txns: &[SignedTransaction], state_compute_result: &StateComputeResult| { + let input_txns = Block::combine_to_input_transactions( + validator_txns, + user_txns.to_vec(), + metadata, + ); + let txns = state_compute_result.transactions_to_commit(input_txns, block_id); + let subscribable_events = state_compute_result.subscribable_events().to_vec(); + Box::pin(async move { + pre_commit_notifier + .send(Box::pin(async move { + if let Err(e) = monitor!( + "notify_state_sync", + state_sync_notifier + .notify_new_commit(txns, subscribable_events) + .await + ) { + error!(error = ?e, "Failed to notify state synchronizer"); + } + + let payload_vec = payload.into_iter().collect(); + payload_manager.notify_commit(timestamp, payload_vec); + })) + .await + .expect("Failed to send pre-commit notification"); + }) + }, + ) } } @@ -201,10 +222,11 @@ impl StateComputer for ExecutionProxy { .execution_pipeline .queue( block.clone(), - metadata, + metadata.clone(), parent_block_id, transaction_generator, block_executor_onchain_config, + self.pre_commit_hook(block, metadata, payload_manager), lifetime_guard, ) .await; @@ -264,40 +286,14 @@ impl StateComputer for ExecutionProxy { callback: StateComputerCommitCallBackType, ) -> ExecutorResult<()> { let mut latest_logical_time = self.write_mutex.lock().await; - let mut txns = Vec::new(); - let mut subscribable_txn_events = Vec::new(); - let mut payloads = Vec::new(); let logical_time = LogicalTime::new( finality_proof.ledger_info().epoch(), finality_proof.ledger_info().round(), ); - let block_timestamp = finality_proof.commit_info().timestamp_usecs(); - - let MutableState { - payload_manager, - validators, - is_randomness_enabled, - .. - } = self - .state - .read() - .as_ref() - .cloned() - .expect("must be set within an epoch"); - let mut pre_commit_futs = Vec::with_capacity(blocks.len()); - for block in blocks { - if let Some(payload) = block.block().payload() { - payloads.push(payload.clone()); - } - - txns.extend(self.transactions_to_commit(block, &validators, is_randomness_enabled)); - subscribable_txn_events.extend(block.subscribable_events()); - pre_commit_futs.push(block.take_pre_commit_fut()); - } // wait until all blocks are committed - for pre_commit_fut in pre_commit_futs { - pre_commit_fut.await? + for block in blocks { + block.take_pre_commit_fut().await? } let executor = self.executor.clone(); @@ -314,15 +310,15 @@ impl StateComputer for ExecutionProxy { .expect("spawn_blocking failed"); let blocks = blocks.to_vec(); - let wrapped_callback = move || { - payload_manager.notify_commit(block_timestamp, payloads); + let callback_fut = Box::pin(async move { callback(&blocks, finality_proof); - }; - self.async_state_sync_notifier + }); + + self.commit_notifier .clone() - .send((Box::new(wrapped_callback), txns, subscribable_txn_events)) + .send(callback_fut) .await - .expect("Failed to send async state sync notification"); + .expect("Failed to send commit notification"); *latest_logical_time = logical_time; Ok(()) @@ -428,9 +424,10 @@ async fn test_commit_sync_race() { aggregate_signature::AggregateSignature, block_executor::partitioner::ExecutableBlock, block_info::BlockInfo, + contract_event::ContractEvent, ledger_info::LedgerInfo, on_chain_config::{TransactionDeduperType, TransactionShufflerType}, - transaction::{SignedTransaction, TransactionStatus}, + transaction::{SignedTransaction, Transaction, TransactionStatus}, }; struct RecordedCommit { diff --git a/consensus/src/state_computer_tests.rs b/consensus/src/state_computer_tests.rs index b93d33a792ec4..6c79e8d3ee710 100644 --- a/consensus/src/state_computer_tests.rs +++ b/consensus/src/state_computer_tests.rs @@ -2,15 +2,14 @@ // SPDX-License-Identifier: Apache-2.0 use crate::{ - error::MempoolError, payload_manager::DirectMempoolPayloadManager, - pipeline::pipeline_phase::CountedRequest, state_computer::ExecutionProxy, + error::MempoolError, pipeline::pipeline_phase::CountedRequest, state_computer::ExecutionProxy, state_replication::StateComputer, transaction_deduper::NoOpDeduper, transaction_filter::TransactionFilter, transaction_shuffler::NoOpShuffler, txn_notifier::TxnNotifier, }; use aptos_config::config::transaction_filter_type::Filter; use aptos_consensus_notifications::{ConsensusNotificationSender, Error}; -use aptos_consensus_types::{block::Block, block_data::BlockData, pipelined_block::PipelinedBlock}; +use aptos_consensus_types::{block::Block, block_data::BlockData}; use aptos_crypto::HashValue; use aptos_executor_types::{ state_checkpoint_output::StateCheckpointOutput, BlockExecutorTrait, ExecutorResult, @@ -18,28 +17,35 @@ use aptos_executor_types::{ }; use aptos_infallible::Mutex; use aptos_types::{ - aggregate_signature::AggregateSignature, block_executor::{config::BlockExecutorConfigFromOnchain, partitioner::ExecutableBlock}, contract_event::ContractEvent, epoch_state::EpochState, - ledger_info::{LedgerInfo, LedgerInfoWithSignatures}, + ledger_info::LedgerInfoWithSignatures, transaction::{ExecutionStatus, SignedTransaction, Transaction, TransactionStatus}, validator_txn::ValidatorTransaction, }; -use futures_channel::oneshot; use std::sync::{atomic::AtomicU64, Arc}; -use tokio::runtime::Handle; +use tokio::{runtime::Handle, sync::Mutex as AsyncMutex}; struct DummyStateSyncNotifier { invocations: Mutex, Vec)>>, + tx: tokio::sync::mpsc::Sender<()>, + rx: AsyncMutex>, } impl DummyStateSyncNotifier { fn new() -> Self { + let (tx, rx) = tokio::sync::mpsc::channel(10); Self { invocations: Mutex::new(vec![]), + tx, + rx: AsyncMutex::new(rx), } } + + async fn wait_for_notification(&self) { + self.rx.lock().await.recv().await; + } } #[async_trait::async_trait] @@ -52,6 +58,7 @@ impl ConsensusNotificationSender for DummyStateSyncNotifier { self.invocations .lock() .push((transactions, subscribable_events)); + self.tx.send(()).await.unwrap(); Ok(()) } @@ -94,15 +101,6 @@ impl BlockExecutorTrait for DummyBlockExecutor { Ok(()) } - fn execute_block( - &self, - _block: ExecutableBlock, - _parent_block_id: HashValue, - _onchain_config: BlockExecutorConfigFromOnchain, - ) -> ExecutorResult { - Ok(StateComputeResult::new_dummy()) - } - fn execute_and_state_checkpoint( &self, block: ExecutableBlock, @@ -119,7 +117,18 @@ impl BlockExecutorTrait for DummyBlockExecutor { _parent_block_id: HashValue, _state_checkpoint_output: StateCheckpointOutput, ) -> ExecutorResult { - Ok(StateComputeResult::new_dummy()) + let num_txns = self + .blocks_received + .lock() + .last() + .unwrap() + .transactions + .num_transactions(); + + Ok(StateComputeResult::new_dummy_with_compute_status(vec![ + TransactionStatus::Keep(ExecutionStatus::Success); + num_txns + ])) } fn pre_commit_block( @@ -142,15 +151,16 @@ impl BlockExecutorTrait for DummyBlockExecutor { #[tokio::test] #[cfg(test)] -async fn schedule_compute_should_discover_validator_txns() { +async fn should_see_and_notify_validator_txns() { use crate::payload_manager::DirectMempoolPayloadManager; let executor = Arc::new(DummyBlockExecutor::new()); + let state_sync_notifier = Arc::new(DummyStateSyncNotifier::new()); let execution_policy = ExecutionProxy::new( executor.clone(), Arc::new(DummyTxnNotifier {}), - Arc::new(DummyStateSyncNotifier::new()), + state_sync_notifier.clone(), &Handle::current(), TransactionFilter::new(Filter::empty()), true, @@ -183,7 +193,8 @@ async fn schedule_compute_should_discover_validator_txns() { let _ = execution_policy .schedule_compute(&block, HashValue::zero(), None, dummy_guard()) .await - .await; + .await + .unwrap(); // Get the txns from the view of the dummy executor. let txns = executor.blocks_received.lock()[0] @@ -195,78 +206,9 @@ async fn schedule_compute_should_discover_validator_txns() { let supposed_validator_txn_1 = txns[2].expect_valid().try_as_validator_txn().unwrap(); assert_eq!(&validator_txn_0, supposed_validator_txn_0); assert_eq!(&validator_txn_1, supposed_validator_txn_1); -} - -#[tokio::test] -async fn commit_should_discover_validator_txns() { - let state_sync_notifier = Arc::new(DummyStateSyncNotifier::new()); - - let execution_policy = ExecutionProxy::new( - Arc::new(DummyBlockExecutor::new()), - Arc::new(DummyTxnNotifier {}), - state_sync_notifier.clone(), - &Handle::current(), - TransactionFilter::new(Filter::empty()), - true, - ); - - let validator_txn_0 = ValidatorTransaction::dummy(vec![0xFF; 99]); - let validator_txn_1 = ValidatorTransaction::dummy(vec![0xFF; 999]); - - let block = Block::new_for_testing( - HashValue::zero(), - BlockData::dummy_with_validator_txns(vec![ - validator_txn_0.clone(), - validator_txn_1.clone(), - ]), - None, - ); - - // Eventually 3 txns: block metadata, validator txn 0, validator txn 1. - let state_compute_result = StateComputeResult::new_dummy_with_compute_status(vec![ - TransactionStatus::Keep( - ExecutionStatus::Success - ); - 3 - ]); - - let blocks = vec![Arc::new(PipelinedBlock::new( - block, - vec![], - state_compute_result, - ))]; - blocks[0].mark_successful_pre_commit_for_test(); - let epoch_state = EpochState::empty(); - - execution_policy.new_epoch( - &epoch_state, - Arc::new(DirectMempoolPayloadManager::new()), - Arc::new(NoOpShuffler {}), - BlockExecutorConfigFromOnchain::new_no_block_limit(), - Arc::new(NoOpDeduper {}), - false, - ); - - let (tx, rx) = oneshot::channel::<()>(); - - let callback = Box::new( - move |_a: &[Arc], _b: LedgerInfoWithSignatures| { - tx.send(()).unwrap(); - }, - ); - - let _ = execution_policy - .commit( - blocks.as_slice(), - LedgerInfoWithSignatures::new(LedgerInfo::dummy(), AggregateSignature::empty()), - callback, - ) - .await; - - // Wait until state sync is notified. - let _ = rx.await; - // Get all txns that state sync was notified with. + // Get all txns that state sync was notified with (when pre-commit finishes) + state_sync_notifier.wait_for_notification().await; let (txns, _) = state_sync_notifier.invocations.lock()[0].clone(); let supposed_validator_txn_0 = txns[1].try_as_validator_txn().unwrap(); diff --git a/consensus/src/test_utils/mock_payload_manager.rs b/consensus/src/test_utils/mock_payload_manager.rs index e62ec85b1ea9a..cfac0aaa458a3 100644 --- a/consensus/src/test_utils/mock_payload_manager.rs +++ b/consensus/src/test_utils/mock_payload_manager.rs @@ -3,13 +3,12 @@ use crate::{ error::QuorumStoreError, - payload_client::{ - user::quorum_store_client::QuorumStoreClient, PayloadClient, PayloadPullParameters, - }, + payload_client::{user::quorum_store_client::QuorumStoreClient, PayloadClient}, }; use anyhow::Result; use aptos_consensus_types::{ - block::block_test_utils::random_payload, common::Payload, request_response::GetPayloadCommand, + block::block_test_utils::random_payload, common::Payload, + payload_pull_params::PayloadPullParameters, request_response::GetPayloadCommand, }; use aptos_types::{ transaction::{ExecutionStatus, TransactionStatus}, diff --git a/consensus/src/test_utils/mod.rs b/consensus/src/test_utils/mod.rs index b556d9bfc7ed8..5744a1ab0b090 100644 --- a/consensus/src/test_utils/mod.rs +++ b/consensus/src/test_utils/mod.rs @@ -5,11 +5,16 @@ #![allow(clippy::unwrap_used)] use crate::{ block_storage::{BlockReader, BlockStore}, + liveness::{ + proposal_status_tracker::{TOptQSPullParamsProvider, TPastProposalStatusTracker}, + round_state::NewRoundReason, + }, payload_manager::DirectMempoolPayloadManager, }; use aptos_consensus_types::{ block::{block_test_utils::certificate_for_genesis, Block}, common::{Author, Round}, + payload_pull_params::OptQSPayloadPullParams, pipelined_block::PipelinedBlock, quorum_cert::QuorumCert, sync_info::SyncInfo, @@ -270,3 +275,17 @@ pub(crate) fn create_vec_signed_transactions_with_gas( .map(|_| create_signed_transaction(gas_unit_price)) .collect() } + +pub struct MockOptQSPayloadProvider {} + +impl TOptQSPullParamsProvider for MockOptQSPayloadProvider { + fn get_params(&self) -> Option { + None + } +} + +pub struct MockPastProposalStatusTracker {} + +impl TPastProposalStatusTracker for MockPastProposalStatusTracker { + fn push(&self, _status: NewRoundReason) {} +} diff --git a/consensus/src/transaction_shuffler/deprecated_fairness/conflict_key/entry_fun.rs b/consensus/src/transaction_shuffler/deprecated_fairness/conflict_key/entry_fun.rs deleted file mode 100644 index 7a3363dd9e959..0000000000000 --- a/consensus/src/transaction_shuffler/deprecated_fairness/conflict_key/entry_fun.rs +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -use crate::transaction_shuffler::deprecated_fairness::conflict_key::ConflictKey; -use aptos_types::transaction::{SignedTransaction, TransactionPayload}; -use move_core_types::{identifier::Identifier, language_storage::ModuleId}; - -#[derive(Eq, Hash, PartialEq)] -pub enum EntryFunKey { - EntryFun { - module: ModuleId, - function: Identifier, - }, - Exempt, -} - -impl ConflictKey for EntryFunKey { - fn extract_from(txn: &SignedTransaction) -> Self { - match txn.payload() { - TransactionPayload::EntryFunction(entry_fun) => { - let module_id = entry_fun.module(); - if module_id.address().is_special() { - // Exempt framework modules - Self::Exempt - } else { - // n.b. Generics ignored. - Self::EntryFun { - module: module_id.clone(), - function: entry_fun.function().to_owned(), - } - } - }, - TransactionPayload::Multisig(_) - | TransactionPayload::Script(_) - | TransactionPayload::ModuleBundle(_) => Self::Exempt, - } - } - - fn conflict_exempt(&self) -> bool { - match self { - Self::Exempt => true, - Self::EntryFun { .. } => false, - } - } -} diff --git a/consensus/src/transaction_shuffler/deprecated_fairness/conflict_key/entry_fun_module.rs b/consensus/src/transaction_shuffler/deprecated_fairness/conflict_key/entry_fun_module.rs deleted file mode 100644 index 948d2e8baa330..0000000000000 --- a/consensus/src/transaction_shuffler/deprecated_fairness/conflict_key/entry_fun_module.rs +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -use crate::transaction_shuffler::deprecated_fairness::conflict_key::ConflictKey; -use aptos_types::transaction::{SignedTransaction, TransactionPayload}; -use move_core_types::language_storage::ModuleId; - -#[derive(Eq, Hash, PartialEq)] -pub enum EntryFunModuleKey { - Module(ModuleId), - AnyScriptOrMultiSig, - Exempt, -} - -impl ConflictKey for EntryFunModuleKey { - fn extract_from(txn: &SignedTransaction) -> Self { - match txn.payload() { - TransactionPayload::EntryFunction(entry_fun) => { - let module_id = entry_fun.module(); - - if module_id.address().is_special() { - Self::Exempt - } else { - Self::Module(module_id.clone()) - } - }, - TransactionPayload::Multisig(..) - | TransactionPayload::Script(_) - | TransactionPayload::ModuleBundle(_) => Self::AnyScriptOrMultiSig, - } - } - - fn conflict_exempt(&self) -> bool { - match self { - Self::Exempt => true, - Self::Module(..) | Self::AnyScriptOrMultiSig => false, - } - } -} diff --git a/consensus/src/transaction_shuffler/deprecated_fairness/conflict_key/mod.rs b/consensus/src/transaction_shuffler/deprecated_fairness/conflict_key/mod.rs deleted file mode 100644 index 1e233dbdc29fc..0000000000000 --- a/consensus/src/transaction_shuffler/deprecated_fairness/conflict_key/mod.rs +++ /dev/null @@ -1,117 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -use crate::transaction_shuffler::deprecated_fairness::TxnIdx; -use std::{collections::HashMap, hash::Hash}; - -pub(crate) mod entry_fun; -pub(crate) mod entry_fun_module; -pub(crate) mod txn_sender; - -#[cfg(test)] -pub(crate) mod test_utils; - -/// `ConflictKey::extract_from(txn)` extracts a key from a transaction. For example, -/// `TxnSenderKey::extract_from(txn)` returns the transaction sender's address. The key is used by -/// the shuffler to determine whether two close by transactions conflict with each other. -/// -/// `ConflictKey::conflict_exempt(&key)` returns if this specific key is exempt from conflict. -/// For example, we can exempt transaction sender 0x1, so that consecutive transactions sent by -/// 0x1 are not seen as a conflict by the shuffler. -pub(crate) trait ConflictKey: Eq + Hash + PartialEq { - fn extract_from(txn: &Txn) -> Self; - - fn conflict_exempt(&self) -> bool; -} - -#[derive(Clone, Copy, Debug)] -pub(crate) struct ConflictKeyId(usize); - -impl ConflictKeyId { - pub fn as_idx(&self) -> usize { - self.0 - } -} - -/// `ConflictKeyRegistry::build::()` goes through a block of transactions and -/// extract the conflict keys from each transaction. In that process, each unique conflict key is -/// assigned a unique `ConflictKeyId`, essentially a sequence number, and the registry remembers which -/// key was extracted from each transaction. After that, we can query the registry to get the key -/// represented by the id, which is 1. cheaper than calling `ConflictKey::extract_from(txn)` again; -/// 2. enables vector based `MapByKeyId` which is cheaper than a `HashMap`; and 3. eliminates the typing -/// information and easier to use in the shuffler. -#[derive(Debug)] -pub(crate) struct ConflictKeyRegistry { - id_by_txn: Vec, - is_exempt_by_id: Vec, -} - -// Provided `ConflictKeyId`s managed by `ConflictKeyRegistry`s are consecutive integers starting -// from 0, a map can be implemented based on a vector, which is cheapter than a hash map. -#[derive(Debug, Eq, PartialEq)] -pub(crate) struct MapByKeyId { - inner: Vec, -} - -impl MapByKeyId { - pub fn new(size: usize) -> Self { - let mut inner = Vec::with_capacity(size); - inner.resize_with(size, Default::default); - - Self { inner } - } - - pub fn get(&self, key_id: ConflictKeyId) -> &T { - &self.inner[key_id.as_idx()] - } - - pub fn get_mut(&mut self, key_id: ConflictKeyId) -> &mut T { - &mut self.inner[key_id.as_idx()] - } -} - -impl ConflictKeyRegistry { - pub fn build(txns: &[Txn]) -> Self - where - K: ConflictKey, - { - let mut registry = HashMap::::new(); - let mut is_exempt_by_id = Vec::new(); - - let id_by_txn = txns - .iter() - .map(|txn| { - let key = K::extract_from(txn); - *registry.entry(key).or_insert_with_key(|key| { - is_exempt_by_id.push(key.conflict_exempt()); - ConflictKeyId(is_exempt_by_id.len() - 1) - }) - }) - .collect(); - - Self { - id_by_txn, - is_exempt_by_id, - } - } - - fn num_ids(&self) -> usize { - self.is_exempt_by_id.len() - } - - pub fn num_txns(&self) -> usize { - self.id_by_txn.len() - } - - pub fn new_map_by_id(&self) -> MapByKeyId { - MapByKeyId::new(self.num_ids()) - } - - pub fn key_id_for_txn(&self, txn_idx: TxnIdx) -> ConflictKeyId { - self.id_by_txn[txn_idx] - } - - pub fn is_conflict_exempt(&self, key_id: ConflictKeyId) -> bool { - self.is_exempt_by_id[key_id.0] - } -} diff --git a/consensus/src/transaction_shuffler/deprecated_fairness/conflict_key/test_utils.rs b/consensus/src/transaction_shuffler/deprecated_fairness/conflict_key/test_utils.rs deleted file mode 100644 index fec79e2c33617..0000000000000 --- a/consensus/src/transaction_shuffler/deprecated_fairness/conflict_key/test_utils.rs +++ /dev/null @@ -1,190 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -use crate::transaction_shuffler::deprecated_fairness::conflict_key::{ - ConflictKey, ConflictKeyId, ConflictKeyRegistry, -}; -use proptest::prelude::*; -use std::hash::Hash; - -impl ConflictKeyId { - pub fn new_for_test(idx: usize) -> Self { - Self(idx) - } -} - -impl ConflictKeyRegistry { - pub fn all_exempt(num_txns: usize) -> Self { - ConflictKeyRegistry { - id_by_txn: vec![ConflictKeyId::new_for_test(0); num_txns], - is_exempt_by_id: vec![true], - } - } - - pub fn non_conflict(num_txns: usize) -> Self { - ConflictKeyRegistry { - id_by_txn: (0..num_txns).map(ConflictKeyId::new_for_test).collect(), - is_exempt_by_id: vec![false; num_txns], - } - } - - pub fn full_conflict(num_txns: usize) -> Self { - ConflictKeyRegistry { - id_by_txn: vec![ConflictKeyId::new_for_test(0); num_txns], - is_exempt_by_id: vec![false], - } - } - - pub fn nums_per_key(nums_per_key: [usize; NUM_KEYS]) -> Self { - Self::nums_per_round_per_key([nums_per_key]) - } - - pub fn nums_per_round_per_key( - nums_per_round_per_key: [[usize; NUM_KEYS]; NUM_ROUNDS], - ) -> Self { - let mut seq = (0..NUM_ROUNDS).flat_map(|_| 0..NUM_KEYS); - let nums_per_key = nums_per_round_per_key.into_iter().flatten(); - - ConflictKeyRegistry { - id_by_txn: nums_per_key - .flat_map(|num| { - let s = seq.next().unwrap(); - vec![ConflictKeyId::new_for_test(s); num] - }) - .collect(), - is_exempt_by_id: vec![false; NUM_KEYS], - } - } -} - -#[derive(Debug)] -struct FakeAccount { - id: usize, -} - -impl Arbitrary for FakeAccount { - type Parameters = (); - type Strategy = BoxedStrategy; - - fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { - (0..10usize).prop_map(|id| FakeAccount { id }).boxed() - } -} - -#[derive(Debug)] -struct FakeModule { - id: usize, -} - -impl FakeModule { - pub fn exempt(&self) -> bool { - self.id % 3 == 0 - } -} - -impl Arbitrary for FakeModule { - type Parameters = (); - type Strategy = BoxedStrategy; - - fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { - (0..10usize).prop_map(|id| FakeModule { id }).boxed() - } -} - -#[derive(Debug)] -struct FakeEntryFun { - module: FakeModule, - id: usize, -} - -impl Arbitrary for FakeEntryFun { - type Parameters = (); - type Strategy = BoxedStrategy; - - fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { - (any::(), 0..3usize) - .prop_map(|(module, id)| FakeEntryFun { module, id }) - .boxed() - } -} - -#[derive(Debug)] -pub struct FakeTxn { - sender: FakeAccount, - entry_fun: FakeEntryFun, -} - -impl Arbitrary for FakeTxn { - type Parameters = (); - type Strategy = BoxedStrategy; - - fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { - (any::(), any::()) - .prop_map(|(sender, entry_fun)| FakeTxn { sender, entry_fun }) - .boxed() - } -} - -#[derive(Eq, Hash, PartialEq)] -pub(crate) struct FakeSenderKey { - id: usize, -} - -impl ConflictKey for FakeSenderKey { - fn extract_from(txn: &FakeTxn) -> Self { - Self { id: txn.sender.id } - } - - fn conflict_exempt(&self) -> bool { - false - } -} - -#[derive(Eq, Hash, PartialEq)] -pub(crate) enum FakeEntryFunModuleKey { - Module(usize), - Exempt, -} - -impl ConflictKey for FakeEntryFunModuleKey { - fn extract_from(txn: &FakeTxn) -> Self { - if txn.entry_fun.module.exempt() { - Self::Exempt - } else { - Self::Module(txn.entry_fun.module.id) - } - } - - fn conflict_exempt(&self) -> bool { - match self { - Self::Exempt => true, - Self::Module(..) => false, - } - } -} - -#[derive(Eq, Hash, PartialEq)] -pub(crate) enum FakeEntryFunKey { - EntryFun { module: usize, function: usize }, - Exempt, -} - -impl ConflictKey for FakeEntryFunKey { - fn extract_from(txn: &FakeTxn) -> Self { - if txn.entry_fun.module.exempt() { - Self::Exempt - } else { - Self::EntryFun { - module: txn.entry_fun.module.id, - function: txn.entry_fun.id, - } - } - } - - fn conflict_exempt(&self) -> bool { - match self { - Self::Exempt => true, - Self::EntryFun { .. } => false, - } - } -} diff --git a/consensus/src/transaction_shuffler/deprecated_fairness/conflict_key/txn_sender.rs b/consensus/src/transaction_shuffler/deprecated_fairness/conflict_key/txn_sender.rs deleted file mode 100644 index a742e7c240573..0000000000000 --- a/consensus/src/transaction_shuffler/deprecated_fairness/conflict_key/txn_sender.rs +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -use crate::transaction_shuffler::deprecated_fairness::conflict_key::ConflictKey; -use aptos_types::transaction::SignedTransaction; -use move_core_types::account_address::AccountAddress; - -#[derive(Eq, Hash, PartialEq)] -pub struct TxnSenderKey(AccountAddress); - -impl ConflictKey for TxnSenderKey { - fn extract_from(txn: &SignedTransaction) -> Self { - TxnSenderKey(txn.sender()) - } - - fn conflict_exempt(&self) -> bool { - false - } -} diff --git a/consensus/src/transaction_shuffler/deprecated_fairness/conflict_zone.rs b/consensus/src/transaction_shuffler/deprecated_fairness/conflict_zone.rs deleted file mode 100644 index a685cf701ac07..0000000000000 --- a/consensus/src/transaction_shuffler/deprecated_fairness/conflict_zone.rs +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -use crate::transaction_shuffler::deprecated_fairness::{ - conflict_key::{ConflictKeyId, ConflictKeyRegistry, MapByKeyId}, - TxnIdx, -}; -use std::collections::VecDeque; - -/// A sliding window of transactions (TxnIds), represented by `ConflictKeyId`s extracted from a -/// specific `ConflictKey`, managed by a specific `ConflictKeyRegistry`. -#[derive(Debug)] -pub(crate) struct ConflictZone<'a> { - sliding_window_size: usize, - sliding_window: VecDeque, - /// Number of transactions in the sliding window for each key_id. `ConflictZone::is_conflict(key)` - /// returns true is the count for `key` is greater than 0, unless the key is exempt from conflict. - counts_by_id: MapByKeyId, - key_registry: &'a ConflictKeyRegistry, -} - -impl<'a> ConflictZone<'a> { - pub fn build_zones( - key_registries: &'a [ConflictKeyRegistry; NUM_CONFLICT_ZONES], - window_sizes: [usize; NUM_CONFLICT_ZONES], - ) -> [Self; NUM_CONFLICT_ZONES] { - itertools::zip_eq(key_registries.iter(), window_sizes) - .map(|(registry, window_size)| Self::new(registry, window_size)) - .collect::>() - .try_into() - .expect("key_registries and window_sizes must have the same length.") - } - - fn new(key_registry: &'a ConflictKeyRegistry, sliding_window_size: usize) -> Self { - Self { - sliding_window_size, - sliding_window: VecDeque::with_capacity(sliding_window_size + 1), - counts_by_id: key_registry.new_map_by_id(), - key_registry, - } - } - - pub fn is_conflict(&self, txn_idx: TxnIdx) -> bool { - let key_id = self.key_registry.key_id_for_txn(txn_idx); - if self.key_registry.is_conflict_exempt(key_id) { - false - } else { - *self.counts_by_id.get(key_id) > 0 - } - } - - /// Append a new transaction to the sliding window and - /// return the key_id that's no longer in conflict as a result if there is one. - pub fn add(&mut self, txn_idx: TxnIdx) -> Option { - let key_id = self.key_registry.key_id_for_txn(txn_idx); - - *self.counts_by_id.get_mut(key_id) += 1; - self.sliding_window.push_back(key_id); - if self.sliding_window.len() > self.sliding_window_size { - if let Some(removed_key_id) = self.sliding_window.pop_front() { - let count = self.counts_by_id.get_mut(removed_key_id); - *count -= 1; - if *count == 0 && !self.key_registry.is_conflict_exempt(removed_key_id) { - return Some(removed_key_id); - } - } - } - None - } -} diff --git a/consensus/src/transaction_shuffler/deprecated_fairness/mod.rs b/consensus/src/transaction_shuffler/deprecated_fairness/mod.rs deleted file mode 100644 index 03cc1eb1c6d18..0000000000000 --- a/consensus/src/transaction_shuffler/deprecated_fairness/mod.rs +++ /dev/null @@ -1,220 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -use crate::transaction_shuffler::{ - deprecated_fairness::{ - conflict_key::{ - entry_fun::EntryFunKey, entry_fun_module::EntryFunModuleKey, txn_sender::TxnSenderKey, - ConflictKeyRegistry, - }, - conflict_zone::ConflictZone, - pending_zone::PendingZone, - }, - TransactionShuffler, -}; -use aptos_types::transaction::SignedTransaction; -use itertools::zip_eq; -use selection_tracker::SelectionTracker; -use std::collections::BTreeSet; - -pub(crate) mod conflict_key; -mod conflict_zone; -mod pending_zone; -mod selection_tracker; - -#[cfg(test)] -mod tests; - -type TxnIdx = usize; - -#[derive(Debug)] -pub struct FairnessShuffler { - pub sender_conflict_window_size: usize, - pub module_conflict_window_size: usize, - pub entry_fun_conflict_window_size: usize, -} - -impl FairnessShuffler { - fn conflict_key_registries(txns: &[SignedTransaction]) -> [ConflictKeyRegistry; 3] { - [ - ConflictKeyRegistry::build::(txns), - ConflictKeyRegistry::build::(txns), - ConflictKeyRegistry::build::(txns), - ] - } - - fn window_sizes(&self) -> [usize; 3] { - [ - self.sender_conflict_window_size, - self.module_conflict_window_size, - self.entry_fun_conflict_window_size, - ] - } -} - -impl TransactionShuffler for FairnessShuffler { - fn shuffle(&self, txns: Vec) -> Vec { - let conflict_key_registries = Self::conflict_key_registries(&txns); - let order = - FairnessShufflerImpl::new(&conflict_key_registries, self.window_sizes()).shuffle(); - reorder(txns, &order) - } -} - -fn reorder(txns: Vec, order: &[TxnIdx]) -> Vec { - assert_eq!(txns.len(), order.len()); - order.iter().map(|idx| txns[*idx].clone()).collect() -} - -struct FairnessShufflerImpl<'a, const NUM_CONFLICT_ZONES: usize> { - conflict_zones: [ConflictZone<'a>; NUM_CONFLICT_ZONES], - pending_zones: [PendingZone<'a>; NUM_CONFLICT_ZONES], - selected_order: Vec, - selection_tracker: SelectionTracker, -} - -impl<'a, const NUM_CONFLICT_ZONES: usize> FairnessShufflerImpl<'a, NUM_CONFLICT_ZONES> { - pub fn new( - conflict_key_registries: &'a [ConflictKeyRegistry; NUM_CONFLICT_ZONES], - window_sizes: [usize; NUM_CONFLICT_ZONES], - ) -> Self { - let num_txns = conflict_key_registries[0].num_txns(); - assert!(conflict_key_registries - .iter() - .skip(1) - .all(|r| r.num_txns() == num_txns)); - - Self { - selected_order: Vec::with_capacity(num_txns), - selection_tracker: SelectionTracker::new(num_txns), - conflict_zones: ConflictZone::build_zones(conflict_key_registries, window_sizes), - pending_zones: PendingZone::build_zones(conflict_key_registries), - } - } - - /// Spread (delay) transactions that have conflicts with adjacent previous transactions - /// according to multiple dimensions (`ConflictKey`s). Invariant is held that for each conflict - /// key, i.e. the transaction sender, the module, and the entry function, etc., the order of - /// transactions with the same key is preserved -- unless the key is exempt from conflict. - /// - /// For example, all transactions from a single sender will preserve their order; all transactions - /// from the same module will preserve their order, unless they are of the aptos framework - /// module -- p2p transfers of APT can violate this invariant. - /// - /// Each transaction comes at most once out of `self.selection_tracker.next_unselected()` for - /// both passes, that's O(2n). And each transaction comes out of each conflict zones at most - /// once, that's O(3n). In either case, the transaction is examined by going through all 3 - /// conflict zones and all 3 pending zones. So the time complexity is O(9n) = O(n). Or if we - /// consider `NUM_CONFLICT_ZONES = m`, the time complexity is O(m*m*n). - pub fn shuffle(mut self) -> Vec { - // First pass, only select transactions with no conflicts in all conflict zones - while let Some(txn_idx) = self.selection_tracker.next_unselected() { - if !self.is_conflict(txn_idx) && !self.is_head_of_line_blocked(txn_idx) { - self.select_and_select_unconflicted(txn_idx, false /* is_pending */) - } else { - self.add_pending(txn_idx); - } - } - - // Second pass, select previously pending txns in order, - // with newly un-conflicted txns jumping the line - self.selection_tracker.new_pass(); - while let Some(txn_idx) = self.selection_tracker.next_unselected() { - self.select_and_select_unconflicted(txn_idx, true /* is_pending */); - } - - self.selected_order - } - - fn select_and_select_unconflicted(&mut self, txn_idx: TxnIdx, is_pending: bool) { - let mut maybe_unconflicted = self.select(txn_idx, is_pending); - while let Some(txn_idx) = maybe_unconflicted.pop_first() { - if !self.is_conflict(txn_idx) && !self.is_head_of_line_blocked(txn_idx) { - maybe_unconflicted.extend(self.select(txn_idx, true /* is_pending */)) - } - } - } - - /// Select a transaction and return potentially un-conflicted transactions - fn select(&mut self, txn_idx: TxnIdx, is_pending: bool) -> BTreeSet { - self.selection_tracker.mark_selected(txn_idx); - self.selected_order.push(txn_idx); - if is_pending { - self.pop_pending(txn_idx); - } - - let mut maybe_unconflicted = BTreeSet::new(); - for (conflict_zone, pending_zone) in - zip_eq(&mut self.conflict_zones, &mut self.pending_zones) - { - if let Some(key_id) = conflict_zone.add(txn_idx) { - if let Some(pending) = pending_zone.first_pending_on_key(key_id) { - maybe_unconflicted.insert(pending); - } - } - } - - maybe_unconflicted - } - - fn is_conflict(&self, txn_idx: TxnIdx) -> bool { - self.conflict_zones.iter().any(|z| z.is_conflict(txn_idx)) - } - - fn is_head_of_line_blocked(&self, txn_idx: TxnIdx) -> bool { - self.pending_zones - .iter() - .any(|z| z.head_of_line_blocked(txn_idx)) - } - - fn add_pending(&mut self, txn_idx: TxnIdx) { - self.pending_zones.iter_mut().for_each(|z| z.add(txn_idx)); - } - - fn pop_pending(&mut self, txn_idx: TxnIdx) { - self.pending_zones.iter_mut().for_each(|z| z.pop(txn_idx)); - } -} - -#[cfg(test)] -mod test_utils { - use crate::transaction_shuffler::deprecated_fairness::FairnessShuffler; - use proptest::prelude::*; - - impl FairnessShuffler { - pub fn new_for_test( - sender_conflict_window_size: usize, - module_conflict_window_size: usize, - entry_fun_conflict_window_size: usize, - ) -> Self { - Self { - sender_conflict_window_size, - module_conflict_window_size, - entry_fun_conflict_window_size, - } - } - } - - impl Arbitrary for FairnessShuffler { - type Parameters = (); - type Strategy = BoxedStrategy; - - fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { - (0..10usize, 0..10usize, 0..10usize) - .prop_map( - |( - sender_conflict_window_size, - module_conflict_window_size, - entry_fun_conflict_window_size, - )| { - FairnessShuffler { - sender_conflict_window_size, - module_conflict_window_size, - entry_fun_conflict_window_size, - } - }, - ) - .boxed() - } - } -} diff --git a/consensus/src/transaction_shuffler/deprecated_fairness/pending_zone.rs b/consensus/src/transaction_shuffler/deprecated_fairness/pending_zone.rs deleted file mode 100644 index eb9c2af18455f..0000000000000 --- a/consensus/src/transaction_shuffler/deprecated_fairness/pending_zone.rs +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -use crate::transaction_shuffler::deprecated_fairness::{ - conflict_key::{ConflictKeyId, ConflictKeyRegistry, MapByKeyId}, - TxnIdx, -}; -use std::collections::VecDeque; - -/// A queue for each confclit Key, represented by `ConflictKeyId`s managed by `ConflictKeyRegistry`. -#[derive(Debug)] -pub(crate) struct PendingZone<'a> { - key_registry: &'a ConflictKeyRegistry, - pending_by_key: MapByKeyId>, -} - -impl<'a> PendingZone<'a> { - pub fn build_zones( - key_registries: &'a [ConflictKeyRegistry; NUM_CONFLICT_ZONES], - ) -> [Self; NUM_CONFLICT_ZONES] { - key_registries - .iter() - .map(Self::new) - .collect::>() - .try_into() - .expect("key_registries and the return type must have the same length.") - } - - fn new(key_registry: &'a ConflictKeyRegistry) -> Self { - Self { - key_registry, - pending_by_key: key_registry.new_map_by_id(), - } - } - - pub fn add(&mut self, txn_idx: TxnIdx) { - let key_id = self.key_registry.key_id_for_txn(txn_idx); - if !self.key_registry.is_conflict_exempt(key_id) { - self.pending_by_key.get_mut(key_id).push_back(txn_idx); - } - } - - pub fn pop(&mut self, txn_idx: TxnIdx) { - let key_id = self.key_registry.key_id_for_txn(txn_idx); - if !self.key_registry.is_conflict_exempt(key_id) { - let popped = self - .pending_by_key - .get_mut(key_id) - .pop_front() - .expect("Must exist"); - assert_eq!(popped, txn_idx); - } - } - - pub fn head_of_line_blocked(&self, txn_idx: TxnIdx) -> bool { - let key_id = self.key_registry.key_id_for_txn(txn_idx); - if self.key_registry.is_conflict_exempt(key_id) { - false - } else { - match self.pending_by_key.get(key_id).front() { - Some(front) => *front < txn_idx, - None => false, - } - } - } - - pub fn first_pending_on_key(&self, key_id: ConflictKeyId) -> Option { - self.pending_by_key.get(key_id).front().cloned() - } -} diff --git a/consensus/src/transaction_shuffler/deprecated_fairness/selection_tracker.rs b/consensus/src/transaction_shuffler/deprecated_fairness/selection_tracker.rs deleted file mode 100644 index 3e10d4368642f..0000000000000 --- a/consensus/src/transaction_shuffler/deprecated_fairness/selection_tracker.rs +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -use crate::transaction_shuffler::deprecated_fairness::TxnIdx; - -pub struct SelectionTracker { - selected_markers: Vec, - cur_idx: usize, -} - -impl SelectionTracker { - pub fn new(num_txns: usize) -> Self { - Self { - selected_markers: vec![false; num_txns], - cur_idx: 0, - } - } - - pub fn next_unselected(&mut self) -> Option { - while self.cur_idx < self.selected_markers.len() { - let idx = self.cur_idx; - self.cur_idx += 1; - - if !self.is_selected(idx) { - return Some(idx); - } - } - None - } - - pub fn new_pass(&mut self) { - self.cur_idx = 0 - } - - pub fn mark_selected(&mut self, idx: TxnIdx) { - assert!(!self.selected_markers[idx]); - self.selected_markers[idx] = true; - } - - fn is_selected(&self, idx: TxnIdx) -> bool { - self.selected_markers[idx] - } -} diff --git a/consensus/src/transaction_shuffler/deprecated_fairness/tests/manual.rs b/consensus/src/transaction_shuffler/deprecated_fairness/tests/manual.rs deleted file mode 100644 index 753ca542a8d3f..0000000000000 --- a/consensus/src/transaction_shuffler/deprecated_fairness/tests/manual.rs +++ /dev/null @@ -1,169 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -use crate::transaction_shuffler::deprecated_fairness::{ - conflict_key::ConflictKeyRegistry, FairnessShuffler, FairnessShufflerImpl, -}; - -struct TestCase { - shuffler: FairnessShuffler, - conflict_key_registries: [ConflictKeyRegistry; 3], - expected_order: Vec, -} - -impl TestCase { - fn run(self) { - let Self { - shuffler, - conflict_key_registries, - expected_order, - } = self; - - let order = - FairnessShufflerImpl::new(&conflict_key_registries, shuffler.window_sizes()).shuffle(); - assert_eq!(order, expected_order); - } -} - -#[test] -fn test_all_exempt() { - TestCase { - shuffler: FairnessShuffler::new_for_test(2, 2, 2), - conflict_key_registries: [ - ConflictKeyRegistry::all_exempt(9), - ConflictKeyRegistry::all_exempt(9), - ConflictKeyRegistry::all_exempt(9), - ], - expected_order: (0..9).collect(), - } - .run() -} - -#[test] -fn test_non_conflict() { - TestCase { - shuffler: FairnessShuffler::new_for_test(2, 2, 2), - conflict_key_registries: [ - ConflictKeyRegistry::non_conflict(9), - ConflictKeyRegistry::non_conflict(9), - ConflictKeyRegistry::non_conflict(9), - ], - expected_order: (0..9).collect(), - } - .run() -} - -#[test] -fn test_full_conflict() { - TestCase { - shuffler: FairnessShuffler::new_for_test(2, 2, 2), - conflict_key_registries: [ - ConflictKeyRegistry::full_conflict(9), - ConflictKeyRegistry::full_conflict(9), - ConflictKeyRegistry::full_conflict(9), - ], - expected_order: (0..9).collect(), - } - .run() -} - -#[test] -fn test_modules_ignored_by_window_size() { - TestCase { - shuffler: FairnessShuffler::new_for_test(2, 0, 2), - conflict_key_registries: [ - // [A0, A1, A2, ...] - ConflictKeyRegistry::non_conflict(8), - // [M0, M0, M0, M0, M1, M1, M2, M2] - ConflictKeyRegistry::nums_per_key([4, 2, 2]), - // [M0::E0, M0::E1, M0::E0, M0::E1, M1::E0, M1::E0, M2::E0, M2::E0] - ConflictKeyRegistry::nums_per_round_per_key([[1, 1, 0], [1, 1, 4]]), - ], - // [M0::E0, M0::E1, M1::E0, M0::E0, M0::E1, M1::E0, M2::E0, M2::E0] - expected_order: vec![0, 1, 4, 2, 3, 5, 6, 7], - } - .run() -} - -#[test] -fn test_modules_and_entry_funs_ignored_by_window_size() { - TestCase { - shuffler: FairnessShuffler::new_for_test(2, 0, 0), - conflict_key_registries: [ - // [A0, A1, A2, ...] - ConflictKeyRegistry::non_conflict(8), - // [M0, M0, M0, M0, M1, M1, M1, M1] - ConflictKeyRegistry::nums_per_key([4, 4]), - // [M0::E0, M0::E0, M0::E1, M0::E1, M1::E0, M1::E0, M1::E1, M1::E1] - ConflictKeyRegistry::nums_per_key([2, 2, 2, 2]), - ], - expected_order: (0..8).collect(), - } - .run() -} - -#[test] -fn test_exempted_modules() { - // think "full block of p2p txns" - TestCase { - shuffler: FairnessShuffler::new_for_test(3, 2, 2), - conflict_key_registries: [ - // [0:A0, 1:A0, 2:A0, 3:A0, 4:A1, 5:A1, 6:A1, 7:A2, 8:A2, 9:A3] - ConflictKeyRegistry::nums_per_key([4, 3, 2, 1]), - ConflictKeyRegistry::all_exempt(10), - ConflictKeyRegistry::all_exempt(10), - ], - // [A0, A1, A2, A3, A0, A1, A2, A0, A1] - expected_order: vec![0, 4, 7, 9, 1, 5, 8, 2, 3, 6], - } - .run() -} - -#[test] -fn test_dominating_module() { - TestCase { - shuffler: FairnessShuffler::new_for_test(4, 1, 1), - conflict_key_registries: [ - ConflictKeyRegistry::non_conflict(7), - // [M0, M0, M0, M1, M2, M3, M4] - ConflictKeyRegistry::nums_per_key([3, 1, 1, 1, 1]), - ConflictKeyRegistry::nums_per_key([3, 1, 1, 1, 1]), - ], - // [M0, M1, M0, M2, M0, M3, M4] - expected_order: vec![0, 3, 1, 4, 2, 5, 6], - } - .run() -} - -#[test] -fn test_dominating_module2() { - TestCase { - shuffler: FairnessShuffler::new_for_test(4, 1, 1), - conflict_key_registries: [ - ConflictKeyRegistry::non_conflict(8), - // [M0, M0, M0, M1, M2, M3, M4, M0] - ConflictKeyRegistry::nums_per_round_per_key([[3, 1, 1, 1, 1], [1, 0, 0, 0, 0]]), - ConflictKeyRegistry::nums_per_round_per_key([[3, 1, 1, 1, 1], [1, 0, 0, 0, 0]]), - ], - // [M0, M1, M0, M2, M0, M3, M4, M0] - expected_order: vec![0, 3, 1, 4, 2, 5, 6, 7], - } - .run() -} - -#[test] -fn test_multiple_entry_funs() { - TestCase { - shuffler: FairnessShuffler::new_for_test(4, 1, 2), - conflict_key_registries: [ - ConflictKeyRegistry::non_conflict(10), - // [M0, M0, M0, M0, M1, M1, M1, M1, M2, M2] - ConflictKeyRegistry::nums_per_key([4, 4, 2]), - // [M0::E0, M0::E1, M0::E0, M0::E1, M1::E0, M1::E0, M1::E0, M1::E0, M2::E0, M2::E0] - ConflictKeyRegistry::nums_per_round_per_key([[1, 1, 0, 0], [1, 1, 4, 2]]), - ], - // [M0::E0, M1::E0, M0::E1, M2::E0, M0::E0, M1::E0, M0:E1, M2::E0, M1::E0, M1::E0] - expected_order: vec![0, 4, 1, 8, 2, 5, 3, 9, 6, 7], - } - .run() -} diff --git a/consensus/src/transaction_shuffler/deprecated_fairness/tests/mod.rs b/consensus/src/transaction_shuffler/deprecated_fairness/tests/mod.rs deleted file mode 100644 index c3550a41f7ccc..0000000000000 --- a/consensus/src/transaction_shuffler/deprecated_fairness/tests/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -mod manual; -mod proptests; diff --git a/consensus/src/transaction_shuffler/deprecated_fairness/tests/proptests.rs b/consensus/src/transaction_shuffler/deprecated_fairness/tests/proptests.rs deleted file mode 100644 index 6d111b03d28e6..0000000000000 --- a/consensus/src/transaction_shuffler/deprecated_fairness/tests/proptests.rs +++ /dev/null @@ -1,104 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -use crate::transaction_shuffler::deprecated_fairness::{ - conflict_key::{ - test_utils::{FakeEntryFunKey, FakeEntryFunModuleKey, FakeSenderKey, FakeTxn}, - ConflictKeyRegistry, MapByKeyId, - }, - reorder, FairnessShuffler, FairnessShufflerImpl, TxnIdx, -}; -use proptest::{collection::vec, prelude::*}; -use std::collections::BTreeSet; - -fn arb_order(num_txns: usize) -> impl Strategy> { - Just((0..num_txns).collect::>()).prop_shuffle() -} - -#[derive(Debug, Default, Eq, PartialEq)] -enum OrderOrSet { - #[default] - Empty, - Order(Vec), - Set(BTreeSet), -} - -impl OrderOrSet { - fn add(&mut self, idx: TxnIdx, is_conflict_exempt: bool) { - if self.is_empty() { - *self = if is_conflict_exempt { - Self::Set(BTreeSet::new()) - } else { - Self::Order(Vec::new()) - }; - } - - match self { - Self::Order(order) => order.push(idx), - Self::Set(set) => { - set.insert(idx); - }, - Self::Empty => unreachable!(), - } - } - - fn is_empty(&self) -> bool { - matches!(self, Self::Empty) - } -} - -fn sort_by_key( - order: impl IntoIterator, - registry: &ConflictKeyRegistry, -) -> MapByKeyId { - let mut map: MapByKeyId = registry.new_map_by_id(); - - for txn_idx in order { - let key_id = registry.key_id_for_txn(txn_idx); - let is_exempt = registry.is_conflict_exempt(key_id); - - map.get_mut(key_id).add(txn_idx, is_exempt); - } - - map -} - -fn assert_invariants(txns: &[FakeTxn], order: Vec, registry: &ConflictKeyRegistry) { - let num_txns = txns.len(); - let original_sorted = sort_by_key(0..num_txns, registry); - let result_sorted = sort_by_key(order, registry); - - assert_eq!(result_sorted, original_sorted); -} - -fn registries(txns: &[FakeTxn]) -> [ConflictKeyRegistry; 3] { - [ - ConflictKeyRegistry::build::(txns), - ConflictKeyRegistry::build::(txns), - ConflictKeyRegistry::build::(txns), - ] -} - -proptest! { - #[test] - fn test_reorder( order in (0..1000usize).prop_flat_map(arb_order) ) { - let num_txns = order.len(); - let txns = (0..num_txns).collect::>(); - - let reordered = reorder(txns, &order); - prop_assert_eq!(reordered, order); - } - - #[test] - fn test_fairness_shuffler( - txns in vec(any::(), 0..1000), - shuffler in any::(), - ) { - let registries = registries(&txns); - let order = FairnessShufflerImpl::new(®istries, shuffler.window_sizes()).shuffle(); - - for registry in ®istries { - assert_invariants(&txns, order.clone(), registry); - } - } -} diff --git a/consensus/src/transaction_shuffler/mod.rs b/consensus/src/transaction_shuffler/mod.rs index a75ce60e0cdd6..f69a230300c7c 100644 --- a/consensus/src/transaction_shuffler/mod.rs +++ b/consensus/src/transaction_shuffler/mod.rs @@ -6,7 +6,6 @@ use aptos_types::{on_chain_config::TransactionShufflerType, transaction::SignedT use sender_aware::SenderAwareShuffler; use std::sync::Arc; -mod deprecated_fairness; mod sender_aware; mod use_case_aware; @@ -45,22 +44,8 @@ pub fn create_transaction_shuffler( ); Arc::new(SenderAwareShuffler::new(conflict_window_size as usize)) }, - DeprecatedFairness { - sender_conflict_window_size, - module_conflict_window_size, - entry_fun_conflict_window_size, - } => { - info!( - "Using fairness transaction shuffling with conflict window sizes: sender {}, module {}, entry fun {}", - sender_conflict_window_size, - module_conflict_window_size, - entry_fun_conflict_window_size - ); - Arc::new(deprecated_fairness::FairnessShuffler { - sender_conflict_window_size: sender_conflict_window_size as usize, - module_conflict_window_size: module_conflict_window_size as usize, - entry_fun_conflict_window_size: entry_fun_conflict_window_size as usize, - }) + DeprecatedFairness => { + unreachable!("DeprecatedFairness shuffler is no longer supported.") }, UseCaseAware { sender_spread_factor, diff --git a/crates/aptos-admin-service/Cargo.toml b/crates/aptos-admin-service/Cargo.toml index af741b67511c7..a430be43bf996 100644 --- a/crates/aptos-admin-service/Cargo.toml +++ b/crates/aptos-admin-service/Cargo.toml @@ -19,11 +19,13 @@ aptos-consensus = { workspace = true } aptos-crypto = { workspace = true } aptos-infallible = { workspace = true } aptos-logger = { workspace = true } +aptos-mempool = { workspace = true } aptos-runtimes = { workspace = true } aptos-storage-interface = { workspace = true } aptos-system-utils = { workspace = true } aptos-types = { workspace = true } bcs = { workspace = true } +futures-channel = { workspace = true } http = { workspace = true } hyper = { workspace = true } sha256 = { workspace = true } diff --git a/crates/aptos-admin-service/src/server/mempool/mod.rs b/crates/aptos-admin-service/src/server/mempool/mod.rs new file mode 100644 index 0000000000000..187a21cf6cea3 --- /dev/null +++ b/crates/aptos-admin-service/src/server/mempool/mod.rs @@ -0,0 +1,55 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use aptos_logger::info; +use aptos_mempool::{MempoolClientRequest, MempoolClientSender}; +use aptos_system_utils::utils::{reply_with, reply_with_status}; +use aptos_types::account_address::AccountAddress; +use futures_channel::oneshot::Canceled; +use http::{Request, Response, StatusCode}; +use hyper::Body; + +pub async fn mempool_handle_parking_lot_address_request( + _req: Request, + mempool_client_sender: MempoolClientSender, +) -> hyper::Result> { + match get_parking_lot_addresses(mempool_client_sender).await { + Ok(addresses) => { + info!("Finished getting parking lot addresses from mempool."); + match bcs::to_bytes(&addresses) { + Ok(addresses) => Ok(reply_with(vec![], addresses)), + Err(e) => { + info!("Failed to bcs serialize parking lot addresses from mempool: {e:?}"); + Ok(reply_with_status( + StatusCode::INTERNAL_SERVER_ERROR, + e.to_string(), + )) + }, + } + }, + Err(e) => { + info!("Failed to get parking lot addresses from mempool: {e:?}"); + Ok(reply_with_status( + StatusCode::INTERNAL_SERVER_ERROR, + e.to_string(), + )) + }, + } +} + +async fn get_parking_lot_addresses( + mempool_client_sender: MempoolClientSender, +) -> Result, Canceled> { + let (sender, receiver) = futures_channel::oneshot::channel(); + + match mempool_client_sender + .clone() + .try_send(MempoolClientRequest::GetAddressesFromParkingLot(sender)) + { + Ok(_) => receiver.await, + Err(e) => { + info!("Failed to send request for GetAddressesFromParkingLot: {e:?}"); + Err(Canceled) + }, + } +} diff --git a/crates/aptos-admin-service/src/server/mod.rs b/crates/aptos-admin-service/src/server/mod.rs index 4387f8c6e9693..bc72d791ed850 100644 --- a/crates/aptos-admin-service/src/server/mod.rs +++ b/crates/aptos-admin-service/src/server/mod.rs @@ -7,6 +7,7 @@ use aptos_consensus::{ }; use aptos_infallible::RwLock; use aptos_logger::info; +use aptos_mempool::MempoolClientSender; use aptos_storage_interface::DbReaderWriter; use aptos_system_utils::utils::reply_with_status; #[cfg(target_os = "linux")] @@ -26,6 +27,7 @@ use std::{ use tokio::runtime::Runtime; mod consensus; +mod mempool; #[derive(Default)] pub struct Context { @@ -34,6 +36,7 @@ pub struct Context { aptos_db: RwLock>>, consensus_db: RwLock>>, quorum_store_db: RwLock>>, + mempool_client_sender: RwLock>, } impl Context { @@ -49,6 +52,10 @@ impl Context { *self.consensus_db.write() = Some(consensus_db); *self.quorum_store_db.write() = Some(quorum_store_db); } + + fn set_mempool_client_sender(&self, mempool_client_sender: MempoolClientSender) { + *self.mempool_client_sender.write() = Some(mempool_client_sender); + } } pub struct AdminService { @@ -107,6 +114,11 @@ impl AdminService { .set_consensus_dbs(consensus_db, quorum_store_db) } + pub fn set_mempool_client_sender(&self, mempool_client_sender: MempoolClientSender) { + self.context + .set_mempool_client_sender(mempool_client_sender) + } + fn start(&self, address: SocketAddr, enabled: bool) { let context = self.context.clone(); self.runtime.spawn(async move { @@ -210,6 +222,21 @@ impl AdminService { )) } }, + (hyper::Method::GET, "/debug/mempool/parking-lot/addresses") => { + let mempool_client_sender = context.mempool_client_sender.read().clone(); + if mempool_client_sender.is_some() { + mempool::mempool_handle_parking_lot_address_request( + req, + mempool_client_sender.unwrap(), + ) + .await + } else { + Ok(reply_with_status( + StatusCode::NOT_FOUND, + "Mempool parking lot is not available.", + )) + } + }, _ => Ok(reply_with_status(StatusCode::NOT_FOUND, "Not found.")), } } diff --git a/crates/aptos-api-tester/src/tests/coin_transfer.rs b/crates/aptos-api-tester/src/tests/coin_transfer.rs index 18b8b8006ff23..1f07e317997cf 100644 --- a/crates/aptos-api-tester/src/tests/coin_transfer.rs +++ b/crates/aptos-api-tester/src/tests/coin_transfer.rs @@ -204,11 +204,11 @@ async fn check_account_balance( address: AccountAddress, ) -> Result<(), TestFailure> { // expected - let expected = U64(TRANSFER_AMOUNT); + let expected = TRANSFER_AMOUNT; // actual - let actual = match client.get_account_balance(address).await { - Ok(response) => response.into_inner().coin.value, + let actual = match client.view_apt_account_balance(address).await { + Ok(response) => response.into_inner(), Err(e) => { error!( "test: coin_transfer part: check_account_balance ERROR: {}, with error {:?}", @@ -236,14 +236,14 @@ async fn check_account_balance_at_version( transaction_version: u64, ) -> Result<(), TestFailure> { // expected - let expected = U64(0); + let expected = 0; // actual let actual = match client - .get_account_balance_at_version(address, transaction_version - 1) + .view_apt_account_balance_at_version(address, transaction_version - 1) .await { - Ok(response) => response.into_inner().coin.value, + Ok(response) => response.into_inner(), Err(e) => { error!( "test: coin_transfer part: check_account_balance_at_version ERROR: {}, with error {:?}", diff --git a/crates/aptos-api-tester/src/utils.rs b/crates/aptos-api-tester/src/utils.rs index fb1ad99d72ae0..44c08a0b4cb6c 100644 --- a/crates/aptos-api-tester/src/utils.rs +++ b/crates/aptos-api-tester/src/utils.rs @@ -181,8 +181,8 @@ pub async fn check_balance( expected: U64, ) -> Result<(), TestFailure> { // actual - let actual = match client.get_account_balance(address).await { - Ok(response) => response.into_inner().coin.value, + let actual = match client.view_apt_account_balance(address).await { + Ok(response) => response.into_inner(), Err(e) => { error!( "test: {} part: check_account_data ERROR: {}, with error {:?}", @@ -195,7 +195,7 @@ pub async fn check_balance( }; // compare - if expected != actual { + if expected.0 != actual { error!( "test: {} part: check_account_data FAIL: {}, expected {:?}, got {:?}", &test_name.to_string(), diff --git a/crates/aptos-collections/src/bounded_vec_deque.rs b/crates/aptos-collections/src/bounded_vec_deque.rs index 6435b7371f331..5da8bdd16e047 100644 --- a/crates/aptos-collections/src/bounded_vec_deque.rs +++ b/crates/aptos-collections/src/bounded_vec_deque.rs @@ -52,6 +52,14 @@ impl BoundedVecDeque { pub fn iter(&self) -> Iter<'_, T> { self.inner.iter() } + + pub fn len(&self) -> usize { + self.inner.len() + } + + pub fn is_empty(&self) -> bool { + self.len() == 0 + } } impl IntoIterator for BoundedVecDeque { diff --git a/crates/aptos-crypto/Cargo.toml b/crates/aptos-crypto/Cargo.toml index eacada812fa56..8a0822a6b4bfa 100644 --- a/crates/aptos-crypto/Cargo.toml +++ b/crates/aptos-crypto/Cargo.toml @@ -16,6 +16,7 @@ rust-version = { workspace = true } aes-gcm = { workspace = true } anyhow = { workspace = true } aptos-crypto-derive = { workspace = true } +arbitrary = { workspace = true, features = ["derive"], optional = true } ark-bn254 = { workspace = true } ark-ec = { workspace = true } ark-ff = { workspace = true } @@ -61,6 +62,7 @@ typenum = { workspace = true } x25519-dalek = { workspace = true } [dev-dependencies] +arbitrary = { workspace = true, features = ["derive"] } ark-bls12-381 = { workspace = true } ark-bn254 = { workspace = true } ark-serialize = { workspace = true } @@ -79,7 +81,7 @@ trybuild = { workspace = true } default = [] assert-private-keys-not-cloneable = [] cloneable-private-keys = [] -fuzzing = ["proptest", "proptest-derive", "cloneable-private-keys"] +fuzzing = ["proptest", "proptest-derive", "cloneable-private-keys", "arbitrary"] testing = [] [[bench]] diff --git a/crates/aptos-crypto/src/ed25519/ed25519_keys.rs b/crates/aptos-crypto/src/ed25519/ed25519_keys.rs index b1f578b0a3e23..4914e1448252f 100644 --- a/crates/aptos-crypto/src/ed25519/ed25519_keys.rs +++ b/crates/aptos-crypto/src/ed25519/ed25519_keys.rs @@ -38,6 +38,15 @@ impl Clone for Ed25519PrivateKey { #[derive(DeserializeKey, Clone, SerializeKey)] pub struct Ed25519PublicKey(pub(crate) ed25519_dalek::PublicKey); +#[cfg(any(test, feature = "fuzzing"))] +impl<'a> arbitrary::Arbitrary<'a> for Ed25519PublicKey { + fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { + let bytes: [u8; ED25519_PUBLIC_KEY_LENGTH] = u.arbitrary()?; + Ed25519PublicKey::from_bytes_unchecked(&bytes) + .map_err(|_| arbitrary::Error::IncorrectFormat) + } +} + impl Ed25519PrivateKey { /// The length of the Ed25519PrivateKey pub const LENGTH: usize = ED25519_PRIVATE_KEY_LENGTH; diff --git a/crates/aptos-crypto/src/ed25519/ed25519_sigs.rs b/crates/aptos-crypto/src/ed25519/ed25519_sigs.rs index 4c1d9438c83a0..c13b8483b7f97 100644 --- a/crates/aptos-crypto/src/ed25519/ed25519_sigs.rs +++ b/crates/aptos-crypto/src/ed25519/ed25519_sigs.rs @@ -18,6 +18,15 @@ use std::{cmp::Ordering, fmt}; #[derive(DeserializeKey, Clone, SerializeKey)] pub struct Ed25519Signature(pub(crate) ed25519_dalek::Signature); +#[cfg(any(test, feature = "fuzzing"))] +impl<'a> arbitrary::Arbitrary<'a> for Ed25519Signature { + fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { + let bytes: [u8; ED25519_SIGNATURE_LENGTH] = u.arbitrary()?; + Ed25519Signature::from_bytes_unchecked(&bytes) + .map_err(|_| arbitrary::Error::IncorrectFormat) + } +} + impl Ed25519Signature { /// The length of the Ed25519Signature pub const LENGTH: usize = ED25519_SIGNATURE_LENGTH; diff --git a/crates/aptos-crypto/src/secp256r1_ecdsa/secp256r1_ecdsa_keys.rs b/crates/aptos-crypto/src/secp256r1_ecdsa/secp256r1_ecdsa_keys.rs index e2f32013245a0..d48cd8a2a3a9c 100644 --- a/crates/aptos-crypto/src/secp256r1_ecdsa/secp256r1_ecdsa_keys.rs +++ b/crates/aptos-crypto/src/secp256r1_ecdsa/secp256r1_ecdsa_keys.rs @@ -41,6 +41,14 @@ impl Clone for PrivateKey { #[key_name("Secp256r1EcdsaPublicKey")] pub struct PublicKey(pub(crate) p256::ecdsa::VerifyingKey); +#[cfg(any(test, feature = "fuzzing"))] +impl<'a> arbitrary::Arbitrary<'a> for PublicKey { + fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { + let bytes: [u8; PUBLIC_KEY_LENGTH] = u.arbitrary()?; + PublicKey::from_bytes_unchecked(&bytes).map_err(|_| arbitrary::Error::IncorrectFormat) + } +} + impl PrivateKey { /// The length of the PrivateKey pub const LENGTH: usize = PRIVATE_KEY_LENGTH; diff --git a/crates/aptos-crypto/src/secp256r1_ecdsa/secp256r1_ecdsa_sigs.rs b/crates/aptos-crypto/src/secp256r1_ecdsa/secp256r1_ecdsa_sigs.rs index 28cfb76105170..aac404aaaba40 100644 --- a/crates/aptos-crypto/src/secp256r1_ecdsa/secp256r1_ecdsa_sigs.rs +++ b/crates/aptos-crypto/src/secp256r1_ecdsa/secp256r1_ecdsa_sigs.rs @@ -35,6 +35,7 @@ impl Signature { /// Deserialize an P256Signature, without checking for malleability /// Uses the SEC1 serialization format. + #[cfg(not(feature = "fuzzing"))] pub(crate) fn from_bytes_unchecked( bytes: &[u8], ) -> std::result::Result { @@ -44,6 +45,18 @@ impl Signature { } } + /// Deserialize an P256Signature, without checking for malleability + /// Uses the SEC1 serialization format. + #[cfg(any(test, feature = "fuzzing"))] + pub fn from_bytes_unchecked( + bytes: &[u8], + ) -> std::result::Result { + match p256::ecdsa::Signature::try_from(bytes) { + Ok(p256_signature) => Ok(Signature(p256_signature)), + Err(_) => Err(CryptoMaterialError::DeserializationError), + } + } + /// return an all-zero signature (for test only) #[cfg(any(test, feature = "fuzzing"))] pub fn dummy_signature() -> Self { diff --git a/crates/aptos-dkg/src/weighted_vuf/pinkas/mod.rs b/crates/aptos-dkg/src/weighted_vuf/pinkas/mod.rs index dd3795830f9ea..254ff831a3d26 100644 --- a/crates/aptos-dkg/src/weighted_vuf/pinkas/mod.rs +++ b/crates/aptos-dkg/src/weighted_vuf/pinkas/mod.rs @@ -246,7 +246,7 @@ impl WeightedVUF for PinkasWUF { pis.push( apks[player.id] .as_ref() - .ok_or(anyhow!("Missing APK for player {}", player.get_id()))? + .ok_or_else(|| anyhow!("Missing APK for player {}", player.get_id()))? .0 .pi, ); @@ -299,7 +299,7 @@ impl PinkasWUF { let apk = apks[player.id] .as_ref() - .ok_or(anyhow!("Missing APK for player {}", player.get_id()))?; + .ok_or_else(|| anyhow!("Missing APK for player {}", player.get_id()))?; rks.push(&apk.0.rks); shares.push(share); diff --git a/crates/aptos-faucet/core/src/funder/transfer.rs b/crates/aptos-faucet/core/src/funder/transfer.rs index c47fdad80fe28..5b3ac87206527 100644 --- a/crates/aptos-faucet/core/src/funder/transfer.rs +++ b/crates/aptos-faucet/core/src/funder/transfer.rs @@ -22,7 +22,7 @@ use aptos_sdk::{ account_address::AccountAddress, chain_id::ChainId, transaction::{authenticator::AuthenticationKey, SignedTransaction, TransactionPayload}, - AptosCoinType, LocalAccount, + LocalAccount, }, }; use async_trait::async_trait; @@ -314,7 +314,7 @@ impl FunderTrait for TransferFunder { let account_address = self.faucet_account.read().await.address(); let funder_balance = match self .get_api_client() - .get_account_balance_bcs::(account_address) + .view_apt_account_balance(account_address) .await { Ok(response) => response.into_inner(), diff --git a/crates/aptos-faucet/core/src/server/run.rs b/crates/aptos-faucet/core/src/server/run.rs index cffc2c179c664..b5b80e6440377 100644 --- a/crates/aptos-faucet/core/src/server/run.rs +++ b/crates/aptos-faucet/core/src/server/run.rs @@ -12,7 +12,7 @@ use crate::{ funder::{ApiConnectionConfig, FunderConfig, MintFunderConfig, TransactionSubmissionConfig}, middleware::middleware_log, }; -use anyhow::{Context, Result}; +use anyhow::{anyhow, Context, Result}; use aptos_config::keys::ConfigKey; use aptos_faucet_metrics_server::{run_metrics_server, MetricsServerConfig}; use aptos_logger::info; @@ -21,12 +21,12 @@ use aptos_sdk::{ types::{account_config::aptos_test_root_address, chain_id::ChainId}, }; use clap::Parser; -use futures::lock::Mutex; -use poem::{http::Method, listener::TcpListener, middleware::Cors, EndpointExt, Route, Server}; +use futures::{channel::oneshot::Sender as OneShotSender, lock::Mutex}; +use poem::{http::Method, listener::TcpAcceptor, middleware::Cors, EndpointExt, Route, Server}; use reqwest::Url; use serde::{Deserialize, Serialize}; use std::{fs::File, io::BufReader, path::PathBuf, pin::Pin, str::FromStr, sync::Arc}; -use tokio::{sync::Semaphore, task::JoinSet}; +use tokio::{net::TcpListener, sync::Semaphore, task::JoinSet}; #[derive(Clone, Debug, Deserialize, Serialize)] pub struct HandlerConfig { @@ -68,6 +68,14 @@ pub struct RunConfig { impl RunConfig { pub async fn run(self) -> Result<()> { + self.run_impl(None).await + } + + pub async fn run_and_report_port(self, port_tx: OneShotSender) -> Result<()> { + self.run_impl(Some(port_tx)).await + } + + async fn run_impl(self, port_tx: Option>) -> Result<()> { info!("Running with config: {:#?}", self); // Set whether we should use useful errors. @@ -177,12 +185,19 @@ impl RunConfig { })); } - // Create a future for the API server. - let api_server_future = Server::new(TcpListener::bind(( + let listener = TcpListener::bind(( self.server_config.listen_address.clone(), self.server_config.listen_port, - ))) - .run( + )) + .await?; + let port = listener.local_addr()?.port(); + + if let Some(tx) = port_tx { + tx.send(port).map_err(|_| anyhow!("failed to send port"))?; + } + + // Create a future for the API server. + let api_server_future = Server::new_with_acceptor(TcpAcceptor::from_tokio(listener)?).run( Route::new() .nest( &self.server_config.api_path_base, @@ -821,10 +836,10 @@ mod test { // Assert that the account exists now with the expected balance. let response = aptos_node_api_client - .get_account_balance(AccountAddress::from_str(&fund_request.address.unwrap()).unwrap()) + .view_account_balance(AccountAddress::from_str(&fund_request.address.unwrap()).unwrap()) .await?; - assert_eq!(response.into_inner().get(), 10); + assert_eq!(response.into_inner(), 10); Ok(()) } @@ -879,10 +894,12 @@ mod test { // Assert that the account exists now with the expected balance. let response = aptos_node_api_client - .get_account_balance(AccountAddress::from_str(&fund_request.address.unwrap()).unwrap()) + .view_apt_account_balance( + AccountAddress::from_str(&fund_request.address.unwrap()).unwrap(), + ) .await?; - assert_eq!(response.into_inner().get(), 10); + assert_eq!(response.into_inner(), 10); Ok(()) } @@ -927,10 +944,12 @@ mod test { // Confirm that the account was given the full 1000 OCTA as requested. let response = aptos_node_api_client - .get_account_balance(AccountAddress::from_str(&fund_request.address.unwrap()).unwrap()) + .view_apt_account_balance( + AccountAddress::from_str(&fund_request.address.unwrap()).unwrap(), + ) .await?; - assert_eq!(response.into_inner().get(), 1000); + assert_eq!(response.into_inner(), 1000); // This time, don't include the auth token. We request more than maximum_amount, // but later we'll see that the faucet will only give us maximum_amount, not @@ -945,10 +964,12 @@ mod test { // Confirm that the account was only given 100 OCTA (maximum_amount), not 1000. let response = aptos_node_api_client - .get_account_balance(AccountAddress::from_str(&fund_request.address.unwrap()).unwrap()) + .view_apt_account_balance( + AccountAddress::from_str(&fund_request.address.unwrap()).unwrap(), + ) .await?; - assert_eq!(response.into_inner().get(), 100); + assert_eq!(response.into_inner(), 100); Ok(()) } diff --git a/crates/aptos-jwk-consensus/src/epoch_manager.rs b/crates/aptos-jwk-consensus/src/epoch_manager.rs index 590435f65be77..802efc2cb1e45 100644 --- a/crates/aptos-jwk-consensus/src/epoch_manager.rs +++ b/crates/aptos-jwk-consensus/src/epoch_manager.rs @@ -154,10 +154,7 @@ impl EpochManager

{ .get() .expect("failed to get ValidatorSet from payload"); - let epoch_state = Arc::new(EpochState { - epoch: payload.epoch(), - verifier: (&validator_set).into(), - }); + let epoch_state = Arc::new(EpochState::new(payload.epoch(), (&validator_set).into())); self.epoch_state = Some(epoch_state.clone()); let my_index = epoch_state .verifier diff --git a/crates/aptos-jwk-consensus/src/jwk_manager/tests.rs b/crates/aptos-jwk-consensus/src/jwk_manager/tests.rs index 6e946852333d6..d0b00821c5400 100644 --- a/crates/aptos-jwk-consensus/src/jwk_manager/tests.rs +++ b/crates/aptos-jwk-consensus/src/jwk_manager/tests.rs @@ -51,7 +51,7 @@ async fn test_jwk_manager_state_transition() { .collect(); let epoch_state = EpochState { epoch: 999, - verifier: ValidatorVerifier::new(validator_consensus_infos.clone()), + verifier: ValidatorVerifier::new(validator_consensus_infos.clone()).into(), }; let update_certifier = DummyUpdateCertifier::default(); diff --git a/crates/aptos-jwk-consensus/src/observation_aggregation/tests.rs b/crates/aptos-jwk-consensus/src/observation_aggregation/tests.rs index f936af700e627..0f4fb1db91dec 100644 --- a/crates/aptos-jwk-consensus/src/observation_aggregation/tests.rs +++ b/crates/aptos-jwk-consensus/src/observation_aggregation/tests.rs @@ -37,7 +37,7 @@ fn test_observation_aggregation_state() { .map(|i| ValidatorConsensusInfo::new(addrs[i], public_keys[i].clone(), voting_powers[i])) .collect(); let verifier = ValidatorVerifier::new(validator_infos); - let epoch_state = Arc::new(EpochState { epoch, verifier }); + let epoch_state = Arc::new(EpochState::new(epoch, verifier)); let view_0 = ProviderJWKs { issuer: b"https::/alice.com".to_vec(), version: 123, diff --git a/crates/aptos-rest-client/src/lib.rs b/crates/aptos-rest-client/src/lib.rs index ef4ca6d8ff602..30b8542c70d05 100644 --- a/crates/aptos-rest-client/src/lib.rs +++ b/crates/aptos-rest-client/src/lib.rs @@ -15,10 +15,7 @@ pub mod state; pub mod types; pub use crate::client_builder::{AptosBaseUrl, ClientBuilder}; -use crate::{ - aptos::{AptosVersion, Balance}, - error::RestError, -}; +use crate::{aptos::AptosVersion, error::RestError}; use anyhow::{anyhow, Result}; pub use aptos_api_types::{ self, IndexResponseBcs, MoveModuleBytecode, PendingTransaction, Transaction, @@ -34,13 +31,15 @@ use aptos_crypto::HashValue; use aptos_logger::{debug, info, sample, sample::SampleRate}; use aptos_types::{ account_address::AccountAddress, - account_config::{AccountResource, CoinStoreResource, NewBlockEvent, CORE_CODE_ADDRESS}, + account_config::{AccountResource, NewBlockEvent, CORE_CODE_ADDRESS}, contract_event::EventWithVersion, state_store::state_key::StateKey, transaction::SignedTransaction, - CoinType, }; -use move_core_types::language_storage::StructTag; +use move_core_types::{ + ident_str, + language_storage::{ModuleId, StructTag, TypeTag}, +}; use reqwest::{ header::{ACCEPT, CONTENT_TYPE}, Client as ReqwestClient, StatusCode, @@ -48,7 +47,7 @@ use reqwest::{ use serde::{de::DeserializeOwned, Deserialize, Serialize}; use serde_json::{json, Value}; pub use state::State; -use std::{collections::BTreeMap, future::Future, time::Duration}; +use std::{collections::BTreeMap, future::Future, str::FromStr, time::Duration}; use tokio::time::Instant; pub use types::{deserialize_from_prefixed_hex_string, Account, Resource}; use url::Url; @@ -205,51 +204,50 @@ impl Client { Ok(response.and_then(|inner| bcs::from_bytes(&inner))?) } - pub async fn get_account_balance( + async fn view_account_balance_bcs_impl( &self, address: AccountAddress, - ) -> AptosResult> { - let resp = self - .get_account_resource(address, "0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>") + coin_type: &str, + version: Option, + ) -> AptosResult> { + let resp: Response> = self + .view_bcs( + &ViewFunction { + module: ModuleId::new(AccountAddress::ONE, ident_str!("coin").into()), + function: ident_str!("balance").into(), + ty_args: vec![TypeTag::Struct(Box::new( + StructTag::from_str(coin_type).unwrap(), + ))], + args: vec![bcs::to_bytes(&address).unwrap()], + }, + version, + ) .await?; - resp.and_then(|resource| { - if let Some(res) = resource { - Ok(serde_json::from_value::(res.data)?) + + resp.and_then(|result| { + if result.len() != 1 { + Err(anyhow!("Wrong data size returned: {:?}", result).into()) } else { - Err(anyhow!("No data returned").into()) + Ok(result[0]) } }) } - pub async fn get_account_balance_bcs( + pub async fn view_apt_account_balance_at_version( &self, address: AccountAddress, + version: u64, ) -> AptosResult> { - let resp = self - .get_account_resource_bcs::>(address, &C::type_tag().to_string()) - .await?; - resp.and_then(|resource| Ok(resource.coin())) + self.view_account_balance_bcs_impl(address, "0x1::aptos_coin::AptosCoin", Some(version)) + .await } - pub async fn get_account_balance_at_version( + pub async fn view_apt_account_balance( &self, address: AccountAddress, - version: u64, - ) -> AptosResult> { - let resp = self - .get_account_resource_at_version( - address, - "0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>", - version, - ) - .await?; - resp.and_then(|resource| { - if let Some(res) = resource { - Ok(serde_json::from_value::(res.data)?) - } else { - Err(anyhow!("No data returned").into()) - } - }) + ) -> AptosResult> { + self.view_account_balance_bcs_impl(address, "0x1::aptos_coin::AptosCoin", None) + .await } pub async fn get_index(&self) -> AptosResult> { diff --git a/crates/aptos-rosetta-cli/src/construction.rs b/crates/aptos-rosetta-cli/src/construction.rs index c0b6485fc157a..466b2a23ec289 100644 --- a/crates/aptos-rosetta-cli/src/construction.rs +++ b/crates/aptos-rosetta-cli/src/construction.rs @@ -4,7 +4,7 @@ use crate::common::{format_output, NetworkArgs, UrlArgs}; use aptos::common::types::{EncodingOptions, PrivateKeyInputOptions, ProfileOptions}; use aptos_logger::info; -use aptos_rosetta::types::TransactionIdentifier; +use aptos_rosetta::types::{Currency, TransactionIdentifier}; use aptos_types::account_address::AccountAddress; use clap::{Parser, Subcommand}; use std::time::{Duration, SystemTime, UNIX_EPOCH}; @@ -154,6 +154,12 @@ pub struct TransferCommand { /// The amount of coins to send #[clap(long)] amount: u64, + #[clap(long, value_parser = parse_currency)] + currency: Currency, +} + +fn parse_currency(str: &str) -> anyhow::Result { + Ok(serde_json::from_str(str)?) } impl TransferCommand { @@ -175,6 +181,7 @@ impl TransferCommand { self.txn_args.sequence_number, self.txn_args.max_gas, self.txn_args.gas_price, + self.currency, ) .await } diff --git a/crates/aptos-rosetta/src/account.rs b/crates/aptos-rosetta/src/account.rs index f75322c2fd99f..6e24fd04e2dde 100644 --- a/crates/aptos-rosetta/src/account.rs +++ b/crates/aptos-rosetta/src/account.rs @@ -8,19 +8,24 @@ use crate::{ common::{ - check_network, get_block_index_from_request, handle_request, native_coin, native_coin_tag, - with_context, + check_network, get_block_index_from_request, handle_request, native_coin, with_context, }, error::{ApiError, ApiResult}, types::{AccountBalanceRequest, AccountBalanceResponse, Amount, Currency, *}, RosettaContext, }; use aptos_logger::{debug, trace, warn}; -use aptos_types::{ - account_address::AccountAddress, - account_config::{AccountResource, CoinStoreResourceUntyped}, +use aptos_rest_client::{ + aptos_api_types::{AptosError, AptosErrorCode, ViewFunction}, + error::{AptosErrorResponse, RestError}, }; -use std::{collections::HashSet, str::FromStr}; +use aptos_types::{account_address::AccountAddress, account_config::AccountResource}; +use move_core_types::{ + ident_str, + language_storage::{ModuleId, StructTag, TypeTag}, + parser::parse_type_tag, +}; +use std::str::FromStr; use warp::Filter; /// Account routes e.g. balance @@ -53,7 +58,6 @@ async fn account_balance( let network_identifier = request.network_identifier; check_network(network_identifier, &server_context)?; - let rest_client = server_context.rest_client()?; // Retrieve the block index to read let block_height = @@ -69,7 +73,7 @@ async fn account_balance( // Retrieve all metadata we want to provide as an on-demand lookup let (sequence_number, operators, balances, lockup_expiration) = get_balances( - &rest_client, + &server_context, request.account_identifier, balance_version, request.currencies, @@ -90,11 +94,12 @@ async fn account_balance( /// Retrieve the balances for an account #[allow(clippy::manual_retain)] async fn get_balances( - rest_client: &aptos_rest_client::Client, + server_context: &RosettaContext, account: AccountIdentifier, version: u64, maybe_filter_currencies: Option>, ) -> ApiResult<(u64, Option>, Vec, u64)> { + let rest_client = server_context.rest_client()?; let owner_address = account.account_address()?; let pool_address = account.pool_address()?; @@ -105,7 +110,7 @@ async fn get_balances( // Lookup the delegation pool, if it's provided in the account information if pool_address.is_some() { match get_delegation_stake_balances( - rest_client, + rest_client.as_ref(), &account, owner_address, pool_address.unwrap(), @@ -137,156 +142,198 @@ async fn get_balances( } } - // Retrieve all account resources - // TODO: This will need to change for FungibleAssets, will need to lookup on a list of known FAs - if let Ok(response) = rest_client - .get_account_resources_at_version_bcs(owner_address, version) + // Retrieve sequence number + let sequence_number = match rest_client + .get_account_resource_at_version_bcs(owner_address, "0x1::account::Account", version) .await { - let resources = response.into_inner(); - let mut maybe_sequence_number = None; - let mut maybe_operators = None; - - // Iterate through resources, converting balances - for (struct_tag, bytes) in resources { - match ( - struct_tag.address, - struct_tag.module.as_str(), - struct_tag.name.as_str(), - ) { - // Retrieve the sequence number from the account resource - // TODO: Make a separate call for this - (AccountAddress::ONE, ACCOUNT_MODULE, ACCOUNT_RESOURCE) => { - let account: AccountResource = bcs::from_bytes(&bytes)?; - maybe_sequence_number = Some(account.sequence_number()) + Ok(response) => { + let account: AccountResource = response.into_inner(); + account.sequence_number() + }, + Err(RestError::Api(AptosErrorResponse { + error: + AptosError { + error_code: AptosErrorCode::AccountNotFound, + .. }, - // Parse all associated coin stores - // TODO: This would need to be expanded to support other coin stores - (AccountAddress::ONE, COIN_MODULE, COIN_STORE_RESOURCE) => { - // Only show coins on the base account - if account.is_base_account() { - let coin_store: CoinStoreResourceUntyped = bcs::from_bytes(&bytes)?; - if let Some(coin_type) = struct_tag.type_args.first() { - // Only display supported coins - if coin_type == &native_coin_tag() { - balances.push(Amount { - value: coin_store.coin().to_string(), - currency: native_coin(), - }); - } - } - } + .. + })) + | Err(RestError::Api(AptosErrorResponse { + error: + AptosError { + error_code: AptosErrorCode::ResourceNotFound, + .. }, - // Parse all staking contract data to know the underlying balances of the pools - (AccountAddress::ONE, STAKING_CONTRACT_MODULE, STORE_RESOURCE) => { - if account.is_base_account() || pool_address.is_some() { - continue; - } - - let store: Store = bcs::from_bytes(&bytes)?; - maybe_operators = Some(vec![]); - for (operator, contract) in store.staking_contracts { - // Keep track of operators - maybe_operators.as_mut().unwrap().push(operator); - match get_stake_balances( - rest_client, - &account, - contract.pool_address, - version, - ) - .await - { - Ok(Some(balance_result)) => { - if let Some(balance) = balance_result.balance { - total_requested_balance = Some( - total_requested_balance.unwrap_or_default() - + u64::from_str(&balance.value).unwrap_or_default(), - ); - } - lockup_expiration = balance_result.lockup_expiration; - }, - result => { - warn!( - "Failed to retrieve requested balance for account: {}, address: {}: {:?}", - owner_address, contract.pool_address, result - ) - }, - } - } - if let Some(balance) = total_requested_balance { - balances.push(Amount { - value: balance.to_string(), - currency: native_coin(), - }) - } - - /* TODO: Right now operator stake is not supported - else if account.is_operator_stake() { - // For operator stake, filter on operator address - let operator_address = account.operator_address()?; - if let Some(contract) = store.staking_contracts.get(&operator_address) { - balances.push(get_total_stake( - rest_client, - &account, - contract.pool_address, - version, - ).await?); - } - }*/ - }, - _ => {}, - } - } - - // Retrieves the sequence number accordingly - // TODO: Sequence number should be 0 if it isn't retrieved probably - let sequence_number = if let Some(sequence_number) = maybe_sequence_number { - sequence_number - } else { + .. + })) => { + // If the account or resource doesn't exist, set the sequence number to 0 + 0 + }, + _ => { + // Any other error we can't retrieve the sequence number return Err(ApiError::InternalError(Some( "Failed to retrieve account sequence number".to_string(), ))); - }; - - // Filter based on requested currencies - if let Some(currencies) = maybe_filter_currencies { - let mut currencies: HashSet = currencies.into_iter().collect(); - // Remove extra currencies not requested - balances = balances - .into_iter() - .filter(|balance| currencies.contains(&balance.currency)) - .collect(); + }, + }; - for balance in balances.iter() { - currencies.remove(&balance.currency); + // Retrieve staking information (if it applies) + // Only non-pool addresses, and non base accounts + let mut maybe_operators = None; + if !account.is_base_account() && pool_address.is_none() { + if let Ok(response) = rest_client + .get_account_resource_at_version_bcs( + owner_address, + "0x1::staking_contract::Store", + version, + ) + .await + { + let store: Store = response.into_inner(); + maybe_operators = Some(vec![]); + for (operator, contract) in store.staking_contracts { + // Keep track of operators + maybe_operators.as_mut().unwrap().push(operator); + match get_stake_balances( + rest_client.as_ref(), + &account, + contract.pool_address, + version, + ) + .await + { + Ok(Some(balance_result)) => { + if let Some(balance) = balance_result.balance { + total_requested_balance = Some( + total_requested_balance.unwrap_or_default() + + u64::from_str(&balance.value).unwrap_or_default(), + ); + } + lockup_expiration = balance_result.lockup_expiration; + }, + result => { + warn!( + "Failed to retrieve requested balance for account: {}, address: {}: {:?}", + owner_address, contract.pool_address, result + ) + }, + } } - - for currency in currencies { + if let Some(balance) = total_requested_balance { balances.push(Amount { - value: 0.to_string(), - currency, - }); + value: balance.to_string(), + currency: native_coin(), + }) } + + /* TODO: Right now operator stake is not supported + else if account.is_operator_stake() { + // For operator stake, filter on operator address + let operator_address = account.operator_address()?; + if let Some(contract) = store.staking_contracts.get(&operator_address) { + balances.push(get_total_stake( + rest_client, + &account, + contract.pool_address, + version, + ).await?); + } + }*/ } + } - // Retrieve balances - Ok(( - sequence_number, - maybe_operators, - balances, - lockup_expiration, - )) + // Filter currencies to lookup + let currencies_to_lookup = if let Some(currencies) = maybe_filter_currencies { + currencies.into_iter().collect() } else { - // If it fails, we return 0 - // TODO: This should probably be fixed to check if the account exists. Then if the account doesn't exist, return empty balance, otherwise error - Ok(( - 0, - None, - vec![Amount { - value: 0.to_string(), - currency: native_coin(), - }], - 0, - )) + server_context.currencies.clone() + }; + + // Retrieve the fungible asset balances and the coin balances + for currency in currencies_to_lookup.iter() { + match *currency { + // FA only + Currency { + metadata: + Some(CurrencyMetadata { + move_type: None, + fa_address: Some(ref fa_address), + }), + .. + } => { + let response = rest_client + .view_bcs::>( + &ViewFunction { + module: ModuleId { + address: AccountAddress::ONE, + name: ident_str!(PRIMARY_FUNGIBLE_STORE_MODULE).into(), + }, + function: ident_str!(BALANCE_FUNCTION).into(), + ty_args: vec![TypeTag::Struct(Box::new(StructTag { + address: AccountAddress::ONE, + module: ident_str!(OBJECT_MODULE).into(), + name: ident_str!(OBJECT_CORE_RESOURCE).into(), + type_args: vec![], + }))], + args: vec![ + bcs::to_bytes(&owner_address).unwrap(), + bcs::to_bytes(&AccountAddress::from_str(fa_address).unwrap()) + .unwrap(), + ], + }, + Some(version), + ) + .await? + .into_inner(); + let fa_balance = response.first().copied().unwrap_or(0); + balances.push(Amount { + value: fa_balance.to_string(), + currency: currency.clone(), + }) + }, + // Coin or Coin and FA combined + Currency { + metadata: + Some(CurrencyMetadata { + move_type: Some(ref coin_type), + fa_address: _, + }), + .. + } => { + if let Ok(type_tag) = parse_type_tag(coin_type) { + let response = rest_client + .view_bcs::>( + &ViewFunction { + module: ModuleId { + address: AccountAddress::ONE, + name: ident_str!(COIN_MODULE).into(), + }, + function: ident_str!(BALANCE_FUNCTION).into(), + ty_args: vec![type_tag], + args: vec![bcs::to_bytes(&owner_address).unwrap()], + }, + Some(version), + ) + .await? + .into_inner(); + let coin_balance = response.first().copied().unwrap_or(0); + balances.push(Amount { + value: coin_balance.to_string(), + currency: currency.clone(), + }) + } + }, + _ => { + // None for both, means we can't look it up anyways / it's invalid + }, + } } + + Ok(( + sequence_number, + maybe_operators, + balances, + lockup_expiration, + )) } diff --git a/crates/aptos-rosetta/src/client.rs b/crates/aptos-rosetta/src/client.rs index ad7975f616047..c6807a11613b2 100644 --- a/crates/aptos-rosetta/src/client.rs +++ b/crates/aptos-rosetta/src/client.rs @@ -11,9 +11,10 @@ use crate::{ ConstructionParseRequest, ConstructionParseResponse, ConstructionPayloadsRequest, ConstructionPayloadsResponse, ConstructionPreprocessRequest, ConstructionPreprocessResponse, ConstructionSubmitRequest, ConstructionSubmitResponse, - Error, MetadataRequest, NetworkIdentifier, NetworkListResponse, NetworkOptionsResponse, - NetworkRequest, NetworkStatusResponse, Operation, PreprocessMetadata, PublicKey, Signature, - SignatureType, TransactionIdentifier, TransactionIdentifierResponse, + Currency, Error, MetadataRequest, NetworkIdentifier, NetworkListResponse, + NetworkOptionsResponse, NetworkRequest, NetworkStatusResponse, Operation, + PreprocessMetadata, PublicKey, Signature, SignatureType, TransactionIdentifier, + TransactionIdentifierResponse, }, }; use anyhow::anyhow; @@ -190,6 +191,7 @@ impl RosettaClient { sequence_number: Option, max_gas: Option, gas_unit_price: Option, + currency: Currency, ) -> anyhow::Result { let sender = self .get_account_address(network_identifier.clone(), private_key) @@ -203,14 +205,14 @@ impl RosettaClient { 0, None, AccountIdentifier::base_account(sender), - native_coin(), + currency.clone(), amount, ), Operation::deposit( 1, None, AccountIdentifier::base_account(receiver), - native_coin(), + currency, amount, ), ]; diff --git a/crates/aptos-rosetta/src/common.rs b/crates/aptos-rosetta/src/common.rs index b078f455a02c7..b889e4ab400bf 100644 --- a/crates/aptos-rosetta/src/common.rs +++ b/crates/aptos-rosetta/src/common.rs @@ -19,7 +19,7 @@ use aptos_sdk::move_types::{ use aptos_types::{account_address::AccountAddress, chain_id::ChainId}; use futures::future::BoxFuture; use serde::{de::DeserializeOwned, Deserialize, Serialize}; -use std::{convert::Infallible, fmt::LowerHex, future::Future, str::FromStr}; +use std::{collections::HashSet, convert::Infallible, fmt::LowerHex, future::Future, str::FromStr}; use warp::Filter; /// The year 2000 in milliseconds, as this is the lower limit for Rosetta API implementations @@ -149,12 +149,15 @@ const DEFAULT_COIN: &str = "APT"; const DEFAULT_DECIMALS: u8 = 8; /// Provides the [Currency] for 0x1::aptos_coin::AptosCoin aka APT +/// +/// Note that 0xA is the address for FA, but it has to be skipped in order to have backwards compatibility pub fn native_coin() -> Currency { Currency { symbol: DEFAULT_COIN.to_string(), decimals: DEFAULT_DECIMALS, metadata: Some(CurrencyMetadata { - move_type: native_coin_tag().to_string(), + move_type: Some(native_coin_tag().to_string()), + fa_address: None, }), } } @@ -169,14 +172,52 @@ pub fn native_coin_tag() -> TypeTag { })) } -/// Tells us whether the coin is APT and errors if it's not -/// -/// TODO: This is the function that needs to be replaced to handle more coin types -pub fn is_native_coin(currency: &Currency) -> ApiResult<()> { - if currency == &native_coin() { - Ok(()) +#[inline] +pub fn is_native_coin(fa_address: AccountAddress) -> bool { + fa_address == AccountAddress::TEN +} + +pub fn find_coin_currency(currencies: &HashSet, type_tag: &TypeTag) -> Option { + currencies + .iter() + .find(|currency| { + if let Some(CurrencyMetadata { + move_type: Some(ref move_type), + fa_address: _, + }) = currency.metadata + { + move_type == &type_tag.to_string() + } else { + false + } + }) + .cloned() +} +pub fn find_fa_currency( + currencies: &HashSet, + metadata_address: AccountAddress, +) -> Option { + if is_native_coin(metadata_address) { + Some(native_coin()) } else { - Err(ApiError::UnsupportedCurrency(Some(currency.symbol.clone()))) + let val = currencies + .iter() + .find(|currency| { + if let Some(CurrencyMetadata { + move_type: _, + fa_address: Some(ref fa_address), + }) = currency.metadata + { + // TODO: Probably want to cache this + AccountAddress::from_str(fa_address) + .map(|addr| addr == metadata_address) + .unwrap_or(false) + } else { + false + } + }) + .cloned(); + val } } @@ -316,15 +357,27 @@ pub fn to_hex_lower(obj: &T) -> String { } /// Retrieves the currency from the given parameters -/// TODO: What do do about the type params? -/// TODO: Handle other currencies, will need to be passed in as a config file or something on startup -pub fn parse_currency(address: AccountAddress, module: &str, name: &str) -> ApiResult { - match (address, module, name) { - (AccountAddress::ONE, APTOS_COIN_MODULE, APTOS_COIN_RESOURCE) => Ok(native_coin()), - _ => Err(ApiError::TransactionParseError(Some(format!( - "Invalid coin for transfer {}::{}::{}", - address, module, name - )))), +pub fn parse_coin_currency( + server_context: &RosettaContext, + struct_tag: &StructTag, +) -> ApiResult { + if let Some(currency) = server_context.currencies.iter().find(|currency| { + if let Some(move_type) = currency + .metadata + .as_ref() + .and_then(|inner| inner.move_type.as_ref()) + { + struct_tag.to_string() == *move_type + } else { + false + } + }) { + Ok(currency.clone()) + } else { + Err(ApiError::TransactionParseError(Some(format!( + "Invalid coin for transfer {}", + struct_tag + )))) } } diff --git a/crates/aptos-rosetta/src/construction.rs b/crates/aptos-rosetta/src/construction.rs index 6a5bdc2ca3b91..4a2aadb752d8e 100644 --- a/crates/aptos-rosetta/src/construction.rs +++ b/crates/aptos-rosetta/src/construction.rs @@ -26,8 +26,8 @@ use crate::{ common::{ - check_network, decode_bcs, decode_key, encode_bcs, get_account, handle_request, - native_coin, parse_currency, with_context, + check_network, decode_bcs, decode_key, encode_bcs, find_fa_currency, get_account, + handle_request, native_coin, parse_coin_currency, with_context, }, error::{ApiError, ApiResult}, types::{InternalOperation, *}, @@ -39,10 +39,7 @@ use aptos_crypto::{ }; use aptos_global_constants::adjust_gas_headroom; use aptos_logger::debug; -use aptos_sdk::{ - move_types::language_storage::{StructTag, TypeTag}, - transaction_builder::TransactionFactory, -}; +use aptos_sdk::{move_types::language_storage::TypeTag, transaction_builder::TransactionFactory}; use aptos_types::{ account_address::AccountAddress, chain_id::ChainId, @@ -556,8 +553,9 @@ async fn construction_parse( module.name().as_str(), function_name.as_str(), ) { - (AccountAddress::ONE, COIN_MODULE, TRANSFER_FUNCTION) => { - parse_transfer_operation(sender, &type_args, &args)? + (AccountAddress::ONE, COIN_MODULE, TRANSFER_FUNCTION) + | (AccountAddress::ONE, APTOS_ACCOUNT_MODULE, TRANSFER_COINS_FUNCTION) => { + parse_transfer_operation(&server_context, sender, &type_args, &args)? }, (AccountAddress::ONE, APTOS_ACCOUNT_MODULE, TRANSFER_FUNCTION) => { parse_account_transfer_operation(sender, &type_args, &args)? @@ -565,6 +563,12 @@ async fn construction_parse( (AccountAddress::ONE, APTOS_ACCOUNT_MODULE, CREATE_ACCOUNT_FUNCTION) => { parse_create_account_operation(sender, &type_args, &args)? }, + (AccountAddress::ONE, PRIMARY_FUNGIBLE_STORE_MODULE, TRANSFER_FUNCTION) => { + parse_primary_fa_transfer_operation(&server_context, sender, &type_args, &args)? + }, + (AccountAddress::ONE, FUNGIBLE_ASSET_MODULE, TRANSFER_FUNCTION) => { + parse_fa_transfer_operation(&server_context, sender, &type_args, &args)? + }, ( AccountAddress::ONE, STAKING_CONTRACT_MODULE, @@ -663,6 +667,7 @@ fn parse_create_account_operation( /// Parses 0x1::coin::transfer(receiver: address, amount: u64) fn parse_transfer_operation( + server_context: &RosettaContext, sender: AccountAddress, type_args: &[TypeTag], args: &[Vec], @@ -671,16 +676,7 @@ fn parse_transfer_operation( // Check coin is the native coin let currency = match type_args.first() { - Some(TypeTag::Struct(struct_tag)) => { - let StructTag { - address, - module, - name, - .. - } = &**struct_tag; - - parse_currency(*address, module.as_str(), name.as_str())? - }, + Some(TypeTag::Struct(struct_tag)) => parse_coin_currency(server_context, struct_tag)?, _ => { return Err(ApiError::TransactionParseError(Some( "No coin type in transfer".to_string(), @@ -770,6 +766,136 @@ fn parse_account_transfer_operation( Ok(operations) } +/// Parses 0x1::primary_fungible_store::transfer(metadata: address, receiver: address, amount: u64) +fn parse_primary_fa_transfer_operation( + server_context: &RosettaContext, + sender: AccountAddress, + type_args: &[TypeTag], + args: &[Vec], +) -> ApiResult> { + // There should be one type arg + if type_args.len() != 1 { + return Err(ApiError::TransactionParseError(Some(format!( + "Primary fungible store transfer should have one type argument: {:?}", + type_args + )))); + } + let mut operations = Vec::new(); + + // Retrieve the args for the operations + let metadata: AccountAddress = if let Some(metadata) = args.first() { + bcs::from_bytes(metadata)? + } else { + return Err(ApiError::TransactionParseError(Some( + "No metadata address in primary fungible transfer".to_string(), + ))); + }; + let receiver: AccountAddress = if let Some(receiver) = args.get(1) { + bcs::from_bytes(receiver)? + } else { + return Err(ApiError::TransactionParseError(Some( + "No receiver address in primary fungible transfer".to_string(), + ))); + }; + let amount: u64 = if let Some(amount) = args.get(2) { + bcs::from_bytes(amount)? + } else { + return Err(ApiError::TransactionParseError(Some( + "No amount in primary fungible transfer".to_string(), + ))); + }; + + // Grab currency accordingly + + let maybe_currency = find_fa_currency(&server_context.currencies, metadata); + + if let Some(currency) = maybe_currency { + operations.push(Operation::withdraw( + 0, + None, + AccountIdentifier::base_account(sender), + currency.clone(), + amount, + )); + operations.push(Operation::deposit( + 1, + None, + AccountIdentifier::base_account(receiver), + currency.clone(), + amount, + )); + Ok(operations) + } else { + Err(ApiError::UnsupportedCurrency(Some(metadata.to_string()))) + } +} + +/// Parses 0x1::fungible_asset::transfer(metadata: address, receiver: address, amount: u64) +/// +/// This is only for using directly from a store, please prefer using primary fa. +fn parse_fa_transfer_operation( + server_context: &RosettaContext, + sender: AccountAddress, + type_args: &[TypeTag], + args: &[Vec], +) -> ApiResult> { + // There is one type arg for the object + if type_args.len() != 1 { + return Err(ApiError::TransactionParseError(Some(format!( + "Fungible asset transfer should have one type argument: {:?}", + type_args + )))); + } + let mut operations = Vec::new(); + + // Retrieve the args for the operations + let metadata: AccountAddress = if let Some(metadata) = args.first() { + bcs::from_bytes(metadata)? + } else { + return Err(ApiError::TransactionParseError(Some( + "No metadata address in fungible asset transfer".to_string(), + ))); + }; + let receiver: AccountAddress = if let Some(receiver) = args.get(1) { + bcs::from_bytes(receiver)? + } else { + return Err(ApiError::TransactionParseError(Some( + "No receiver address in fungible asset transfer".to_string(), + ))); + }; + let amount: u64 = if let Some(amount) = args.get(2) { + bcs::from_bytes(amount)? + } else { + return Err(ApiError::TransactionParseError(Some( + "No amount in fungible transfer".to_string(), + ))); + }; + + // Grab currency accordingly + + let maybe_currency = find_fa_currency(&server_context.currencies, metadata); + + if let Some(currency) = maybe_currency { + operations.push(Operation::withdraw( + 0, + None, + AccountIdentifier::base_account(sender), + currency.clone(), + amount, + )); + operations.push(Operation::deposit( + 1, + None, + AccountIdentifier::base_account(receiver), + currency.clone(), + amount, + )); + Ok(operations) + } else { + Err(ApiError::UnsupportedCurrency(Some(metadata.to_string()))) + } +} + /// Parses a specific BCS function argument to the given type pub fn parse_function_arg( name: &str, @@ -1050,7 +1176,7 @@ async fn construction_payloads( check_network(request.network_identifier, &server_context)?; // Retrieve the real operation we're doing, this identifies the sub-operations to a function - let mut operation = InternalOperation::extract(&request.operations)?; + let mut operation = InternalOperation::extract(&server_context, &request.operations)?; // For some reason, metadata is optional on the Rosetta spec, we enforce it here, otherwise we // can't build the [RawTransaction] offline. @@ -1304,7 +1430,7 @@ async fn construction_preprocess( check_network(request.network_identifier, &server_context)?; // Determine the actual operation from the collection of Rosetta [Operation] - let internal_operation = InternalOperation::extract(&request.operations)?; + let internal_operation = InternalOperation::extract(&server_context, &request.operations)?; // Provide the accounts that need public keys (there's only one supported today) let required_public_keys = vec![AccountIdentifier::base_account(internal_operation.sender())]; diff --git a/crates/aptos-rosetta/src/lib.rs b/crates/aptos-rosetta/src/lib.rs index 792084f9edf4f..7a0726c9c80ea 100644 --- a/crates/aptos-rosetta/src/lib.rs +++ b/crates/aptos-rosetta/src/lib.rs @@ -7,14 +7,15 @@ use crate::{ block::BlockRetriever, - common::{handle_request, with_context}, + common::{handle_request, native_coin, with_context}, error::{ApiError, ApiResult}, + types::Currency, }; use aptos_config::config::ApiConfig; use aptos_logger::debug; use aptos_types::{account_address::AccountAddress, chain_id::ChainId}; use aptos_warp_webserver::{logger, Error, WebServer}; -use std::{convert::Infallible, sync::Arc}; +use std::{collections::HashSet, convert::Infallible, sync::Arc}; use tokio::task::JoinHandle; use warp::{ http::{HeaderValue, Method, StatusCode}, @@ -31,6 +32,9 @@ pub mod common; pub mod error; pub mod types; +#[cfg(test)] +mod test; + pub const NODE_VERSION: &str = "0.1"; pub const ROSETTA_VERSION: &str = "1.4.12"; @@ -43,6 +47,8 @@ pub struct RosettaContext { pub chain_id: ChainId, /// Block index cache pub block_cache: Option>, + /// Set of supported currencies + pub currencies: HashSet, } impl RosettaContext { @@ -50,11 +56,16 @@ impl RosettaContext { rest_client: Option>, chain_id: ChainId, block_cache: Option>, + mut currencies: HashSet, ) -> Self { + // Always add APT + currencies.insert(native_coin()); + RosettaContext { rest_client, chain_id, block_cache, + currencies, } } @@ -80,12 +91,18 @@ pub fn bootstrap( chain_id: ChainId, api_config: ApiConfig, rest_client: Option, + supported_currencies: HashSet, ) -> anyhow::Result { let runtime = aptos_runtimes::spawn_named_runtime("rosetta".into(), None); debug!("Starting up Rosetta server with {:?}", api_config); - runtime.spawn(bootstrap_async(chain_id, api_config, rest_client)); + runtime.spawn(bootstrap_async( + chain_id, + api_config, + rest_client, + supported_currencies, + )); Ok(runtime) } @@ -94,6 +111,7 @@ pub async fn bootstrap_async( chain_id: ChainId, api_config: ApiConfig, rest_client: Option, + supported_currencies: HashSet, ) -> anyhow::Result> { debug!("Starting up Rosetta server with {:?}", api_config); @@ -123,7 +141,13 @@ pub async fn bootstrap_async( )) }); - let context = RosettaContext::new(rest_client.clone(), chain_id, block_cache).await; + let context = RosettaContext::new( + rest_client.clone(), + chain_id, + block_cache, + supported_currencies, + ) + .await; api.serve(routes(context)).await; }); Ok(handle) diff --git a/crates/aptos-rosetta/src/main.rs b/crates/aptos-rosetta/src/main.rs index 1e136ae75f017..11574dd66ca18 100644 --- a/crates/aptos-rosetta/src/main.rs +++ b/crates/aptos-rosetta/src/main.rs @@ -8,10 +8,12 @@ use aptos_config::config::{ApiConfig, DEFAULT_MAX_PAGE_SIZE}; use aptos_logger::prelude::*; use aptos_node::AptosNodeArgs; -use aptos_rosetta::bootstrap; +use aptos_rosetta::{bootstrap, common::native_coin, types::Currency}; use aptos_types::chain_id::ChainId; use clap::Parser; use std::{ + collections::HashSet, + fs::File, net::SocketAddr, path::PathBuf, sync::{ @@ -85,8 +87,13 @@ async fn main() { println!("aptos-rosetta: Starting rosetta"); // Ensure runtime for Rosetta is up and running - let _rosetta = bootstrap(args.chain_id(), args.api_config(), args.rest_client()) - .expect("aptos-rosetta: Should bootstrap rosetta server"); + let _rosetta = bootstrap( + args.chain_id(), + args.api_config(), + args.rest_client(), + args.supported_currencies(), + ) + .expect("aptos-rosetta: Should bootstrap rosetta server"); println!("aptos-rosetta: Rosetta started"); // Run until there is an interrupt @@ -106,6 +113,9 @@ trait ServerArgs { /// Retrieve the chain id fn chain_id(&self) -> ChainId; + + /// Supported currencies for the service + fn supported_currencies(&self) -> HashSet; } /// Aptos Rosetta API Server @@ -146,6 +156,14 @@ impl ServerArgs for CommandArgs { CommandArgs::Online(args) => args.chain_id(), } } + + fn supported_currencies(&self) -> HashSet { + match self { + CommandArgs::OnlineRemote(args) => args.supported_currencies(), + CommandArgs::Offline(args) => args.supported_currencies(), + CommandArgs::Online(args) => args.supported_currencies(), + } + } } #[derive(Debug, Parser)] @@ -170,6 +188,31 @@ pub struct OfflineArgs { /// This can be configured to change performance characteristics #[clap(long, default_value_t = DEFAULT_MAX_PAGE_SIZE)] transactions_page_size: u16, + + /// A file of currencies to support other than APT + /// + /// Example file for testnet: + /// ```json + /// [ + /// { + /// "symbol": "TC", + /// "decimals": 4, + /// "metadata": { + /// "fa_address": "0xb528ad40e472f8fcf0f21aa78aecd09fe68f6208036a5845e6d16b7d561c83b8", + /// "move_type": "0xf5a9b6ccc95f8ad3c671ddf1e227416e71f7bcd3c971efe83c0ae8e5e028350f::test_faucet::TestFaucetCoin" + /// } + /// }, + /// { + /// "symbol": "TFA", + /// "decimals": 4, + /// "metadata": { + /// "fa_address": "0x7e51ad6e79cd113f5abe08f53ed6a3c2bfbf88561a24ae10b9e1e822e0623dfd" + /// } + /// } + /// ] + /// ``` + #[clap(long)] + currency_config_file: Option, } impl ServerArgs for OfflineArgs { @@ -192,6 +235,21 @@ impl ServerArgs for OfflineArgs { fn chain_id(&self) -> ChainId { self.chain_id } + + fn supported_currencies(&self) -> HashSet { + let mut supported_currencies = HashSet::new(); + supported_currencies.insert(native_coin()); + + if let Some(ref filepath) = self.currency_config_file { + let file = File::open(filepath).unwrap(); + let currencies: Vec = serde_json::from_reader(file).unwrap(); + currencies.into_iter().for_each(|item| { + supported_currencies.insert(item); + }); + } + + supported_currencies + } } #[derive(Debug, Parser)] @@ -218,6 +276,10 @@ impl ServerArgs for OnlineRemoteArgs { fn chain_id(&self) -> ChainId { self.offline_args.chain_id } + + fn supported_currencies(&self) -> HashSet { + self.offline_args.supported_currencies() + } } #[derive(Debug, Parser)] @@ -242,6 +304,10 @@ impl ServerArgs for OnlineLocalArgs { fn chain_id(&self) -> ChainId { self.online_args.offline_args.chain_id } + + fn supported_currencies(&self) -> HashSet { + self.online_args.offline_args.supported_currencies() + } } #[test] diff --git a/crates/aptos-rosetta/src/test/mod.rs b/crates/aptos-rosetta/src/test/mod.rs new file mode 100644 index 0000000000000..ac5c9428b095c --- /dev/null +++ b/crates/aptos-rosetta/src/test/mod.rs @@ -0,0 +1,466 @@ +// Copyright (c) Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::{ + common::native_coin, + types::{ + Currency, CurrencyMetadata, FungibleAssetChangeEvent, ObjectCore, OperationType, + Transaction, FUNGIBLE_ASSET_MODULE, FUNGIBLE_STORE_RESOURCE, OBJECT_CORE_RESOURCE, + OBJECT_MODULE, OBJECT_RESOURCE_GROUP, + }, + RosettaContext, +}; +use aptos_crypto::{ + ed25519::{Ed25519PrivateKey, Ed25519Signature}, + HashValue, PrivateKey, Uniform, +}; +use aptos_rest_client::aptos_api_types::{ResourceGroup, TransactionOnChainData}; +use aptos_types::{ + account_config::fungible_store::FungibleStoreResource, + chain_id::ChainId, + contract_event::ContractEvent, + event::{EventHandle, EventKey}, + on_chain_config::CurrentTimeMicroseconds, + state_store::{state_key::StateKey, state_value::StateValueMetadata}, + test_helpers::transaction_test_helpers::get_test_raw_transaction, + transaction::{ExecutionStatus, TransactionInfo, TransactionInfoV0}, + write_set::{WriteOp, WriteSet, WriteSetMut}, +}; +use move_core_types::{ + account_address::AccountAddress, + ident_str, + language_storage::{StructTag, TypeTag}, +}; +use once_cell::sync::Lazy; +use serde::Serialize; +use std::{collections::HashSet, str::FromStr}; + +const APT_ADDRESS: AccountAddress = AccountAddress::TEN; +const OTHER_CURRENCY_ADDRESS: &str = "0x12341234123412341234123412341234"; +static OTHER_CURRENCY: Lazy = Lazy::new(|| Currency { + symbol: "FUN".to_string(), + decimals: 2, + metadata: Some(CurrencyMetadata { + move_type: None, + fa_address: Some(OTHER_CURRENCY_ADDRESS.to_string()), + }), +}); + +async fn test_rosetta_context() -> RosettaContext { + let mut currencies = HashSet::new(); + currencies.insert(OTHER_CURRENCY.clone()); + + RosettaContext::new(None, ChainId::test(), None, currencies).await +} + +fn test_transaction( + sender: AccountAddress, + version: u64, + changes: WriteSet, + events: Vec, +) -> TransactionOnChainData { + // generate random key + let private_key = Ed25519PrivateKey::generate_for_testing(); + + TransactionOnChainData { + version, + transaction: aptos_types::transaction::Transaction::UserTransaction( + aptos_types::transaction::SignedTransaction::new( + get_test_raw_transaction( + sender, + 0, // Sequence number doesn't matter for this + None, // TODO: payload + None, // Expiration timestamp + Some(101), // Gas unit price, specifically make it different than 100 to check calculations + None, // Max gas amount + ), + // Dummy keys and signatures + private_key.public_key(), + Ed25519Signature::dummy_signature(), + ), + ), + info: TransactionInfo::V0(TransactionInfoV0::new( + HashValue::random(), + HashValue::random(), + HashValue::random(), + None, + 178, // gas used, chosen arbitrarily + ExecutionStatus::Success, // TODO: Add other statuses + )), + events, + accumulator_root_hash: Default::default(), + changes, + } +} + +fn resource_group_modification_write_op( + address: &AccountAddress, + resource: &StructTag, + input: &T, +) -> (StateKey, WriteOp) { + let encoded = bcs::to_bytes(input).unwrap(); + let state_key = StateKey::resource_group(address, resource); + let write_op = WriteOp::Modification { + data: encoded.into(), + metadata: StateValueMetadata::new(0, 0, &CurrentTimeMicroseconds { microseconds: 0 }), + }; + (state_key, write_op) +} + +struct FaData { + fa_metadata_address: AccountAddress, + owner: AccountAddress, + store_address: AccountAddress, + previous_balance: u64, + deposit: bool, + amount: u64, +} + +impl FaData { + fn create_change(&self) -> (Vec<(StateKey, WriteOp)>, Vec) { + let object_core = ObjectCore { + guid_creation_num: 0, + owner: self.owner, + allow_ungated_transfer: false, + transfer_events: EventHandle::new(EventKey::new(42, self.owner), 22), + }; + + let (new_balance, contract_event) = if self.deposit { + let type_tag = TypeTag::Struct(Box::new(StructTag { + address: AccountAddress::ONE, + module: ident_str!(FUNGIBLE_ASSET_MODULE).into(), + name: ident_str!("Deposit").into(), + type_args: vec![], + })); + let event = FungibleAssetChangeEvent { + store: self.store_address, + amount: self.amount, + }; + ( + self.previous_balance + self.amount, + ContractEvent::new_v2(type_tag, bcs::to_bytes(&event).unwrap()), + ) + } else { + let event = FungibleAssetChangeEvent { + store: self.store_address, + amount: self.amount, + }; + let type_tag = TypeTag::Struct(Box::new(StructTag { + address: AccountAddress::ONE, + module: ident_str!(FUNGIBLE_ASSET_MODULE).into(), + name: ident_str!("Withdraw").into(), + type_args: vec![], + })); + + ( + self.previous_balance - self.amount, + ContractEvent::new_v2(type_tag, bcs::to_bytes(&event).unwrap()), + ) + }; + + let store = FungibleStoreResource::new(self.fa_metadata_address, new_balance, false); + let mut group = ResourceGroup::new(); + group.insert( + StructTag { + address: AccountAddress::ONE, + module: ident_str!(OBJECT_MODULE).into(), + name: ident_str!(OBJECT_CORE_RESOURCE).into(), + type_args: vec![], + }, + bcs::to_bytes(&object_core).unwrap(), + ); + group.insert( + StructTag { + address: AccountAddress::ONE, + module: ident_str!(FUNGIBLE_ASSET_MODULE).into(), + name: ident_str!(FUNGIBLE_STORE_RESOURCE).into(), + type_args: vec![], + }, + bcs::to_bytes(&store).unwrap(), + ); + + let write_ops = vec![ + // Update sender + resource_group_modification_write_op( + &self.store_address, + &StructTag { + address: AccountAddress::ONE, + module: ident_str!(OBJECT_MODULE).into(), + name: ident_str!(OBJECT_RESOURCE_GROUP).into(), + type_args: vec![], + }, + &group, + ), + ]; + + (write_ops, vec![contract_event]) + } +} + +fn mint_fa_output( + owner: AccountAddress, + fa_address: AccountAddress, + store_address: AccountAddress, + previous_balance: u64, + amount: u64, +) -> (WriteSet, Vec) { + let (minter_ops, minter_events) = FaData { + fa_metadata_address: fa_address, + owner, + store_address, + previous_balance, + deposit: true, + amount, + } + .create_change(); + + let write_set = WriteSetMut::new(minter_ops).freeze().unwrap(); + (write_set, minter_events) +} +fn transfer_fa_output( + owner: AccountAddress, + fa_address: AccountAddress, + store_address: AccountAddress, + previous_balance: u64, + dest: AccountAddress, + dest_store_address: AccountAddress, + dest_previous_balance: u64, + amount: u64, +) -> (WriteSet, Vec) { + let (mut sender_ops, mut sender_events) = FaData { + fa_metadata_address: fa_address, + owner, + store_address, + previous_balance, + deposit: false, + amount, + } + .create_change(); + + let (mut dest_ops, mut dest_events) = FaData { + fa_metadata_address: fa_address, + owner: dest, + store_address: dest_store_address, + previous_balance: dest_previous_balance, + deposit: true, + amount, + } + .create_change(); + sender_ops.append(&mut dest_ops); + sender_events.append(&mut dest_events); + let write_set = WriteSetMut::new(sender_ops).freeze().unwrap(); + (write_set, sender_events) +} + +#[tokio::test] +async fn test_fa_mint() { + let context = test_rosetta_context().await; + + let version = 0; + let amount = 100; + let sender = AccountAddress::random(); + let store_address = AccountAddress::random(); + let (mint_changes, mint_events) = mint_fa_output(sender, APT_ADDRESS, store_address, 0, amount); + let input = test_transaction(sender, version, mint_changes, mint_events); + + let result = Transaction::from_transaction(&context, input).await; + let expected_txn = result.expect("Must succeed"); + assert_eq!(2, expected_txn.operations.len()); + + // TODO: Check that reading is working correctly + let operation_1 = expected_txn.operations.first().unwrap(); + assert_eq!( + operation_1.operation_type, + OperationType::Deposit.to_string() + ); + assert_eq!( + operation_1.amount.as_ref().unwrap().value, + format!("{}", amount) + ); + assert_eq!( + operation_1 + .account + .as_ref() + .unwrap() + .account_address() + .unwrap(), + sender, + ); + let operation_2 = expected_txn.operations.get(1).unwrap(); + assert_eq!(operation_2.operation_type, OperationType::Fee.to_string()); + assert_eq!( + operation_2 + .account + .as_ref() + .unwrap() + .account_address() + .unwrap(), + sender, + ); + // TODO: Check fee +} + +#[tokio::test] +async fn test_fa_transfer() { + let context = test_rosetta_context().await; + + let version = 0; + let amount = 100; + let sender = AccountAddress::random(); + let receiver = AccountAddress::random(); + let store_address = AccountAddress::random(); + let receiver_store_address = AccountAddress::random(); + let (changes, events) = transfer_fa_output( + sender, + APT_ADDRESS, + store_address, + amount * 2, + receiver, + receiver_store_address, + 0, + amount, + ); + let input = test_transaction(sender, version, changes, events); + + let result = Transaction::from_transaction(&context, input).await; + let expected_txn = result.expect("Must succeed"); + assert_eq!(3, expected_txn.operations.len(), "Ops: {:#?}", expected_txn); + + // TODO: Check that reading is working correctly + // TODO: Do we want to order these? + let operation_1 = expected_txn.operations.first().unwrap(); + assert_eq!( + operation_1 + .account + .as_ref() + .unwrap() + .account_address() + .unwrap(), + sender + ); + assert_eq!( + operation_1.operation_type, + OperationType::Withdraw.to_string() + ); + assert_eq!( + operation_1.amount.as_ref().unwrap().value, + format!("-{}", amount) + ); + let operation_2 = expected_txn.operations.get(1).unwrap(); + assert_eq!( + operation_2.operation_type, + OperationType::Deposit.to_string() + ); + assert_eq!( + operation_2.amount.as_ref().unwrap().value, + format!("{}", amount) + ); + assert_eq!( + operation_2 + .account + .as_ref() + .unwrap() + .account_address() + .unwrap(), + receiver + ); + let operation_3 = expected_txn.operations.get(2).unwrap(); + assert_eq!(operation_3.operation_type, OperationType::Fee.to_string()); + assert_eq!( + operation_3 + .account + .as_ref() + .unwrap() + .account_address() + .unwrap(), + sender + ); + // TODO: Check fee +} + +#[tokio::test] +async fn test_fa_transfer_other_currency() { + let context = test_rosetta_context().await; + + let version = 0; + let amount = 100; + let sender = AccountAddress::random(); + let receiver = AccountAddress::random(); + let store_address = AccountAddress::random(); + let receiver_store_address = AccountAddress::random(); + let (changes, events) = transfer_fa_output( + sender, + AccountAddress::from_str(OTHER_CURRENCY_ADDRESS).unwrap(), + store_address, + amount * 2, + receiver, + receiver_store_address, + 0, + amount, + ); + let input = test_transaction(sender, version, changes, events); + + let result = Transaction::from_transaction(&context, input).await; + let expected_txn = result.expect("Must succeed"); + assert_eq!(3, expected_txn.operations.len(), "Ops: {:#?}", expected_txn); + + // TODO: Check that reading is working correctly + // TODO: Do we want to order these? + let operation_1 = expected_txn.operations.first().unwrap(); + assert_eq!( + operation_1 + .account + .as_ref() + .unwrap() + .account_address() + .unwrap(), + sender + ); + assert_eq!( + operation_1.operation_type, + OperationType::Withdraw.to_string() + ); + assert_eq!( + operation_1.amount.as_ref().unwrap().value, + format!("-{}", amount) + ); + assert_eq!( + operation_1.amount.as_ref().unwrap().currency, + OTHER_CURRENCY.to_owned() + ); + let operation_2 = expected_txn.operations.get(1).unwrap(); + assert_eq!( + operation_2.operation_type, + OperationType::Deposit.to_string() + ); + assert_eq!( + operation_2.amount.as_ref().unwrap().value, + format!("{}", amount) + ); + assert_eq!( + operation_2 + .account + .as_ref() + .unwrap() + .account_address() + .unwrap(), + receiver + ); + assert_eq!( + operation_2.amount.as_ref().unwrap().currency, + OTHER_CURRENCY.to_owned() + ); + let operation_3 = expected_txn.operations.get(2).unwrap(); + assert_eq!(operation_3.operation_type, OperationType::Fee.to_string()); + assert_eq!( + operation_3 + .account + .as_ref() + .unwrap() + .account_address() + .unwrap(), + sender + ); + assert_eq!(operation_3.amount.as_ref().unwrap().currency, native_coin()); + // TODO: Check fee +} diff --git a/crates/aptos-rosetta/src/types/move_types.rs b/crates/aptos-rosetta/src/types/move_types.rs index eff18d095fbb4..286296833ca46 100644 --- a/crates/aptos-rosetta/src/types/move_types.rs +++ b/crates/aptos-rosetta/src/types/move_types.rs @@ -16,6 +16,10 @@ pub const STAKING_PROXY_MODULE: &str = "staking_proxy"; pub const STAKING_CONTRACT_MODULE: &str = "staking_contract"; pub const VESTING_MODULE: &str = "vesting"; pub const DELEGATION_POOL_MODULE: &str = "delegation_pool"; +pub const OBJECT_MODULE: &str = "object"; +pub const PRIMARY_FUNGIBLE_STORE_MODULE: &str = "primary_fungible_store"; +pub const FUNGIBLE_ASSET_MODULE: &str = "fungible_asset"; +pub const DISPATCHABLE_FUNGIBLE_ASSET_MODULE: &str = "dispatchable_fungible_asset"; pub const ACCOUNT_RESOURCE: &str = "Account"; pub const APTOS_COIN_RESOURCE: &str = "AptosCoin"; @@ -24,13 +28,19 @@ pub const COIN_STORE_RESOURCE: &str = "CoinStore"; pub const STAKE_POOL_RESOURCE: &str = "StakePool"; pub const STAKING_CONTRACT_RESOURCE: &str = "StakingContract"; pub const STORE_RESOURCE: &str = "Store"; +pub const FUNGIBLE_STORE_RESOURCE: &str = "FungibleStore"; pub const STAKING_GROUP_UPDATE_COMMISSION_RESOURCE: &str = "StakingGroupUpdateCommissionEvent"; pub const VESTING_RESOURCE: &str = "Vesting"; pub const DELEGATION_POOL_RESOURCE: &str = "DelegationPool"; pub const WITHDRAW_STAKE_EVENT: &str = "WithdrawStakeEvent"; +pub const OBJECT_CORE_RESOURCE: &str = "ObjectCore"; + +pub const OBJECT_RESOURCE_GROUP: &str = "ObjectGroup"; pub const CREATE_ACCOUNT_FUNCTION: &str = "create_account"; pub const TRANSFER_FUNCTION: &str = "transfer"; +pub const TRANSFER_COINS_FUNCTION: &str = "transfer_coins"; +pub const BALANCE_FUNCTION: &str = "balance"; // Staking Contract pub const RESET_LOCKUP_FUNCTION: &str = "reset_lockup"; @@ -245,3 +255,17 @@ pub struct WithdrawUndelegatedEvent { pub delegator_address: AccountAddress, pub amount_withdrawn: u64, } + +#[derive(Debug, Serialize, Deserialize)] +pub struct FungibleAssetChangeEvent { + pub store: AccountAddress, + pub amount: u64, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct ObjectCore { + pub guid_creation_num: u64, + pub owner: AccountAddress, + pub allow_ungated_transfer: bool, + pub transfer_events: EventHandle, +} diff --git a/crates/aptos-rosetta/src/types/objects.rs b/crates/aptos-rosetta/src/types/objects.rs index f58c1fb50a795..1109fa900eb98 100644 --- a/crates/aptos-rosetta/src/types/objects.rs +++ b/crates/aptos-rosetta/src/types/objects.rs @@ -6,7 +6,7 @@ //! [Spec](https://www.rosetta-api.org/docs/api_objects.html) use crate::{ - common::{is_native_coin, native_coin, native_coin_tag}, + common::{find_coin_currency, find_fa_currency, native_coin}, construction::{ parse_create_stake_pool_operation, parse_delegation_pool_add_stake_operation, parse_delegation_pool_unlock_operation, parse_delegation_pool_withdraw_operation, @@ -25,11 +25,15 @@ use anyhow::anyhow; use aptos_cached_packages::aptos_stdlib; use aptos_crypto::{ed25519::Ed25519PublicKey, ValidCryptoMaterialStringExt}; use aptos_logger::warn; -use aptos_rest_client::aptos_api_types::{TransactionOnChainData, U64}; +use aptos_rest_client::aptos_api_types::{ResourceGroup, TransactionOnChainData, U64}; use aptos_types::{ + access_path::Path, account_address::AccountAddress, - account_config::{AccountResource, CoinStoreResourceUntyped, WithdrawEvent}, - contract_event::{ContractEvent, FEE_STATEMENT_EVENT_TYPE}, + account_config::{ + fungible_store::FungibleStoreResource, AccountResource, CoinStoreResourceUntyped, + WithdrawEvent, + }, + contract_event::{ContractEvent, ContractEventV2, FEE_STATEMENT_EVENT_TYPE}, event::EventKey, fee_statement::FeeStatement, stake_pool::{SetOperatorEvent, StakePool}, @@ -38,17 +42,27 @@ use aptos_types::{ write_set::{WriteOp, WriteSet}, }; use itertools::Itertools; -use move_core_types::language_storage::TypeTag; +use move_core_types::{ + ident_str, + language_storage::{ModuleId, StructTag, TypeTag}, + parser::parse_type_tag, +}; +use once_cell::sync::Lazy; use serde::{Deserialize, Serialize}; use std::{ cmp::Ordering, - collections::{BTreeMap, HashMap}, + collections::{BTreeMap, HashMap, HashSet}, convert::TryFrom, fmt::{Display, Formatter}, hash::Hash, str::FromStr, }; +static WITHDRAW_TYPE_TAG: Lazy = + Lazy::new(|| parse_type_tag("0x1::fungible_asset::Withdraw").unwrap()); +static DEPOSIT_TYPE_TAG: Lazy = + Lazy::new(|| parse_type_tag("0x1::fungible_asset::Deposit").unwrap()); + /// A description of all types used by the Rosetta implementation. /// /// This is used to verify correctness of the implementation and to check things like @@ -148,10 +162,17 @@ pub struct Currency { #[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)] pub struct CurrencyMetadata { - pub move_type: String, + /// Move coin type e.g. 0x1::aptos_coin::AptosCoin + #[serde(skip_serializing_if = "Option::is_none")] + pub move_type: Option, + /// Fungible Asset Address e.g. 0xA + #[serde(skip_serializing_if = "Option::is_none")] + pub fa_address: Option, } -/// Various signing curves supported by Rosetta. We only use [`CurveType::Edwards25519`] +/// Various signing curves supported by Rosetta. +/// +/// We only use [`CurveType::Edwards25519`] /// [API Spec](https://www.rosetta-api.org/docs/models/CurveType.html) #[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq, Serialize)] #[serde(rename_all = "snake_case")] @@ -904,18 +925,37 @@ impl Transaction { let mut operations = vec![]; let mut operation_index: u64 = 0; if successful { - // Parse all operations from the writeset changes in a success + let mut object_to_owner = HashMap::new(); + let mut store_to_currency = HashMap::new(); + let mut framework_changes = vec![]; + // Not the most efficient, parse all store owners, and assets associated with stores for (state_key, write_op) in &txn.changes { - let mut ops = parse_operations_from_write_set( + let new_changes = preprocess_write_set( server_context, state_key, write_op, - &events, - maybe_user_txn.map(|inner| inner.sender()), maybe_user_txn.map(|inner| inner.payload()), txn.version, + &mut object_to_owner, + &mut store_to_currency, + ); + framework_changes.extend(new_changes); + } + + // Parse all operations from the writeset changes in a success + for (struct_tag, account_address, data) in &framework_changes { + let mut ops = parse_operations_from_write_set( + server_context, + struct_tag, + *account_address, + data, + &events, // TODO: Filter events down to framework events only + maybe_user_txn.map(|inner| inner.sender()), + txn.version, operation_index, - &txn.changes, + &txn.changes, // TODO: Move to parsed framework_changes + &mut object_to_owner, + &mut store_to_currency, ) .await?; operation_index += ops.len() as u64; @@ -940,6 +980,7 @@ impl Transaction { // Parse all failed operations from the payload if let Some(user_txn) = maybe_user_txn { let mut ops = parse_failed_operations_from_txn_payload( + &server_context.currencies, operation_index, user_txn.sender(), user_txn.payload(), @@ -986,6 +1027,7 @@ impl Transaction { /// This case only occurs if the transaction failed, and that's because it's less accurate /// than just following the state changes fn parse_failed_operations_from_txn_payload( + currencies: &HashSet, operation_index: u64, sender: AccountAddress, payload: &TransactionPayload, @@ -997,16 +1039,17 @@ fn parse_failed_operations_from_txn_payload( inner.module().name().as_str(), inner.function().as_str(), ) { - (AccountAddress::ONE, COIN_MODULE, TRANSFER_FUNCTION) => { - // Only put the transfer in if we can understand the currency + (AccountAddress::ONE, COIN_MODULE, TRANSFER_FUNCTION) + | (AccountAddress::ONE, APTOS_ACCOUNT_MODULE, TRANSFER_COINS_FUNCTION) => { + // We could add a create here as well on transfer_coins, but we don't know if it will actually happen if let Some(type_tag) = inner.ty_args().first() { - // We don't want to do lookups on failures for currencies that don't exist, - // so we only look up cached info not new info - // TODO: If other coins are supported, this will need to be updated to handle more coins - if type_tag == &native_coin_tag() { - operations = parse_transfer_from_txn_payload( + // Find currency from type tag + let maybe_currency = find_coin_currency(currencies, type_tag); + + if let Some(currency) = maybe_currency { + operations = parse_coin_transfer_from_txn_payload( inner, - native_coin(), + currency.clone(), sender, operation_index, ) @@ -1015,8 +1058,35 @@ fn parse_failed_operations_from_txn_payload( }, (AccountAddress::ONE, APTOS_ACCOUNT_MODULE, TRANSFER_FUNCTION) => { // We could add a create here as well, but we don't know if it will actually happen - operations = - parse_transfer_from_txn_payload(inner, native_coin(), sender, operation_index) + operations = parse_coin_transfer_from_txn_payload( + inner, + native_coin(), + sender, + operation_index, + ) + }, + (AccountAddress::ONE, PRIMARY_FUNGIBLE_STORE_MODULE, TRANSFER_FUNCTION) => { + // Primary transfer has the same interface as coin transfer, but it's a metadata address instead of a coin type generic + let maybe_metadata_address = inner + .args() + .first() + .map(|encoded| bcs::from_bytes::(encoded)); + if let Some(Ok(addr)) = maybe_metadata_address { + // Find currency from type tag + let maybe_currency = find_fa_currency(currencies, addr); + + if let Some(currency) = maybe_currency { + operations = parse_primary_fa_transfer_from_txn_payload( + inner, + currency.clone(), + sender, + operation_index, + ) + } + } + }, + (AccountAddress::ONE, DISPATCHABLE_FUNGIBLE_ASSET_MODULE, TRANSFER_FUNCTION) => { + // TODO: This isn't really easy to handle atm, objects get messy, need owners etc. }, (AccountAddress::ONE, ACCOUNT_MODULE, CREATE_ACCOUNT_FUNCTION) => { if let Some(Ok(address)) = inner @@ -1159,20 +1229,62 @@ fn parse_failed_operations_from_txn_payload( } /// Parses a 0x1::coin::transfer to a Withdraw and Deposit -fn parse_transfer_from_txn_payload( +fn parse_coin_transfer_from_txn_payload( payload: &EntryFunction, currency: Currency, sender: AccountAddress, operation_index: u64, ) -> Vec { - let mut operations = vec![]; - let args = payload.args(); let maybe_receiver = args .first() .map(|encoded| bcs::from_bytes::(encoded)); let maybe_amount = args.get(1).map(|encoded| bcs::from_bytes::(encoded)); + build_transfer_operations( + payload, + operation_index, + sender, + maybe_receiver, + maybe_amount, + currency, + ) +} + +/// Parses a 0x1::primary_fungible_store::transfer to a Withdraw and Deposit +fn parse_primary_fa_transfer_from_txn_payload( + payload: &EntryFunction, + currency: Currency, + sender: AccountAddress, + operation_index: u64, +) -> Vec { + let args = payload.args(); + let maybe_receiver = args + .get(1) + .map(|encoded| bcs::from_bytes::(encoded)); + let maybe_amount = args.get(2).map(|encoded| bcs::from_bytes::(encoded)); + + build_transfer_operations( + payload, + operation_index, + sender, + maybe_receiver, + maybe_amount, + currency, + ) +} + +/// Builds operations for a coin or FA transfer +fn build_transfer_operations( + payload: &EntryFunction, + operation_index: u64, + sender: AccountAddress, + maybe_receiver: Option>, + maybe_amount: Option>, + currency: Currency, +) -> Vec { + let mut operations = vec![]; + if let (Some(Ok(receiver)), Some(Ok(amount))) = (maybe_receiver, maybe_amount) { operations.push(Operation::withdraw( operation_index, @@ -1204,35 +1316,17 @@ fn parse_transfer_from_txn_payload( /// It is more accurate because untracked scripts are included in balance operations async fn parse_operations_from_write_set( server_context: &RosettaContext, - state_key: &StateKey, - write_op: &WriteOp, + struct_tag: &StructTag, + address: AccountAddress, + data: &[u8], events: &[ContractEvent], maybe_sender: Option, - _maybe_payload: Option<&TransactionPayload>, version: u64, operation_index: u64, changes: &WriteSet, + object_to_owner: &mut HashMap, + store_to_currency: &mut HashMap, ) -> ApiResult> { - let (struct_tag, address) = match state_key.inner() { - StateKeyInner::AccessPath(path) => { - if let Some(struct_tag) = path.get_struct_tag() { - (struct_tag, path.address) - } else { - return Ok(vec![]); - } - }, - _ => { - // Ignore all but access path - return Ok(vec![]); - }, - }; - - let bytes = match write_op.bytes() { - Some(bytes) => bytes, - None => return Ok(vec![]), - }; - let data = &bytes; - // Determine operation match ( struct_tag.address, @@ -1240,6 +1334,7 @@ async fn parse_operations_from_write_set( struct_tag.name.as_str(), struct_tag.type_args.len(), ) { + // TODO: Handle object transfer for transfer of fungible asset stores (AccountAddress::ONE, ACCOUNT_MODULE, ACCOUNT_RESOURCE, 0) => { parse_account_resource_changes(version, address, data, maybe_sender, operation_index) }, @@ -1269,17 +1364,18 @@ async fn parse_operations_from_write_set( }, (AccountAddress::ONE, COIN_MODULE, COIN_STORE_RESOURCE, 1) => { if let Some(type_tag) = struct_tag.type_args.first() { - // TODO: This will need to be updated to support more coins - if type_tag == &native_coin_tag() { + // Find the currency and parse it accordingly + let maybe_currency = find_coin_currency(&server_context.currencies, type_tag); + + if let Some(currency) = maybe_currency { parse_coinstore_changes( - native_coin(), + currency.clone(), version, address, data, events, operation_index, ) - .await } else { Ok(vec![]) } @@ -1291,6 +1387,15 @@ async fn parse_operations_from_write_set( Ok(vec![]) } }, + (AccountAddress::ONE, FUNGIBLE_ASSET_MODULE, FUNGIBLE_STORE_RESOURCE, 0) => { + parse_fungible_store_changes( + object_to_owner, + store_to_currency, + address, + events, + operation_index, + ) + }, _ => { // Any unknown type will just skip the operations Ok(vec![]) @@ -1298,6 +1403,114 @@ async fn parse_operations_from_write_set( } } +fn parse_write_set<'a>( + state_key: &'a StateKey, + write_op: &'a WriteOp, +) -> Option<(StructTag, AccountAddress, &'a [u8])> { + let (struct_tag, address) = match state_key.inner() { + StateKeyInner::AccessPath(path) => match path.get_path() { + Path::Resource(struct_tag) => (struct_tag, path.address), + Path::ResourceGroup(group_tag) => (group_tag, path.address), + _ => return None, + }, + _ => { + // Ignore all but access path + return None; + }, + }; + + let bytes = match write_op.bytes() { + Some(bytes) => bytes, + None => return None, + }; + + Some((struct_tag, address, bytes)) +} + +fn preprocess_write_set<'a>( + server_context: &RosettaContext, + state_key: &'a StateKey, + write_op: &'a WriteOp, + _maybe_payload: Option<&TransactionPayload>, + version: u64, + object_to_owner: &mut HashMap, + store_to_currency: &mut HashMap, +) -> Vec<(StructTag, AccountAddress, Vec)> { + let write_set_data = parse_write_set(state_key, write_op); + if write_set_data.is_none() { + return vec![]; + } + let (struct_tag, address, data) = write_set_data.unwrap(); + + // Determine owners of stores, and metadata addresses for stores + let mut resources = vec![]; + match ( + struct_tag.address, + struct_tag.module.as_str(), + struct_tag.name.as_str(), + ) { + (AccountAddress::ONE, OBJECT_MODULE, OBJECT_RESOURCE_GROUP) => { + // Parse the underlying resources in the group + let maybe_resource_group = bcs::from_bytes::(data); + let resource_group = match maybe_resource_group { + Ok(resource_group) => resource_group, + Err(err) => { + warn!( + "Failed to parse object resource group in version {}: {:#}", + version, err + ); + return vec![]; + }, + }; + + for (struct_tag, bytes) in resource_group.iter() { + match ( + struct_tag.address, + struct_tag.module.as_str(), + struct_tag.name.as_str(), + ) { + (AccountAddress::ONE, OBJECT_MODULE, OBJECT_CORE_RESOURCE) => { + parse_object_owner(address, bytes, object_to_owner); + }, + (AccountAddress::ONE, FUNGIBLE_ASSET_MODULE, FUNGIBLE_STORE_RESOURCE) => { + parse_fungible_store_metadata( + &server_context.currencies, + version, + address, + bytes, + store_to_currency, + ); + }, + _ => {}, + } + + // Filter out transactions that are not framework + if struct_tag.address == AccountAddress::ONE { + resources.push((struct_tag.clone(), address, bytes.clone())); + } + } + }, + (AccountAddress::ONE, ..) => { + // Filter out transactions that are not framework + // TODO: maybe be more strict on what we filter + resources.push((struct_tag.clone(), address, data.to_vec())); + }, + _ => {}, + } + + resources +} + +fn parse_object_owner( + object_address: AccountAddress, + data: &[u8], + object_to_owner: &mut HashMap, +) { + if let Ok(object_core) = bcs::from_bytes::(data) { + object_to_owner.insert(object_address, object_core.owner); + } +} + /// Parses any account resource changes, in this case only create account is supported fn parse_account_resource_changes( version: u64, @@ -1785,7 +1998,7 @@ async fn parse_delegation_pool_resource_changes( } /// Parses coin store direct changes, for withdraws and deposits -async fn parse_coinstore_changes( +fn parse_coinstore_changes( currency: Currency, version: u64, address: AccountAddress, @@ -1835,6 +2048,91 @@ async fn parse_coinstore_changes( Ok(operations) } +fn parse_fungible_store_metadata( + currencies: &HashSet, + version: u64, + address: AccountAddress, + data: &[u8], + store_to_currency: &mut HashMap, +) { + let fungible_store: FungibleStoreResource = if let Ok(fungible_store) = bcs::from_bytes(data) { + fungible_store + } else { + warn!( + "Fungible store failed to parse for address {} at version {} : {}", + address, + version, + hex::encode(data) + ); + return; + }; + + let metadata_address = fungible_store.metadata(); + let maybe_currency = find_fa_currency(currencies, metadata_address); + if let Some(currency) = maybe_currency { + store_to_currency.insert(address, currency); + } +} + +/// Parses fungible store direct changes, for withdraws and deposits +/// +/// Note that, we don't know until we introspect the change, which fa it is +fn parse_fungible_store_changes( + object_to_owner: &HashMap, + store_to_currency: &HashMap, + address: AccountAddress, + events: &[ContractEvent], + mut operation_index: u64, +) -> ApiResult> { + let mut operations = vec![]; + + // Find the fungible asset currency association + let maybe_currency = store_to_currency.get(&address); + if maybe_currency.is_none() { + return Ok(operations); + } + let currency = maybe_currency.unwrap(); + + // If there's a currency, let's fill in operations + // If we don't have an owner here, there's missing data on the writeset + let maybe_owner = object_to_owner.get(&address); + if maybe_owner.is_none() { + warn!( + "First pass did not catch owner for fungible store \"{}\", returning no operations", + address + ); + return Ok(operations); + } + + let owner = maybe_owner.copied().unwrap(); + + let withdraw_amounts = get_amount_from_fa_event(events, &WITHDRAW_TYPE_TAG, address); + for amount in withdraw_amounts { + operations.push(Operation::withdraw( + operation_index, + Some(OperationStatusType::Success), + AccountIdentifier::base_account(owner), + currency.clone(), + amount, + )); + operation_index += 1; + } + + let deposit_amounts = get_amount_from_fa_event(events, &DEPOSIT_TYPE_TAG, address); + for amount in deposit_amounts { + operations.push(Operation::deposit( + operation_index, + Some(OperationStatusType::Success), + AccountIdentifier::base_account(owner), + currency.clone(), + amount, + )); + operation_index += 1; + } + + Ok(operations) +} + /// Pulls the balance change from a withdraw or deposit event fn get_amount_from_event(events: &[ContractEvent], event_key: &EventKey) -> Vec { filter_events(events, event_key, |event_key, event| { @@ -1852,6 +2150,27 @@ fn get_amount_from_event(events: &[ContractEvent], event_key: &EventKey) -> Vec< }) } +/// Pulls the balance change from a withdraw or deposit event +fn get_amount_from_fa_event( + events: &[ContractEvent], + type_tag: &TypeTag, + store_address: AccountAddress, +) -> Vec { + filter_v2_events(type_tag, events, |event| { + if let Ok(event) = bcs::from_bytes::(event.event_data()) { + if event.store == store_address { + Some(event.amount) + } else { + None + } + } else { + // If we can't parse the withdraw event, then there's nothing + warn!("Failed to parse fungible store event! Skipping"); + None + } + }) +} + /// Filter v2 FeeStatement events with non-zero storage_fee_refund fn get_fee_statement_from_event(events: &[ContractEvent]) -> Vec { events @@ -1886,6 +2205,20 @@ fn filter_events Option, T>( .collect() } +fn filter_v2_events Option, T>( + event_type: &TypeTag, + events: &[ContractEvent], + parser: F, +) -> Vec { + events + .iter() + .filter(|event| event.is_v2()) + .map(|event| event.v2().unwrap()) + .filter(|event| event_type == event.type_tag()) + .filter_map(parser) + .collect() +} + /// An enum for processing which operation is in a transaction pub enum OperationDetails { CreateAccount, @@ -1917,7 +2250,10 @@ pub enum InternalOperation { impl InternalOperation { /// Pulls the [`InternalOperation`] from the set of [`Operation`] /// TODO: this needs to be broken up - pub fn extract(operations: &Vec) -> ApiResult { + pub fn extract( + server_context: &RosettaContext, + operations: &Vec, + ) -> ApiResult { match operations.len() { // Single operation actions 1 => { @@ -2165,7 +2501,10 @@ impl InternalOperation { )))) }, // Double operation actions (only coin transfer) - 2 => Ok(Self::Transfer(Transfer::extract_transfer(operations)?)), + 2 => Ok(Self::Transfer(Transfer::extract_transfer( + server_context, + operations, + )?)), // Anything else is not expected _ => Err(ApiError::InvalidOperations(Some(format!( "Unrecognized operation combination {:?}", @@ -2201,11 +2540,73 @@ impl InternalOperation { create_account.sender, ), InternalOperation::Transfer(transfer) => { - is_native_coin(&transfer.currency)?; - ( - aptos_stdlib::aptos_account_transfer(transfer.receiver, transfer.amount.0), - transfer.sender, - ) + // Check if the currency is known + let currency = &transfer.currency; + + // We special case APT, because we don't want the behavior to change + if currency == &native_coin() { + return Ok(( + aptos_stdlib::aptos_account_transfer(transfer.receiver, transfer.amount.0), + transfer.sender, + )); + } + + // For all other coins and FAs we need to handle them accordingly + if let Some(ref metadata) = currency.metadata { + match (&metadata.move_type, &metadata.fa_address) { + // For currencies with the coin type, we will always use the coin functionality, even if migrated + (Some(coin_type), Some(_)) | (Some(coin_type), None) => { + let coin_type_tag = parse_type_tag(coin_type) + .map_err(|err| ApiError::InvalidInput(Some(err.to_string())))?; + ( + aptos_stdlib::aptos_account_transfer_coins( + coin_type_tag, + transfer.receiver, + transfer.amount.0, + ), + transfer.sender, + ) + }, + // For FA only currencies, we use the FA functionality + (None, Some(fa_address_str)) => { + let fa_address = AccountAddress::from_str(fa_address_str)?; + + ( + TransactionPayload::EntryFunction(EntryFunction::new( + ModuleId::new( + AccountAddress::ONE, + ident_str!("primary_fungible_store").to_owned(), + ), + ident_str!("transfer").to_owned(), + vec![TypeTag::Struct(Box::new(StructTag { + address: AccountAddress::ONE, + module: ident_str!(OBJECT_MODULE).into(), + name: ident_str!(OBJECT_CORE_RESOURCE).into(), + type_args: vec![], + }))], + vec![ + bcs::to_bytes(&fa_address).unwrap(), + bcs::to_bytes(&transfer.receiver).unwrap(), + bcs::to_bytes(&transfer.amount.0).unwrap(), + ], + )), + transfer.sender, + ) + }, + _ => { + return Err(ApiError::InvalidInput(Some(format!( + "{} does not have a move type provided", + currency.symbol + )))) + }, + } + } else { + // This should never happen unless the server's currency list is improperly set + return Err(ApiError::InvalidInput(Some(format!( + "{} does not have a currency information provided", + currency.symbol + )))); + } }, InternalOperation::SetOperator(set_operator) => { if set_operator.old_operator.is_none() { @@ -2312,7 +2713,10 @@ pub struct Transfer { } impl Transfer { - pub fn extract_transfer(operations: &Vec) -> ApiResult { + pub fn extract_transfer( + server_context: &RosettaContext, + operations: &Vec, + ) -> ApiResult { // Only support 1:1 P2P transfer // This is composed of a Deposit and a Withdraw operation if operations.len() != 2 { @@ -2384,8 +2788,14 @@ impl Transfer { } // Check that the currency is supported - // TODO: in future use currency, since there's more than just 1 - is_native_coin(&withdraw_amount.currency)?; + if !server_context + .currencies + .contains(&withdraw_amount.currency) + { + return Err(ApiError::UnsupportedCurrency(Some( + withdraw_amount.currency.symbol.clone(), + ))); + } let withdraw_value = i128::from_str(&withdraw_amount.value) .map_err(|_| ApiError::InvalidTransferOperations(Some("Withdraw amount is invalid")))?; diff --git a/crates/aptos/CHANGELOG.md b/crates/aptos/CHANGELOG.md index db5ed7ada5e72..6c7f37148bbd1 100644 --- a/crates/aptos/CHANGELOG.md +++ b/crates/aptos/CHANGELOG.md @@ -4,6 +4,8 @@ All notable changes to the Aptos CLI will be captured in this file. This project ## Unreleased - `aptos move fmt` formats move files inside the `tests` and `examples` directory of a package. +- Add `aptos update prover-dependencies`, which installs the dependency of Move prover, boogie, z3 and cvc5. +- Update the default version of `movefmt` to be installed from 1.0.4 to 1.0.5 ## [4.2.3] - 2024/09/20 - Fix the broken indexer in localnet in 4.2.2, which migrates table info from sycn to async ways. diff --git a/crates/aptos/Cargo.toml b/crates/aptos/Cargo.toml index df8ba8c787130..fd51fdc4b7872 100644 --- a/crates/aptos/Cargo.toml +++ b/crates/aptos/Cargo.toml @@ -78,6 +78,7 @@ move-disassembler = { workspace = true } move-ir-types = { workspace = true } move-model = { workspace = true } move-package = { workspace = true } +move-prover-boogie-backend = { workspace = true } move-symbol-pool = { workspace = true } move-unit-test = { workspace = true, features = ["debugging"] } move-vm-runtime = { workspace = true, features = ["testing"] } @@ -94,6 +95,7 @@ serde = { workspace = true } serde_json = { workspace = true } serde_yaml = { workspace = true } server-framework = { git = "https://github.com/aptos-labs/aptos-indexer-processors.git", rev = "51a34901b40d7f75767ac907b4d2478104d6a515" } +set_env = { workspace = true } tempfile = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true } diff --git a/crates/aptos/src/account/multisig_account.rs b/crates/aptos/src/account/multisig_account.rs index a65a351cec56f..637a613aa0a33 100644 --- a/crates/aptos/src/account/multisig_account.rs +++ b/crates/aptos/src/account/multisig_account.rs @@ -200,11 +200,11 @@ impl CliCommand for VerifyProposal { .to_hex_literal() // If full payload not provided, get payload hash directly from transaction proposal: } else { - view_json_option_str(&multisig_transaction["payload_hash"])?.ok_or( + view_json_option_str(&multisig_transaction["payload_hash"])?.ok_or_else(|| { CliError::UnexpectedError( "Neither payload nor payload hash provided on-chain".to_string(), - ), - )? + ) + })? }; // Get verification result based on if expected and actual payload hashes match. if expected_payload_hash.eq(&actual_payload_hash) { diff --git a/crates/aptos/src/common/init.rs b/crates/aptos/src/common/init.rs index 46a5a7da8f6ad..db590ff97faea 100644 --- a/crates/aptos/src/common/init.rs +++ b/crates/aptos/src/common/init.rs @@ -231,9 +231,9 @@ impl CliCommand<()> for InitTool { let public_key = if self.is_hardware_wallet() { let pub_key = match aptos_ledger::get_public_key( derivation_path - .ok_or(CliError::UnexpectedError( - "Invalid derivation path".to_string(), - ))? + .ok_or_else(|| { + CliError::UnexpectedError("Invalid derivation path".to_string()) + })? .as_str(), false, ) { diff --git a/crates/aptos/src/common/types.rs b/crates/aptos/src/common/types.rs index c9a6be08e4c23..0b5fc13047670 100644 --- a/crates/aptos/src/common/types.rs +++ b/crates/aptos/src/common/types.rs @@ -1899,7 +1899,7 @@ impl TransactionOptions { let sequence_number = account.sequence_number; let balance = client - .get_account_balance_at_version(sender_address, version) + .view_apt_account_balance_at_version(sender_address, version) .await .map_err(|err| CliError::ApiError(err.to_string()))? .into_inner(); @@ -1908,7 +1908,7 @@ impl TransactionOptions { if gas_unit_price == 0 { DEFAULT_MAX_GAS } else { - std::cmp::min(balance.coin.value.0 / gas_unit_price, DEFAULT_MAX_GAS) + std::cmp::min(balance / gas_unit_price, DEFAULT_MAX_GAS) } }); @@ -2218,9 +2218,7 @@ impl TryInto for &EntryFunctionArguments { fn try_into(self) -> Result { self.function_id .clone() - .ok_or(CliError::CommandArgumentError( - "No function ID provided".to_string(), - )) + .ok_or_else(|| CliError::CommandArgumentError("No function ID provided".to_string())) } } diff --git a/crates/aptos/src/governance/delegation_pool.rs b/crates/aptos/src/governance/delegation_pool.rs index 5d836241af694..b6824e179e84f 100644 --- a/crates/aptos/src/governance/delegation_pool.rs +++ b/crates/aptos/src/governance/delegation_pool.rs @@ -222,13 +222,13 @@ async fn is_partial_governance_voting_enabled_for_delegation_pool( None, ) .await?; - response.inner()[0] - .as_bool() - .ok_or(CliError::UnexpectedError( + response.inner()[0].as_bool().ok_or_else(|| { + CliError::UnexpectedError( "Unexpected response from node when checking if partial governance_voting is \ enabled for delegation pool" .to_string(), - )) + ) + }) } async fn get_remaining_voting_power( @@ -255,14 +255,13 @@ async fn get_remaining_voting_power( None, ) .await?; - let remaining_voting_power_str = - response.inner()[0] - .as_str() - .ok_or(CliError::UnexpectedError(format!( - "Unexpected response from node when getting remaining voting power of {}\ + let remaining_voting_power_str = response.inner()[0].as_str().ok_or_else(|| { + CliError::UnexpectedError(format!( + "Unexpected response from node when getting remaining voting power of {}\ in delegation pool {}", - pool_address, voter_address - )))?; + pool_address, voter_address + )) + })?; remaining_voting_power_str.parse().map_err(|err| { CliError::UnexpectedError(format!( "Unexpected response from node when getting remaining voting power of {}\ diff --git a/crates/aptos/src/move_tool/mod.rs b/crates/aptos/src/move_tool/mod.rs index 72b8c243b891d..4b15f51e6b63e 100644 --- a/crates/aptos/src/move_tool/mod.rs +++ b/crates/aptos/src/move_tool/mod.rs @@ -1569,7 +1569,7 @@ async fn submit_chunked_publish_transactions( match result { Ok(tx_summary) => { let tx_hash = tx_summary.transaction_hash.to_string(); - let status = tx_summary.success.map_or("".to_string(), |success| { + let status = tx_summary.success.map_or_else(String::new, |success| { if success { "Success".to_string() } else { diff --git a/crates/aptos/src/node/local_testnet/mod.rs b/crates/aptos/src/node/local_testnet/mod.rs index 752851dde4337..9f56a68f7ef67 100644 --- a/crates/aptos/src/node/local_testnet/mod.rs +++ b/crates/aptos/src/node/local_testnet/mod.rs @@ -17,7 +17,6 @@ pub mod traits; use self::{ faucet::FaucetArgs, - health_checker::HealthChecker, indexer_api::IndexerApiArgs, logging::ThreadNameMakeWriter, node::NodeArgs, @@ -41,6 +40,7 @@ use anyhow::{Context, Result}; use aptos_indexer_grpc_server_framework::setup_logging; use async_trait::async_trait; use clap::Parser; +pub use health_checker::HealthChecker; use std::{ collections::HashSet, fs::{create_dir_all, remove_dir_all}, diff --git a/crates/aptos/src/update/aptos.rs b/crates/aptos/src/update/aptos.rs index 56cab921e3a1f..4a1d7e77d8ffd 100644 --- a/crates/aptos/src/update/aptos.rs +++ b/crates/aptos/src/update/aptos.rs @@ -49,8 +49,8 @@ impl BinaryUpdater for AptosUpdateTool { self.check } - fn pretty_name(&self) -> &'static str { - "Aptos CLI" + fn pretty_name(&self) -> String { + "Aptos CLI".to_string() } /// Return information about whether an update is required. diff --git a/crates/aptos/src/update/mod.rs b/crates/aptos/src/update/mod.rs index f854bca2219fe..58aee4cf5d688 100644 --- a/crates/aptos/src/update/mod.rs +++ b/crates/aptos/src/update/mod.rs @@ -7,6 +7,8 @@ mod aptos; mod helpers; mod movefmt; +mod prover_dependencies; +mod prover_dependency_installer; mod revela; mod tool; mod update_helper; @@ -25,7 +27,7 @@ trait BinaryUpdater { fn check(&self) -> bool; /// Only used for messages we print to the user. - fn pretty_name(&self) -> &'static str; + fn pretty_name(&self) -> String; /// Return information about whether an update is required. fn get_update_info(&self) -> Result; diff --git a/crates/aptos/src/update/movefmt.rs b/crates/aptos/src/update/movefmt.rs index f614c1f3d5686..2dd229a430271 100644 --- a/crates/aptos/src/update/movefmt.rs +++ b/crates/aptos/src/update/movefmt.rs @@ -13,7 +13,7 @@ use self_update::update::ReleaseUpdate; use std::path::PathBuf; const FORMATTER_BINARY_NAME: &str = "movefmt"; -const TARGET_FORMATTER_VERSION: &str = "1.0.4"; +const TARGET_FORMATTER_VERSION: &str = "1.0.5"; const FORMATTER_EXE_ENV: &str = "FORMATTER_EXE"; #[cfg(target_os = "windows")] @@ -64,8 +64,8 @@ impl BinaryUpdater for FormatterUpdateTool { self.check } - fn pretty_name(&self) -> &'static str { - "movefmt" + fn pretty_name(&self) -> String { + "movefmt".to_string() } /// Return information about whether an update is required. @@ -127,5 +127,6 @@ pub fn get_movefmt_path() -> Result { FORMATTER_EXE_ENV, FORMATTER_BINARY_NAME, FORMATTER_EXE, + true, ) } diff --git a/crates/aptos/src/update/prover_dependencies.rs b/crates/aptos/src/update/prover_dependencies.rs new file mode 100644 index 0000000000000..7c3e24c9d72d0 --- /dev/null +++ b/crates/aptos/src/update/prover_dependencies.rs @@ -0,0 +1,213 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::{ + cli_build_information, + common::types::{CliCommand, CliError, CliTypedResult}, + update::{ + get_additional_binaries_dir, prover_dependency_installer::DependencyInstaller, + update_binary, + }, +}; +use anyhow::{Context, Result}; +use aptos_build_info::BUILD_OS; +use async_trait::async_trait; +use clap::Parser; +use move_prover_boogie_backend::options::{ + BoogieOptions, MAX_BOOGIE_VERSION, MAX_CVC5_VERSION, MAX_Z3_VERSION, MIN_BOOGIE_VERSION, + MIN_CVC5_VERSION, MIN_Z3_VERSION, +}; +use std::{ + env, + path::{Path, PathBuf}, +}; + +pub(crate) const REPO_NAME: &str = "prover-dependency"; +pub(crate) const REPO_OWNER: &str = "aptos-labs"; + +pub(crate) const BOOGIE_BINARY_NAME: &str = "boogie"; +pub(crate) const TARGET_BOOGIE_VERSION: &str = "3.2.4"; + +pub(crate) const BOOGIE_EXE_ENV: &str = "BOOGIE_EXE"; +#[cfg(target_os = "windows")] +pub(crate) const BOOGIE_EXE: &str = "boogie.exe"; +#[cfg(not(target_os = "windows"))] +pub(crate) const BOOGIE_EXE: &str = "boogie"; + +const Z3_BINARY_NAME: &str = "z3"; +const TARGET_Z3_VERSION: &str = "4.11.2"; + +const Z3_EXE_ENV: &str = "Z3_EXE"; +#[cfg(target_os = "windows")] +const Z3_EXE: &str = "z3.exe"; +#[cfg(not(target_os = "windows"))] +const Z3_EXE: &str = "z3"; + +const CVC5_BINARY_NAME: &str = "cvc5"; +const TARGET_CVC5_VERSION: &str = "0.0.3"; + +const CVC5_EXE_ENV: &str = "CVC5_EXE"; +#[cfg(target_os = "windows")] +const CVC5_EXE: &str = "cvc5.exe"; +#[cfg(not(target_os = "windows"))] +const CVC5_EXE: &str = "cvc5"; + +/// Install dependencies (boogie, z3 and cvc5) for Move prover +#[derive(Debug, Parser)] +pub struct ProverDependencyInstaller { + /// Where to install binaries of boogie, z3 and cvc5. If not + /// given we will put it in a standard location for your OS. + #[clap(long)] + install_dir: Option, +} + +impl ProverDependencyInstaller { + fn add_env_var(&self, env_var: &str, install_path: &Path) -> Result<(), CliError> { + if let Ok(current_value) = env::var(env_var) { + if current_value == install_path.to_string_lossy() { + return Ok(()); + } else { + return Err(CliError::UnexpectedError(format!( + "{} is already set to a different value: {}.", + env_var, current_value + ))); + } + } + + set_env::set(env_var, install_path.to_string_lossy()) + .map_err(|e| CliError::UnexpectedError(format!("Failed to set {}: {}", env_var, e)))?; + println!( + "Added {} to environment with value: {} to the profile.", + env_var, + install_path.to_string_lossy() + ); + if env::var(env_var).is_err() { + eprintln!("Please use the `source` command or reboot the terminal to check whether {} is set with the correct value. \ + If not, please set it manually.", env_var); + } + Ok(()) + } + + async fn download_dependency(&self) -> CliTypedResult { + let build_info = cli_build_information(); + let _ = match build_info.get(BUILD_OS).context("Failed to determine build info of current CLI")?.as_str() { + "linux-x86_64" => Ok("linux"), + "macos-aarch64" | "macos-x86_64" => Ok("macos"), + "windows-x86_64" => Ok("win"), + wildcard => Err(CliError::UnexpectedError(format!("Self-updating is not supported on your OS ({}) right now, please download the binary manually", wildcard))), + }; + + let install_dir = match self.install_dir.clone() { + Some(dir) => dir, + None => { + let dir = get_additional_binaries_dir(); + // Make the directory if it doesn't already exist. + std::fs::create_dir_all(&dir) + .with_context(|| format!("Failed to create directory: {:?}", dir))?; + dir + }, + }; + + BoogieOptions::check_version_is_compatible( + BOOGIE_BINARY_NAME, + &format!("{}.0", TARGET_BOOGIE_VERSION), + MIN_BOOGIE_VERSION, + MAX_BOOGIE_VERSION, + )?; + let res = self + .install_binary( + install_dir.clone(), + BOOGIE_EXE, + BOOGIE_BINARY_NAME, + TARGET_BOOGIE_VERSION, + "/", + "Boogie program verifier version ", + BOOGIE_EXE_ENV, + ) + .await?; + println!("{}", res); + + BoogieOptions::check_version_is_compatible( + Z3_BINARY_NAME, + TARGET_Z3_VERSION, + MIN_Z3_VERSION, + MAX_Z3_VERSION, + )?; + let res = self + .install_binary( + install_dir.clone(), + Z3_EXE, + Z3_BINARY_NAME, + TARGET_Z3_VERSION, + "--", + "Z3 version ", + Z3_EXE_ENV, + ) + .await?; + println!("{}", res); + + #[cfg(unix)] + { + BoogieOptions::check_version_is_compatible( + CVC5_BINARY_NAME, + TARGET_CVC5_VERSION, + MIN_CVC5_VERSION, + MAX_CVC5_VERSION, + )?; + let res = self + .install_binary( + install_dir.clone(), + CVC5_EXE, + CVC5_BINARY_NAME, + TARGET_CVC5_VERSION, + "--", + "This is cvc5 version ", + CVC5_EXE_ENV, + ) + .await?; + println!("{}", res); + } + + Ok("Succeeded".to_string()) + } + + async fn install_binary( + &self, + install_dir: PathBuf, + exe_name: &str, + binary_name: &str, + version: &str, + version_option_string: &str, + version_match_string: &str, + env_name: &str, + ) -> CliTypedResult { + let installer = DependencyInstaller { + binary_name: binary_name.to_string(), + exe_name: exe_name.to_string(), + env_var: env_name.to_string(), + version_option_string: version_option_string.to_string(), + version_match_string: version_match_string.to_string(), + target_version: version.to_string(), + install_dir: Some(install_dir.clone()), + check: false, + }; + let result = update_binary(installer).await?; + + let install_dir = install_dir.join(exe_name); + if let Err(err) = self.add_env_var(env_name, &install_dir) { + eprintln!("{:#}. Please set it manually", err); + } + Ok(result) + } +} + +#[async_trait] +impl CliCommand for ProverDependencyInstaller { + fn command_name(&self) -> &'static str { + "InstallProverDependencies" + } + + async fn execute(self) -> CliTypedResult { + self.download_dependency().await + } +} diff --git a/crates/aptos/src/update/prover_dependency_installer.rs b/crates/aptos/src/update/prover_dependency_installer.rs new file mode 100644 index 0000000000000..346f1ad476f6c --- /dev/null +++ b/crates/aptos/src/update/prover_dependency_installer.rs @@ -0,0 +1,115 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use super::{BinaryUpdater, UpdateRequiredInfo}; +use crate::update::{ + prover_dependencies::{REPO_NAME, REPO_OWNER}, + update_helper::{build_updater, get_path}, +}; +use anyhow::{Context, Result}; +use self_update::update::ReleaseUpdate; +use std::path::PathBuf; + +/// Update Prover dependency. +#[derive(Debug)] +pub struct DependencyInstaller { + /// Binary name, such as `boogie` or `z3` + pub binary_name: String, + + /// Actual executable name such as `boogie.exe` in Windows + pub exe_name: String, + + /// Environment variable to be updated + pub env_var: String, + + /// The string output by the tool before the version + pub version_match_string: String, + + /// The option string to be passed to the command such as "/" or "--" + pub version_option_string: String, + + /// The version to be installed + pub target_version: String, + + /// The directory to install the binary + pub install_dir: Option, + + /// Whether to check availability of a newer version + pub check: bool, +} + +impl DependencyInstaller { + fn extract_version(&self, input: &str) -> String { + use regex::Regex; + let version_format = format!(r"{}{}", self.version_match_string, r"\d+\.\d+\.\d+"); + let re = Regex::new(&version_format).unwrap(); + if let Some(caps) = re.captures(input) { + let version = caps.get(0).unwrap().as_str().to_string(); + return version + .trim_start_matches(&self.version_match_string) + .to_string(); + } + String::new() + } + + pub fn get_path(&self) -> Result { + get_path( + &self.binary_name, + &self.env_var, + &self.binary_name, + &self.exe_name, + false, + ) + } +} + +impl BinaryUpdater for DependencyInstaller { + fn check(&self) -> bool { + self.check + } + + fn pretty_name(&self) -> String { + self.binary_name.clone() + } + + /// Return information about whether an update is required. + fn get_update_info(&self) -> Result { + // Get the current version, if any. + let dependency_path = self.get_path(); + let current_version = match dependency_path { + Ok(path) if path.exists() => { + let output = std::process::Command::new(path) + .arg(format!("{}version", self.version_option_string)) + .output() + .context("Failed to get current version")?; + let stdout = String::from_utf8(output.stdout) + .context("Failed to parse current version as UTF-8")?; + let version = self.extract_version(&stdout); + if !version.is_empty() { + Some(version) + } else { + None + } + }, + _ => None, + }; + + Ok(UpdateRequiredInfo { + current_version, + target_version: self.target_version.trim_start_matches('v').to_string(), + }) + } + + fn build_updater(&self, info: &UpdateRequiredInfo) -> Result> { + build_updater( + info, + self.install_dir.clone(), + REPO_OWNER.to_string(), + REPO_NAME.to_string(), + &self.binary_name, + "unknown-linux-gnu", + "apple-darwin", + "windows", + ) + } +} diff --git a/crates/aptos/src/update/revela.rs b/crates/aptos/src/update/revela.rs index 84fb0674793a3..3d4910a34d088 100644 --- a/crates/aptos/src/update/revela.rs +++ b/crates/aptos/src/update/revela.rs @@ -54,8 +54,8 @@ impl BinaryUpdater for RevelaUpdateTool { self.check } - fn pretty_name(&self) -> &'static str { - "Revela" + fn pretty_name(&self) -> String { + "Revela".to_string() } /// Return information about whether an update is required. @@ -115,5 +115,11 @@ impl CliCommand for RevelaUpdateTool { } pub fn get_revela_path() -> Result { - get_path("decompiler", REVELA_EXE_ENV, REVELA_BINARY_NAME, REVELA_EXE) + get_path( + "decompiler", + REVELA_EXE_ENV, + REVELA_BINARY_NAME, + REVELA_EXE, + false, + ) } diff --git a/crates/aptos/src/update/tool.rs b/crates/aptos/src/update/tool.rs index c56fd9f58b65b..79724cfb91c76 100644 --- a/crates/aptos/src/update/tool.rs +++ b/crates/aptos/src/update/tool.rs @@ -4,7 +4,7 @@ use super::{aptos::AptosUpdateTool, revela::RevelaUpdateTool}; use crate::{ common::types::{CliCommand, CliResult}, - update::movefmt::FormatterUpdateTool, + update::{movefmt::FormatterUpdateTool, prover_dependencies::ProverDependencyInstaller}, }; use clap::Subcommand; @@ -14,6 +14,7 @@ pub enum UpdateTool { Aptos(AptosUpdateTool), Revela(RevelaUpdateTool), Movefmt(FormatterUpdateTool), + ProverDependencies(ProverDependencyInstaller), } impl UpdateTool { @@ -22,6 +23,7 @@ impl UpdateTool { UpdateTool::Aptos(tool) => tool.execute_serialized().await, UpdateTool::Revela(tool) => tool.execute_serialized().await, UpdateTool::Movefmt(tool) => tool.execute_serialized().await, + UpdateTool::ProverDependencies(tool) => tool.execute_serialized().await, } } } diff --git a/crates/aptos/src/update/update_helper.rs b/crates/aptos/src/update/update_helper.rs index 50797eae0e85f..96a657813d41c 100644 --- a/crates/aptos/src/update/update_helper.rs +++ b/crates/aptos/src/update/update_helper.rs @@ -75,7 +75,13 @@ pub fn build_updater( .map_err(|e| anyhow!("Failed to build self-update configuration: {:#}", e)) } -pub fn get_path(name: &str, exe_env: &str, binary_name: &str, exe: &str) -> Result { +pub fn get_path( + name: &str, + exe_env: &str, + binary_name: &str, + exe: &str, + find_in_path: bool, +) -> Result { // Look at the environment variable first. if let Ok(path) = std::env::var(exe_env) { return Ok(PathBuf::from(path)); @@ -87,9 +93,11 @@ pub fn get_path(name: &str, exe_env: &str, binary_name: &str, exe: &str) -> Resu return Ok(path); } - // See if we can find the binary in the PATH. - if let Some(path) = pathsearch::find_executable_in_path(exe) { - return Ok(path); + if find_in_path { + // See if we can find the binary in the PATH. + if let Some(path) = pathsearch::find_executable_in_path(exe) { + return Ok(path); + } } Err(anyhow!( diff --git a/crates/transaction-emitter-lib/src/cluster.rs b/crates/transaction-emitter-lib/src/cluster.rs index a1584b3ef992f..72686d672cb9c 100644 --- a/crates/transaction-emitter-lib/src/cluster.rs +++ b/crates/transaction-emitter-lib/src/cluster.rs @@ -12,7 +12,7 @@ use aptos_rest_client::{Client as RestClient, State}; use aptos_sdk::types::{chain_id::ChainId, AccountKey, LocalAccount}; use futures::{stream::FuturesUnordered, StreamExt}; use rand::seq::SliceRandom; -use std::{convert::TryFrom, time::Instant}; +use std::convert::TryFrom; use url::Url; #[derive(Debug)] @@ -42,7 +42,7 @@ impl Cluster { let mut instance_states = Vec::new(); let mut errors = Vec::new(); - let start = Instant::now(); + let fetch_timestamp = aptos_infallible::duration_since_epoch().as_secs(); let futures = FuturesUnordered::new(); for url in &peers { let instance = Instance::new( @@ -62,7 +62,7 @@ impl Cluster { } let results: Vec<_> = futures.collect().await; - let fetch_time_s = start.elapsed().as_secs(); + for (instance, result) in results { match result { Ok(v) => instance_states.push((instance, v.into_inner())), @@ -89,6 +89,13 @@ impl Cluster { .map(|(_, s)| s.timestamp_usecs / 1000000) .max() .unwrap(); + if max_timestamp + 10 < fetch_timestamp { + return Err(anyhow!( + "None of the rest endpoints provided have chain timestamp within 10s of local time: {} < {}", + max_timestamp, + fetch_timestamp, + )); + } let chain_id_from_instances = get_chain_id_from_instances(instance_states.clone())?; let chain_id: ChainId = match maybe_chain_id { @@ -111,18 +118,19 @@ impl Cluster { state.chain_id, chain_id.id(), ); - } else if state_timestamp + 20 + fetch_time_s < max_timestamp { + } else if state_timestamp + 10 < fetch_timestamp { warn!( - "Excluding Client {} too stale, {}, while chain at {} (delta of {}s)", + "Excluding Client {} too stale, {}, while current time when fetching is {} (delta of {}s)", instance.peer_name(), state_timestamp, - max_timestamp, - max_timestamp - state_timestamp, + fetch_timestamp, + fetch_timestamp - state_timestamp, ); } else { info!( - "Client {} is healthy, adding to the list of end points for load testing", - instance.peer_name() + "Client {} is healthy ({}s delay), adding to the list of end points for load testing", + instance.peer_name(), + fetch_timestamp.saturating_sub(state_timestamp), ); instances.push(instance); } diff --git a/crates/transaction-emitter-lib/src/emitter/account_minter.rs b/crates/transaction-emitter-lib/src/emitter/account_minter.rs index 51eed9afa8de1..274f340a4bd70 100644 --- a/crates/transaction-emitter-lib/src/emitter/account_minter.rs +++ b/crates/transaction-emitter-lib/src/emitter/account_minter.rs @@ -349,9 +349,7 @@ impl<'t> AccountMinter<'t> { .await .into_iter() .collect::>>() - .map_err(|e| format_err!("Failed to create accounts: {:?}", e))? - .into_iter() - .collect(); + .map_err(|e| format_err!("Failed to create accounts: {:?}", e))?; info!( "Successfully completed creating {} accounts in {}s, request stats: {}", @@ -493,7 +491,7 @@ async fn create_and_fund_new_accounts( .map(|chunk| chunk.to_vec()) .collect::>(); let source_address = source_account.address(); - for batch in accounts_by_batch { + for (batch_index, batch) in accounts_by_batch.into_iter().enumerate() { let creation_requests: Vec<_> = batch .iter() .map(|account| { @@ -509,7 +507,12 @@ async fn create_and_fund_new_accounts( txn_executor .execute_transactions_with_counter(&creation_requests, counters) .await - .with_context(|| format!("Account {} couldn't mint", source_address))?; + .with_context(|| { + format!( + "Account {} couldn't mint batch {}", + source_address, batch_index + ) + })?; } Ok(()) } @@ -698,18 +701,24 @@ pub async fn bulk_create_accounts( .iter() .map(|account| txn_executor.get_account_balance(account.address())); let balances: Vec<_> = try_join_all(balance_futures).await?; - accounts + let underfunded = accounts .iter() .zip(balances) - .for_each(|(account, balance)| { - assert!( - balance >= coins_per_account, - "Account {} has balance {} < needed_min_balance {}", - account.address(), - balance, - coins_per_account - ); - }); + .enumerate() + .filter(|(_idx, (_account, balance))| *balance < coins_per_account) + .collect::>(); + + let first = underfunded.first(); + assert!( + underfunded.is_empty(), + "{} out of {} accounts are underfunded. For example Account[{}] {} has balance {} < needed_min_balance {}", + underfunded.len(), + accounts.len(), + first.unwrap().0, // idx + first.unwrap().1.0.address(), // account + first.unwrap().1.1, // balance + coins_per_account, + ); info!("Skipping funding accounts"); Ok(accounts) diff --git a/crates/transaction-emitter-lib/src/emitter/local_account_generator.rs b/crates/transaction-emitter-lib/src/emitter/local_account_generator.rs index 21504db166fd7..db6c9383c023e 100644 --- a/crates/transaction-emitter-lib/src/emitter/local_account_generator.rs +++ b/crates/transaction-emitter-lib/src/emitter/local_account_generator.rs @@ -2,17 +2,21 @@ use anyhow::bail; // Copyright © Aptos Foundation // SPDX-License-Identifier: Apache-2.0 use aptos_crypto::ed25519::Ed25519PrivateKey; -use aptos_sdk::types::{AccountKey, EphemeralKeyPair, KeylessAccount, LocalAccount}; +use aptos_sdk::types::{ + AccountKey, EphemeralKeyPair, EphemeralPrivateKey, KeylessAccount, LocalAccount, +}; use aptos_transaction_generator_lib::ReliableTransactionSubmitter; use aptos_types::keyless::{Claims, OpenIdSig, Pepper, ZeroKnowledgeSig}; use async_trait::async_trait; -use futures::future::try_join_all; +use futures::StreamExt; use rand::rngs::StdRng; use std::{ fs::File, io::{self, BufRead}, }; +const QUERY_PARALLELISM: usize = 300; + #[async_trait] pub trait LocalAccountGenerator: Send + Sync { async fn gen_local_accounts( @@ -73,7 +77,13 @@ impl LocalAccountGenerator for PrivateKeyAccountGenerator { .iter() .map(|address| txn_executor.query_sequence_number(*address)) .collect::>(); - let seq_nums: Vec<_> = try_join_all(result_futures).await?.into_iter().collect(); + + let seq_nums = futures::stream::iter(result_futures) + .buffered(QUERY_PARALLELISM) + .collect::>() + .await + .into_iter() + .collect::, _>>()?; let accounts = account_keys .into_iter() @@ -133,7 +143,9 @@ impl LocalAccountGenerator for KeylessAccountGenerator { // Cloning is disabled outside #[cfg(test)] let serialized: &[u8] = &(self.ephemeral_secret_key.to_bytes()); - let esk = Ed25519PrivateKey::try_from(serialized)?; + let esk = EphemeralPrivateKey::Ed25519 { + inner_private_key: Ed25519PrivateKey::try_from(serialized)?, + }; let keyless_account = KeylessAccount::new( &self.iss, @@ -166,7 +178,13 @@ impl LocalAccountGenerator for KeylessAccountGenerator { .iter() .map(|address| txn_executor.query_sequence_number(*address)) .collect::>(); - let seq_nums: Vec<_> = try_join_all(result_futures).await?.into_iter().collect(); + + let seq_nums = futures::stream::iter(result_futures) + .buffered(QUERY_PARALLELISM) + .collect::>() + .await + .into_iter() + .collect::, _>>()?; let accounts = keyless_accounts .into_iter() diff --git a/crates/transaction-emitter-lib/src/emitter/mod.rs b/crates/transaction-emitter-lib/src/emitter/mod.rs index 39ff2c284d4d6..78a140cc4f997 100644 --- a/crates/transaction-emitter-lib/src/emitter/mod.rs +++ b/crates/transaction-emitter-lib/src/emitter/mod.rs @@ -55,8 +55,8 @@ use tokio::{runtime::Handle, task::JoinHandle, time}; const MAX_TXNS: u64 = 1_000_000_000; // TODO Transfer cost increases during Coin => FA migration, we can reduce back later. -pub const EXPECTED_GAS_PER_TRANSFER: u64 = 10; -pub const EXPECTED_GAS_PER_ACCOUNT_CREATE: u64 = 2000 + 8; +pub const EXPECTED_GAS_PER_TRANSFER: u64 = 22; +pub const EXPECTED_GAS_PER_ACCOUNT_CREATE: u64 = 1100 + 20; const MAX_RETRIES: usize = 12; diff --git a/crates/transaction-emitter-lib/src/emitter/submission_worker.rs b/crates/transaction-emitter-lib/src/emitter/submission_worker.rs index b51b6cba1d0c0..be3dafccaeaf2 100644 --- a/crates/transaction-emitter-lib/src/emitter/submission_worker.rs +++ b/crates/transaction-emitter-lib/src/emitter/submission_worker.rs @@ -469,9 +469,9 @@ pub async fn submit_transactions( None }; let balance = client - .get_account_balance(sender) + .view_apt_account_balance(sender) .await - .map_or(-1, |v| v.into_inner().get() as i64); + .map_or(-1, |v| v.into_inner() as i64); warn!( "[{:?}] Failed to submit {} txns in a batch, first failure due to {:?}, for account {}, chain id: {:?}, first asked: {}, failed seq nums: {:?}, failed error codes: {:?}, balance of {} and last transaction for account: {:?}", diff --git a/crates/transaction-emitter-lib/src/emitter/transaction_executor.rs b/crates/transaction-emitter-lib/src/emitter/transaction_executor.rs index 63d72060dfb29..55713307dd45c 100644 --- a/crates/transaction-emitter-lib/src/emitter/transaction_executor.rs +++ b/crates/transaction-emitter-lib/src/emitter/transaction_executor.rs @@ -82,6 +82,7 @@ impl RestApiReliableTransactionSubmitter { rest_client, txn, self.retry_after, + i == 0, &mut failed_submit, &mut failed_wait, ) @@ -188,9 +189,9 @@ async fn warn_detailed_error( (None, None) }; let balance = rest_client - .get_account_balance(sender) + .view_apt_account_balance(sender) .await - .map_or(-1, |v| v.into_inner().get() as i128); + .map_or(-1, |v| v.into_inner() as i128); warn!( "[{:?}] Failed {} transaction: {:?}, seq num: {}, gas: unit {} and max {}, for account {}, last seq_num {:?}, balance of {} and last transaction for account: {:?}", @@ -211,6 +212,7 @@ async fn submit_and_check( rest_client: &RestClient, txn: &SignedTransaction, wait_duration: Duration, + first_try: bool, failed_submit: &mut bool, failed_wait: &mut bool, ) -> Result<()> { @@ -221,7 +223,11 @@ async fn submit_and_check( warn_detailed_error("submitting", rest_client, txn, Err(&err)).await ); *failed_submit = true; - if format!("{}", err).contains("SEQUENCE_NUMBER_TOO_OLD") { + if first_try && format!("{}", err).contains("SEQUENCE_NUMBER_TOO_OLD") { + sample!( + SampleRate::Duration(Duration::from_secs(2)), + warn_detailed_error("submitting on first try", rest_client, txn, Err(&err)).await + ); // There's no point to wait or retry on this error. // TODO: find a better way to propogate this error to the caller. Err(err)? @@ -295,13 +301,22 @@ fn is_account_not_found(error: &RestError) -> bool { impl ReliableTransactionSubmitter for RestApiReliableTransactionSubmitter { async fn get_account_balance(&self, account_address: AccountAddress) -> Result { Ok(FETCH_ACCOUNT_RETRY_POLICY - .retry(move || { - self.random_rest_client() - .get_account_balance(account_address) - }) + .retry_if( + move || { + self.random_rest_client() + .view_apt_account_balance(account_address) + }, + |error: &RestError| match error { + RestError::Api(error) => !matches!( + error.error.error_code, + AptosErrorCode::AccountNotFound | AptosErrorCode::InvalidInput + ), + RestError::Unknown(_) => false, + _ => true, + }, + ) .await? - .into_inner() - .get()) + .into_inner()) } async fn query_sequence_number(&self, account_address: AccountAddress) -> Result { diff --git a/dkg/src/dkg_manager/tests.rs b/dkg/src/dkg_manager/tests.rs index 5085fd5a76c77..69757b38f9a65 100644 --- a/dkg/src/dkg_manager/tests.rs +++ b/dkg/src/dkg_manager/tests.rs @@ -55,7 +55,7 @@ async fn test_dkg_state_transition() { .collect::>(); let epoch_state = EpochState { epoch: 999, - verifier: ValidatorVerifier::new(validator_consensus_infos.clone()), + verifier: Arc::new(ValidatorVerifier::new(validator_consensus_infos.clone())), }; let agg_node_producer = DummyAggTranscriptProducer {}; let mut dkg_manager: DKGManager = DKGManager::new( diff --git a/dkg/src/epoch_manager.rs b/dkg/src/epoch_manager.rs index 94ef6309cc89f..7561a2c49e8d2 100644 --- a/dkg/src/epoch_manager.rs +++ b/dkg/src/epoch_manager.rs @@ -159,10 +159,7 @@ impl EpochManager

{ .get() .expect("failed to get ValidatorSet from payload"); - let epoch_state = Arc::new(EpochState { - epoch: payload.epoch(), - verifier: (&validator_set).into(), - }); + let epoch_state = Arc::new(EpochState::new(payload.epoch(), (&validator_set).into())); self.epoch_state = Some(epoch_state.clone()); let my_index = epoch_state .verifier diff --git a/dkg/src/transcript_aggregation/tests.rs b/dkg/src/transcript_aggregation/tests.rs index 8d798bc2f189f..52aa202ad002a 100644 --- a/dkg/src/transcript_aggregation/tests.rs +++ b/dkg/src/transcript_aggregation/tests.rs @@ -49,7 +49,7 @@ fn test_transcript_aggregation_state() { dealer_validator_set: validator_consensus_info_move_structs.clone(), target_validator_set: validator_consensus_info_move_structs.clone(), }); - let epoch_state = Arc::new(EpochState { epoch, verifier }); + let epoch_state = Arc::new(EpochState::new(epoch, verifier)); let trx_agg_state = Arc::new(TranscriptAggregationState::::new( duration_since_epoch(), addrs[0], diff --git a/docker/builder/build-indexer.sh b/docker/builder/build-indexer.sh index 0bde16450815e..95bebee0a5de7 100755 --- a/docker/builder/build-indexer.sh +++ b/docker/builder/build-indexer.sh @@ -15,7 +15,7 @@ cargo build --locked --profile=$PROFILE \ -p aptos-indexer-grpc-cache-worker \ -p aptos-indexer-grpc-file-store \ -p aptos-indexer-grpc-data-service \ - -p aptos-nft-metadata-crawler-parser \ + -p aptos-nft-metadata-crawler \ -p aptos-indexer-grpc-file-store-backfiller \ "$@" @@ -24,7 +24,7 @@ BINS=( aptos-indexer-grpc-cache-worker aptos-indexer-grpc-file-store aptos-indexer-grpc-data-service - aptos-nft-metadata-crawler-parser + aptos-nft-metadata-crawler aptos-indexer-grpc-file-store-backfiller ) diff --git a/docker/builder/nft-metadata-crawler.Dockerfile b/docker/builder/nft-metadata-crawler.Dockerfile index 52eb69b8ec43a..1b1d6998740ad 100644 --- a/docker/builder/nft-metadata-crawler.Dockerfile +++ b/docker/builder/nft-metadata-crawler.Dockerfile @@ -16,7 +16,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ libpq-dev \ curl -COPY --link --from=indexer-builder /aptos/dist/aptos-nft-metadata-crawler-parser /usr/local/bin/aptos-nft-metadata-crawler-parser +COPY --link --from=indexer-builder /aptos/dist/aptos-nft-metadata-crawler /usr/local/bin/aptos-nft-metadata-crawler # The health check port EXPOSE 8080 diff --git a/ecosystem/indexer-grpc/indexer-grpc-fullnode/src/runtime.rs b/ecosystem/indexer-grpc/indexer-grpc-fullnode/src/runtime.rs index ec74174693fc5..04a4a3aaf49a5 100644 --- a/ecosystem/indexer-grpc/indexer-grpc-fullnode/src/runtime.rs +++ b/ecosystem/indexer-grpc/indexer-grpc-fullnode/src/runtime.rs @@ -19,9 +19,13 @@ use aptos_protos::{ }; use aptos_storage_interface::DbReader; use aptos_types::{chain_id::ChainId, indexer::indexer_db_reader::IndexerReader}; -use std::{net::ToSocketAddrs, sync::Arc}; -use tokio::runtime::Runtime; -use tonic::{codec::CompressionEncoding, transport::Server}; +use futures::channel::oneshot; +use std::sync::Arc; +use tokio::{net::TcpListener, runtime::Runtime}; +use tonic::{ + codec::CompressionEncoding, + transport::{server::TcpIncoming, Server}, +}; // Default Values pub const DEFAULT_NUM_RETRIES: usize = 3; @@ -35,6 +39,7 @@ pub fn bootstrap( db: Arc, mp_sender: MempoolClientSender, indexer_reader: Option>, + port_tx: Option>, ) -> Option { if !config.indexer_grpc.enabled { return None; @@ -105,11 +110,16 @@ pub fn bootstrap( tonic_server.add_service(svc) }, }; + + let listener = TcpListener::bind(address).await.unwrap(); + if let Some(port_tx) = port_tx { + port_tx.send(listener.local_addr().unwrap().port()).unwrap(); + } + let incoming = TcpIncoming::from_listener(listener, false, None).unwrap(); + // Make port into a config - router - .serve(address.to_socket_addrs().unwrap().next().unwrap()) - .await - .unwrap(); + router.serve_with_incoming(incoming).await.unwrap(); + info!(address = address, "[indexer-grpc] Started GRPC server"); }); Some(runtime) diff --git a/ecosystem/indexer-grpc/indexer-grpc-table-info/src/backup_restore/fs_ops.rs b/ecosystem/indexer-grpc/indexer-grpc-table-info/src/backup_restore/fs_ops.rs index 63de2a4ef927b..9fae0231af9ec 100644 --- a/ecosystem/indexer-grpc/indexer-grpc-table-info/src/backup_restore/fs_ops.rs +++ b/ecosystem/indexer-grpc/indexer-grpc-table-info/src/backup_restore/fs_ops.rs @@ -53,19 +53,36 @@ pub fn rename_db_folders_and_cleanup( /// Creates a tar.gz archive from the db snapshot directory pub fn create_tar_gz(dir_path: PathBuf, backup_file_name: &str) -> Result { - let tar_file_name = format!("{}.tar.gz", backup_file_name); - let tar_file_path = dir_path.join(&tar_file_name); - let temp_tar_file_path = dir_path.join(format!("{}.tmp", tar_file_name)); - - let tar_file = File::create(&temp_tar_file_path)?; - let gz_encoder = GzEncoder::new(tar_file, Compression::default()); + // Create a buffer to write the tar.gz archive. + let gz_encoder = GzEncoder::new(Vec::new(), Compression::fast()); let tar_data = BufWriter::new(gz_encoder); let mut tar_builder = Builder::new(tar_data); - + aptos_logger::info!( + dir_path = dir_path.to_str(), + backup_file_name = backup_file_name, + "[Table Info] Creating a tar.gz archive from the db snapshot directory" + ); tar_builder.append_dir_all(".", &dir_path)?; - tar_builder.into_inner()?; + aptos_logger::info!("[Table Info] Directory contents appended to the tar.gz archive"); + // Finish writing the tar archive and get the compressed GzEncoder back + let tar_data = tar_builder.into_inner()?; + let gz_encoder = tar_data.into_inner()?; - std::fs::rename(&temp_tar_file_path, &tar_file_path)?; + // Finish the compression process + let compressed_data = gz_encoder.finish()?; + + let tar_file_name = format!("{}.tar.gz", backup_file_name); + let tar_file_path = dir_path.join(&tar_file_name); + aptos_logger::info!( + dir_path = dir_path.to_str(), + backup_file_name = backup_file_name, + tar_file_path = tar_file_path.to_str(), + tar_file_name = tar_file_name, + "[Table Info] Prepare to compress the db snapshot directory" + ); + // Write the tar.gz archive to a file + std::fs::write(&tar_file_path, compressed_data)?; + aptos_logger::info!("[Table Info] Tar.gz archive created successfully"); Ok(tar_file_path) } diff --git a/ecosystem/indexer-grpc/indexer-grpc-table-info/src/backup_restore/gcs.rs b/ecosystem/indexer-grpc/indexer-grpc-table-info/src/backup_restore/gcs.rs index 2a38fcbc55a1a..21b3ce2982315 100644 --- a/ecosystem/indexer-grpc/indexer-grpc-table-info/src/backup_restore/gcs.rs +++ b/ecosystem/indexer-grpc/indexer-grpc-table-info/src/backup_restore/gcs.rs @@ -141,6 +141,10 @@ impl GcsBackupRestoreOperator { .await { Ok(_) => { + aptos_logger::info!( + "[Table Info] Successfully updated metadata to GCS bucket: {}", + METADATA_FILE_NAME + ); return Ok(()); }, // https://cloud.google.com/storage/quotas @@ -168,12 +172,30 @@ impl GcsBackupRestoreOperator { // chain id + epoch is the unique identifier for the snapshot. let snapshot_tar_file_name = format!("chain_id_{}_epoch_{}", chain_id, epoch); let snapshot_path_closure = snapshot_path.clone(); + aptos_logger::info!( + snapshot_tar_file_name = snapshot_tar_file_name.as_str(), + "[Table Info] Starting to compress the folder.", + ); let tar_file = task::spawn_blocking(move || { - create_tar_gz(snapshot_path_closure.clone(), &snapshot_tar_file_name) + aptos_logger::info!( + snapshot_tar_file_name = snapshot_tar_file_name.as_str(), + "[Table Info] Compressing the folder." + ); + let result = create_tar_gz(snapshot_path_closure.clone(), &snapshot_tar_file_name); + aptos_logger::info!( + snapshot_tar_file_name = snapshot_tar_file_name.as_str(), + result = result.is_ok(), + "[Table Info] Compressed the folder." + ); + result }) .await .context("Failed to spawn task to create snapshot backup file.")? .context("Failed to create tar.gz file in blocking task")?; + aptos_logger::info!( + "[Table Info] Created snapshot tar file: {:?}", + tar_file.file_name().unwrap() + ); // Open the file in async mode to stream it let file = File::open(&tar_file) @@ -183,6 +205,10 @@ impl GcsBackupRestoreOperator { let filename = generate_blob_name(chain_id, epoch); + aptos_logger::info!( + "[Table Info] Uploading snapshot to GCS bucket: {}", + filename + ); match self .gcs_client .upload_streamed_object( @@ -206,6 +232,10 @@ impl GcsBackupRestoreOperator { .and_then(|_| fs::remove_dir_all(snapshot_path_clone)) .await .expect("Failed to clean up after db snapshot upload"); + aptos_logger::info!( + "[Table Info] Successfully uploaded snapshot to GCS bucket: {}", + filename + ); }, Err(err) => { error!("Failed to upload snapshot: {}", err); diff --git a/ecosystem/indexer-grpc/indexer-grpc-table-info/src/internal_indexer_db_service.rs b/ecosystem/indexer-grpc/indexer-grpc-table-info/src/internal_indexer_db_service.rs index 2feaf9c3b5702..1806cf6231e7c 100644 --- a/ecosystem/indexer-grpc/indexer-grpc-table-info/src/internal_indexer_db_service.rs +++ b/ecosystem/indexer-grpc/indexer-grpc-table-info/src/internal_indexer_db_service.rs @@ -174,6 +174,27 @@ impl InternalIndexerDBService { start_version = next_version; } } + + // For internal testing + pub async fn run_with_end_version( + &mut self, + node_config: &NodeConfig, + end_version: Option, + ) -> Result<()> { + let mut start_version = self.get_start_version(node_config).await?; + while start_version <= end_version.unwrap_or(std::u64::MAX) { + let next_version = self.db_indexer.process_a_batch(start_version)?; + if next_version == start_version { + tokio::time::sleep(std::time::Duration::from_millis(100)).await; + continue; + } + start_version = next_version; + } + // We should never stop the internal indexer + tokio::time::sleep(std::time::Duration::from_secs(100)).await; + + Ok(()) + } } pub struct MockInternalIndexerDBService { @@ -186,6 +207,7 @@ impl MockInternalIndexerDBService { db_reader: Arc, node_config: &NodeConfig, update_receiver: WatchReceiver, + end_version: Option, ) -> Self { if !node_config .indexer_db_config @@ -205,7 +227,7 @@ impl MockInternalIndexerDBService { let config_clone = node_config.to_owned(); handle.spawn(async move { internal_indexer_db_service - .run(&config_clone) + .run_with_end_version(&config_clone, end_version) .await .unwrap(); }); diff --git a/ecosystem/indexer-grpc/indexer-grpc-table-info/src/table_info_service.rs b/ecosystem/indexer-grpc/indexer-grpc-table-info/src/table_info_service.rs index 518b74c897e52..f5c939b883094 100644 --- a/ecosystem/indexer-grpc/indexer-grpc-table-info/src/table_info_service.rs +++ b/ecosystem/indexer-grpc/indexer-grpc-table-info/src/table_info_service.rs @@ -66,6 +66,7 @@ impl TableInfoService { let context = self.context.clone(); let _task = tokio::spawn(async move { loop { + aptos_logger::info!("[Table Info] Checking for snapshots to backup."); Self::backup_snapshot_if_present( context.clone(), backup_restore_operator.clone(), @@ -416,10 +417,20 @@ impl TableInfoService { } // If nothing to backup, return. if epochs_to_backup.is_empty() { + // No snapshot to backup. + aptos_logger::info!("[Table Info] No snapshot to backup. Skipping the backup."); return; } + aptos_logger::info!( + epochs_to_backup = format!("{:?}", epochs_to_backup), + "[Table Info] Found snapshots to backup." + ); // Sort the epochs to backup. epochs_to_backup.sort(); + aptos_logger::info!( + epochs_to_backup = format!("{:?}", epochs_to_backup), + "[Table Info] Sorted snapshots to backup." + ); // Backup the existing snapshots and cleanup. for epoch in epochs_to_backup { backup_the_snapshot_and_cleanup( @@ -474,7 +485,11 @@ async fn backup_the_snapshot_and_cleanup( epoch: u64, ) { let snapshot_folder_name = snapshot_folder_name(context.chain_id().id() as u64, epoch); - + aptos_logger::info!( + epoch = epoch, + snapshot_folder_name = snapshot_folder_name, + "[Table Info] Backing up the snapshot and cleaning up the old snapshot." + ); let ledger_chain_id = context.chain_id().id(); // Validate the runtime. let backup_metadata = backup_restore_operator.get_metadata().await; @@ -486,6 +501,12 @@ async fn backup_the_snapshot_and_cleanup( metadata.chain_id ); } + } else { + aptos_logger::warn!( + epoch = epoch, + snapshot_folder_name = snapshot_folder_name, + "[Table Info] No backup metadata found. Skipping the backup." + ); } let start_time = std::time::Instant::now(); @@ -493,16 +514,32 @@ async fn backup_the_snapshot_and_cleanup( let snapshot_dir = context .node_config .get_data_dir() - .join(snapshot_folder_name); - + .join(snapshot_folder_name.clone()); // If the backup is for old epoch, clean up and return. if let Some(metadata) = backup_metadata { if metadata.epoch >= epoch { + aptos_logger::info!( + epoch = epoch, + snapshot_folder_name = snapshot_folder_name, + "[Table Info] Snapshot already backed up. Skipping the backup." + ); // Remove the snapshot directory. std::fs::remove_dir_all(snapshot_dir).unwrap(); return; } + } else { + aptos_logger::warn!( + epoch = epoch, + snapshot_folder_name = snapshot_folder_name, + "[Table Info] No backup metadata found." + ); } + aptos_logger::info!( + epoch = epoch, + snapshot_folder_name = snapshot_folder_name, + snapshot_dir = snapshot_dir.to_str(), + "[Table Info] Backing up the snapshot." + ); // TODO: add checks to handle concurrent backup jobs. backup_restore_operator .backup_db_snapshot_and_update_metadata(ledger_chain_id as u64, epoch, snapshot_dir.clone()) diff --git a/ecosystem/indexer-grpc/indexer-test-transactions/README.md b/ecosystem/indexer-grpc/indexer-test-transactions/README.md new file mode 100644 index 0000000000000..a26eaff406f3c --- /dev/null +++ b/ecosystem/indexer-grpc/indexer-test-transactions/README.md @@ -0,0 +1,66 @@ +# Transaction Code Generator +This dynamically generates code for constants that represent transactions stored as JSON files. It builds a set of Rust constants that point to these JSON files and optionally creates a function to retrieve the name of a transaction based on its constant value. + +## Features +- Transaction Constants: Automatically generates pub const declarations for each JSON file found in the specified directories. +- Modular Support: The code generation supports different transaction directories, such as imported_mainnet_txns, imported_testnet_txns, and scripted_transactions. +- Name Function Generation (Optional): For certain directories, the project can also generate a function that maps constant data to transaction names. + +## Directories for Transactions +The JSON files must be organized into specific directories within the json_transactions folder. The following directories are supported by default: + +- imported_mainnet_txns: Holds mainnet transaction JSON files. +- imported_testnet_txns: Holds testnet transaction JSON files. +- scripted_transactions: Holds scripted transaction JSON files and has a corresponding name lookup function. + +## How It Works +### Code Generation +The main purpose of this project is to automatically generate Rust code at build time. The generated code includes constants for each transaction JSON file and a function to retrieve transaction names, if applicable. The code is generated by the TransactionCodeBuilder struct and written to the OUT_DIR environment directory at compile time. + +The steps include: + +1. Scanning Directories: The project scans the directories for .json files. +2. Constant Generation: It creates a Rust constant for each JSON file, allowing them to be easily referenced in your code. +3. Name Function (Optional): For certain directories (e.g., scripted_transactions), a function get_transaction_name is generated to map the constant data back to the transaction name. + +### Example generate_transaction.rs Output +``` + + pub const IMPORTED_MAINNET_TXNS_308783012_FA_TRANSFER: &[u8] = include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/json_transactions/imported_mainnet_txns/308783012_fa_transfer.json")); + pub const ALL_IMPORTED_MAINNET_TXNS: &[&[u8]] = &[IMPORTED_MAINNET_TXNS_308783012_FA_TRANSFER,]; + + pub const IMPORTED_TESTNET_TXNS_5979639459_COIN_REGISTER: &[u8] = include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/json_transactions/imported_testnet_txns/5979639459_coin_register.json")); + + pub const IMPORTED_TESTNET_TXNS_1255836496_V2_FA_METADATA_: &[u8] = include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/json_transactions/imported_testnet_txns/1255836496_v2_fa_metadata_.json")); + + pub const IMPORTED_TESTNET_TXNS_5992795934_FA_ACTIVITIES: &[u8] = include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/json_transactions/imported_testnet_txns/5992795934_fa_activities.json")); + + pub const IMPORTED_TESTNET_TXNS_278556781_V1_COIN_REGISTER_FA_METADATA: &[u8] = include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/json_transactions/imported_testnet_txns/278556781_v1_coin_register_fa_metadata.json")); + + pub const IMPORTED_TESTNET_TXNS_5523474016_VALIDATOR_TXN: &[u8] = include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/json_transactions/imported_testnet_txns/5523474016_validator_txn.json")); + + pub const IMPORTED_TESTNET_TXNS_1_GENESIS: &[u8] = include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/json_transactions/imported_testnet_txns/1_genesis.json")); + pub const ALL_IMPORTED_TESTNET_TXNS: &[&[u8]] = &[IMPORTED_TESTNET_TXNS_5979639459_COIN_REGISTER,IMPORTED_TESTNET_TXNS_1255836496_V2_FA_METADATA_,IMPORTED_TESTNET_TXNS_5992795934_FA_ACTIVITIES,IMPORTED_TESTNET_TXNS_278556781_V1_COIN_REGISTER_FA_METADATA,IMPORTED_TESTNET_TXNS_5523474016_VALIDATOR_TXN,IMPORTED_TESTNET_TXNS_1_GENESIS,]; + + pub const SCRIPTED_TRANSACTIONS_SIMPLE_USER_SCRIPT4: &[u8] = include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/json_transactions/scripted_transactions/simple_user_script4.json")); + + pub const SCRIPTED_TRANSACTIONS_SIMPLE_USER_SCRIPT2: &[u8] = include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/json_transactions/scripted_transactions/simple_user_script2.json")); + + pub const SCRIPTED_TRANSACTIONS_SIMPLE_USER_SCRIPT3: &[u8] = include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/json_transactions/scripted_transactions/simple_user_script3.json")); + + pub const SCRIPTED_TRANSACTIONS_SIMPLE_USER_SCRIPT1: &[u8] = include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/json_transactions/scripted_transactions/simple_user_script1.json")); + pub const ALL_SCRIPTED_TRANSACTIONS: &[&[u8]] = &[SCRIPTED_TRANSACTIONS_SIMPLE_USER_SCRIPT4,SCRIPTED_TRANSACTIONS_SIMPLE_USER_SCRIPT2,SCRIPTED_TRANSACTIONS_SIMPLE_USER_SCRIPT3,SCRIPTED_TRANSACTIONS_SIMPLE_USER_SCRIPT1,]; + + pub fn get_transaction_name(const_data: &[u8]) -> Option<&'static str> { + match const_data { + SCRIPTED_TRANSACTIONS_SIMPLE_USER_SCRIPT4 => Some("simple_user_script4"), + SCRIPTED_TRANSACTIONS_SIMPLE_USER_SCRIPT2 => Some("simple_user_script2"), + SCRIPTED_TRANSACTIONS_SIMPLE_USER_SCRIPT3 => Some("simple_user_script3"), + SCRIPTED_TRANSACTIONS_SIMPLE_USER_SCRIPT1 => Some("simple_user_script1"), + + _ => None, + } + } + + +``` \ No newline at end of file diff --git a/ecosystem/indexer-grpc/indexer-test-transactions/build.rs b/ecosystem/indexer-grpc/indexer-test-transactions/build.rs index 4c47d6b644ca7..b574313a291b0 100644 --- a/ecosystem/indexer-grpc/indexer-test-transactions/build.rs +++ b/ecosystem/indexer-grpc/indexer-test-transactions/build.rs @@ -1,71 +1,132 @@ // Copyright (c) Aptos Foundation // SPDX-License-Identifier: Apache-2.0 -// build.rs use std::{env, fs, path::Path}; -fn main() { - let out_dir = env::var("OUT_DIR").unwrap(); - let dest_path = Path::new(&out_dir).join("generate_transactions.rs"); +const IMPORTED_MAINNET_TXNS: &str = "imported_mainnet_txns"; +const IMPORTED_TESTNET_TXNS: &str = "imported_testnet_txns"; +const SCRIPTED_TRANSACTIONS_TXNS: &str = "scripted_transactions"; +#[derive(Default)] +pub struct TransactionCodeBuilder { + // Holds the generated Rust code for transaction constants + transactions_code: String, + // Holds the match arms for the name generation function for scripted txns (optional) + name_function_code: String, +} - let mut all_transactions_code = String::new(); +impl TransactionCodeBuilder { + pub fn new() -> Self { + Self::default() + } - create_directory_if_missing("json_transactions/imported_mainnet_txns"); - create_directory_if_missing("json_transactions/imported_testnet_txns"); - create_directory_if_missing("json_transactions/scripted_txns"); + pub fn add_directory( + mut self, + dir_name: &str, + module_name: &str, + generate_name_function: bool, + ) -> Self { + let json_dir = Path::new("json_transactions").join(dir_name); + let mut all_constants = String::new(); - all_transactions_code.push_str(&process_directory( - "imported_mainnet_txns", - "imported_mainnet_txns", - )); - all_transactions_code.push_str(&process_directory( - "imported_testnet_txns", - "imported_testnet_txns", - )); - all_transactions_code.push_str(&process_directory("generated_txns", "generated_txns")); + // Iterates over all files in the directory + for entry in fs::read_dir(json_dir).expect("Failed to read directory") { + let entry = entry.expect("Failed to get directory entry"); + let path = entry.path(); - fs::write(dest_path, all_transactions_code).unwrap(); -} + // Checks if the file has a `.json` extension + if path.extension().and_then(|s| s.to_str()) == Some("json") { + let file_name = path.file_stem().unwrap().to_str().unwrap(); + let const_name = format!( + "{}_{}", + module_name.to_uppercase(), + file_name.to_uppercase().replace('-', "_") + ); -fn process_directory(dir_name: &str, module_name: &str) -> String { - let mut transactions_code = String::new(); - let mut all_constants = String::new(); - let json_dir = Path::new("json_transactions").join(dir_name); + // Generates a constant for the JSON file and appends it to the `transactions_code` string + self.transactions_code.push_str(&format!( + r#" + pub const {const_name}: &[u8] = include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/json_transactions/{dir_name}/{file_name}.json")); + "#, + const_name = const_name, + dir_name = dir_name, + file_name = file_name, + )); - for entry in fs::read_dir(json_dir).expect("Failed to read directory") { - let entry = entry.expect("Failed to get directory entry"); - let path = entry.path(); + // Adds the constant to the list of all constants + all_constants.push_str(&format!("{},", const_name)); - if path.extension().and_then(|s| s.to_str()) == Some("json") { - let file_name = path.file_stem().unwrap().to_str().unwrap(); - let const_name = format!( - "{}_{}", + // If name function generation is requested, adds the corresponding match arm + if generate_name_function { + self.name_function_code.push_str(&format!( + " {const_name} => Some(\"{file_name}\"),\n", + const_name = const_name, + file_name = file_name + )); + } + } + } + + // If any constants were created, generate an array holding all of them + if !all_constants.is_empty() { + self.transactions_code.push_str(&format!( + "pub const ALL_{}: &[&[u8]] = &[{}];\n", module_name.to_uppercase(), - file_name.to_uppercase().replace('-', "_") - ); + all_constants + )); + } + + self + } - let json_code = format!( + // Adds the transaction name lookup function if any name match arms were created + pub fn add_transaction_name_function(mut self) -> Self { + if !self.name_function_code.is_empty() { + self.transactions_code.push_str( r#" - pub const {const_name}: &[u8] = include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/json_transactions/{dir_name}/{file_name}.json")); + pub fn get_transaction_name(const_data: &[u8]) -> Option<&'static str> { + match const_data { "#, - const_name = const_name, - dir_name = dir_name, - file_name = file_name, ); - transactions_code.push_str(&json_code); - all_constants.push_str(&format!("{},", const_name)); + + self.transactions_code.push_str(&self.name_function_code); + + self.transactions_code.push_str( + r#" + _ => None, + } + } + "#, + ); } + self + } + + pub fn build(self) -> String { + self.transactions_code } +} + +fn main() { + let out_dir = env::var("OUT_DIR").unwrap(); + let dest_path = Path::new(&out_dir).join("generate_transactions.rs"); + + // Create necessary directories if missing + create_directory_if_missing(&format!("json_transactions/{}", IMPORTED_MAINNET_TXNS)); + create_directory_if_missing(&format!("json_transactions/{}", IMPORTED_TESTNET_TXNS)); + create_directory_if_missing(&format!("json_transactions/{}", SCRIPTED_TRANSACTIONS_TXNS)); - transactions_code.push_str(&format!( - "pub const ALL_{}: &[&[u8]] = &[{}];", - module_name.to_uppercase(), - all_constants - )); + // Using the builder pattern to construct the code + let code = TransactionCodeBuilder::new() + .add_directory(IMPORTED_MAINNET_TXNS, IMPORTED_MAINNET_TXNS, false) + .add_directory(IMPORTED_TESTNET_TXNS, IMPORTED_TESTNET_TXNS, false) + .add_directory(SCRIPTED_TRANSACTIONS_TXNS, SCRIPTED_TRANSACTIONS_TXNS, true) + .add_transaction_name_function() + .build(); - transactions_code + fs::write(dest_path, code).unwrap(); } +// Helper function to create directories if they are missing fn create_directory_if_missing(dir: &str) { let path = Path::new(dir); if !path.exists() { diff --git a/ecosystem/indexer-grpc/indexer-test-transactions/json_transactions/generated_txns/simple_user_script1.json b/ecosystem/indexer-grpc/indexer-test-transactions/json_transactions/scripted_transactions/simple_user_script1.json similarity index 92% rename from ecosystem/indexer-grpc/indexer-test-transactions/json_transactions/generated_txns/simple_user_script1.json rename to ecosystem/indexer-grpc/indexer-test-transactions/json_transactions/scripted_transactions/simple_user_script1.json index 6a26fbca3c8c0..2791bea70215a 100644 --- a/ecosystem/indexer-grpc/indexer-test-transactions/json_transactions/generated_txns/simple_user_script1.json +++ b/ecosystem/indexer-grpc/indexer-test-transactions/json_transactions/scripted_transactions/simple_user_script1.json @@ -1,17 +1,17 @@ { "timestamp": { - "seconds": "1727421325", - "nanos": 197950000 + "seconds": "1727820423", + "nanos": 374924000 }, "version": "65", "info": { - "hash": "EVjdz6VT0ucfIThWYuM1275MjzHyEgSJP5RVaC8NlBA=", - "stateChangeHash": "i/eXRYj+jS/niJmWwd2iTvSzQm8s4S1qvyN06q0K3qU=", + "hash": "HVkZSbpJSlZwoFhNjwQIZVRP98gL/FflFz0XDr1s/jQ=", + "stateChangeHash": "3soporrHMTRz3yCupvKV3zTuT08yUmHu0fDCQ7t9Z14=", "eventRootHash": "J/kCueHhHqJXi8MVudrh82sOQ/pqY9TJ9VjLOdgel6I=", "gasUsed": "3", "success": true, "vmStatus": "Executed successfully", - "accumulatorRootHash": "oZCcacA0QaSJ73RZXyUy2VF0qSleXwVfxGZ+oen4qkQ=", + "accumulatorRootHash": "x7tsbu3sxHcS4gRpYWlhR++sAahWzRGE0pQEzcZ7sN4=", "changes": [ { "type": "TYPE_WRITE_RESOURCE", @@ -99,7 +99,7 @@ "maxGasAmount": "4", "gasUnitPrice": "100", "expirationTimestampSecs": { - "seconds": "1727421355" + "seconds": "1727820453" }, "payload": { "type": "TYPE_SCRIPT_PAYLOAD", @@ -128,7 +128,7 @@ "type": "TYPE_ED25519", "ed25519": { "publicKey": "ObSsyF4CbcBWRkpeoAuY+FgmDqrSt03TC4auDU2U3fU=", - "signature": "zKv0luBq+y21hWdUPNwXgr3IMqa/bOu2F6EEOsKE8Nj/hHTpufL9cRw1qw9HFqg1jPYjHrkj4CHz0AEpMzohDw==" + "signature": "IU2OKSpVrg5lkxDIaQcu2bof2350i1ZK1Fa5fnQDD69b511fgTzhLo3HxKBfUWPRVSHz4r4F3069tzbt2QRGAg==" } } }, diff --git a/ecosystem/indexer-grpc/indexer-test-transactions/json_transactions/generated_txns/simple_user_script2.json b/ecosystem/indexer-grpc/indexer-test-transactions/json_transactions/scripted_transactions/simple_user_script2.json similarity index 92% rename from ecosystem/indexer-grpc/indexer-test-transactions/json_transactions/generated_txns/simple_user_script2.json rename to ecosystem/indexer-grpc/indexer-test-transactions/json_transactions/scripted_transactions/simple_user_script2.json index 58156a8638d62..3e0147bc98bb4 100644 --- a/ecosystem/indexer-grpc/indexer-test-transactions/json_transactions/generated_txns/simple_user_script2.json +++ b/ecosystem/indexer-grpc/indexer-test-transactions/json_transactions/scripted_transactions/simple_user_script2.json @@ -1,17 +1,17 @@ { "timestamp": { - "seconds": "1727421331", - "nanos": 441046000 + "seconds": "1727820429", + "nanos": 295697000 }, "version": "85", "info": { - "hash": "PW14bLZ3tJ0R17SjNgnmkojeN1M9kHGQ7HqplAGrNMI=", - "stateChangeHash": "2x+QMF/oSpT68TbIeBfM9ii9C/BBxxaynIA9o5YyFWE=", + "hash": "q13GWfM8FlYaXVhA7NMVDn/xk0+k5DmQ02RaWRBqIyY=", + "stateChangeHash": "hD3RRNs1JLei2+pGUGnfhBTFaiG53wVQSddrWWbNEeI=", "eventRootHash": "J/kCueHhHqJXi8MVudrh82sOQ/pqY9TJ9VjLOdgel6I=", "gasUsed": "3", "success": true, "vmStatus": "Executed successfully", - "accumulatorRootHash": "nuQFa0uh8LYnJpm/ZCZltSy8QIsy/RZ62yR1EmI+Jhk=", + "accumulatorRootHash": "QYI3MKmiAbxczpvK5hz+ViHMVpr1fz1tXfnESqq+isM=", "changes": [ { "type": "TYPE_WRITE_RESOURCE", @@ -100,7 +100,7 @@ "maxGasAmount": "4", "gasUnitPrice": "100", "expirationTimestampSecs": { - "seconds": "1727421361" + "seconds": "1727820459" }, "payload": { "type": "TYPE_SCRIPT_PAYLOAD", @@ -129,7 +129,7 @@ "type": "TYPE_ED25519", "ed25519": { "publicKey": "ObSsyF4CbcBWRkpeoAuY+FgmDqrSt03TC4auDU2U3fU=", - "signature": "KU0rMZs8n95eJcX0vkVDRIMKt/kaJjPP4ttdTms5mniusNLILqM7qPqC2bDImRmmYhNRtJ4+BJcxn+T76rRsBQ==" + "signature": "6AjqLiElP1/XJ/cA5JLYn4ZhKlFCWP1mez4RUnsu8VMlrySXcTCmiQcdTyK1dlCwygPCd8bpPK556oLfXD/FCA==" } } }, diff --git a/ecosystem/indexer-grpc/indexer-test-transactions/json_transactions/generated_txns/simple_user_script3.json b/ecosystem/indexer-grpc/indexer-test-transactions/json_transactions/scripted_transactions/simple_user_script3.json similarity index 92% rename from ecosystem/indexer-grpc/indexer-test-transactions/json_transactions/generated_txns/simple_user_script3.json rename to ecosystem/indexer-grpc/indexer-test-transactions/json_transactions/scripted_transactions/simple_user_script3.json index a40abc7bf98cf..c6b8e2a84cef2 100644 --- a/ecosystem/indexer-grpc/indexer-test-transactions/json_transactions/generated_txns/simple_user_script3.json +++ b/ecosystem/indexer-grpc/indexer-test-transactions/json_transactions/scripted_transactions/simple_user_script3.json @@ -1,17 +1,17 @@ { "timestamp": { - "seconds": "1727421312", - "nanos": 921233000 + "seconds": "1727820411", + "nanos": 25131000 }, "version": "23", "info": { - "hash": "2RmO+iRS2emMjOVhnZpv9ZwJNM9y63TCwaYLobNg+RA=", - "stateChangeHash": "VMJsni+6OOtOvYbW9TkmqnbaKPjmkniZXoySrY2J0Zw=", + "hash": "GZknxbrVbJ5aNZM9oxo8z2rn7vXPdCvI6xnNXFwTCbI=", + "stateChangeHash": "WVt5Qp5hqlE5kot3+/OaitmDhrCxSBoOAD11eWHj3Vs=", "eventRootHash": "J/kCueHhHqJXi8MVudrh82sOQ/pqY9TJ9VjLOdgel6I=", "gasUsed": "3", "success": true, "vmStatus": "Executed successfully", - "accumulatorRootHash": "1yTie36mbirBFeepzuuC/bLZbFAJqNqwR1emhQKMVs4=", + "accumulatorRootHash": "aXWm4P5j68oLt6Ti0vzFIib/dLvCzgTgBBQE33QBR7o=", "changes": [ { "type": "TYPE_WRITE_RESOURCE", @@ -99,7 +99,7 @@ "maxGasAmount": "4", "gasUnitPrice": "100", "expirationTimestampSecs": { - "seconds": "1727421342" + "seconds": "1727820441" }, "payload": { "type": "TYPE_SCRIPT_PAYLOAD", @@ -128,7 +128,7 @@ "type": "TYPE_ED25519", "ed25519": { "publicKey": "spiXXSfb/zAg5e5/27rYqWnU8qLVKG4JfR25dg0E3TE=", - "signature": "Fsmy9//Uha51qhZGM0d4YPmGVXCjU7aIsFqjZ5cw8ieC47atfHpktEA164wc+i6PSAuj9TIoH4r/xi6dpclWAA==" + "signature": "bp1zbI6WAhlpVcvHStPchYhxufqxtxRVy+/BMs253MCe7fqK8dXJVTseyfz12GQN7MEax0+6ywn8WrKJ6e7bDg==" } } }, diff --git a/ecosystem/indexer-grpc/indexer-test-transactions/json_transactions/generated_txns/simple_user_script4.json b/ecosystem/indexer-grpc/indexer-test-transactions/json_transactions/scripted_transactions/simple_user_script4.json similarity index 92% rename from ecosystem/indexer-grpc/indexer-test-transactions/json_transactions/generated_txns/simple_user_script4.json rename to ecosystem/indexer-grpc/indexer-test-transactions/json_transactions/scripted_transactions/simple_user_script4.json index eccc37576107a..0bdcbf5fc6580 100644 --- a/ecosystem/indexer-grpc/indexer-test-transactions/json_transactions/generated_txns/simple_user_script4.json +++ b/ecosystem/indexer-grpc/indexer-test-transactions/json_transactions/scripted_transactions/simple_user_script4.json @@ -1,17 +1,17 @@ { "timestamp": { - "seconds": "1727421318", - "nanos": 640581000 + "seconds": "1727820416", + "nanos": 759819000 }, "version": "43", "info": { - "hash": "kq1i+DudMSQSPMed3N2xiuVq4RIeBTm5vyvl/29VBwA=", - "stateChangeHash": "zwtBKlKw9jZwEznxm9VtQucWtXtPf7vzbodt6SHcIRk=", + "hash": "kloPL+AJzlOiHUD8DctUWeIZY3habQK1KrpM1+QnlnE=", + "stateChangeHash": "x2csE3/ogps+13tND7qzDszafZ96xYXGIwFYnkx77Ag=", "eventRootHash": "J/kCueHhHqJXi8MVudrh82sOQ/pqY9TJ9VjLOdgel6I=", "gasUsed": "3", "success": true, "vmStatus": "Executed successfully", - "accumulatorRootHash": "91KHbKiUBC+o2jUqg0zFiNjVa1V4YpYqcX23oh6xWGo=", + "accumulatorRootHash": "dXORuqtTul46Lyl8Q5HORdGaKR6461GPCPSdsL20lLw=", "changes": [ { "type": "TYPE_WRITE_RESOURCE", @@ -99,7 +99,7 @@ "maxGasAmount": "4", "gasUnitPrice": "100", "expirationTimestampSecs": { - "seconds": "1727421348" + "seconds": "1727820446" }, "payload": { "type": "TYPE_SCRIPT_PAYLOAD", @@ -128,7 +128,7 @@ "type": "TYPE_ED25519", "ed25519": { "publicKey": "7wW+3hX0IuFsAALjzui01DQVGNmcRpU1KhhpsHeYZPs=", - "signature": "BWUCSlTwShIrM508yRGjhb2vAymDu+08DYtyn/oh3YuB7ptCeeaYAQg4GdzDQuGkBQixnjoWHVzA/tdgDWsuAQ==" + "signature": "nsZra2UgS2lbtSWogQG39J89KyK6f49h46acnn+rZD87OmJAzYux/9poCzoqiVyUEvJTShJ0RjUFKKxO6aUdAA==" } } }, diff --git a/ecosystem/indexer-grpc/indexer-test-transactions/src/lib.rs b/ecosystem/indexer-grpc/indexer-test-transactions/src/lib.rs index cd4c4b7fc7023..19c9634431480 100644 --- a/ecosystem/indexer-grpc/indexer-test-transactions/src/lib.rs +++ b/ecosystem/indexer-grpc/indexer-test-transactions/src/lib.rs @@ -10,7 +10,7 @@ mod tests { #[test] fn test_generate_transactions() { - let json_bytes = GENERATED_TXNS_SIMPLE_USER_SCRIPT1; + let json_bytes = SCRIPTED_TRANSACTIONS_SIMPLE_USER_SCRIPT1; // Check that the transaction is valid JSON let transaction = serde_json::from_slice::(json_bytes).unwrap(); diff --git a/ecosystem/indexer-grpc/indexer-transaction-generator/example.yaml b/ecosystem/indexer-grpc/indexer-transaction-generator/example.yaml new file mode 100644 index 0000000000000..1bf7c0649d272 --- /dev/null +++ b/ecosystem/indexer-grpc/indexer-transaction-generator/example.yaml @@ -0,0 +1,10 @@ +import_config: + testnet: + # Transaction Stream endpoint addresss. + transaction_stream_endpoint: https://grpc.testnet.aptoslabs.com:443 + # (Optional) The key to use with developers.aptoslabs.com + api_key: aptoslabs_L7pzPsqiShp_EwzUCbhb6ehTe8PSvFp6ZeUr3B3gBdGaX + # A map from versions to dump and their output names. + versions_to_import: + 1: v1_genesis + 278556781: coin_register_fa_metadata_278556781 \ No newline at end of file diff --git a/ecosystem/nft-metadata-crawler-parser/src/config.rs b/ecosystem/nft-metadata-crawler-parser/src/config.rs deleted file mode 100644 index ed21836d1ce02..0000000000000 --- a/ecosystem/nft-metadata-crawler-parser/src/config.rs +++ /dev/null @@ -1,160 +0,0 @@ -// Copyright © Aptos Foundation -// SPDX-License-Identifier: Apache-2.0 - -use crate::{ - asset_uploader::AssetUploaderContext, - parser::ParserContext, - utils::{ - constants::{ - DEFAULT_IMAGE_QUALITY, DEFAULT_MAX_FILE_SIZE_BYTES, DEFAULT_MAX_IMAGE_DIMENSIONS, - DEFAULT_MAX_NUM_PARSE_RETRIES, - }, - database::{establish_connection_pool, run_migrations}, - }, -}; -use aptos_indexer_grpc_server_framework::RunnableConfig; -use axum::Router; -use diesel::{ - r2d2::{ConnectionManager, Pool}, - PgConnection, -}; -use enum_dispatch::enum_dispatch; -use serde::{Deserialize, Serialize}; -use tracing::info; - -/// Trait for building a router for axum -#[enum_dispatch] -pub trait Server: Send + Sync { - fn build_router(&self) -> Router; -} - -/// Required account data and auth keys for Cloudflare -#[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(deny_unknown_fields)] -pub struct AssetUploaderConfig { - /// Cloudflare API key - pub cloudflare_auth_key: String, - /// Cloudflare Account ID provided at the images home page used to authenticate requests - pub cloudflare_account_id: String, - /// Cloudflare Account Hash provided at the images home page used for generating the CDN image URLs - pub cloudflare_account_hash: String, - /// Cloudflare Image Delivery URL prefix provided at the images home page used for generating the CDN image URLs - pub cloudflare_image_delivery_prefix: String, - /// In addition to on the fly transformations, Cloudflare images can be returned in preset variants. This is the default variant used with the saved CDN image URLs. - pub cloudflare_default_variant: String, -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(deny_unknown_fields)] -pub struct ParserConfig { - pub google_application_credentials: Option, - pub bucket: String, - pub cdn_prefix: String, - pub ipfs_prefix: String, - pub ipfs_auth_key: Option, - #[serde(default = "NFTMetadataCrawlerConfig::default_max_file_size_bytes")] - pub max_file_size_bytes: u32, - #[serde(default = "NFTMetadataCrawlerConfig::default_image_quality")] - pub image_quality: u8, // Quality up to 100 - #[serde(default = "NFTMetadataCrawlerConfig::default_max_image_dimensions")] - pub max_image_dimensions: u32, - #[serde(default)] - pub ack_parsed_uris: bool, - #[serde(default)] - pub uri_blacklist: Vec, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(tag = "type")] -pub enum ServerConfig { - Parser(ParserConfig), - AssetUploader(AssetUploaderConfig), -} - -/// Structs to hold config from YAML -#[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(deny_unknown_fields)] -pub struct NFTMetadataCrawlerConfig { - pub database_url: String, - #[serde(default = "NFTMetadataCrawlerConfig::default_max_num_parse_retries")] - pub max_num_parse_retries: i32, - pub server_port: u16, - pub server_config: ServerConfig, -} - -impl NFTMetadataCrawlerConfig { - pub const fn default_max_file_size_bytes() -> u32 { - DEFAULT_MAX_FILE_SIZE_BYTES - } - - pub const fn default_image_quality() -> u8 { - DEFAULT_IMAGE_QUALITY - } - - pub const fn default_max_image_dimensions() -> u32 { - DEFAULT_MAX_IMAGE_DIMENSIONS - } - - pub const fn default_max_num_parse_retries() -> i32 { - DEFAULT_MAX_NUM_PARSE_RETRIES - } -} - -#[derive(Clone)] -#[enum_dispatch(Server)] -pub enum ServerContext { - Parser(ParserContext), - AssetUploader(AssetUploaderContext), -} - -impl ServerConfig { - pub async fn build_context( - &self, - pool: Pool>, - max_num_retries: i32, - ) -> ServerContext { - match self { - ServerConfig::Parser(parser_config) => ServerContext::Parser( - ParserContext::new(parser_config.clone(), pool, max_num_retries).await, - ), - ServerConfig::AssetUploader(asset_uploader_config) => ServerContext::AssetUploader( - AssetUploaderContext::new(asset_uploader_config.clone(), pool), - ), - } - } -} - -#[async_trait::async_trait] -impl RunnableConfig for NFTMetadataCrawlerConfig { - /// Main driver function that establishes a connection to Pubsub and parses the Pubsub entries in parallel - async fn run(&self) -> anyhow::Result<()> { - info!( - "[NFT Metadata Crawler] Starting parser with config: {:?}", - self - ); - - info!("[NFT Metadata Crawler] Connecting to database"); - let pool = establish_connection_pool(&self.database_url); - info!("[NFT Metadata Crawler] Database connection successful"); - - info!("[NFT Metadata Crawler] Running migrations"); - run_migrations(&pool); - info!("[NFT Metadata Crawler] Finished migrations"); - - // Create request context - let context = self - .server_config - .build_context(pool, self.max_num_parse_retries) - .await; - let listener = tokio::net::TcpListener::bind(format!("0.0.0.0:{}", self.server_port)) - .await - .expect("Failed to bind TCP listener"); - axum::serve(listener, context.build_router()).await.unwrap(); - - Ok(()) - } - - fn get_server_name(&self) -> String { - "parser".to_string() - } -} diff --git a/ecosystem/nft-metadata-crawler-parser/.gitignore b/ecosystem/nft-metadata-crawler/.gitignore similarity index 100% rename from ecosystem/nft-metadata-crawler-parser/.gitignore rename to ecosystem/nft-metadata-crawler/.gitignore diff --git a/ecosystem/nft-metadata-crawler-parser/Cargo.toml b/ecosystem/nft-metadata-crawler/Cargo.toml similarity index 92% rename from ecosystem/nft-metadata-crawler-parser/Cargo.toml rename to ecosystem/nft-metadata-crawler/Cargo.toml index 21aa48782c2f8..0301bd8944289 100644 --- a/ecosystem/nft-metadata-crawler-parser/Cargo.toml +++ b/ecosystem/nft-metadata-crawler/Cargo.toml @@ -1,6 +1,6 @@ [package] -name = "aptos-nft-metadata-crawler-parser" -description = "NFT Metadata Crawler Parser service." +name = "aptos-nft-metadata-crawler" +description = "NFT Metadata Crawler related services." version = "0.1.0" # Workspace inherited keys diff --git a/ecosystem/nft-metadata-crawler-parser/diesel.toml b/ecosystem/nft-metadata-crawler/diesel.toml similarity index 100% rename from ecosystem/nft-metadata-crawler-parser/diesel.toml rename to ecosystem/nft-metadata-crawler/diesel.toml diff --git a/ecosystem/nft-metadata-crawler-parser/migrations/00000000000000_diesel_initial_setup/down.sql b/ecosystem/nft-metadata-crawler/migrations/00000000000000_diesel_initial_setup/down.sql similarity index 100% rename from ecosystem/nft-metadata-crawler-parser/migrations/00000000000000_diesel_initial_setup/down.sql rename to ecosystem/nft-metadata-crawler/migrations/00000000000000_diesel_initial_setup/down.sql diff --git a/ecosystem/nft-metadata-crawler-parser/migrations/00000000000000_diesel_initial_setup/up.sql b/ecosystem/nft-metadata-crawler/migrations/00000000000000_diesel_initial_setup/up.sql similarity index 100% rename from ecosystem/nft-metadata-crawler-parser/migrations/00000000000000_diesel_initial_setup/up.sql rename to ecosystem/nft-metadata-crawler/migrations/00000000000000_diesel_initial_setup/up.sql diff --git a/ecosystem/nft-metadata-crawler-parser/migrations/2023-09-08-001532_create_tables/down.sql b/ecosystem/nft-metadata-crawler/migrations/2023-09-08-001532_create_tables/down.sql similarity index 100% rename from ecosystem/nft-metadata-crawler-parser/migrations/2023-09-08-001532_create_tables/down.sql rename to ecosystem/nft-metadata-crawler/migrations/2023-09-08-001532_create_tables/down.sql diff --git a/ecosystem/nft-metadata-crawler-parser/migrations/2023-09-08-001532_create_tables/up.sql b/ecosystem/nft-metadata-crawler/migrations/2023-09-08-001532_create_tables/up.sql similarity index 100% rename from ecosystem/nft-metadata-crawler-parser/migrations/2023-09-08-001532_create_tables/up.sql rename to ecosystem/nft-metadata-crawler/migrations/2023-09-08-001532_create_tables/up.sql diff --git a/ecosystem/nft-metadata-crawler-parser/migrations/2024-01-31-221845_add_not_parsable_column/down.sql b/ecosystem/nft-metadata-crawler/migrations/2024-01-31-221845_add_not_parsable_column/down.sql similarity index 100% rename from ecosystem/nft-metadata-crawler-parser/migrations/2024-01-31-221845_add_not_parsable_column/down.sql rename to ecosystem/nft-metadata-crawler/migrations/2024-01-31-221845_add_not_parsable_column/down.sql diff --git a/ecosystem/nft-metadata-crawler-parser/migrations/2024-01-31-221845_add_not_parsable_column/up.sql b/ecosystem/nft-metadata-crawler/migrations/2024-01-31-221845_add_not_parsable_column/up.sql similarity index 100% rename from ecosystem/nft-metadata-crawler-parser/migrations/2024-01-31-221845_add_not_parsable_column/up.sql rename to ecosystem/nft-metadata-crawler/migrations/2024-01-31-221845_add_not_parsable_column/up.sql diff --git a/ecosystem/nft-metadata-crawler-parser/migrations/2024-02-08-013147_add_last_transaction_version/down.sql b/ecosystem/nft-metadata-crawler/migrations/2024-02-08-013147_add_last_transaction_version/down.sql similarity index 100% rename from ecosystem/nft-metadata-crawler-parser/migrations/2024-02-08-013147_add_last_transaction_version/down.sql rename to ecosystem/nft-metadata-crawler/migrations/2024-02-08-013147_add_last_transaction_version/down.sql diff --git a/ecosystem/nft-metadata-crawler-parser/migrations/2024-02-08-013147_add_last_transaction_version/up.sql b/ecosystem/nft-metadata-crawler/migrations/2024-02-08-013147_add_last_transaction_version/up.sql similarity index 100% rename from ecosystem/nft-metadata-crawler-parser/migrations/2024-02-08-013147_add_last_transaction_version/up.sql rename to ecosystem/nft-metadata-crawler/migrations/2024-02-08-013147_add_last_transaction_version/up.sql diff --git a/ecosystem/nft-metadata-crawler/src/asset_uploader/config.rs b/ecosystem/nft-metadata-crawler/src/asset_uploader/config.rs new file mode 100644 index 0000000000000..3d019f4899f21 --- /dev/null +++ b/ecosystem/nft-metadata-crawler/src/asset_uploader/config.rs @@ -0,0 +1,20 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use serde::{Deserialize, Serialize}; + +/// Required account data and auth keys for Cloudflare +#[derive(Clone, Debug, Deserialize, Serialize)] +#[serde(deny_unknown_fields)] +pub struct AssetUploaderConfig { + /// Cloudflare API key + pub cloudflare_auth_key: String, + /// Cloudflare Account ID provided at the images home page used to authenticate requests + pub cloudflare_account_id: String, + /// Cloudflare Account Hash provided at the images home page used for generating the CDN image URLs + pub cloudflare_account_hash: String, + /// Cloudflare Image Delivery URL prefix provided at the images home page used for generating the CDN image URLs + pub cloudflare_image_delivery_prefix: String, + /// In addition to on the fly transformations, Cloudflare images can be returned in preset variants. This is the default variant used with the saved CDN image URLs. + pub cloudflare_default_variant: String, +} diff --git a/ecosystem/nft-metadata-crawler-parser/src/asset_uploader.rs b/ecosystem/nft-metadata-crawler/src/asset_uploader/mod.rs similarity index 96% rename from ecosystem/nft-metadata-crawler-parser/src/asset_uploader.rs rename to ecosystem/nft-metadata-crawler/src/asset_uploader/mod.rs index 70bf41b7b6acb..1ec5bf564a1dd 100644 --- a/ecosystem/nft-metadata-crawler-parser/src/asset_uploader.rs +++ b/ecosystem/nft-metadata-crawler/src/asset_uploader/mod.rs @@ -2,8 +2,9 @@ // SPDX-License-Identifier: Apache-2.0 use crate::{ - config::{AssetUploaderConfig, Server}, - models::nft_metadata_crawler_uris::NFTMetadataCrawlerURIs, + asset_uploader::config::AssetUploaderConfig, + config::Server, + models::parsed_asset_uris::ParsedAssetUris, utils::{ constants::{MAX_ASSET_UPLOAD_RETRY_SECONDS, MAX_RETRY_TIME_SECONDS}, database::upsert_uris, @@ -23,6 +24,8 @@ use std::{sync::Arc, time::Duration}; use tracing::{info, warn}; use url::Url; +pub mod config; + #[derive(Clone)] pub struct AssetUploaderContext { pub asset_uploader_config: Arc, @@ -141,7 +144,7 @@ impl AssetUploaderContext { cdn_uri = cdn_url, "[Asset Uploader] Writing to Postgres" ); - let mut model = NFTMetadataCrawlerURIs::new(url.as_ref()); + let mut model = ParsedAssetUris::new(url.as_ref()); model.set_cdn_image_uri(Some(cdn_url.clone())); let mut conn = self_clone.pool.get().context("Failed to get connection")?; diff --git a/ecosystem/nft-metadata-crawler/src/config.rs b/ecosystem/nft-metadata-crawler/src/config.rs new file mode 100644 index 0000000000000..ab294aafff65b --- /dev/null +++ b/ecosystem/nft-metadata-crawler/src/config.rs @@ -0,0 +1,93 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::{ + asset_uploader::{config::AssetUploaderConfig, AssetUploaderContext}, + parser::{config::ParserConfig, ParserContext}, + utils::database::{establish_connection_pool, run_migrations}, +}; +use aptos_indexer_grpc_server_framework::RunnableConfig; +use axum::Router; +use diesel::{ + r2d2::{ConnectionManager, Pool}, + PgConnection, +}; +use enum_dispatch::enum_dispatch; +use serde::{Deserialize, Serialize}; +use tokio::net::TcpListener; +use tracing::info; + +/// Trait for building a router for axum +#[enum_dispatch] +pub trait Server: Send + Sync { + fn build_router(&self) -> Router; +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "type")] +pub enum ServerConfig { + Parser(ParserConfig), + AssetUploader(AssetUploaderConfig), +} + +/// Structs to hold config from YAML +#[derive(Clone, Debug, Deserialize, Serialize)] +#[serde(deny_unknown_fields)] +pub struct NFTMetadataCrawlerConfig { + pub database_url: String, + pub server_port: u16, + pub server_config: ServerConfig, +} + +#[derive(Clone)] +#[enum_dispatch(Server)] +pub enum ServerContext { + Parser(ParserContext), + AssetUploader(AssetUploaderContext), +} + +impl ServerConfig { + pub async fn build_context( + &self, + pool: Pool>, + ) -> ServerContext { + match self { + ServerConfig::Parser(parser_config) => { + ServerContext::Parser(ParserContext::new(parser_config.clone(), pool).await) + }, + ServerConfig::AssetUploader(asset_uploader_config) => ServerContext::AssetUploader( + AssetUploaderContext::new(asset_uploader_config.clone(), pool), + ), + } + } +} + +#[async_trait::async_trait] +impl RunnableConfig for NFTMetadataCrawlerConfig { + /// Main driver function that establishes a connection to Pubsub and parses the Pubsub entries in parallel + async fn run(&self) -> anyhow::Result<()> { + info!("[NFT Metadata Crawler] Starting with config: {:?}", self); + + info!("[NFT Metadata Crawler] Connecting to database"); + let pool = establish_connection_pool(&self.database_url); + info!("[NFT Metadata Crawler] Database connection successful"); + + info!("[NFT Metadata Crawler] Running migrations"); + run_migrations(&pool); + info!("[NFT Metadata Crawler] Finished migrations"); + + // Create request context + let context = self.server_config.build_context(pool).await; + let listener = TcpListener::bind(format!("0.0.0.0:{}", self.server_port)).await?; + axum::serve(listener, context.build_router()).await?; + + Ok(()) + } + + fn get_server_name(&self) -> String { + match &self.server_config { + ServerConfig::Parser(_) => "parser".to_string(), + ServerConfig::AssetUploader(_) => "asset_uploader".to_string(), + } + } +} diff --git a/ecosystem/nft-metadata-crawler-parser/src/lib.rs b/ecosystem/nft-metadata-crawler/src/lib.rs similarity index 98% rename from ecosystem/nft-metadata-crawler-parser/src/lib.rs rename to ecosystem/nft-metadata-crawler/src/lib.rs index c75310b61093e..acabe0aeba90c 100644 --- a/ecosystem/nft-metadata-crawler-parser/src/lib.rs +++ b/ecosystem/nft-metadata-crawler/src/lib.rs @@ -12,7 +12,6 @@ pub mod models; pub mod parser; pub mod schema; pub mod utils; -pub mod worker; /// HEAD request to get MIME type and size of content pub async fn get_uri_metadata(url: &str) -> anyhow::Result<(String, u32)> { diff --git a/ecosystem/nft-metadata-crawler-parser/src/main.rs b/ecosystem/nft-metadata-crawler/src/main.rs similarity index 79% rename from ecosystem/nft-metadata-crawler-parser/src/main.rs rename to ecosystem/nft-metadata-crawler/src/main.rs index 9c050bce8f154..b889dd54638ea 100644 --- a/ecosystem/nft-metadata-crawler-parser/src/main.rs +++ b/ecosystem/nft-metadata-crawler/src/main.rs @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 use aptos_indexer_grpc_server_framework::ServerArgs; -use aptos_nft_metadata_crawler_parser::config::NFTMetadataCrawlerConfig; +use aptos_nft_metadata_crawler::config::NFTMetadataCrawlerConfig; #[tokio::main] async fn main() -> anyhow::Result<()> { diff --git a/ecosystem/nft-metadata-crawler-parser/src/models/ledger_info.rs b/ecosystem/nft-metadata-crawler/src/models/ledger_info.rs similarity index 100% rename from ecosystem/nft-metadata-crawler-parser/src/models/ledger_info.rs rename to ecosystem/nft-metadata-crawler/src/models/ledger_info.rs diff --git a/ecosystem/nft-metadata-crawler-parser/src/models/mod.rs b/ecosystem/nft-metadata-crawler/src/models/mod.rs similarity index 55% rename from ecosystem/nft-metadata-crawler-parser/src/models/mod.rs rename to ecosystem/nft-metadata-crawler/src/models/mod.rs index a034b1a23f72d..92f8bc964516f 100644 --- a/ecosystem/nft-metadata-crawler-parser/src/models/mod.rs +++ b/ecosystem/nft-metadata-crawler/src/models/mod.rs @@ -2,5 +2,5 @@ // SPDX-License-Identifier: Apache-2.0 pub mod ledger_info; -pub mod nft_metadata_crawler_uris; -pub mod nft_metadata_crawler_uris_query; +pub mod parsed_asset_uris; +pub mod parsed_asset_uris_query; diff --git a/ecosystem/nft-metadata-crawler-parser/src/models/nft_metadata_crawler_uris.rs b/ecosystem/nft-metadata-crawler/src/models/parsed_asset_uris.rs similarity index 95% rename from ecosystem/nft-metadata-crawler-parser/src/models/nft_metadata_crawler_uris.rs rename to ecosystem/nft-metadata-crawler/src/models/parsed_asset_uris.rs index c991362feea80..e3ce82abcfdc6 100644 --- a/ecosystem/nft-metadata-crawler-parser/src/models/nft_metadata_crawler_uris.rs +++ b/ecosystem/nft-metadata-crawler/src/models/parsed_asset_uris.rs @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 use crate::{ - models::nft_metadata_crawler_uris_query::NFTMetadataCrawlerURIsQuery, + models::parsed_asset_uris_query::ParsedAssetUrisQuery, schema::nft_metadata_crawler::parsed_asset_uris, }; use diesel::prelude::*; @@ -13,7 +13,7 @@ use tracing::warn; #[derive(Clone, Debug, Deserialize, FieldCount, Identifiable, Insertable, Serialize)] #[diesel(primary_key(asset_uri))] #[diesel(table_name = parsed_asset_uris)] -pub struct NFTMetadataCrawlerURIs { +pub struct ParsedAssetUris { asset_uri: String, raw_image_uri: Option, raw_animation_uri: Option, @@ -27,7 +27,7 @@ pub struct NFTMetadataCrawlerURIs { last_transaction_version: i64, } -impl NFTMetadataCrawlerURIs { +impl ParsedAssetUris { pub fn new(asset_uri: &str) -> Self { Self { asset_uri: asset_uri.to_string(), @@ -172,8 +172,8 @@ impl NFTMetadataCrawlerURIs { } } -impl From for NFTMetadataCrawlerURIs { - fn from(query: NFTMetadataCrawlerURIsQuery) -> Self { +impl From for ParsedAssetUris { + fn from(query: ParsedAssetUrisQuery) -> Self { Self { asset_uri: query.asset_uri, raw_image_uri: query.raw_image_uri, diff --git a/ecosystem/nft-metadata-crawler-parser/src/models/nft_metadata_crawler_uris_query.rs b/ecosystem/nft-metadata-crawler/src/models/parsed_asset_uris_query.rs similarity index 92% rename from ecosystem/nft-metadata-crawler-parser/src/models/nft_metadata_crawler_uris_query.rs rename to ecosystem/nft-metadata-crawler/src/models/parsed_asset_uris_query.rs index 65ee71ebe7ad3..409dac08632fb 100644 --- a/ecosystem/nft-metadata-crawler-parser/src/models/nft_metadata_crawler_uris_query.rs +++ b/ecosystem/nft-metadata-crawler/src/models/parsed_asset_uris_query.rs @@ -16,7 +16,7 @@ use tracing::error; #[derive(Debug, Deserialize, Identifiable, Queryable, Serialize)] #[diesel(primary_key(asset_uri))] #[diesel(table_name = parsed_asset_uris)] -pub struct NFTMetadataCrawlerURIsQuery { +pub struct ParsedAssetUrisQuery { pub asset_uri: String, pub raw_image_uri: Option, pub raw_animation_uri: Option, @@ -31,7 +31,7 @@ pub struct NFTMetadataCrawlerURIsQuery { pub last_transaction_version: i64, } -impl NFTMetadataCrawlerURIsQuery { +impl ParsedAssetUrisQuery { pub fn get_by_asset_uri( conn: &mut PooledConnection>, asset_uri: &str, @@ -39,7 +39,7 @@ impl NFTMetadataCrawlerURIsQuery { let mut op = || { parsed_asset_uris::table .find(asset_uri) - .first::(conn) + .first::(conn) .optional() .map_err(Into::into) }; @@ -65,7 +65,7 @@ impl NFTMetadataCrawlerURIsQuery { .filter(parsed_asset_uris::raw_image_uri.eq(raw_image_uri)) .filter(parsed_asset_uris::asset_uri.ne(asset_uri)) .filter(parsed_asset_uris::cdn_image_uri.is_not_null()) - .first::(conn) + .first::(conn) .optional() .map_err(Into::into) }; @@ -91,7 +91,7 @@ impl NFTMetadataCrawlerURIsQuery { .filter(parsed_asset_uris::raw_animation_uri.eq(raw_animation_uri)) .filter(parsed_asset_uris::asset_uri.ne(asset_uri)) .filter(parsed_asset_uris::cdn_animation_uri.is_not_null()) - .first::(conn) + .first::(conn) .optional() .map_err(Into::into) }; @@ -108,7 +108,7 @@ impl NFTMetadataCrawlerURIsQuery { } } -impl Default for NFTMetadataCrawlerURIsQuery { +impl Default for ParsedAssetUrisQuery { fn default() -> Self { Self { asset_uri: "".to_string(), diff --git a/ecosystem/nft-metadata-crawler/src/parser/config.rs b/ecosystem/nft-metadata-crawler/src/parser/config.rs new file mode 100644 index 0000000000000..e918755ab33f4 --- /dev/null +++ b/ecosystem/nft-metadata-crawler/src/parser/config.rs @@ -0,0 +1,48 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::utils::constants::{ + DEFAULT_IMAGE_QUALITY, DEFAULT_MAX_FILE_SIZE_BYTES, DEFAULT_MAX_IMAGE_DIMENSIONS, + DEFAULT_MAX_NUM_PARSE_RETRIES, +}; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, Deserialize, Serialize)] +#[serde(deny_unknown_fields)] +pub struct ParserConfig { + pub google_application_credentials: Option, + pub bucket: String, + pub cdn_prefix: String, + pub ipfs_prefix: String, + pub ipfs_auth_key: Option, + #[serde(default = "ParserConfig::default_max_file_size_bytes")] + pub max_file_size_bytes: u32, + #[serde(default = "ParserConfig::default_image_quality")] + pub image_quality: u8, // Quality up to 100 + #[serde(default = "ParserConfig::default_max_image_dimensions")] + pub max_image_dimensions: u32, + #[serde(default = "ParserConfig::default_max_num_parse_retries")] + pub max_num_parse_retries: i32, + #[serde(default)] + pub ack_parsed_uris: bool, + #[serde(default)] + pub uri_blacklist: Vec, +} + +impl ParserConfig { + pub const fn default_max_file_size_bytes() -> u32 { + DEFAULT_MAX_FILE_SIZE_BYTES + } + + pub const fn default_image_quality() -> u8 { + DEFAULT_IMAGE_QUALITY + } + + pub const fn default_max_image_dimensions() -> u32 { + DEFAULT_MAX_IMAGE_DIMENSIONS + } + + pub const fn default_max_num_parse_retries() -> i32 { + DEFAULT_MAX_NUM_PARSE_RETRIES + } +} diff --git a/ecosystem/nft-metadata-crawler-parser/src/parser.rs b/ecosystem/nft-metadata-crawler/src/parser/mod.rs similarity index 97% rename from ecosystem/nft-metadata-crawler-parser/src/parser.rs rename to ecosystem/nft-metadata-crawler/src/parser/mod.rs index 8997f4dc019b2..56830330f007e 100644 --- a/ecosystem/nft-metadata-crawler-parser/src/parser.rs +++ b/ecosystem/nft-metadata-crawler/src/parser/mod.rs @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 use crate::{ - config::{ParserConfig, Server}, + config::Server, utils::{ counters::{ GOT_CONNECTION_COUNT, PARSER_FAIL_COUNT, PARSER_INVOCATIONS_COUNT, @@ -10,10 +10,10 @@ use crate::{ }, database::check_or_update_chain_id, }, - worker::Worker, }; use axum::{http::StatusCode, response::Response, routing::post, Router}; use bytes::Bytes; +use config::ParserConfig; use diesel::{ r2d2::{ConnectionManager, Pool}, PgConnection, @@ -21,6 +21,10 @@ use diesel::{ use google_cloud_storage::client::{Client as GCSClient, ClientConfig as GCSClientConfig}; use std::sync::Arc; use tracing::{error, info, warn}; +use worker::Worker; + +pub mod config; +mod worker; /// Struct to hold context required for parsing #[derive(Clone)] @@ -28,14 +32,12 @@ pub struct ParserContext { pub parser_config: Arc, pub pool: Pool>, pub gcs_client: Arc, - pub max_num_retries: i32, } impl ParserContext { pub async fn new( parser_config: ParserConfig, pool: Pool>, - max_num_retries: i32, ) -> Self { if let Some(google_application_credentials) = &parser_config.google_application_credentials { @@ -64,7 +66,6 @@ impl ParserContext { parser_config: Arc::new(parser_config), pool, gcs_client: Arc::new(GCSClient::new(gcs_config)), - max_num_retries, } } @@ -151,7 +152,7 @@ impl ParserContext { let mut worker = Worker::new( self.parser_config.clone(), conn, - self.max_num_retries, + self.parser_config.max_num_parse_retries, self.gcs_client.clone(), &pubsub_message, parts[0], diff --git a/ecosystem/nft-metadata-crawler-parser/src/worker.rs b/ecosystem/nft-metadata-crawler/src/parser/worker.rs similarity index 96% rename from ecosystem/nft-metadata-crawler-parser/src/worker.rs rename to ecosystem/nft-metadata-crawler/src/parser/worker.rs index 85d221fec12f5..dce2f642f48b5 100644 --- a/ecosystem/nft-metadata-crawler-parser/src/worker.rs +++ b/ecosystem/nft-metadata-crawler/src/parser/worker.rs @@ -2,11 +2,8 @@ // SPDX-License-Identifier: Apache-2.0 use crate::{ - config::ParserConfig, - models::{ - nft_metadata_crawler_uris::NFTMetadataCrawlerURIs, - nft_metadata_crawler_uris_query::NFTMetadataCrawlerURIsQuery, - }, + models::{parsed_asset_uris::ParsedAssetUris, parsed_asset_uris_query::ParsedAssetUrisQuery}, + parser::config::ParserConfig, utils::{ counters::{ DUPLICATE_ASSET_URI_COUNT, DUPLICATE_RAW_ANIMATION_URI_COUNT, @@ -38,7 +35,7 @@ pub struct Worker { max_num_retries: i32, gcs_client: Arc, pubsub_message: String, - model: NFTMetadataCrawlerURIs, + model: ParsedAssetUris, asset_data_id: String, asset_uri: String, last_transaction_version: i64, @@ -59,7 +56,7 @@ impl Worker { last_transaction_timestamp: chrono::NaiveDateTime, force: bool, ) -> Self { - let model = NFTMetadataCrawlerURIs::new(asset_uri); + let model = ParsedAssetUris::new(asset_uri); let worker = Self { parser_config, conn, @@ -81,8 +78,7 @@ impl Worker { pub async fn parse(&mut self) -> anyhow::Result<()> { // Deduplicate asset_uri // Exit if not force or if asset_uri has already been parsed - let prev_model = - NFTMetadataCrawlerURIsQuery::get_by_asset_uri(&mut self.conn, &self.asset_uri); + let prev_model = ParsedAssetUrisQuery::get_by_asset_uri(&mut self.conn, &self.asset_uri); if let Some(pm) = prev_model { DUPLICATE_ASSET_URI_COUNT.inc(); self.model = pm.into(); @@ -181,7 +177,7 @@ impl Worker { false } else { self.model.get_raw_image_uri().map_or(true, |uri| { - match NFTMetadataCrawlerURIsQuery::get_by_raw_image_uri( + match ParsedAssetUrisQuery::get_by_raw_image_uri( &mut self.conn, &self.asset_uri, &uri, @@ -288,7 +284,7 @@ impl Worker { None } else { self.model.get_raw_animation_uri().and_then(|uri| { - match NFTMetadataCrawlerURIsQuery::get_by_raw_animation_uri( + match ParsedAssetUrisQuery::get_by_raw_animation_uri( &mut self.conn, &self.asset_uri, &uri, diff --git a/ecosystem/nft-metadata-crawler-parser/src/schema.rs b/ecosystem/nft-metadata-crawler/src/schema.rs similarity index 100% rename from ecosystem/nft-metadata-crawler-parser/src/schema.rs rename to ecosystem/nft-metadata-crawler/src/schema.rs diff --git a/ecosystem/nft-metadata-crawler-parser/src/utils/constants.rs b/ecosystem/nft-metadata-crawler/src/utils/constants.rs similarity index 100% rename from ecosystem/nft-metadata-crawler-parser/src/utils/constants.rs rename to ecosystem/nft-metadata-crawler/src/utils/constants.rs diff --git a/ecosystem/nft-metadata-crawler-parser/src/utils/counters.rs b/ecosystem/nft-metadata-crawler/src/utils/counters.rs similarity index 100% rename from ecosystem/nft-metadata-crawler-parser/src/utils/counters.rs rename to ecosystem/nft-metadata-crawler/src/utils/counters.rs diff --git a/ecosystem/nft-metadata-crawler-parser/src/utils/database.rs b/ecosystem/nft-metadata-crawler/src/utils/database.rs similarity index 96% rename from ecosystem/nft-metadata-crawler-parser/src/utils/database.rs rename to ecosystem/nft-metadata-crawler/src/utils/database.rs index 89202f8ffd783..6763cf3cd3f6d 100644 --- a/ecosystem/nft-metadata-crawler-parser/src/utils/database.rs +++ b/ecosystem/nft-metadata-crawler/src/utils/database.rs @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 use crate::{ - models::{ledger_info::LedgerInfo, nft_metadata_crawler_uris::NFTMetadataCrawlerURIs}, + models::{ledger_info::LedgerInfo, parsed_asset_uris::ParsedAssetUris}, schema, }; use anyhow::Context; @@ -35,7 +35,7 @@ pub fn run_migrations(pool: &Pool>) { /// Upserts URIs into database pub fn upsert_uris( conn: &mut PooledConnection>, - entry: &NFTMetadataCrawlerURIs, + entry: &ParsedAssetUris, ltv: i64, ) -> anyhow::Result { use schema::nft_metadata_crawler::parsed_asset_uris::dsl::*; diff --git a/ecosystem/nft-metadata-crawler-parser/src/utils/gcs.rs b/ecosystem/nft-metadata-crawler/src/utils/gcs.rs similarity index 100% rename from ecosystem/nft-metadata-crawler-parser/src/utils/gcs.rs rename to ecosystem/nft-metadata-crawler/src/utils/gcs.rs diff --git a/ecosystem/nft-metadata-crawler-parser/src/utils/image_optimizer.rs b/ecosystem/nft-metadata-crawler/src/utils/image_optimizer.rs similarity index 100% rename from ecosystem/nft-metadata-crawler-parser/src/utils/image_optimizer.rs rename to ecosystem/nft-metadata-crawler/src/utils/image_optimizer.rs diff --git a/ecosystem/nft-metadata-crawler-parser/src/utils/json_parser.rs b/ecosystem/nft-metadata-crawler/src/utils/json_parser.rs similarity index 100% rename from ecosystem/nft-metadata-crawler-parser/src/utils/json_parser.rs rename to ecosystem/nft-metadata-crawler/src/utils/json_parser.rs diff --git a/ecosystem/nft-metadata-crawler-parser/src/utils/mod.rs b/ecosystem/nft-metadata-crawler/src/utils/mod.rs similarity index 100% rename from ecosystem/nft-metadata-crawler-parser/src/utils/mod.rs rename to ecosystem/nft-metadata-crawler/src/utils/mod.rs diff --git a/ecosystem/nft-metadata-crawler-parser/src/utils/uri_parser.rs b/ecosystem/nft-metadata-crawler/src/utils/uri_parser.rs similarity index 100% rename from ecosystem/nft-metadata-crawler-parser/src/utils/uri_parser.rs rename to ecosystem/nft-metadata-crawler/src/utils/uri_parser.rs diff --git a/execution/executor-benchmark/src/lib.rs b/execution/executor-benchmark/src/lib.rs index 641b278eef34c..d34ef9a7c7041 100644 --- a/execution/executor-benchmark/src/lib.rs +++ b/execution/executor-benchmark/src/lib.rs @@ -26,9 +26,8 @@ use aptos_db::AptosDB; use aptos_executor::{ block_executor::{BlockExecutor, TransactionBlockExecutor}, metrics::{ - APTOS_EXECUTOR_COMMIT_BLOCKS_SECONDS, APTOS_EXECUTOR_EXECUTE_BLOCK_SECONDS, - APTOS_EXECUTOR_LEDGER_UPDATE_SECONDS, APTOS_EXECUTOR_OTHER_TIMERS_SECONDS, - APTOS_EXECUTOR_VM_EXECUTE_BLOCK_SECONDS, APTOS_PROCESSED_TXNS_OUTPUT_SIZE, + COMMIT_BLOCKS, EXECUTE_BLOCK, OTHER_TIMERS, PROCESSED_TXNS_OUTPUT_SIZE, UPDATE_LEDGER, + VM_EXECUTE_BLOCK, }, }; use aptos_jellyfish_merkle::metrics::{ @@ -536,29 +535,29 @@ struct ExecutionTimeMeasurement { impl ExecutionTimeMeasurement { pub fn now() -> Self { - let output_size = APTOS_PROCESSED_TXNS_OUTPUT_SIZE + let output_size = PROCESSED_TXNS_OUTPUT_SIZE .with_label_values(&["execution"]) .get_sample_sum(); let partitioning_total = BLOCK_PARTITIONING_SECONDS.get_sample_sum(); - let execution_total = APTOS_EXECUTOR_EXECUTE_BLOCK_SECONDS.get_sample_sum(); - let vm_only = APTOS_EXECUTOR_VM_EXECUTE_BLOCK_SECONDS.get_sample_sum(); + let execution_total = EXECUTE_BLOCK.get_sample_sum(); + let vm_only = VM_EXECUTE_BLOCK.get_sample_sum(); let by_other = OTHER_LABELS .iter() .map(|(_prefix, _top_level, other_label)| { ( *other_label, - APTOS_EXECUTOR_OTHER_TIMERS_SECONDS + OTHER_TIMERS .with_label_values(&[other_label]) .get_sample_sum(), ) }) .collect::>(); - let ledger_update_total = APTOS_EXECUTOR_LEDGER_UPDATE_SECONDS.get_sample_sum(); - let commit_total = APTOS_EXECUTOR_COMMIT_BLOCKS_SECONDS.get_sample_sum(); + let ledger_update_total = UPDATE_LEDGER.get_sample_sum(); + let commit_total = COMMIT_BLOCKS.get_sample_sum(); - let vm_time = APTOS_EXECUTOR_VM_EXECUTE_BLOCK_SECONDS.get_sample_sum(); + let vm_time = VM_EXECUTE_BLOCK.get_sample_sum(); Self { output_size, diff --git a/execution/executor-benchmark/src/transaction_committer.rs b/execution/executor-benchmark/src/transaction_committer.rs index 39a35957d2936..64f5f689433fa 100644 --- a/execution/executor-benchmark/src/transaction_committer.rs +++ b/execution/executor-benchmark/src/transaction_committer.rs @@ -7,10 +7,7 @@ use aptos_crypto::hash::HashValue; use aptos_db::metrics::API_LATENCY_SECONDS; use aptos_executor::{ block_executor::{BlockExecutor, TransactionBlockExecutor}, - metrics::{ - APTOS_EXECUTOR_COMMIT_BLOCKS_SECONDS, APTOS_EXECUTOR_EXECUTE_BLOCK_SECONDS, - APTOS_EXECUTOR_VM_EXECUTE_BLOCK_SECONDS, - }, + metrics::{COMMIT_BLOCKS, EXECUTE_BLOCK, VM_EXECUTE_BLOCK}, }; use aptos_executor_types::BlockExecutorTrait; use aptos_logger::prelude::*; @@ -136,19 +133,19 @@ fn report_block( ); info!( "Accumulative total: VM time: {:.0} secs, executor time: {:.0} secs, commit time: {:.0} secs, DB commit time: {:.0} secs", - APTOS_EXECUTOR_VM_EXECUTE_BLOCK_SECONDS.get_sample_sum(), - APTOS_EXECUTOR_EXECUTE_BLOCK_SECONDS.get_sample_sum() - APTOS_EXECUTOR_VM_EXECUTE_BLOCK_SECONDS.get_sample_sum(), - APTOS_EXECUTOR_COMMIT_BLOCKS_SECONDS.get_sample_sum(), + VM_EXECUTE_BLOCK.get_sample_sum(), + EXECUTE_BLOCK.get_sample_sum() - VM_EXECUTE_BLOCK.get_sample_sum(), + COMMIT_BLOCKS.get_sample_sum(), API_LATENCY_SECONDS.get_metric_with_label_values(&["save_transactions", "Ok"]).expect("must exist.").get_sample_sum(), ); const NANOS_PER_SEC: f64 = 1_000_000_000.0; info!( "Accumulative per transaction: VM time: {:.0} ns, executor time: {:.0} ns, commit time: {:.0} ns, DB commit time: {:.0} ns", - APTOS_EXECUTOR_VM_EXECUTE_BLOCK_SECONDS.get_sample_sum() * NANOS_PER_SEC + VM_EXECUTE_BLOCK.get_sample_sum() * NANOS_PER_SEC / total_versions, - (APTOS_EXECUTOR_EXECUTE_BLOCK_SECONDS.get_sample_sum() - APTOS_EXECUTOR_VM_EXECUTE_BLOCK_SECONDS.get_sample_sum()) * NANOS_PER_SEC + (EXECUTE_BLOCK.get_sample_sum() - VM_EXECUTE_BLOCK.get_sample_sum()) * NANOS_PER_SEC / total_versions, - APTOS_EXECUTOR_COMMIT_BLOCKS_SECONDS.get_sample_sum() * NANOS_PER_SEC + COMMIT_BLOCKS.get_sample_sum() * NANOS_PER_SEC / total_versions, API_LATENCY_SECONDS.get_metric_with_label_values(&["save_transactions", "Ok"]).expect("must exist.").get_sample_sum() * NANOS_PER_SEC / total_versions, diff --git a/execution/executor-types/Cargo.toml b/execution/executor-types/Cargo.toml index 822bc4df17b42..a3df75cd01ccf 100644 --- a/execution/executor-types/Cargo.toml +++ b/execution/executor-types/Cargo.toml @@ -15,7 +15,6 @@ rust-version = { workspace = true } [dependencies] anyhow = { workspace = true } aptos-crypto = { workspace = true } -aptos-drop-helper = { workspace = true } aptos-scratchpad = { workspace = true } aptos-secure-net = { workspace = true } aptos-storage-interface = { workspace = true } diff --git a/execution/executor-types/src/ledger_update_output.rs b/execution/executor-types/src/ledger_update_output.rs index b2226a2321a45..beb86a5dd2959 100644 --- a/execution/executor-types/src/ledger_update_output.rs +++ b/execution/executor-types/src/ledger_update_output.rs @@ -10,9 +10,8 @@ use aptos_storage_interface::cached_state_view::ShardedStateCache; use aptos_types::{ contract_event::ContractEvent, epoch_state::EpochState, - ledger_info::LedgerInfoWithSignatures, proof::accumulator::InMemoryTransactionAccumulator, - state_store::{combine_or_add_sharded_state_updates, ShardedStateUpdates}, + state_store::ShardedStateUpdates, transaction::{ block_epilogue::BlockEndInfo, TransactionInfo, TransactionStatus, TransactionToCommit, Version, @@ -72,66 +71,6 @@ impl LedgerUpdateOutput { Ok(()) } - pub fn maybe_select_chunk_ending_ledger_info( - &self, - verified_target_li: &LedgerInfoWithSignatures, - epoch_change_li: Option<&LedgerInfoWithSignatures>, - next_epoch_state: Option<&EpochState>, - ) -> Result> { - if verified_target_li.ledger_info().version() + 1 - == self.transaction_accumulator.num_leaves() - { - // If the chunk corresponds to the target LI, the target LI can be added to storage. - ensure!( - verified_target_li - .ledger_info() - .transaction_accumulator_hash() - == self.transaction_accumulator.root_hash(), - "Root hash in target ledger info does not match local computation. {:?} != {:?}", - verified_target_li, - self.transaction_accumulator, - ); - Ok(Some(verified_target_li.clone())) - } else if let Some(epoch_change_li) = epoch_change_li { - // If the epoch change LI is present, it must match the version of the chunk: - - // Verify that the given ledger info corresponds to the new accumulator. - ensure!( - epoch_change_li.ledger_info().transaction_accumulator_hash() - == self.transaction_accumulator.root_hash(), - "Root hash of a given epoch LI does not match local computation. {:?} vs {:?}", - epoch_change_li, - self.transaction_accumulator, - ); - ensure!( - epoch_change_li.ledger_info().version() + 1 - == self.transaction_accumulator.num_leaves(), - "Version of a given epoch LI does not match local computation. {:?} vs {:?}", - epoch_change_li, - self.transaction_accumulator, - ); - ensure!( - epoch_change_li.ledger_info().ends_epoch(), - "Epoch change LI does not carry validator set. version:{}", - epoch_change_li.ledger_info().version(), - ); - ensure!( - epoch_change_li.ledger_info().next_epoch_state() == next_epoch_state, - "New validator set of a given epoch LI does not match local computation. {:?} vs {:?}", - epoch_change_li.ledger_info().next_epoch_state(), - next_epoch_state, - ); - Ok(Some(epoch_change_li.clone())) - } else { - ensure!( - next_epoch_state.is_none(), - "End of epoch chunk based on local computation but no EoE LedgerInfo provided. version: {:?}", - self.transaction_accumulator.num_leaves().checked_sub(1), - ); - Ok(None) - } - } - pub fn ensure_transaction_infos_match( &self, transaction_infos: &[TransactionInfo], @@ -180,35 +119,6 @@ impl LedgerUpdateOutput { ) } - pub fn combine(&mut self, rhs: Self) { - assert!(self.block_end_info.is_none()); - assert!(rhs.block_end_info.is_none()); - let Self { - statuses_for_input_txns, - to_commit, - subscribable_events, - transaction_info_hashes, - state_updates_until_last_checkpoint: state_updates_before_last_checkpoint, - sharded_state_cache, - transaction_accumulator, - block_end_info: _block_end_info, - } = rhs; - - if let Some(updates) = state_updates_before_last_checkpoint { - combine_or_add_sharded_state_updates( - &mut self.state_updates_until_last_checkpoint, - updates, - ); - } - - self.statuses_for_input_txns.extend(statuses_for_input_txns); - self.to_commit.extend(to_commit); - self.subscribable_events.extend(subscribable_events); - self.transaction_info_hashes.extend(transaction_info_hashes); - self.sharded_state_cache.combine(sharded_state_cache); - self.transaction_accumulator = transaction_accumulator; - } - pub fn next_version(&self) -> Version { self.transaction_accumulator.num_leaves() as Version } diff --git a/execution/executor-types/src/lib.rs b/execution/executor-types/src/lib.rs index 2c34a9cf23a9d..8ae1701243ca2 100644 --- a/execution/executor-types/src/lib.rs +++ b/execution/executor-types/src/lib.rs @@ -28,7 +28,6 @@ use aptos_types::{ write_set::WriteSet, }; pub use error::{ExecutorError, ExecutorResult}; -pub use executed_chunk::ExecutedChunk; pub use ledger_update_output::LedgerUpdateOutput; pub use parsed_transaction_output::ParsedTransactionOutput; use serde::{Deserialize, Serialize}; @@ -44,8 +43,6 @@ use std::{ }; mod error; -mod executed_chunk; -pub mod execution_output; mod ledger_update_output; pub mod parsed_transaction_output; pub mod state_checkpoint_output; @@ -262,16 +259,16 @@ impl VerifyExecutionMode { } pub trait TransactionReplayer: Send { - fn replay( + fn enqueue_chunks( &self, transactions: Vec, transaction_infos: Vec, write_sets: Vec, event_vecs: Vec>, verify_execution_mode: &VerifyExecutionMode, - ) -> Result<()>; + ) -> Result; - fn commit(&self) -> Result; + fn commit(&self) -> Result; } /// A structure that holds relevant information about a chunk that was committed. @@ -434,6 +431,10 @@ impl StateComputeResult { input_txns: Vec, block_id: HashValue, ) -> Vec { + if self.is_reconfiguration_suffix() { + return vec![]; + } + assert_eq!( input_txns.len(), self.compute_status_for_input_txns().len(), @@ -518,6 +519,10 @@ impl StateComputeResult { pub fn subscribable_events(&self) -> &[ContractEvent] { &self.subscribable_events } + + pub fn is_reconfiguration_suffix(&self) -> bool { + self.has_reconfiguration() && self.compute_status_for_input_txns().is_empty() + } } pub struct ProofReader { diff --git a/execution/executor/src/block_executor.rs b/execution/executor/src/block_executor.rs index d7c2b92412631..ae8ff5ee0035b 100644 --- a/execution/executor/src/block_executor.rs +++ b/execution/executor/src/block_executor.rs @@ -6,22 +6,21 @@ use crate::{ components::{ - apply_chunk_output::ApplyChunkOutput, block_tree::BlockTree, chunk_output::ChunkOutput, + apply_chunk_output::ApplyChunkOutput, + block_tree::{block_output::BlockOutput, BlockTree}, + chunk_output::ChunkOutput, }, logging::{LogEntry, LogSchema}, metrics::{ - APTOS_CHUNK_EXECUTOR_OTHER_SECONDS, APTOS_EXECUTOR_COMMIT_BLOCKS_SECONDS, - APTOS_EXECUTOR_EXECUTE_BLOCK_SECONDS, APTOS_EXECUTOR_LEDGER_UPDATE_SECONDS, - APTOS_EXECUTOR_OTHER_TIMERS_SECONDS, APTOS_EXECUTOR_SAVE_TRANSACTIONS_SECONDS, - APTOS_EXECUTOR_TRANSACTIONS_SAVED, APTOS_EXECUTOR_VM_EXECUTE_BLOCK_SECONDS, - CONCURRENCY_GAUGE, + COMMIT_BLOCKS, CONCURRENCY_GAUGE, EXECUTE_BLOCK, OTHER_TIMERS, SAVE_TRANSACTIONS, + TRANSACTIONS_SAVED, UPDATE_LEDGER, VM_EXECUTE_BLOCK, }, }; use anyhow::Result; use aptos_crypto::HashValue; use aptos_executor_types::{ - execution_output::ExecutionOutput, state_checkpoint_output::StateCheckpointOutput, - BlockExecutorTrait, ExecutorError, ExecutorResult, StateComputeResult, + state_checkpoint_output::StateCheckpointOutput, BlockExecutorTrait, ExecutorError, + ExecutorResult, StateComputeResult, }; use aptos_experimental_runtimes::thread_manager::THREAD_MANAGER; use aptos_infallible::RwLock; @@ -216,7 +215,7 @@ where parent_block_id: HashValue, onchain_config: BlockExecutorConfigFromOnchain, ) -> ExecutorResult { - let _timer = APTOS_EXECUTOR_EXECUTE_BLOCK_SECONDS.start_timer(); + let _timer = EXECUTE_BLOCK.start_timer(); let ExecutableBlock { block_id, transactions, @@ -248,9 +247,8 @@ where ) } else { let state_view = { - let _timer = APTOS_EXECUTOR_OTHER_TIMERS_SECONDS - .with_label_values(&["verified_state_view"]) - .start_timer(); + let _timer = OTHER_TIMERS.timer_with(&["verified_state_view"]); + info!("next_version: {}", parent_output.next_version()); CachedStateView::new( StateViewId::BlockExecution { block_id }, @@ -262,7 +260,7 @@ where }; let chunk_output = { - let _timer = APTOS_EXECUTOR_VM_EXECUTE_BLOCK_SECONDS.start_timer(); + let _timer = VM_EXECUTE_BLOCK.start_timer(); fail_point!("executor::vm_execute_block", |_| { Err(ExecutorError::from(anyhow::anyhow!( "Injected error in vm_execute_block" @@ -271,9 +269,7 @@ where V::execute_transaction_block(transactions, state_view, onchain_config.clone())? }; - let _timer = APTOS_EXECUTOR_OTHER_TIMERS_SECONDS - .with_label_values(&["state_checkpoint"]) - .start_timer(); + let _timer = OTHER_TIMERS.timer_with(&["state_checkpoint"]); THREAD_MANAGER.get_exe_cpu_pool().install(|| { chunk_output.into_state_checkpoint_output(parent_output.state(), block_id) @@ -283,7 +279,7 @@ where let _ = self.block_tree.add_block( parent_block_id, block_id, - ExecutionOutput::new(state, epoch_state), + BlockOutput::new(state, epoch_state), )?; Ok(state_checkpoint_output) } @@ -294,7 +290,7 @@ where parent_block_id: HashValue, state_checkpoint_output: StateCheckpointOutput, ) -> ExecutorResult { - let _timer = APTOS_EXECUTOR_LEDGER_UPDATE_SECONDS.start_timer(); + let _timer = UPDATE_LEDGER.start_timer(); info!( LogSchema::new(LogEntry::BlockExecutor).block_id(block_id), "ledger_update" @@ -358,7 +354,7 @@ where block_id: HashValue, parent_block_id: HashValue, ) -> ExecutorResult<()> { - let _timer = APTOS_EXECUTOR_COMMIT_BLOCKS_SECONDS.start_timer(); + let _timer = COMMIT_BLOCKS.start_timer(); info!( LogSchema::new(LogEntry::BlockExecutor).block_id(block_id), "pre_commit_block", @@ -376,7 +372,7 @@ where let ledger_update = block.output.get_ledger_update(); if !ledger_update.transactions_to_commit().is_empty() { - let _timer = APTOS_EXECUTOR_SAVE_TRANSACTIONS_SECONDS.start_timer(); + let _timer = SAVE_TRANSACTIONS.start_timer(); self.db.writer.pre_commit_ledger( ledger_update.transactions_to_commit(), ledger_update.first_version(), @@ -387,14 +383,14 @@ where ledger_update.state_updates_until_last_checkpoint.clone(), Some(&ledger_update.sharded_state_cache), )?; - APTOS_EXECUTOR_TRANSACTIONS_SAVED.observe(ledger_update.num_txns() as f64); + TRANSACTIONS_SAVED.observe(ledger_update.num_txns() as f64); } Ok(()) } fn commit_ledger(&self, ledger_info_with_sigs: LedgerInfoWithSignatures) -> ExecutorResult<()> { - let _timer = APTOS_CHUNK_EXECUTOR_OTHER_SECONDS.timer_with(&["commit_ledger"]); + let _timer = OTHER_TIMERS.timer_with(&["commit_ledger"]); let block_id = ledger_info_with_sigs.ledger_info().consensus_block_id(); info!( diff --git a/execution/executor/src/chunk_executor.rs b/execution/executor/src/chunk_executor.rs index 22289d087c4a5..d54497c000ca9 100644 --- a/execution/executor/src/chunk_executor.rs +++ b/execution/executor/src/chunk_executor.rs @@ -6,37 +6,34 @@ use crate::{ components::{ - apply_chunk_output::{ensure_no_discard, ensure_no_retry, ApplyChunkOutput}, + apply_chunk_output::ApplyChunkOutput, chunk_commit_queue::{ChunkCommitQueue, ChunkToUpdateLedger}, chunk_output::ChunkOutput, + chunk_result_verifier::{ChunkResultVerifier, ReplayChunkVerifier, StateSyncChunkVerifier}, + executed_chunk::ExecutedChunk, + transaction_chunk::{ChunkToApply, ChunkToExecute, TransactionChunk}, }, logging::{LogEntry, LogSchema}, - metrics::{ - APTOS_CHUNK_EXECUTOR_OTHER_SECONDS, APTOS_EXECUTOR_APPLY_CHUNK_SECONDS, - APTOS_EXECUTOR_COMMIT_CHUNK_SECONDS, APTOS_EXECUTOR_EXECUTE_CHUNK_SECONDS, - APTOS_EXECUTOR_VM_EXECUTE_CHUNK_SECONDS, CONCURRENCY_GAUGE, - }, + metrics::{APPLY_CHUNK, CHUNK_OTHER_TIMERS, COMMIT_CHUNK, CONCURRENCY_GAUGE, EXECUTE_CHUNK}, }; use anyhow::{anyhow, ensure, Result}; -use aptos_crypto::HashValue; use aptos_drop_helper::DEFAULT_DROPPER; use aptos_executor_types::{ - ChunkCommitNotification, ChunkExecutorTrait, ExecutedChunk, ParsedTransactionOutput, - TransactionReplayer, VerifyExecutionMode, + ChunkCommitNotification, ChunkExecutorTrait, ParsedTransactionOutput, TransactionReplayer, + VerifyExecutionMode, }; -use aptos_experimental_runtimes::thread_manager::{optimal_min_len, THREAD_MANAGER}; +use aptos_experimental_runtimes::thread_manager::THREAD_MANAGER; use aptos_infallible::{Mutex, RwLock}; use aptos_logger::prelude::*; use aptos_metrics_core::{IntGaugeHelper, TimerHelper}; use aptos_storage_interface::{ async_proof_fetcher::AsyncProofFetcher, cached_state_view::CachedStateView, - state_delta::StateDelta, DbReaderWriter, ExecutedTrees, + state_delta::StateDelta, DbReaderWriter, }; use aptos_types::{ block_executor::config::BlockExecutorConfigFromOnchain, contract_event::ContractEvent, ledger_info::LedgerInfoWithSignatures, - proof::TransactionInfoListWithProof, state_store::StateViewId, transaction::{ signature_verified_transaction::SignatureVerifiedTransaction, Transaction, @@ -48,8 +45,6 @@ use aptos_types::{ use aptos_vm::VMExecutor; use fail::fail_point; use itertools::multizip; -use once_cell::sync::Lazy; -use rayon::iter::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator}; use std::{ iter::once, marker::PhantomData, @@ -60,16 +55,6 @@ use std::{ time::Instant, }; -pub static SIG_VERIFY_POOL: Lazy> = Lazy::new(|| { - Arc::new( - rayon::ThreadPoolBuilder::new() - .num_threads(8) // More than 8 threads doesn't seem to help much - .thread_name(|index| format!("signature-checker-{}", index)) - .build() - .unwrap(), - ) -}); - pub struct ChunkExecutor { db: DbReaderWriter, inner: RwLock>>, @@ -108,6 +93,10 @@ impl ChunkExecutor { error }) } + + pub fn is_empty(&self) -> bool { + self.with_inner(|inner| Ok(inner.is_empty())).unwrap() + } } impl ChunkExecutorTrait for ChunkExecutor { @@ -118,15 +107,39 @@ impl ChunkExecutorTrait for ChunkExecutor { epoch_change_li: Option<&LedgerInfoWithSignatures>, ) -> Result<()> { let _guard = CONCURRENCY_GAUGE.concurrency_with(&["chunk", "enqueue_by_execution"]); + let _timer = EXECUTE_CHUNK.start_timer(); self.maybe_initialize()?; - self.with_inner(|inner| { - inner.enqueue_chunk_by_execution( - txn_list_with_proof, - verified_target_li, - epoch_change_li, - ) - }) + + // Verify input data. + // In consensus-only mode, txn_list_with_proof is fake. + if !cfg!(feature = "consensus-only-perf-test") { + txn_list_with_proof.verify( + verified_target_li.ledger_info(), + txn_list_with_proof.first_transaction_version, + )?; + } + + // Compose enqueue_chunk parameters. + let TransactionListWithProof { + transactions, + events: _, + first_transaction_version: v, + proof: txn_infos_with_proof, + } = txn_list_with_proof; + + let chunk = ChunkToExecute { + transactions, + first_version: v.ok_or_else(|| anyhow!("first version is None"))?, + }; + let chunk_verifier = Arc::new(StateSyncChunkVerifier { + txn_infos_with_proof, + verified_target_li: verified_target_li.clone(), + epoch_change_li: epoch_change_li.cloned(), + }); + + // Call the shared implementation. + self.with_inner(|inner| inner.enqueue_chunk(chunk, chunk_verifier, "execute")) } fn enqueue_chunk_by_transaction_outputs( @@ -136,14 +149,38 @@ impl ChunkExecutorTrait for ChunkExecutor { epoch_change_li: Option<&LedgerInfoWithSignatures>, ) -> Result<()> { let _guard = CONCURRENCY_GAUGE.concurrency_with(&["chunk", "enqueue_by_outputs"]); - - self.with_inner(|inner| { - inner.enqueue_chunk_by_transaction_outputs( - txn_output_list_with_proof, - verified_target_li, - epoch_change_li, + let _timer = APPLY_CHUNK.start_timer(); + + // Verify input data. + THREAD_MANAGER.get_exe_cpu_pool().install(|| { + let _timer = CHUNK_OTHER_TIMERS.timer_with(&["apply_chunk__verify"]); + txn_output_list_with_proof.verify( + verified_target_li.ledger_info(), + txn_output_list_with_proof.first_transaction_output_version, ) - }) + })?; + + // Compose enqueue_chunk parameters. + let TransactionOutputListWithProof { + transactions_and_outputs, + first_transaction_output_version: v, + proof: txn_infos_with_proof, + } = txn_output_list_with_proof; + let (transactions, transaction_outputs) = transactions_and_outputs.into_iter().unzip(); + + let chunk = ChunkToApply { + transactions, + transaction_outputs, + first_version: v.ok_or_else(|| anyhow!("first version is None"))?, + }; + let chunk_verifier = Arc::new(StateSyncChunkVerifier { + txn_infos_with_proof, + verified_target_li: verified_target_li.clone(), + epoch_change_li: epoch_change_li.cloned(), + }); + + // Call the shared implementation. + self.with_inner(|inner| inner.enqueue_chunk(chunk, chunk_verifier, "apply")) } fn update_ledger(&self) -> Result<()> { @@ -207,36 +244,16 @@ impl ChunkExecutorInner { )?) } - fn verify_extends_ledger( - &self, - proof: &TransactionInfoListWithProof, - first_version: Version, - my_root_hash: HashValue, - ) -> Result<()> { - // In consensus-only mode, we cannot verify the proof against the executed output, - // because the proof returned by the remote peer is an empty one. - if cfg!(feature = "consensus-only-perf-test") { - return Ok(()); - } - - let num_overlap = - proof.verify_extends_ledger(first_version, my_root_hash, Some(first_version))?; - assert_eq!(num_overlap, 0, "overlapped chunks"); - - Ok(()) - } - fn commit_chunk_impl(&self) -> Result { - let _timer = APTOS_CHUNK_EXECUTOR_OTHER_SECONDS.timer_with(&["commit_chunk_impl__total"]); + let _timer = CHUNK_OTHER_TIMERS.timer_with(&["commit_chunk_impl__total"]); let (persisted_state, chunk) = { - let _timer = APTOS_CHUNK_EXECUTOR_OTHER_SECONDS - .timer_with(&["commit_chunk_impl__next_chunk_to_commit"]); + let _timer = + CHUNK_OTHER_TIMERS.timer_with(&["commit_chunk_impl__next_chunk_to_commit"]); self.commit_queue.lock().next_chunk_to_commit()? }; if chunk.ledger_info.is_some() || !chunk.transactions_to_commit().is_empty() { - let _timer = - APTOS_CHUNK_EXECUTOR_OTHER_SECONDS.timer_with(&["commit_chunk_impl__save_txns"]); + let _timer = CHUNK_OTHER_TIMERS.timer_with(&["commit_chunk_impl__save_txns"]); fail_point!("executor::commit_chunk", |_| { Err(anyhow::anyhow!("Injected error in commit_chunk")) }); @@ -258,8 +275,7 @@ impl ChunkExecutorInner { DEFAULT_DROPPER.schedule_drop(persisted_state); - let _timer = APTOS_CHUNK_EXECUTOR_OTHER_SECONDS - .timer_with(&["commit_chunk_impl__dequeue_and_return"]); + let _timer = CHUNK_OTHER_TIMERS.timer_with(&["commit_chunk_impl__dequeue_and_return"]); self.commit_queue .lock() .dequeue_committed(chunk.result_state.clone())?; @@ -267,177 +283,42 @@ impl ChunkExecutorInner { Ok(chunk) } - // ************************* Chunk Executor Implementation ************************* - fn enqueue_chunk_by_execution( - &self, - txn_list_with_proof: TransactionListWithProof, - verified_target_li: &LedgerInfoWithSignatures, - epoch_change_li: Option<&LedgerInfoWithSignatures>, - ) -> Result<()> { - let _timer = APTOS_EXECUTOR_EXECUTE_CHUNK_SECONDS.start_timer(); - - let num_txns = txn_list_with_proof.transactions.len(); - ensure!(num_txns != 0, "Empty transaction list!"); - let first_version_in_request = txn_list_with_proof - .first_transaction_version - .ok_or_else(|| anyhow!("Non-empty chunk with first_version == None."))?; - let parent_state = self.commit_queue.lock().latest_state(); - ensure!( - first_version_in_request == parent_state.next_version(), - "Unexpected chunk. version in request: {}, current_version: {:?}", - first_version_in_request, - parent_state.current_version, - ); - - { - let _timer = APTOS_CHUNK_EXECUTOR_OTHER_SECONDS - .timer_with(&["enqueue_chunk_by_execution__verify_chunk"]); - THREAD_MANAGER - .get_exe_cpu_pool() - .install(|| -> Result<()> { - verify_chunk( - &txn_list_with_proof, - verified_target_li, - Some(first_version_in_request), - ) - })?; - } - - let TransactionListWithProof { - transactions, - events: _, - first_transaction_version: _, - proof: txn_infos_with_proof, - } = txn_list_with_proof; - let verified_target_li = verified_target_li.clone(); - let epoch_change_li = epoch_change_li.cloned(); - let known_state_checkpoints: Vec<_> = txn_infos_with_proof - .transaction_infos - .iter() - .map(|t| t.state_checkpoint_hash()) - .collect(); - - // TODO(skedia) In the chunk executor path, we ideally don't need to verify the signature - // as only transactions with verified signatures are committed to the storage. - let num_txns = transactions.len(); - let sig_verified_txns = SIG_VERIFY_POOL.install(|| { - transactions - .into_par_iter() - .with_min_len(optimal_min_len(num_txns, 32)) - .map(|t| t.into()) - .collect::>() - }); - - // Execute transactions. - let state_view = self.latest_state_view(&parent_state)?; - let chunk_output = { - let _timer = APTOS_EXECUTOR_VM_EXECUTE_CHUNK_SECONDS.start_timer(); - // State sync executor shouldn't have block gas limit. - ChunkOutput::by_transaction_execution::( - sig_verified_txns.into(), - state_view, - BlockExecutorConfigFromOnchain::new_no_block_limit(), - )? - }; - - // Calcualte state snapshot - let (result_state, next_epoch_state, state_checkpoint_output) = - ApplyChunkOutput::calculate_state_checkpoint( - chunk_output, - &self.commit_queue.lock().latest_state(), - None, // append_state_checkpoint_to_block - Some(known_state_checkpoints), - false, // is_block - )?; - - // Enqueue for next stage. - self.commit_queue - .lock() - .enqueue_for_ledger_update(ChunkToUpdateLedger { - result_state, - state_checkpoint_output, - next_epoch_state, - verified_target_li, - epoch_change_li, - txn_infos_with_proof, - })?; - - info!( - LogSchema::new(LogEntry::ChunkExecutor) - .first_version_in_request(Some(first_version_in_request)) - .num_txns_in_request(num_txns), - "Executed transaction chunk!", - ); - - Ok(()) + fn is_empty(&self) -> bool { + self.commit_queue.lock().is_empty() } - fn enqueue_chunk_by_transaction_outputs( + // ************************* Chunk Executor Implementation ************************* + fn enqueue_chunk( &self, - txn_output_list_with_proof: TransactionOutputListWithProof, - verified_target_li: &LedgerInfoWithSignatures, - epoch_change_li: Option<&LedgerInfoWithSignatures>, + chunk: Chunk, + chunk_verifier: Arc, + mode_for_log: &'static str, ) -> Result<()> { - let _timer = APTOS_EXECUTOR_APPLY_CHUNK_SECONDS.start_timer(); - - let num_txns = txn_output_list_with_proof.transactions_and_outputs.len(); - ensure!(num_txns != 0, "Empty transaction list!"); - let first_version_in_request = txn_output_list_with_proof - .first_transaction_output_version - .ok_or_else(|| anyhow!("Non-empty chunk with first_version == None."))?; let parent_state = self.commit_queue.lock().latest_state(); + + let first_version = parent_state.next_version(); ensure!( - first_version_in_request == parent_state.next_version(), - "Unexpected chunk. version in request: {}, current_version: {:?}", - first_version_in_request, - parent_state.current_version, + chunk.first_version() == parent_state.next_version(), + "Chunk carries unexpected first version. Expected: {}, got: {}", + parent_state.next_version(), + chunk.first_version(), ); - { - let _timer = APTOS_CHUNK_EXECUTOR_OTHER_SECONDS.timer_with(&["apply_chunk__verify"]); - // Verify input transaction list. - THREAD_MANAGER - .get_exe_cpu_pool() - .install(|| -> Result<()> { - txn_output_list_with_proof.verify( - verified_target_li.ledger_info(), - Some(first_version_in_request), - ) - })?; - } - let TransactionOutputListWithProof { - transactions_and_outputs, - first_transaction_output_version: _, - proof: txn_infos_with_proof, - } = txn_output_list_with_proof; - let verified_target_li = verified_target_li.clone(); - let epoch_change_li = epoch_change_li.cloned(); - let known_state_checkpoints: Vec<_> = txn_infos_with_proof - .transaction_infos - .iter() - .map(|t| t.state_checkpoint_hash()) - .collect(); + let num_txns = chunk.len(); - // Apply transaction outputs. let state_view = self.latest_state_view(&parent_state)?; - let chunk_output = - ChunkOutput::by_transaction_output(transactions_and_outputs, state_view)?; + let chunk_output = chunk.into_output::(state_view)?; // Calculate state snapshot - let (result_state, next_epoch_state, state_checkpoint_output) = { - let _timer = APTOS_CHUNK_EXECUTOR_OTHER_SECONDS - .timer_with(&["apply_chunk__calculate_state_checkpoint"]); + let (result_state, next_epoch_state, state_checkpoint_output) = ApplyChunkOutput::calculate_state_checkpoint( chunk_output, &self.commit_queue.lock().latest_state(), None, // append_state_checkpoint_to_block - Some(known_state_checkpoints), + Some(chunk_verifier.state_checkpoint_hashes()), false, // is_block - )? - }; + )?; - let _timer = APTOS_CHUNK_EXECUTOR_OTHER_SECONDS - .timer_with(&["apply_chunk__enqueue_for_ledger_update"]); // Enqueue for next stage. self.commit_queue .lock() @@ -445,57 +326,49 @@ impl ChunkExecutorInner { result_state, state_checkpoint_output, next_epoch_state, - verified_target_li, - epoch_change_li, - txn_infos_with_proof, + chunk_verifier, })?; info!( LogSchema::new(LogEntry::ChunkExecutor) - .first_version_in_request(Some(first_version_in_request)) + .first_version_in_request(Some(first_version)) .num_txns_in_request(num_txns), - "Applied transaction output chunk!", + mode = mode_for_log, + "Enqueued transaction chunk!", ); Ok(()) } pub fn update_ledger(&self) -> Result<()> { - let _timer = APTOS_CHUNK_EXECUTOR_OTHER_SECONDS.timer_with(&["chunk_update_ledger_total"]); + let _timer = CHUNK_OTHER_TIMERS.timer_with(&["chunk_update_ledger_total"]); let (parent_accumulator, chunk) = { - let _timer = - APTOS_CHUNK_EXECUTOR_OTHER_SECONDS.timer_with(&["chunk_update_ledger__next_chunk"]); + let _timer = CHUNK_OTHER_TIMERS.timer_with(&["chunk_update_ledger__next_chunk"]); self.commit_queue.lock().next_chunk_to_update_ledger()? }; let ChunkToUpdateLedger { result_state, state_checkpoint_output, next_epoch_state, - verified_target_li, - epoch_change_li, - txn_infos_with_proof, + chunk_verifier, } = chunk; let first_version = parent_accumulator.num_leaves(); - self.verify_extends_ledger( - &txn_infos_with_proof, - first_version, - parent_accumulator.root_hash(), - )?; - let (ledger_update_output, to_discard, to_retry) = { - let _timer = - APTOS_CHUNK_EXECUTOR_OTHER_SECONDS.timer_with(&["chunk_update_ledger__calculate"]); - ApplyChunkOutput::calculate_ledger_update(state_checkpoint_output, parent_accumulator)? + let _timer = CHUNK_OTHER_TIMERS.timer_with(&["chunk_update_ledger__calculate"]); + ApplyChunkOutput::calculate_ledger_update( + state_checkpoint_output, + parent_accumulator.clone(), + )? }; + ensure!(to_discard.is_empty(), "Unexpected discard."); ensure!(to_retry.is_empty(), "Unexpected retry."); - ledger_update_output - .ensure_transaction_infos_match(&txn_infos_with_proof.transaction_infos)?; - let ledger_info_opt = ledger_update_output.maybe_select_chunk_ending_ledger_info( - &verified_target_li, - epoch_change_li.as_ref(), + chunk_verifier.verify_chunk_result(&parent_accumulator, &ledger_update_output)?; + + let ledger_info_opt = chunk_verifier.maybe_select_chunk_ending_ledger_info( + &ledger_update_output, next_epoch_state.as_ref(), )?; @@ -507,7 +380,7 @@ impl ChunkExecutorInner { }; let num_txns = executed_chunk.transactions_to_commit().len(); - let _timer = APTOS_CHUNK_EXECUTOR_OTHER_SECONDS.timer_with(&["chunk_update_ledger__save"]); + let _timer = CHUNK_OTHER_TIMERS.timer_with(&["chunk_update_ledger__save"]); self.commit_queue .lock() .save_ledger_update_output(executed_chunk)?; @@ -521,13 +394,13 @@ impl ChunkExecutorInner { } fn commit_chunk(&self) -> Result { - let _timer = APTOS_EXECUTOR_COMMIT_CHUNK_SECONDS.start_timer(); + let _timer = COMMIT_CHUNK.start_timer(); let executed_chunk = self.commit_chunk_impl()?; self.has_pending_pre_commit.store(false, Ordering::Release); let commit_notification = { - let _timer = APTOS_CHUNK_EXECUTOR_OTHER_SECONDS - .timer_with(&["commit_chunk__into_chunk_commit_notification"]); + let _timer = + CHUNK_OTHER_TIMERS.timer_with(&["commit_chunk__into_chunk_commit_notification"]); executed_chunk.into_chunk_commit_notification() }; @@ -535,74 +408,50 @@ impl ChunkExecutorInner { } } -/// Verifies the transaction list proof against the ledger info and returns transactions -/// that are not already applied in the ledger. -#[cfg(not(feature = "consensus-only-perf-test"))] -fn verify_chunk( - txn_list_with_proof: &TransactionListWithProof, - verified_target_li: &LedgerInfoWithSignatures, - first_version_in_request: Option, -) -> Result<()> { - txn_list_with_proof.verify(verified_target_li.ledger_info(), first_version_in_request) -} - -/// In consensus-only mode, the [TransactionListWithProof](transaction list) is *not* -/// verified against the proof and the [LedgerInfoWithSignatures](ledger info). -/// This is because the [FakeAptosDB] from where these transactions come from -/// returns an empty proof and not an actual proof, so proof verification will -/// fail regardless. This function does not skip any transactions that may be -/// already in the ledger, because it is not necessary as execution is disabled. -#[cfg(feature = "consensus-only-perf-test")] -fn verify_chunk( - _txn_list_with_proof: &TransactionListWithProof, - _verified_target_li: &LedgerInfoWithSignatures, - _first_version_in_request: Option, -) -> Result<()> { - // no-op: we do not verify the proof in consensus-only mode - Ok(()) -} - impl TransactionReplayer for ChunkExecutor { - fn replay( + fn enqueue_chunks( &self, transactions: Vec, transaction_infos: Vec, write_sets: Vec, event_vecs: Vec>, verify_execution_mode: &VerifyExecutionMode, - ) -> Result<()> { + ) -> Result { let _guard = CONCURRENCY_GAUGE.concurrency_with(&["replayer", "replay"]); self.maybe_initialize()?; - self.inner.read().as_ref().expect("not reset").replay( - transactions, - transaction_infos, - write_sets, - event_vecs, - verify_execution_mode, - ) + self.inner + .read() + .as_ref() + .expect("not reset") + .enqueue_chunks( + transactions, + transaction_infos, + write_sets, + event_vecs, + verify_execution_mode, + ) } - fn commit(&self) -> Result { + fn commit(&self) -> Result { let _guard = CONCURRENCY_GAUGE.concurrency_with(&["replayer", "commit"]); self.inner.read().as_ref().expect("not reset").commit() } } -impl TransactionReplayer for ChunkExecutorInner { - fn replay( +impl ChunkExecutorInner { + fn enqueue_chunks( &self, mut transactions: Vec, mut transaction_infos: Vec, mut write_sets: Vec, mut event_vecs: Vec>, verify_execution_mode: &VerifyExecutionMode, - ) -> Result<()> { + ) -> Result { let started = Instant::now(); let num_txns = transactions.len(); - let mut latest_view = self.commit_queue.lock().expect_latest_view()?; - let chunk_begin = latest_view.num_transactions() as Version; + let chunk_begin = self.commit_queue.lock().expecting_version(); let chunk_end = chunk_begin + num_txns as Version; // right-exclusive // Find epoch boundaries. @@ -621,12 +470,10 @@ impl TransactionReplayer for ChunkExecutorInner { epochs.push((epoch_begin, chunk_end)); } - let mut executed_chunk = None; + let mut chunks_enqueued = 0; // Replay epoch by epoch. for (begin, end) in epochs { - self.remove_and_replay_epoch( - &mut executed_chunk, - &mut latest_view, + chunks_enqueued += self.remove_and_replay_epoch( &mut transactions, &mut transaction_infos, &mut write_sets, @@ -637,19 +484,16 @@ impl TransactionReplayer for ChunkExecutorInner { )?; } - self.commit_queue - .lock() - .enqueue_chunk_to_commit_directly(executed_chunk.expect("Nothing to commit."))?; info!( num_txns = num_txns, tps = (num_txns as f64 / started.elapsed().as_secs_f64()), "TransactionReplayer::replay() OK" ); - Ok(()) + Ok(chunks_enqueued) } - fn commit(&self) -> Result { + fn commit(&self) -> Result { let started = Instant::now(); let chunk = self.commit_chunk_impl()?; @@ -660,18 +504,18 @@ impl TransactionReplayer for ChunkExecutorInner { tps = num_committed as f64 / started.elapsed().as_secs_f64(), "TransactionReplayer::commit() OK" ); - Ok(chunk) + + Ok(chunk + .result_state + .current_version + .expect("Version must exist after commit.")) } -} -impl ChunkExecutorInner { /// Remove `end_version - begin_version` transactions from the mutable input arguments and replay. /// The input range indicated by `[begin_version, end_version]` is guaranteed not to cross epoch boundaries. /// Notice there can be known broken versions inside the range. fn remove_and_replay_epoch( &self, - executed_chunk: &mut Option, - latest_view: &mut ExecutedTrees, transactions: &mut Vec, transaction_infos: &mut Vec, write_sets: &mut Vec, @@ -679,21 +523,21 @@ impl ChunkExecutorInner { begin_version: Version, end_version: Version, verify_execution_mode: &VerifyExecutionMode, - ) -> Result<()> { + ) -> Result { // we try to apply the txns in sub-batches split by known txns to skip and the end of the batch let txns_to_skip = verify_execution_mode.txns_to_skip(); let mut batch_ends = txns_to_skip .range(begin_version..end_version) .chain(once(&end_version)); + let mut chunks_enqueued = 0; + let mut batch_begin = begin_version; let mut batch_end = *batch_ends.next().unwrap(); while batch_begin < end_version { if batch_begin == batch_end { // batch_end is a known broken version that won't pass execution verification self.remove_and_apply( - executed_chunk, - latest_view, transactions, transaction_infos, write_sets, @@ -701,6 +545,7 @@ impl ChunkExecutorInner { batch_begin, batch_begin + 1, )?; + chunks_enqueued += 1; info!( version_skipped = batch_begin, "Skipped known broken transaction, applied transaction output directly." @@ -713,7 +558,6 @@ impl ChunkExecutorInner { // Try to run the transactions with the VM let next_begin = if verify_execution_mode.should_verify() { self.verify_execution( - latest_view, transactions, transaction_infos, write_sets, @@ -726,8 +570,6 @@ impl ChunkExecutorInner { batch_end }; self.remove_and_apply( - executed_chunk, - latest_view, transactions, transaction_infos, write_sets, @@ -735,15 +577,15 @@ impl ChunkExecutorInner { batch_begin, next_begin, )?; + chunks_enqueued += 1; batch_begin = next_begin; } - Ok(()) + Ok(chunks_enqueued) } fn verify_execution( &self, - latest_view: &mut ExecutedTrees, transactions: &[Transaction], transaction_infos: &[TransactionInfo], write_sets: &[WriteSet], @@ -753,7 +595,7 @@ impl ChunkExecutorInner { verify_execution_mode: &VerifyExecutionMode, ) -> Result { // Execute transactions. - let state_view = self.latest_state_view(latest_view.state())?; + let state_view = self.latest_state_view(&self.commit_queue.lock().latest_state())?; let txns = transactions .iter() .take((end_version - begin_version) as usize) @@ -797,8 +639,6 @@ impl ChunkExecutorInner { /// It's guaranteed that there's no known broken versions or epoch endings in the range. fn remove_and_apply( &self, - executed_chunk: &mut Option, - latest_view: &mut ExecutedTrees, transactions: &mut Vec, transaction_infos: &mut Vec, write_sets: &mut Vec, @@ -808,7 +648,7 @@ impl ChunkExecutorInner { ) -> Result<()> { let num_txns = (end_version - begin_version) as usize; let txn_infos: Vec<_> = transaction_infos.drain(..num_txns).collect(); - let txns_and_outputs = multizip(( + let (transactions, transaction_outputs) = multizip(( transactions.drain(..num_txns), txn_infos.iter(), write_sets.drain(..num_txns), @@ -826,30 +666,18 @@ impl ChunkExecutorInner { ), ) }) - .collect(); - - let state_view = self.latest_state_view(latest_view.state())?; - let chunk_output = ChunkOutput::by_transaction_output(txns_and_outputs, state_view)?; - let (executed_batch, to_discard, to_retry) = chunk_output.apply_to_ledger( - latest_view, - Some( - txn_infos - .iter() - .map(|txn_info| txn_info.state_checkpoint_hash()) - .collect(), - ), - )?; - ensure_no_discard(to_discard)?; - ensure_no_retry(to_retry)?; - executed_batch - .ledger_update_output - .ensure_transaction_infos_match(&txn_infos)?; - - match executed_chunk { - Some(chunk) => chunk.combine(executed_batch), - None => *executed_chunk = Some(executed_batch), - } - *latest_view = executed_chunk.as_ref().unwrap().result_view(); + .unzip(); + + let chunk = ChunkToApply { + transactions, + transaction_outputs, + first_version: begin_version, + }; + let chunk_verifier = Arc::new(ReplayChunkVerifier { + transaction_infos: txn_infos, + }); + self.enqueue_chunk(chunk, chunk_verifier, "replay")?; + Ok(()) } } diff --git a/execution/executor/src/components/apply_chunk_output.rs b/execution/executor/src/components/apply_chunk_output.rs index dd887ac23bb52..cf018e8ea9044 100644 --- a/execution/executor/src/components/apply_chunk_output.rs +++ b/execution/executor/src/components/apply_chunk_output.rs @@ -7,9 +7,10 @@ use crate::{ components::{ chunk_output::{update_counters_for_processed_chunk, ChunkOutput}, + executed_chunk::ExecutedChunk, in_memory_state_calculator_v2::InMemoryStateCalculatorV2, }, - metrics::{APTOS_EXECUTOR_ERRORS, APTOS_EXECUTOR_OTHER_TIMERS_SECONDS}, + metrics::{EXECUTOR_ERRORS, OTHER_TIMERS}, }; use anyhow::{ensure, Result}; use aptos_crypto::{hash::CryptoHash, HashValue}; @@ -17,10 +18,11 @@ use aptos_executor_types::{ parsed_transaction_output::TransactionsWithParsedOutput, should_forward_to_subscription_service, state_checkpoint_output::{StateCheckpointOutput, TransactionsByStatus}, - ExecutedChunk, LedgerUpdateOutput, ParsedTransactionOutput, + LedgerUpdateOutput, ParsedTransactionOutput, }; use aptos_experimental_runtimes::thread_manager::optimal_min_len; use aptos_logger::error; +use aptos_metrics_core::TimerHelper; use aptos_storage_interface::{state_delta::StateDelta, ExecutedTrees}; use aptos_types::{ contract_event::ContractEvent, @@ -54,9 +56,8 @@ impl ApplyChunkOutput { block_end_info, } = chunk_output; let (new_epoch, statuses_for_input_txns, to_commit, to_discard, to_retry) = { - let _timer = APTOS_EXECUTOR_OTHER_TIMERS_SECONDS - .with_label_values(&["sort_transactions"]) - .start_timer(); + let _timer = OTHER_TIMERS.timer_with(&["sort_transactions"]); + // Separate transactions with different VM statuses, i.e., Keep, Discard and Retry. // Will return transactions with Retry txns sorted after Keep/Discard txns. Self::sort_transactions_with_state_checkpoint( @@ -76,7 +77,7 @@ impl ApplyChunkOutput { state_updates_before_last_checkpoint, sharded_state_cache, ) = { - let _timer = APTOS_EXECUTOR_OTHER_TIMERS_SECONDS + let _timer = OTHER_TIMERS .with_label_values(&["calculate_for_transactions"]) .start_timer(); InMemoryStateCalculatorV2::calculate_for_transactions( @@ -141,9 +142,7 @@ impl ApplyChunkOutput { ); // Calculate TransactionData and TransactionInfo, i.e. the ledger history diff. - let _timer = APTOS_EXECUTOR_OTHER_TIMERS_SECONDS - .with_label_values(&["assemble_ledger_diff_for_block"]) - .start_timer(); + let _timer = OTHER_TIMERS.timer_with(&["assemble_ledger_diff_for_block"]); let (txns_to_commit, transaction_info_hashes, subscribable_events) = Self::assemble_ledger_diff(to_commit, state_updates_vec, state_checkpoint_hashes); @@ -301,7 +300,7 @@ impl ApplyChunkOutput { t, o.status(), ); - APTOS_EXECUTOR_ERRORS.inc(); + EXECUTOR_ERRORS.inc(); } }); @@ -331,9 +330,8 @@ impl ApplyChunkOutput { let mut txn_info_hashes = Vec::with_capacity(num_txns); let hashes_vec = Self::calculate_events_and_writeset_hashes(to_commit_from_execution.parsed_outputs()); - let _timer = APTOS_EXECUTOR_OTHER_TIMERS_SECONDS - .with_label_values(&["process_events_and_writeset_hashes"]) - .start_timer(); + + let _timer = OTHER_TIMERS.timer_with(&["process_events_and_writeset_hashes"]); let hashes_vec: Vec<(HashValue, HashValue)> = hashes_vec .into_par_iter() .map(|(event_hashes, write_set_hash)| { @@ -398,9 +396,8 @@ impl ApplyChunkOutput { fn calculate_events_and_writeset_hashes( to_commit_from_execution: &[ParsedTransactionOutput], ) -> Vec<(Vec, HashValue)> { - let _timer = APTOS_EXECUTOR_OTHER_TIMERS_SECONDS - .with_label_values(&["calculate_events_and_writeset_hashes"]) - .start_timer(); + let _timer = OTHER_TIMERS.timer_with(&["calculate_events_and_writeset_hashes"]); + let num_txns = to_commit_from_execution.len(); to_commit_from_execution .par_iter() diff --git a/execution/executor-types/src/execution_output.rs b/execution/executor/src/components/block_tree/block_output.rs similarity index 95% rename from execution/executor-types/src/execution_output.rs rename to execution/executor/src/components/block_tree/block_output.rs index ee3fd4aed5c23..3604604eaa948 100644 --- a/execution/executor-types/src/execution_output.rs +++ b/execution/executor/src/components/block_tree/block_output.rs @@ -3,19 +3,19 @@ #![forbid(unsafe_code)] -use crate::LedgerUpdateOutput; +use aptos_executor_types::LedgerUpdateOutput; use aptos_storage_interface::state_delta::StateDelta; use aptos_types::epoch_state::EpochState; use once_cell::sync::OnceCell; -pub struct ExecutionOutput { +pub struct BlockOutput { state: StateDelta, /// If set, this is the new epoch info that should be changed to if this is committed. next_epoch_state: Option, ledger_update_output: OnceCell, } -impl ExecutionOutput { +impl BlockOutput { pub fn new(state: StateDelta, next_epoch_state: Option) -> Self { Self { state, diff --git a/execution/executor/src/components/block_tree/mod.rs b/execution/executor/src/components/block_tree/mod.rs index 2618313832064..57a417014158f 100644 --- a/execution/executor/src/components/block_tree/mod.rs +++ b/execution/executor/src/components/block_tree/mod.rs @@ -4,6 +4,7 @@ #![forbid(unsafe_code)] +pub mod block_output; #[cfg(test)] mod test; @@ -12,11 +13,12 @@ use anyhow::{anyhow, ensure, Result}; use aptos_consensus_types::block::Block as ConsensusBlock; use aptos_crypto::HashValue; use aptos_drop_helper::DEFAULT_DROPPER; -use aptos_executor_types::{execution_output::ExecutionOutput, ExecutorError, LedgerUpdateOutput}; +use aptos_executor_types::{ExecutorError, LedgerUpdateOutput}; use aptos_infallible::Mutex; use aptos_logger::{debug, info}; use aptos_storage_interface::DbReader; use aptos_types::{ledger_info::LedgerInfo, proof::definition::LeafCount}; +use block_output::BlockOutput; use std::{ collections::{hash_map::Entry, HashMap}, sync::{mpsc::Receiver, Arc, Weak}, @@ -24,7 +26,7 @@ use std::{ pub struct Block { pub id: HashValue, - pub output: ExecutionOutput, + pub output: BlockOutput, children: Mutex>>, block_lookup: Arc, } @@ -92,7 +94,7 @@ impl BlockLookupInner { fn fetch_or_add_block( &mut self, id: HashValue, - output: ExecutionOutput, + output: BlockOutput, parent_id: Option, block_lookup: &Arc, ) -> Result<(Arc, bool, Option>)> { @@ -148,7 +150,7 @@ impl BlockLookup { fn fetch_or_add_block( self: &Arc, id: HashValue, - output: ExecutionOutput, + output: BlockOutput, parent_id: Option, ) -> Result> { let (block, existing, parent_block) = self @@ -224,7 +226,7 @@ impl BlockTree { ledger_info.consensus_block_id() }; - let output = ExecutionOutput::new_with_ledger_update( + let output = BlockOutput::new_with_ledger_update( ledger_view.state().clone(), None, LedgerUpdateOutput::new_empty(ledger_view.txn_accumulator().clone()), @@ -250,7 +252,7 @@ impl BlockTree { .original_reconfiguration_block_id(committed_block_id), "Updated with a new root block as a virtual block of reconfiguration block" ); - let output = ExecutionOutput::new_with_ledger_update( + let output = BlockOutput::new_with_ledger_update( last_committed_block.output.state().clone(), None, LedgerUpdateOutput::new_empty( @@ -289,7 +291,7 @@ impl BlockTree { &self, parent_block_id: HashValue, id: HashValue, - output: ExecutionOutput, + output: BlockOutput, ) -> Result> { self.block_lookup .fetch_or_add_block(id, output, Some(parent_block_id)) diff --git a/execution/executor/src/components/block_tree/test.rs b/execution/executor/src/components/block_tree/test.rs index a5ca96bd69167..5318fcb78dd1c 100644 --- a/execution/executor/src/components/block_tree/test.rs +++ b/execution/executor/src/components/block_tree/test.rs @@ -2,9 +2,11 @@ // Parts of the project are originally copyright © Meta Platforms, Inc. // SPDX-License-Identifier: Apache-2.0 -use crate::components::block_tree::{epoch_genesis_block_id, BlockLookup, BlockTree}; +use crate::components::block_tree::{ + block_output::BlockOutput, epoch_genesis_block_id, BlockLookup, BlockTree, +}; use aptos_crypto::{hash::PRE_GENESIS_BLOCK_ID, HashValue}; -use aptos_executor_types::{execution_output::ExecutionOutput, LedgerUpdateOutput}; +use aptos_executor_types::LedgerUpdateOutput; use aptos_infallible::Mutex; use aptos_storage_interface::ExecutedTrees; use aptos_types::{block_info::BlockInfo, epoch_state::EpochState, ledger_info::LedgerInfo}; @@ -36,9 +38,9 @@ fn id(index: u64) -> HashValue { HashValue::new(buf) } -fn empty_block() -> ExecutionOutput { +fn empty_block() -> BlockOutput { let result_view = ExecutedTrees::new_empty(); - ExecutionOutput::new_with_ledger_update( + BlockOutput::new_with_ledger_update( result_view.state().clone(), None, LedgerUpdateOutput::new_empty(ExecutedTrees::new_empty().txn_accumulator().clone()), diff --git a/execution/executor/src/components/chunk_commit_queue.rs b/execution/executor/src/components/chunk_commit_queue.rs index 0776af114b032..d796079bf812e 100644 --- a/execution/executor/src/components/chunk_commit_queue.rs +++ b/execution/executor/src/components/chunk_commit_queue.rs @@ -4,13 +4,14 @@ #![forbid(unsafe_code)] +use crate::components::{ + chunk_result_verifier::ChunkResultVerifier, executed_chunk::ExecutedChunk, +}; use anyhow::{anyhow, ensure, Result}; -use aptos_executor_types::{state_checkpoint_output::StateCheckpointOutput, ExecutedChunk}; +use aptos_executor_types::state_checkpoint_output::StateCheckpointOutput; use aptos_storage_interface::{state_delta::StateDelta, DbReader, ExecutedTrees}; use aptos_types::{ - epoch_state::EpochState, - ledger_info::LedgerInfoWithSignatures, - proof::{accumulator::InMemoryTransactionAccumulator, TransactionInfoListWithProof}, + epoch_state::EpochState, proof::accumulator::InMemoryTransactionAccumulator, transaction::Version, }; use std::{collections::VecDeque, sync::Arc}; @@ -21,11 +22,10 @@ pub(crate) struct ChunkToUpdateLedger { pub state_checkpoint_output: StateCheckpointOutput, /// If set, this is the new epoch info that should be changed to if this is committed. pub next_epoch_state: Option, - /// the below are from the input -- can be checked / used only after the transaction accumulator + + /// from the input -- can be checked / used only after the transaction accumulator /// is updated. - pub verified_target_li: LedgerInfoWithSignatures, - pub epoch_change_li: Option, - pub txn_infos_with_proof: TransactionInfoListWithProof, + pub chunk_verifier: Arc, } /// It's a two stage pipeline: @@ -66,18 +66,7 @@ impl ChunkCommitQueue { } pub(crate) fn expecting_version(&self) -> Version { - self.latest_txn_accumulator.num_leaves() - } - - pub(crate) fn expect_latest_view(&self) -> Result { - ensure!( - self.to_update_ledger.is_empty(), - "Pending chunk to update_ledger, can't construct latest ExecutedTrees." - ); - Ok(ExecutedTrees::new( - self.latest_state.clone(), - self.latest_txn_accumulator.clone(), - )) + self.latest_state.next_version() } pub(crate) fn enqueue_for_ledger_update( @@ -130,17 +119,6 @@ impl ChunkCommitQueue { Ok((self.persisted_state.clone(), chunk)) } - pub(crate) fn enqueue_chunk_to_commit_directly(&mut self, chunk: ExecutedChunk) -> Result<()> { - ensure!( - self.to_update_ledger.is_empty(), - "Mixed usage of different modes." - ); - self.latest_state = chunk.result_state.clone(); - self.latest_txn_accumulator = chunk.ledger_update_output.transaction_accumulator.clone(); - self.to_commit.push_back(Some(chunk)); - Ok(()) - } - pub(crate) fn dequeue_committed(&mut self, latest_state: StateDelta) -> Result<()> { ensure!(!self.to_commit.is_empty(), "to_commit is empty."); ensure!( @@ -154,4 +132,8 @@ impl ChunkCommitQueue { .log_generation("commit_queue_base"); Ok(()) } + + pub(crate) fn is_empty(&self) -> bool { + self.to_commit.is_empty() && self.to_update_ledger.is_empty() + } } diff --git a/execution/executor/src/components/chunk_output.rs b/execution/executor/src/components/chunk_output.rs index 3e471f5dcf714..9c5019dd8db64 100644 --- a/execution/executor/src/components/chunk_output.rs +++ b/execution/executor/src/components/chunk_output.rs @@ -4,14 +4,17 @@ #![forbid(unsafe_code)] -use crate::{components::apply_chunk_output::ApplyChunkOutput, metrics}; +use crate::{ + components::{apply_chunk_output::ApplyChunkOutput, executed_chunk::ExecutedChunk}, + metrics, +}; use anyhow::Result; use aptos_crypto::HashValue; use aptos_executor_service::{ local_executor_helper::SHARDED_BLOCK_EXECUTOR, remote_executor_client::{get_remote_addresses, REMOTE_SHARDED_BLOCK_EXECUTOR}, }; -use aptos_executor_types::{state_checkpoint_output::StateCheckpointOutput, ExecutedChunk}; +use aptos_executor_types::state_checkpoint_output::StateCheckpointOutput; use aptos_logger::{sample, sample::SampleRate, warn}; use aptos_storage_interface::{ cached_state_view::{CachedStateView, StateCache}, @@ -113,12 +116,10 @@ impl ChunkOutput { } pub fn by_transaction_output( - transactions_and_outputs: Vec<(Transaction, TransactionOutput)>, + transactions: Vec, + transaction_outputs: Vec, state_view: CachedStateView, ) -> Result { - let (transactions, transaction_outputs): (Vec<_>, Vec<_>) = - transactions_and_outputs.into_iter().unzip(); - update_counters_for_processed_chunk(&transactions, &transaction_outputs, "output"); // collect all accounts touched and dedup @@ -265,7 +266,7 @@ pub fn update_counters_for_processed_chunk( for (txn, output) in transactions.iter().zip(transaction_outputs.iter()) { if detailed_counters { if let Ok(size) = bcs::serialized_size(output.get_transaction_output()) { - metrics::APTOS_PROCESSED_TXNS_OUTPUT_SIZE + metrics::PROCESSED_TXNS_OUTPUT_SIZE .with_label_values(&[process_type]) .observe(size as f64); } @@ -342,12 +343,12 @@ pub fn update_counters_for_processed_chunk( None => "unknown", }; - metrics::APTOS_PROCESSED_TXNS_COUNT + metrics::PROCESSED_TXNS_COUNT .with_label_values(&[process_type, kind, state]) .inc(); if !error_code.is_empty() { - metrics::APTOS_PROCESSED_FAILED_TXNS_REASON_COUNT + metrics::PROCESSED_FAILED_TXNS_REASON_COUNT .with_label_values(&[ detailed_counters_label, process_type, @@ -366,20 +367,20 @@ pub fn update_counters_for_processed_chunk( match account_authenticator { AccountAuthenticator::Ed25519 { .. } => { signature_count += 1; - metrics::APTOS_PROCESSED_TXNS_AUTHENTICATOR + metrics::PROCESSED_TXNS_AUTHENTICATOR .with_label_values(&[process_type, "Ed25519"]) .inc(); }, AccountAuthenticator::MultiEd25519 { signature, .. } => { let count = signature.signatures().len(); signature_count += count; - metrics::APTOS_PROCESSED_TXNS_AUTHENTICATOR + metrics::PROCESSED_TXNS_AUTHENTICATOR .with_label_values(&[process_type, "Ed25519_in_MultiEd25519"]) .inc_by(count as u64); }, AccountAuthenticator::SingleKey { authenticator } => { signature_count += 1; - metrics::APTOS_PROCESSED_TXNS_AUTHENTICATOR + metrics::PROCESSED_TXNS_AUTHENTICATOR .with_label_values(&[ process_type, &format!("{}_in_SingleKey", authenticator.signature().name()), @@ -389,7 +390,7 @@ pub fn update_counters_for_processed_chunk( AccountAuthenticator::MultiKey { authenticator } => { for (_, signature) in authenticator.signatures() { signature_count += 1; - metrics::APTOS_PROCESSED_TXNS_AUTHENTICATOR + metrics::PROCESSED_TXNS_AUTHENTICATOR .with_label_values(&[ process_type, &format!("{}_in_MultiKey", signature.name()), @@ -398,31 +399,31 @@ pub fn update_counters_for_processed_chunk( } }, AccountAuthenticator::NoAccountAuthenticator => { - metrics::APTOS_PROCESSED_TXNS_AUTHENTICATOR + metrics::PROCESSED_TXNS_AUTHENTICATOR .with_label_values(&[process_type, "NoAccountAuthenticator"]) .inc(); }, }; } - metrics::APTOS_PROCESSED_TXNS_NUM_AUTHENTICATORS + metrics::PROCESSED_TXNS_NUM_AUTHENTICATORS .with_label_values(&[process_type]) .observe(signature_count as f64); } match user_txn.payload() { aptos_types::transaction::TransactionPayload::Script(_script) => { - metrics::APTOS_PROCESSED_USER_TRANSACTIONS_PAYLOAD_TYPE + metrics::PROCESSED_USER_TXNS_BY_PAYLOAD .with_label_values(&[process_type, "script", state]) .inc(); }, aptos_types::transaction::TransactionPayload::EntryFunction(function) => { - metrics::APTOS_PROCESSED_USER_TRANSACTIONS_PAYLOAD_TYPE + metrics::PROCESSED_USER_TXNS_BY_PAYLOAD .with_label_values(&[process_type, "function", state]) .inc(); let is_core = function.module().address() == &CORE_CODE_ADDRESS; - metrics::APTOS_PROCESSED_USER_TRANSACTIONS_ENTRY_FUNCTION_MODULE + metrics::PROCESSED_USER_TXNS_ENTRY_FUNCTION_BY_MODULE .with_label_values(&[ detailed_counters_label, process_type, @@ -438,7 +439,7 @@ pub fn update_counters_for_processed_chunk( ]) .inc(); if is_core && detailed_counters { - metrics::APTOS_PROCESSED_USER_TRANSACTIONS_ENTRY_FUNCTION_CORE_METHOD + metrics::PROCESSED_USER_TXNS_ENTRY_FUNCTION_BY_CORE_METHOD .with_label_values(&[ process_type, function.module().name().as_str(), @@ -449,14 +450,14 @@ pub fn update_counters_for_processed_chunk( } }, aptos_types::transaction::TransactionPayload::Multisig(_) => { - metrics::APTOS_PROCESSED_USER_TRANSACTIONS_PAYLOAD_TYPE + metrics::PROCESSED_USER_TXNS_BY_PAYLOAD .with_label_values(&[process_type, "multisig", state]) .inc(); }, // Deprecated. aptos_types::transaction::TransactionPayload::ModuleBundle(_) => { - metrics::APTOS_PROCESSED_USER_TRANSACTIONS_PAYLOAD_TYPE + metrics::PROCESSED_USER_TXNS_BY_PAYLOAD .with_label_values(&[process_type, "deprecated_module_bundle", state]) .inc(); }, @@ -475,7 +476,7 @@ pub fn update_counters_for_processed_chunk( ), ContractEvent::V2(_v2) => (false, "event".to_string()), }; - metrics::APTOS_PROCESSED_USER_TRANSACTIONS_CORE_EVENTS + metrics::PROCESSED_USER_TXNS_CORE_EVENTS .with_label_values(&[ detailed_counters_label, process_type, diff --git a/execution/executor/src/components/chunk_result_verifier.rs b/execution/executor/src/components/chunk_result_verifier.rs new file mode 100644 index 0000000000000..5cc222163c99f --- /dev/null +++ b/execution/executor/src/components/chunk_result_verifier.rs @@ -0,0 +1,161 @@ +// Copyright (c) Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use anyhow::{ensure, Result}; +use aptos_crypto::HashValue; +use aptos_executor_types::LedgerUpdateOutput; +use aptos_experimental_runtimes::thread_manager::THREAD_MANAGER; +use aptos_types::{ + epoch_state::EpochState, + ledger_info::LedgerInfoWithSignatures, + proof::{accumulator::InMemoryTransactionAccumulator, TransactionInfoListWithProof}, + transaction::TransactionInfo, +}; + +pub trait ChunkResultVerifier { + fn verify_chunk_result( + &self, + parent_accumulator: &InMemoryTransactionAccumulator, + ledger_update_output: &LedgerUpdateOutput, + ) -> Result<()>; + + fn transaction_infos(&self) -> &[TransactionInfo]; + + fn state_checkpoint_hashes(&self) -> Vec> { + self.transaction_infos() + .iter() + .map(|t| t.state_checkpoint_hash()) + .collect() + } + + fn maybe_select_chunk_ending_ledger_info( + &self, + ledger_update_output: &LedgerUpdateOutput, + next_epoch_state: Option<&EpochState>, + ) -> Result>; +} + +pub struct StateSyncChunkVerifier { + pub txn_infos_with_proof: TransactionInfoListWithProof, + pub verified_target_li: LedgerInfoWithSignatures, + pub epoch_change_li: Option, +} + +impl ChunkResultVerifier for StateSyncChunkVerifier { + fn verify_chunk_result( + &self, + parent_accumulator: &InMemoryTransactionAccumulator, + ledger_update_output: &LedgerUpdateOutput, + ) -> Result<()> { + // In consensus-only mode, we cannot verify the proof against the executed output, + // because the proof returned by the remote peer is an empty one. + if cfg!(feature = "consensus-only-perf-test") { + return Ok(()); + } + + THREAD_MANAGER.get_exe_cpu_pool().install(|| { + let first_version = parent_accumulator.num_leaves(); + + // Verify the chunk extends the parent accumulator. + let parent_root_hash = parent_accumulator.root_hash(); + let num_overlap = self.txn_infos_with_proof.verify_extends_ledger( + first_version, + parent_root_hash, + Some(first_version), + )?; + assert_eq!(num_overlap, 0, "overlapped chunks"); + + // Verify transaction infos match + ledger_update_output + .ensure_transaction_infos_match(&self.txn_infos_with_proof.transaction_infos)?; + + Ok(()) + }) + } + + fn transaction_infos(&self) -> &[TransactionInfo] { + &self.txn_infos_with_proof.transaction_infos + } + + fn maybe_select_chunk_ending_ledger_info( + &self, + ledger_update_output: &LedgerUpdateOutput, + next_epoch_state: Option<&EpochState>, + ) -> Result> { + let li = self.verified_target_li.ledger_info(); + let txn_accumulator = &ledger_update_output.transaction_accumulator; + + if li.version() + 1 == txn_accumulator.num_leaves() { + // If the chunk corresponds to the target LI, the target LI can be added to storage. + ensure!( + li.transaction_accumulator_hash() == txn_accumulator.root_hash(), + "Root hash in target ledger info does not match local computation. {:?} != {:?}", + li, + txn_accumulator, + ); + Ok(Some(self.verified_target_li.clone())) + } else if let Some(epoch_change_li) = &self.epoch_change_li { + // If the epoch change LI is present, it must match the version of the chunk: + let li = epoch_change_li.ledger_info(); + + // Verify that the given ledger info corresponds to the new accumulator. + ensure!( + li.transaction_accumulator_hash() == txn_accumulator.root_hash(), + "Root hash of a given epoch LI does not match local computation. {:?} vs {:?}", + li, + txn_accumulator, + ); + ensure!( + li.version() + 1 == txn_accumulator.num_leaves(), + "Version of a given epoch LI does not match local computation. {:?} vs {:?}", + li, + txn_accumulator, + ); + ensure!( + li.ends_epoch(), + "Epoch change LI does not carry validator set. version:{}", + li.version(), + ); + ensure!( + li.next_epoch_state() == next_epoch_state, + "New validator set of a given epoch LI does not match local computation. {:?} vs {:?}", + li.next_epoch_state(), + next_epoch_state, + ); + Ok(Some(epoch_change_li.clone())) + } else { + ensure!( + next_epoch_state.is_none(), + "End of epoch chunk based on local computation but no EoE LedgerInfo provided. version: {:?}", + txn_accumulator.num_leaves().checked_sub(1), + ); + Ok(None) + } + } +} + +pub struct ReplayChunkVerifier { + pub transaction_infos: Vec, +} + +impl ChunkResultVerifier for ReplayChunkVerifier { + fn verify_chunk_result( + &self, + _parent_accumulator: &InMemoryTransactionAccumulator, + ledger_update_output: &LedgerUpdateOutput, + ) -> Result<()> { + ledger_update_output.ensure_transaction_infos_match(&self.transaction_infos) + } + + fn transaction_infos(&self) -> &[TransactionInfo] { + &self.transaction_infos + } + + fn maybe_select_chunk_ending_ledger_info( + &self, + _ledger_update_output: &LedgerUpdateOutput, + _next_epoch_state: Option<&EpochState>, + ) -> Result> { + Ok(None) + } +} diff --git a/execution/executor-types/src/executed_chunk.rs b/execution/executor/src/components/executed_chunk.rs similarity index 70% rename from execution/executor-types/src/executed_chunk.rs rename to execution/executor/src/components/executed_chunk.rs index 08770ed3caa97..28e8e68fcf87e 100644 --- a/execution/executor-types/src/executed_chunk.rs +++ b/execution/executor/src/components/executed_chunk.rs @@ -4,8 +4,10 @@ #![forbid(unsafe_code)] -use crate::{should_forward_to_subscription_service, ChunkCommitNotification, LedgerUpdateOutput}; use aptos_drop_helper::DEFAULT_DROPPER; +use aptos_executor_types::{ + should_forward_to_subscription_service, ChunkCommitNotification, LedgerUpdateOutput, +}; use aptos_storage_interface::{state_delta::StateDelta, ExecutedTrees}; #[cfg(test)] use aptos_types::account_config::NewEpochEvent; @@ -13,7 +15,7 @@ use aptos_types::account_config::NewEpochEvent; use aptos_types::contract_event::ContractEvent; use aptos_types::{ epoch_state::EpochState, ledger_info::LedgerInfoWithSignatures, - state_store::combine_or_add_sharded_state_updates, transaction::TransactionToCommit, + transaction::TransactionToCommit, }; #[derive(Debug)] @@ -26,16 +28,6 @@ pub struct ExecutedChunk { } impl ExecutedChunk { - pub fn reconfig_suffix(&self) -> Self { - assert!(self.next_epoch_state.is_some()); - Self { - result_state: self.result_state.clone(), - ledger_info: None, - next_epoch_state: self.next_epoch_state.clone(), - ledger_update_output: self.ledger_update_output.reconfig_suffix(), - } - } - pub fn transactions_to_commit(&self) -> &Vec { &self.ledger_update_output.to_commit } @@ -44,37 +36,6 @@ impl ExecutedChunk { self.next_epoch_state.is_some() } - pub fn combine(&mut self, rhs: Self) { - assert_eq!( - self.ledger_update_output.next_version(), - rhs.ledger_update_output.first_version(), - "Chunks to be combined are not consecutive.", - ); - let Self { - result_state, - ledger_info, - next_epoch_state, - ledger_update_output, - } = rhs; - - let old_result_state = self.result_state.replace_with(result_state); - // TODO(aldenhu): This is very unfortunate. Will revisit soon by remodeling the state diff. - if self.result_state.base_version > old_result_state.base_version - && old_result_state.base_version != old_result_state.current_version - { - combine_or_add_sharded_state_updates( - &mut self - .ledger_update_output - .state_updates_until_last_checkpoint, - old_result_state.updates_since_base, - ) - } - - self.ledger_info = ledger_info; - self.next_epoch_state = next_epoch_state; - self.ledger_update_output.combine(ledger_update_output) - } - pub fn result_view(&self) -> ExecutedTrees { ExecutedTrees::new( self.result_state.clone(), diff --git a/execution/executor/src/components/in_memory_state_calculator_v2.rs b/execution/executor/src/components/in_memory_state_calculator_v2.rs index 60fc3dcec30a5..055477fcc85ac 100644 --- a/execution/executor/src/components/in_memory_state_calculator_v2.rs +++ b/execution/executor/src/components/in_memory_state_calculator_v2.rs @@ -1,12 +1,13 @@ // Copyright © Aptos Foundation // SPDX-License-Identifier: Apache-2.0 -use crate::metrics::APTOS_EXECUTOR_OTHER_TIMERS_SECONDS; +use crate::metrics::OTHER_TIMERS; use anyhow::{anyhow, ensure, Result}; use aptos_crypto::{hash::CryptoHash, HashValue}; use aptos_drop_helper::DEFAULT_DROPPER; use aptos_executor_types::{parsed_transaction_output::TransactionsWithParsedOutput, ProofReader}; use aptos_logger::info; +use aptos_metrics_core::TimerHelper; use aptos_scratchpad::FrozenSparseMerkleTree; use aptos_storage_interface::{ cached_state_view::{ShardedStateCache, StateCache}, @@ -284,9 +285,8 @@ impl InMemoryStateCalculatorV2 { T: Sync + 'a, F: Fn(&'a T) -> &'a WriteSet + Sync, { - let _timer = APTOS_EXECUTOR_OTHER_TIMERS_SECONDS - .with_label_values(&["get_sharded_state_updates"]) - .start_timer(); + let _timer = OTHER_TIMERS.timer_with(&["get_sharded_state_updates"]); + outputs .par_iter() .map(|output| { @@ -303,9 +303,7 @@ impl InMemoryStateCalculatorV2 { } fn calculate_updates(state_updates_vec: &[ShardedStateUpdates]) -> ShardedStateUpdates { - let _timer = APTOS_EXECUTOR_OTHER_TIMERS_SECONDS - .with_label_values(&["calculate_updates"]) - .start_timer(); + let _timer = OTHER_TIMERS.timer_with(&["calculate_updates"]); let mut updates: ShardedStateUpdates = create_empty_sharded_state_updates(); updates .par_iter_mut() @@ -347,7 +345,7 @@ impl InMemoryStateCalculatorV2 { sharded_state_cache: &ShardedStateCache, updates: &[&ShardedStateUpdates; 2], ) -> StateStorageUsage { - let _timer = APTOS_EXECUTOR_OTHER_TIMERS_SECONDS + let _timer = OTHER_TIMERS .with_label_values(&["calculate_usage"]) .start_timer(); if old_usage.is_untracked() { @@ -403,9 +401,7 @@ impl InMemoryStateCalculatorV2 { usage: StateStorageUsage, proof_reader: &ProofReader, ) -> Result> { - let _timer = APTOS_EXECUTOR_OTHER_TIMERS_SECONDS - .with_label_values(&["make_checkpoint"]) - .start_timer(); + let _timer = OTHER_TIMERS.timer_with(&["make_checkpoint"]); // Update SMT. // @@ -429,10 +425,10 @@ impl InMemoryStateCalculatorV2 { let configuration = ConfigurationResource::fetch_config(&state_cache_view) .ok_or_else(|| anyhow!("Configuration resource not touched on epoch change"))?; - Ok(EpochState { - epoch: configuration.epoch(), - verifier: (&validator_set).into(), - }) + Ok(EpochState::new( + configuration.epoch(), + (&validator_set).into(), + )) } fn validate_input_for_block( diff --git a/execution/executor/src/components/mod.rs b/execution/executor/src/components/mod.rs index ed1a8d28ac3da..e4a96049b339c 100644 --- a/execution/executor/src/components/mod.rs +++ b/execution/executor/src/components/mod.rs @@ -9,3 +9,7 @@ pub mod block_tree; pub mod chunk_commit_queue; pub mod chunk_output; pub mod in_memory_state_calculator_v2; + +pub mod chunk_result_verifier; +pub mod executed_chunk; +pub mod transaction_chunk; diff --git a/execution/executor/src/components/transaction_chunk.rs b/execution/executor/src/components/transaction_chunk.rs new file mode 100644 index 0000000000000..41b995cfea428 --- /dev/null +++ b/execution/executor/src/components/transaction_chunk.rs @@ -0,0 +1,111 @@ +// Copyright (c) Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::{ + components::chunk_output::ChunkOutput, + metrics::{CHUNK_OTHER_TIMERS, VM_EXECUTE_CHUNK}, +}; +use anyhow::Result; +use aptos_experimental_runtimes::thread_manager::optimal_min_len; +use aptos_metrics_core::TimerHelper; +use aptos_storage_interface::cached_state_view::CachedStateView; +use aptos_types::{ + block_executor::config::BlockExecutorConfigFromOnchain, + transaction::{Transaction, TransactionOutput, Version}, +}; +use aptos_vm::VMExecutor; +use once_cell::sync::Lazy; +use rayon::iter::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator}; +use std::sync::Arc; + +pub static SIG_VERIFY_POOL: Lazy> = Lazy::new(|| { + Arc::new( + rayon::ThreadPoolBuilder::new() + .num_threads(8) // More than 8 threads doesn't seem to help much + .thread_name(|index| format!("chunk-sig-check-{}", index)) + .build() + .unwrap(), + ) +}); + +pub trait TransactionChunk { + fn first_version(&self) -> Version; + + fn len(&self) -> usize; + + fn is_empty(&self) -> bool { + self.len() == 0 + } + + fn into_output(self, state_view: CachedStateView) -> Result; +} + +pub struct ChunkToExecute { + pub transactions: Vec, + pub first_version: Version, +} + +impl TransactionChunk for ChunkToExecute { + fn first_version(&self) -> Version { + self.first_version + } + + fn len(&self) -> usize { + self.transactions.len() + } + + fn into_output(self, state_view: CachedStateView) -> Result { + let ChunkToExecute { + transactions, + first_version: _, + } = self; + + // TODO(skedia) In the chunk executor path, we ideally don't need to verify the signature + // as only transactions with verified signatures are committed to the storage. + let sig_verified_txns = { + let _timer = CHUNK_OTHER_TIMERS.timer_with(&["sig_verify"]); + + let num_txns = transactions.len(); + SIG_VERIFY_POOL.install(|| { + transactions + .into_par_iter() + .with_min_len(optimal_min_len(num_txns, 32)) + .map(|t| t.into()) + .collect::>() + }) + }; + + let _timer = VM_EXECUTE_CHUNK.start_timer(); + ChunkOutput::by_transaction_execution::( + sig_verified_txns.into(), + state_view, + BlockExecutorConfigFromOnchain::new_no_block_limit(), + ) + } +} + +pub struct ChunkToApply { + pub transactions: Vec, + pub transaction_outputs: Vec, + pub first_version: Version, +} + +impl TransactionChunk for ChunkToApply { + fn first_version(&self) -> Version { + self.first_version + } + + fn len(&self) -> usize { + self.transactions.len() + } + + fn into_output(self, state_view: CachedStateView) -> Result { + let Self { + transactions, + transaction_outputs, + first_version: _, + } = self; + + ChunkOutput::by_transaction_output(transactions, transaction_outputs, state_view) + } +} diff --git a/execution/executor/src/db_bootstrapper.rs b/execution/executor/src/db_bootstrapper.rs index 1be7ea06bf965..5015a48e84b9c 100644 --- a/execution/executor/src/db_bootstrapper.rs +++ b/execution/executor/src/db_bootstrapper.rs @@ -4,10 +4,9 @@ #![forbid(unsafe_code)] -use crate::components::chunk_output::ChunkOutput; +use crate::components::{chunk_output::ChunkOutput, executed_chunk::ExecutedChunk}; use anyhow::{anyhow, ensure, format_err, Result}; use aptos_crypto::HashValue; -use aptos_executor_types::ExecutedChunk; use aptos_logger::prelude::*; use aptos_storage_interface::{ async_proof_fetcher::AsyncProofFetcher, cached_state_view::CachedStateView, DbReaderWriter, diff --git a/execution/executor/src/metrics.rs b/execution/executor/src/metrics.rs index af9fea11f4067..c3750f557ad7a 100644 --- a/execution/executor/src/metrics.rs +++ b/execution/executor/src/metrics.rs @@ -9,7 +9,7 @@ use aptos_metrics_core::{ }; use once_cell::sync::Lazy; -pub static APTOS_EXECUTOR_EXECUTE_CHUNK_SECONDS: Lazy = Lazy::new(|| { +pub static EXECUTE_CHUNK: Lazy = Lazy::new(|| { register_histogram!( // metric name "aptos_executor_execute_chunk_seconds", @@ -20,7 +20,7 @@ pub static APTOS_EXECUTOR_EXECUTE_CHUNK_SECONDS: Lazy = Lazy::new(|| .unwrap() }); -pub static APTOS_EXECUTOR_APPLY_CHUNK_SECONDS: Lazy = Lazy::new(|| { +pub static APPLY_CHUNK: Lazy = Lazy::new(|| { register_histogram!( // metric name "aptos_executor_apply_chunk_seconds", @@ -31,7 +31,7 @@ pub static APTOS_EXECUTOR_APPLY_CHUNK_SECONDS: Lazy = Lazy::new(|| { .unwrap() }); -pub static APTOS_EXECUTOR_COMMIT_CHUNK_SECONDS: Lazy = Lazy::new(|| { +pub static COMMIT_CHUNK: Lazy = Lazy::new(|| { register_histogram!( // metric name "aptos_executor_commit_chunk_seconds", @@ -42,7 +42,7 @@ pub static APTOS_EXECUTOR_COMMIT_CHUNK_SECONDS: Lazy = Lazy::new(|| { .unwrap() }); -pub static APTOS_EXECUTOR_VM_EXECUTE_BLOCK_SECONDS: Lazy = Lazy::new(|| { +pub static VM_EXECUTE_BLOCK: Lazy = Lazy::new(|| { register_histogram!( // metric name "aptos_executor_vm_execute_block_seconds", @@ -53,7 +53,7 @@ pub static APTOS_EXECUTOR_VM_EXECUTE_BLOCK_SECONDS: Lazy = Lazy::new( .unwrap() }); -pub static APTOS_EXECUTOR_OTHER_TIMERS_SECONDS: Lazy = Lazy::new(|| { +pub static OTHER_TIMERS: Lazy = Lazy::new(|| { register_histogram_vec!( // metric name "aptos_executor_other_timers_seconds", @@ -65,11 +65,11 @@ pub static APTOS_EXECUTOR_OTHER_TIMERS_SECONDS: Lazy = Lazy::new(| .unwrap() }); -pub static APTOS_EXECUTOR_ERRORS: Lazy = Lazy::new(|| { +pub static EXECUTOR_ERRORS: Lazy = Lazy::new(|| { register_int_counter!("aptos_executor_error_total", "Cumulative number of errors").unwrap() }); -pub static APTOS_EXECUTOR_EXECUTE_BLOCK_SECONDS: Lazy = Lazy::new(|| { +pub static EXECUTE_BLOCK: Lazy = Lazy::new(|| { register_histogram!( // metric name "aptos_executor_execute_block_seconds", @@ -80,7 +80,7 @@ pub static APTOS_EXECUTOR_EXECUTE_BLOCK_SECONDS: Lazy = Lazy::new(|| .unwrap() }); -pub static APTOS_EXECUTOR_LEDGER_UPDATE_SECONDS: Lazy = Lazy::new(|| { +pub static UPDATE_LEDGER: Lazy = Lazy::new(|| { register_histogram!( // metric name "aptos_executor_ledger_update_seconds", @@ -91,7 +91,7 @@ pub static APTOS_EXECUTOR_LEDGER_UPDATE_SECONDS: Lazy = Lazy::new(|| .unwrap() }); -pub static APTOS_CHUNK_EXECUTOR_OTHER_SECONDS: Lazy = Lazy::new(|| { +pub static CHUNK_OTHER_TIMERS: Lazy = Lazy::new(|| { register_histogram_vec!( // metric name "aptos_chunk_executor_other_seconds", @@ -103,7 +103,7 @@ pub static APTOS_CHUNK_EXECUTOR_OTHER_SECONDS: Lazy = Lazy::new(|| .unwrap() }); -pub static APTOS_EXECUTOR_VM_EXECUTE_CHUNK_SECONDS: Lazy = Lazy::new(|| { +pub static VM_EXECUTE_CHUNK: Lazy = Lazy::new(|| { register_histogram!( // metric name "aptos_executor_vm_execute_chunk_seconds", @@ -114,7 +114,7 @@ pub static APTOS_EXECUTOR_VM_EXECUTE_CHUNK_SECONDS: Lazy = Lazy::new( .unwrap() }); -pub static APTOS_EXECUTOR_COMMIT_BLOCKS_SECONDS: Lazy = Lazy::new(|| { +pub static COMMIT_BLOCKS: Lazy = Lazy::new(|| { register_histogram!( // metric name "aptos_executor_commit_blocks_seconds", @@ -125,7 +125,7 @@ pub static APTOS_EXECUTOR_COMMIT_BLOCKS_SECONDS: Lazy = Lazy::new(|| .unwrap() }); -pub static APTOS_EXECUTOR_SAVE_TRANSACTIONS_SECONDS: Lazy = Lazy::new(|| { +pub static SAVE_TRANSACTIONS: Lazy = Lazy::new(|| { register_histogram!( // metric name "aptos_executor_save_transactions_seconds", @@ -136,7 +136,7 @@ pub static APTOS_EXECUTOR_SAVE_TRANSACTIONS_SECONDS: Lazy = Lazy::new .unwrap() }); -pub static APTOS_EXECUTOR_TRANSACTIONS_SAVED: Lazy = Lazy::new(|| { +pub static TRANSACTIONS_SAVED: Lazy = Lazy::new(|| { register_histogram!( // metric name "aptos_executor_transactions_saved", @@ -151,7 +151,7 @@ pub static APTOS_EXECUTOR_TRANSACTIONS_SAVED: Lazy = Lazy::new(|| { ////////////////////////////////////// /// Count of the executed transactions since last restart. -pub static APTOS_PROCESSED_TXNS_COUNT: Lazy = Lazy::new(|| { +pub static PROCESSED_TXNS_COUNT: Lazy = Lazy::new(|| { register_int_counter_vec!( "aptos_processed_txns_count", "Count of the transactions since last restart. state is success, failed or retry", @@ -161,7 +161,7 @@ pub static APTOS_PROCESSED_TXNS_COUNT: Lazy = Lazy::new(|| { }); /// Count of the executed transactions since last restart. -pub static APTOS_PROCESSED_FAILED_TXNS_REASON_COUNT: Lazy = Lazy::new(|| { +pub static PROCESSED_FAILED_TXNS_REASON_COUNT: Lazy = Lazy::new(|| { register_int_counter_vec!( "aptos_processed_failed_txns_reason_count", "Count of the transactions since last restart. state is success, failed or retry", @@ -171,7 +171,7 @@ pub static APTOS_PROCESSED_FAILED_TXNS_REASON_COUNT: Lazy = Lazy: }); /// Counter of executed user transactions by payload type -pub static APTOS_PROCESSED_USER_TRANSACTIONS_PAYLOAD_TYPE: Lazy = Lazy::new(|| { +pub static PROCESSED_USER_TXNS_BY_PAYLOAD: Lazy = Lazy::new(|| { register_int_counter_vec!( "aptos_processed_user_transactions_by_payload", "Counter of processed user transactions by payload type", @@ -181,18 +181,17 @@ pub static APTOS_PROCESSED_USER_TRANSACTIONS_PAYLOAD_TYPE: Lazy = }); /// Counter of executed EntryFunction user transactions by module -pub static APTOS_PROCESSED_USER_TRANSACTIONS_ENTRY_FUNCTION_MODULE: Lazy = - Lazy::new(|| { - register_int_counter_vec!( - "aptos_processed_user_transactions_entry_function_by_module", - "Counter of processed EntryFunction user transactions by module", - &["is_detailed", "process", "account", "name", "state"] - ) - .unwrap() - }); +pub static PROCESSED_USER_TXNS_ENTRY_FUNCTION_BY_MODULE: Lazy = Lazy::new(|| { + register_int_counter_vec!( + "aptos_processed_user_transactions_entry_function_by_module", + "Counter of processed EntryFunction user transactions by module", + &["is_detailed", "process", "account", "name", "state"] + ) + .unwrap() +}); /// Counter of executed EntryFunction user transaction for core address by method -pub static APTOS_PROCESSED_USER_TRANSACTIONS_ENTRY_FUNCTION_CORE_METHOD: Lazy = +pub static PROCESSED_USER_TXNS_ENTRY_FUNCTION_BY_CORE_METHOD: Lazy = Lazy::new(|| { register_int_counter_vec!( "aptos_processed_user_transactions_entry_function_by_core_method", @@ -203,7 +202,7 @@ pub static APTOS_PROCESSED_USER_TRANSACTIONS_ENTRY_FUNCTION_CORE_METHOD: Lazy = Lazy::new(|| { +pub static PROCESSED_USER_TXNS_CORE_EVENTS: Lazy = Lazy::new(|| { register_int_counter_vec!( "aptos_processed_user_transactions_core_events", "Counter of processed EntryFunction user transaction for core address by method", @@ -212,7 +211,7 @@ pub static APTOS_PROCESSED_USER_TRANSACTIONS_CORE_EVENTS: Lazy = .unwrap() }); -pub static APTOS_PROCESSED_TXNS_OUTPUT_SIZE: Lazy = Lazy::new(|| { +pub static PROCESSED_TXNS_OUTPUT_SIZE: Lazy = Lazy::new(|| { register_histogram_vec!( "aptos_processed_txns_output_size", "Histogram of transaction output sizes", @@ -222,7 +221,7 @@ pub static APTOS_PROCESSED_TXNS_OUTPUT_SIZE: Lazy = Lazy::new(|| { .unwrap() }); -pub static APTOS_PROCESSED_TXNS_NUM_AUTHENTICATORS: Lazy = Lazy::new(|| { +pub static PROCESSED_TXNS_NUM_AUTHENTICATORS: Lazy = Lazy::new(|| { register_histogram_vec!( "aptos_processed_txns_num_authenticators", "Histogram of number of authenticators in a transaction", @@ -232,7 +231,7 @@ pub static APTOS_PROCESSED_TXNS_NUM_AUTHENTICATORS: Lazy = Lazy::n .unwrap() }); -pub static APTOS_PROCESSED_TXNS_AUTHENTICATOR: Lazy = Lazy::new(|| { +pub static PROCESSED_TXNS_AUTHENTICATOR: Lazy = Lazy::new(|| { register_int_counter_vec!( "aptos_processed_txns_authenticator", "Counter of authenticators by type, for processed transactions", diff --git a/execution/executor/src/tests/mod.rs b/execution/executor/src/tests/mod.rs index e523dbeb09d7e..c50d2480a1b84 100644 --- a/execution/executor/src/tests/mod.rs +++ b/execution/executor/src/tests/mod.rs @@ -4,7 +4,7 @@ use crate::{ block_executor::BlockExecutor, - components::chunk_output::ChunkOutput, + components::{chunk_output::ChunkOutput, executed_chunk::ExecutedChunk}, db_bootstrapper::{generate_waypoint, maybe_bootstrap}, mock_vm::{ encode_mint_transaction, encode_reconfiguration_transaction, encode_transfer_transaction, @@ -14,7 +14,8 @@ use crate::{ use aptos_crypto::{ed25519::Ed25519PrivateKey, HashValue, PrivateKey, SigningKey, Uniform}; use aptos_db::AptosDB; use aptos_executor_types::{ - BlockExecutorTrait, ExecutedChunk, LedgerUpdateOutput, TransactionReplayer, VerifyExecutionMode, + BlockExecutorTrait, ChunkExecutorTrait, LedgerUpdateOutput, TransactionReplayer, + VerifyExecutionMode, }; use aptos_storage_interface::{ async_proof_fetcher::AsyncProofFetcher, DbReaderWriter, ExecutedTrees, Result, @@ -27,7 +28,6 @@ use aptos_types::{ bytes::NumToBytes, chain_id::ChainId, ledger_info::{LedgerInfo, LedgerInfoWithSignatures}, - proof::definition::LeafCount, state_store::{state_key::StateKey, state_value::StateValue, StateViewId}, test_helpers::transaction_test_helpers::{block, TEST_BLOCK_EXECUTOR_ONCHAIN_CONFIG}, transaction::{ @@ -38,6 +38,7 @@ use aptos_types::{ }, write_set::{WriteOp, WriteSet, WriteSetMut}, }; +use itertools::Itertools; use proptest::prelude::*; use std::{iter::once, sync::Arc}; @@ -450,7 +451,7 @@ fn apply_transaction_by_writeset( ) { let ledger_view: ExecutedTrees = db.reader.get_latest_executed_trees().unwrap(); - let transactions_and_outputs = transactions_and_writesets + let (txns, txn_outs) = transactions_and_writesets .iter() .map(|(txn, write_set)| { ( @@ -474,7 +475,7 @@ fn apply_transaction_by_writeset( TransactionAuxiliaryData::default(), ), ))) - .collect(); + .unzip(); let state_view = ledger_view .verified_state_view( @@ -484,8 +485,7 @@ fn apply_transaction_by_writeset( ) .unwrap(); - let chunk_output = - ChunkOutput::by_transaction_output(transactions_and_outputs, state_view).unwrap(); + let chunk_output = ChunkOutput::by_transaction_output(txns, txn_outs, state_view).unwrap(); let (executed, _, _) = chunk_output.apply_to_ledger(&ledger_view, None).unwrap(); let ExecutedChunk { @@ -742,23 +742,23 @@ fn run_transactions_naive( } proptest! { -#![proptest_config(ProptestConfig::with_cases(5))] + #![proptest_config(ProptestConfig::with_cases(5))] -#[test] -#[cfg_attr(feature = "consensus-only-perf-test", ignore)] -fn test_reconfiguration_with_retry_transaction_status( - (num_user_txns, reconfig_txn_index) in (2..5u64).prop_flat_map(|num_user_txns| { - ( - Just(num_user_txns), - 0..num_user_txns - 1 // avoid state checkpoint right after reconfig - ) + #[test] + #[cfg_attr(feature = "consensus-only-perf-test", ignore)] + fn test_reconfiguration_with_retry_transaction_status( + (num_user_txns, reconfig_txn_index) in (2..5usize).prop_flat_map(|num_user_txns| { + ( + Just(num_user_txns), + 0..num_user_txns - 1 // avoid state checkpoint right after reconfig + ) }).no_shrink()) { let executor = TestExecutor::new(); let block_id = gen_block_id(1); - let mut block = TestBlock::new(num_user_txns, 10, block_id); - let num_input_txns = block.txns.len() as LeafCount; - block.txns[reconfig_txn_index as usize] = encode_reconfiguration_transaction().into(); + let mut block = TestBlock::new(num_user_txns as u64, 10, block_id); + let num_input_txns = block.txns.len(); + block.txns[reconfig_txn_index] = encode_reconfiguration_transaction().into(); let parent_block_id = executor.committed_block_id(); let output = executor.execute_block( @@ -769,34 +769,52 @@ fn test_reconfiguration_with_retry_transaction_status( let retry_iter = output.compute_status_for_input_txns().iter() .skip_while(|status| matches!(*status, TransactionStatus::Keep(_))); prop_assert_eq!( - retry_iter.take_while(|status| matches!(*status,TransactionStatus::Retry)).count() as u64, + retry_iter.take_while(|status| matches!(*status,TransactionStatus::Retry)).count(), num_input_txns - reconfig_txn_index - 1 ); // commit - let ledger_info = gen_ledger_info(reconfig_txn_index + 1 /* version */, output.root_hash(), block_id, 1 /* timestamp */); + let ledger_info = gen_ledger_info( + reconfig_txn_index as Version + 1 /* version */, + output.root_hash(), + block_id, + 1 /* timestamp */ + ); executor.commit_blocks(vec![block_id], ledger_info).unwrap(); let parent_block_id = executor.committed_block_id(); // retry txns after reconfiguration + let retry_txns = block.txns.iter().skip(reconfig_txn_index + 1).cloned().collect_vec(); let retry_block_id = gen_block_id(2); let retry_output = executor.execute_block( - (retry_block_id, block.txns.iter().skip(reconfig_txn_index as usize + 1).cloned().collect::>()).into(), parent_block_id, TEST_BLOCK_EXECUTOR_ONCHAIN_CONFIG + ( retry_block_id, retry_txns ).into(), + parent_block_id, + TEST_BLOCK_EXECUTOR_ONCHAIN_CONFIG ).unwrap(); prop_assert!(retry_output.compute_status_for_input_txns().iter().all(|s| matches!(*s, TransactionStatus::Keep(_)))); // Second block has StateCheckpoint/BlockPrologue transaction added. - let ledger_version = num_input_txns + 1; + let ledger_version = num_input_txns as Version + 1; // commit - let ledger_info = gen_ledger_info(ledger_version, retry_output.root_hash(), retry_block_id, 12345 /* timestamp */); + let ledger_info = gen_ledger_info( + ledger_version, + retry_output.root_hash(), + retry_block_id, + 12345 /* timestamp */ + ); executor.commit_blocks(vec![retry_block_id], ledger_info).unwrap(); // get txn_infos from db let db = executor.db.reader.clone(); prop_assert_eq!(db.expect_synced_version(), ledger_version); - let txn_list = db.get_transactions(1 /* start version */, ledger_version, ledger_version /* ledger version */, false /* fetch events */).unwrap(); - prop_assert_eq!(&block.inner_txns(), &txn_list.transactions[..num_input_txns as usize]); + let txn_list = db.get_transactions( + 1, /* start version */ + ledger_version, /* version */ + ledger_version, /* ledger version */ + false /* fetch events */ + ).unwrap(); + prop_assert_eq!(&block.inner_txns(), &txn_list.transactions[..num_input_txns]); let txn_infos = txn_list.proof.transaction_infos; let write_sets = db.get_write_set_iterator(1, ledger_version).unwrap().collect::>().unwrap(); let event_vecs = db.get_events_iterator(1, ledger_version).unwrap().collect::>().unwrap(); @@ -804,8 +822,20 @@ fn test_reconfiguration_with_retry_transaction_status( // replay txns in one batch across epoch boundary, // and the replayer should deal with `Retry`s automatically let replayer = chunk_executor_tests::TestExecutor::new(); - replayer.executor.replay(txn_list.transactions, txn_infos, write_sets, event_vecs, &VerifyExecutionMode::verify_all()).unwrap(); + let chunks_enqueued = replayer.executor.enqueue_chunks( + txn_list.transactions, + txn_infos, + write_sets, + event_vecs, + &VerifyExecutionMode::verify_all() + ).unwrap(); + assert_eq!(chunks_enqueued, 2); + replayer.executor.update_ledger().unwrap(); + replayer.executor.update_ledger().unwrap(); + + replayer.executor.commit().unwrap(); replayer.executor.commit().unwrap(); + prop_assert!(replayer.executor.is_empty()); let replayed_db = replayer.db.reader.clone(); prop_assert_eq!( replayed_db.get_accumulator_root_hash(ledger_version).unwrap(), diff --git a/keyless/common/src/input_processing/witness_gen.rs b/keyless/common/src/input_processing/witness_gen.rs index baaf2e783e6ff..6df60c3822113 100644 --- a/keyless/common/src/input_processing/witness_gen.rs +++ b/keyless/common/src/input_processing/witness_gen.rs @@ -12,7 +12,9 @@ pub trait PathStr { impl PathStr for NamedTempFile { fn path_str(&self) -> Result<&str> { - self.path().to_str().ok_or(anyhow!("tempfile path error")) + self.path() + .to_str() + .ok_or_else(|| anyhow!("tempfile path error")) } } diff --git a/keyless/pepper/service/src/lib.rs b/keyless/pepper/service/src/lib.rs index 0c52182fd1945..f231a7fd4d8f7 100644 --- a/keyless/pepper/service/src/lib.rs +++ b/keyless/pepper/service/src/lib.rs @@ -320,7 +320,14 @@ async fn process_common( return Err(BadRequest("epk expired".to_string())); } - if exp_date_secs >= claims.claims.iat + config.max_exp_horizon_secs { + let (max_exp_data_secs, overflowed) = claims + .claims + .iat + .overflowing_add(config.max_exp_horizon_secs); + if overflowed { + return Err(BadRequest("max_exp_data_secs overflowed".to_string())); + } + if exp_date_secs >= max_exp_data_secs { return Err(BadRequest("epk expiry date too far".to_string())); } @@ -538,3 +545,6 @@ async fn update_account_recovery_db(input: &PepperInput) -> Result<(), Processin }, } } + +#[cfg(test)] +mod tests; diff --git a/keyless/pepper/service/src/tests.rs b/keyless/pepper/service/src/tests.rs new file mode 100644 index 0000000000000..58b09c0bb79a6 --- /dev/null +++ b/keyless/pepper/service/src/tests.rs @@ -0,0 +1,50 @@ +// Copyright (c) Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::{process_common, ProcessingFailure}; +use aptos_crypto::ed25519::Ed25519PublicKey; +use aptos_types::{ + keyless::{ + circuit_testcases::{ + sample_jwt_payload_json_overrides, SAMPLE_EXP_DATE, SAMPLE_JWT_EXTRA_FIELD, + SAMPLE_NONCE, SAMPLE_TEST_ISS_VALUE, SAMPLE_UID_VAL, + }, + test_utils::{get_sample_epk_blinder, get_sample_esk, get_sample_jwt_token_from_payload}, + }, + transaction::authenticator::EphemeralPublicKey, +}; +use uuid::Uuid; + +#[tokio::test] +async fn process_common_should_fail_if_max_exp_data_secs_overflowed() { + let session_id = Uuid::new_v4(); + let sk = get_sample_esk(); + let pk = Ed25519PublicKey::from(&sk); + + let jwt_payload = sample_jwt_payload_json_overrides( + SAMPLE_TEST_ISS_VALUE, + SAMPLE_UID_VAL, + SAMPLE_JWT_EXTRA_FIELD.as_str(), + u64::MAX - 1, // unusual iat + SAMPLE_NONCE.as_str(), + ); + + let jwt = get_sample_jwt_token_from_payload(&jwt_payload); + + let process_result = process_common( + &session_id, + jwt, + EphemeralPublicKey::ed25519(pk), + SAMPLE_EXP_DATE, + get_sample_epk_blinder(), + None, + None, + false, + None, + false, + ) + .await; + assert!( + matches!(process_result, Err(ProcessingFailure::BadRequest(e)) if e.as_str() == "max_exp_data_secs overflowed") + ); +} diff --git a/mempool/src/core_mempool/index.rs b/mempool/src/core_mempool/index.rs index 959f3ca091120..3194a29ae86ba 100644 --- a/mempool/src/core_mempool/index.rs +++ b/mempool/src/core_mempool/index.rs @@ -513,6 +513,13 @@ impl ParkingLotIndex { pub(crate) fn size(&self) -> usize { self.size } + + pub(crate) fn get_addresses(&self) -> Vec<(AccountAddress, u64)> { + self.data + .iter() + .map(|(addr, txns)| (*addr, txns.len() as u64)) + .collect::>() + } } /// Logical pointer to `MempoolTransaction`. diff --git a/mempool/src/core_mempool/mempool.rs b/mempool/src/core_mempool/mempool.rs index bf6d52a462aa0..b0a0ff613e96a 100644 --- a/mempool/src/core_mempool/mempool.rs +++ b/mempool/src/core_mempool/mempool.rs @@ -597,4 +597,8 @@ impl Mempool { pub fn get_transaction_store(&self) -> &TransactionStore { &self.transactions } + + pub fn get_parking_lot_addresses(&self) -> Vec<(AccountAddress, u64)> { + self.transactions.get_parking_lot_addresses() + } } diff --git a/mempool/src/core_mempool/transaction_store.rs b/mempool/src/core_mempool/transaction_store.rs index c225a52725707..6bf63be986a64 100644 --- a/mempool/src/core_mempool/transaction_store.rs +++ b/mempool/src/core_mempool/transaction_store.rs @@ -916,4 +916,8 @@ impl TransactionStore { pub(crate) fn get_transactions(&self) -> &HashMap { &self.transactions } + + pub(crate) fn get_parking_lot_addresses(&self) -> Vec<(AccountAddress, u64)> { + self.parking_lot_index.get_addresses() + } } diff --git a/mempool/src/counters.rs b/mempool/src/counters.rs index 464523419e886..838506efd7fc2 100644 --- a/mempool/src/counters.rs +++ b/mempool/src/counters.rs @@ -67,6 +67,7 @@ pub const SUCCESS_LABEL: &str = "success"; // Bounded executor task labels pub const CLIENT_EVENT_LABEL: &str = "client_event"; pub const CLIENT_EVENT_GET_TXN_LABEL: &str = "client_event_get_txn"; +pub const CLIENT_EVENT_GET_PARKING_LOT_ADDRESSES: &str = "client_event_get_parking_lot_addresses"; pub const RECONFIG_EVENT_LABEL: &str = "reconfig"; pub const PEER_BROADCAST_EVENT_LABEL: &str = "peer_broadcast"; @@ -284,7 +285,7 @@ pub static CORE_MEMPOOL_GC_EVENT_COUNT: Lazy = Lazy::new(|| { "aptos_core_mempool_gc_event_count", "Number of times the periodic garbage-collection event occurs, regardless of how many txns were actually removed", &["type"]) - .unwrap() + .unwrap() }); /// Counter for number of periodic client garbage-collection (=GC) events that happen with eager @@ -362,7 +363,7 @@ static MEMPOOL_SERVICE_TXNS: Lazy = Lazy::new(|| { &["type"], TXN_COUNT_BUCKETS.clone() ) - .unwrap() + .unwrap() }); pub fn mempool_service_transactions(label: &'static str, num: usize) { diff --git a/mempool/src/shared_mempool/coordinator.rs b/mempool/src/shared_mempool/coordinator.rs index 76a36e699cd79..2ddfd088e1917 100644 --- a/mempool/src/shared_mempool/coordinator.rs +++ b/mempool/src/shared_mempool/coordinator.rs @@ -217,6 +217,11 @@ async fn handle_client_request( )) .await; }, + MempoolClientRequest::GetAddressesFromParkingLot(callback) => { + bounded_executor + .spawn(tasks::process_parking_lot_addresses(smp.clone(), callback)) + .await; + }, } } diff --git a/mempool/src/shared_mempool/network.rs b/mempool/src/shared_mempool/network.rs index 36a672721093d..3f5bee5f9b520 100644 --- a/mempool/src/shared_mempool/network.rs +++ b/mempool/src/shared_mempool/network.rs @@ -370,7 +370,7 @@ impl> MempoolNetworkInterf // If we don't have any info about the node, we shouldn't broadcast to it let state = sync_states .get_mut(&peer) - .ok_or(BroadcastError::PeerNotFound(peer))?; + .ok_or_else(|| BroadcastError::PeerNotFound(peer))?; // If backoff mode is on for this peer, only execute broadcasts that were scheduled as a backoff broadcast. // This is to ensure the backoff mode is actually honored (there is a chance a broadcast was scheduled @@ -607,7 +607,7 @@ impl> MempoolNetworkInterf let mut sync_states = self.sync_states.write(); let state = sync_states .get_mut(&peer) - .ok_or(BroadcastError::PeerNotFound(peer))?; + .ok_or_else(|| BroadcastError::PeerNotFound(peer))?; // Update peer sync state with info from above broadcast. state.update(&message_id); diff --git a/mempool/src/shared_mempool/tasks.rs b/mempool/src/shared_mempool/tasks.rs index d1acbe7ce1a2a..71a8c6f392a56 100644 --- a/mempool/src/shared_mempool/tasks.rs +++ b/mempool/src/shared_mempool/tasks.rs @@ -30,6 +30,7 @@ use aptos_metrics_core::HistogramTimer; use aptos_network::application::interface::NetworkClientInterface; use aptos_storage_interface::state_view::LatestDbStateCheckpointView; use aptos_types::{ + account_address::AccountAddress, mempool_status::{MempoolStatus, MempoolStatusCode}, on_chain_config::{OnChainConfigPayload, OnChainConfigProvider, OnChainConsensusConfig}, transaction::SignedTransaction, @@ -151,6 +152,25 @@ pub(crate) async fn process_client_transaction_submission( + smp: SharedMempool, + callback: oneshot::Sender>, +) where + NetworkClient: NetworkClientInterface, + TransactionValidator: TransactionValidation + 'static, +{ + let addresses = smp.mempool.lock().get_parking_lot_addresses(); + + if callback.send(addresses).is_err() { + warn!(LogSchema::event_log( + LogEntry::JsonRpc, + LogEvent::CallbackFail + )); + counters::CLIENT_CALLBACK_FAIL.inc(); + } +} + /// Processes get transaction by hash request by client. pub(crate) async fn process_client_get_transaction( smp: SharedMempool, diff --git a/mempool/src/shared_mempool/types.rs b/mempool/src/shared_mempool/types.rs index 8f92858ad1873..b2eb6572b89b5 100644 --- a/mempool/src/shared_mempool/types.rs +++ b/mempool/src/shared_mempool/types.rs @@ -21,7 +21,8 @@ use aptos_infallible::{Mutex, RwLock}; use aptos_network::application::interface::NetworkClientInterface; use aptos_storage_interface::DbReader; use aptos_types::{ - mempool_status::MempoolStatus, transaction::SignedTransaction, vm_status::DiscardedVMStatus, + account_address::AccountAddress, mempool_status::MempoolStatus, transaction::SignedTransaction, + vm_status::DiscardedVMStatus, }; use aptos_vm_validator::vm_validator::TransactionValidation; use futures::{ @@ -235,8 +236,13 @@ pub type SubmissionStatus = (MempoolStatus, Option); pub type SubmissionStatusBundle = (SignedTransaction, SubmissionStatus); pub enum MempoolClientRequest { + /// Submits a transaction to the mempool and returns its submission status SubmitTransaction(SignedTransaction, oneshot::Sender>), + /// Retrieves a signed transaction from the mempool using its hash GetTransactionByHash(HashValue, oneshot::Sender>), + /// Retrieves all addresses with transactions in the mempool's parking lot and + /// the number of transactions for each address + GetAddressesFromParkingLot(oneshot::Sender>), } pub type MempoolClientSender = mpsc::Sender; diff --git a/mempool/src/tests/integration_tests.rs b/mempool/src/tests/integration_tests.rs index 8c1661c23a7b7..c10facf9ae787 100644 --- a/mempool/src/tests/integration_tests.rs +++ b/mempool/src/tests/integration_tests.rs @@ -319,6 +319,29 @@ async fn test_rebroadcast_retry_is_empty() { .await; } +#[tokio::test] +async fn test_get_all_addresses_from_parking_lot() { + let mut node = MempoolTestFrameworkBuilder::single_validator(); + + // Add second txn. Using TXN_2 here because sequence number needs to be higher than expected + // to be put in parking lot + node.add_txns_via_client(&TXN_2).await; + + // Check to make sure transaction is in parking lot + let addresses = node.get_parking_lot_txns_via_client().await; + let address = addresses.first().unwrap().0; + let txn_size = addresses.first().unwrap().1; + + // Assert that the address matches + assert_eq!( + address.to_string(), + TXN_2.first().unwrap().address.to_string() + ); + + // Assert there is only one transaction for that address + assert_eq!(txn_size, 1); +} + // -- Multi node tests below here -- /// Tests if the node is a VFN, and it's getting forwarded messages from a PFN. It should forward diff --git a/mempool/src/tests/test_framework.rs b/mempool/src/tests/test_framework.rs index 30260e6b69557..efc1a64a12594 100644 --- a/mempool/src/tests/test_framework.rs +++ b/mempool/src/tests/test_framework.rs @@ -168,6 +168,15 @@ impl MempoolNode { } } + pub async fn get_parking_lot_txns_via_client(&mut self) -> Vec<(AccountAddress, u64)> { + let (sender, receiver) = oneshot::channel(); + self.mempool_client_sender + .send(MempoolClientRequest::GetAddressesFromParkingLot(sender)) + .await + .unwrap(); + receiver.await.unwrap() + } + /// Asynchronously waits for up to 1 second for txns to appear in mempool pub async fn wait_on_txns_in_mempool(&self, txns: &[TestTransaction]) { for _ in 0..10 { diff --git a/scripts/indexer_processor_tests_status_poll.sh b/scripts/indexer_processor_tests_status_poll.sh new file mode 100644 index 0000000000000..be617fe108e1a --- /dev/null +++ b/scripts/indexer_processor_tests_status_poll.sh @@ -0,0 +1,117 @@ +#!/bin/bash + +# Poll for Workflow Run and Wait for Job Completion + +# Unique identifier for the run (commit_hash or UUID passed via event payload) +UUID="${GITHUB_SHA}" + +# If a run_id is already known, use it directly to check the run status +if [ -f ".cached_run_id" ]; then + run_id=$(cat .cached_run_id) + echo "Using cached run_id: $run_id" +else + echo "Polling for the workflow run with UUID: $UUID." + + attempts=0 + max_attempts=5 # Number of attempts to find the run_id + sleep_interval=30 # Time to wait between attempts (in seconds) + + while [ $attempts -lt $max_attempts ]; do + echo "Polling for the workflow run. Attempt $((attempts+1)) of $max_attempts..." + + # Get the workflow runs for the repository + response=$(curl -s -H "Authorization: Bearer ${GITHUB_TOKEN}" \ + "https://api.github.com/repos/aptos-labs/aptos-indexer-processors/actions/runs?event=repository_dispatch&branch=main") + + # Check if the workflow_runs array exists + workflow_runs=$(echo "$response" | jq -r '.workflow_runs') + if [ "$workflow_runs" == "null" ] || [ -z "$workflow_runs" ]; then + echo "No workflow runs found. Response from GitHub API:" + echo "$response" # Output the raw response for debugging + echo "Retrying in $sleep_interval seconds..." + attempts=$((attempts + 1)) + sleep $sleep_interval + continue + fi + + # Filter the workflow run by the unique run-name commit hash + run_id=$(echo "$workflow_runs" | jq -r ".[] | select(.name | test(\"$UUID\")) | .id") + + if [ -n "$run_id" ]; then + echo "Found workflow run with ID: $run_id" + echo "$run_id" > .cached_run_id # Save the run_id to cache + break + else + echo "No matching workflow run found yet. Retrying in $sleep_interval seconds..." + attempts=$((attempts + 1)) + sleep $sleep_interval + fi + done +fi + +# If we still don't have a run_id, exit the job +if [ -z "$run_id" ]; then + echo "Workflow run not found after $max_attempts attempts. Exiting." + exit 1 +fi + +# Now that we have the run_id (cached or newly found), proceed to poll job status +jobs_url="https://api.github.com/repos/aptos-labs/aptos-indexer-processors/actions/runs/${run_id}/jobs" + +# Poll the job status until completion +job_completed=false +max_job_attempts=20 # Adjust based on how long you expect the job to run +job_attempts=0 +sleep_interval=60 # Adjust polling interval as needed + +while [ "$job_completed" == false ] && [ $job_attempts -lt $max_job_attempts ]; do + echo "Polling for job status. Attempt $((job_attempts+1)) of $max_job_attempts..." + jobs_response=$(curl -s -H "Authorization: Bearer ${GITHUB_TOKEN}" "$jobs_url") + + # Check if the jobs array exists + jobs=$(echo "$jobs_response" | jq -r '.jobs') + if [ "$jobs" == "null" ] || [ -z "$jobs" ]; then + echo "No jobs found in the workflow run. Response from GitHub API:" + echo "$jobs_response" # Output the raw response for debugging + exit 1 + fi + + # Loop through the jobs and check their status + for job in $(echo "$jobs" | jq -r '.[] | @base64'); do + _jq() { + echo "${job}" | base64 --decode | jq -r "${1}" + } + + job_name=$(_jq '.name') + job_id=$(_jq '.id') + job_status=$(_jq '.status') + job_conclusion=$(_jq '.conclusion') + + echo "Checking job: $job_name (Job ID: $job_id)" + echo "Job status: $job_status" + echo "Job conclusion: $job_conclusion" + + # Check if the job has completed + if [ "$job_status" == "completed" ]; then + job_completed=true + if [ "$job_conclusion" == "success" ]; then + echo "Job completed successfully!" + exit 0 # Exit with success + else + echo "Job failed!" + exit 1 # Exit with failure + fi + fi + done + + # Sleep before the next polling attempt + echo "Job is still in progress. Waiting $sleep_interval seconds before polling again..." + sleep $sleep_interval + job_attempts=$((job_attempts + 1)) +done + +# If the job hasn't completed within the allowed attempts, exit with an error +if [ "$job_completed" == false ]; then + echo "Job did not complete within the expected time. Exiting with failure." + exit 1 +fi diff --git a/scripts/indexer_test_txns_compare_and_diff.sh b/scripts/indexer_test_txns_compare_and_diff.sh new file mode 100644 index 0000000000000..e747a1b853ad9 --- /dev/null +++ b/scripts/indexer_test_txns_compare_and_diff.sh @@ -0,0 +1,116 @@ +#!/bin/bash + +# Function to compare and handle diff logic +compare_and_diff() { + local generated_file=$1 + local original_file=$2 + + echo "Modified file path: $generated_file" + echo "Original file path: $original_file" + + if [ -f "$original_file" ]; then + echo "Original file exists, comparing with modified file." + # Run diff and capture the output + diff_output=$(diff -u "$original_file" "$generated_file" || true) + + if [ -n "$diff_output" ]; then + echo "Differences found in $generated_file" + diff_found=true + modified_files="${modified_files}${generated_file}\n" # Append the full path of the modified file + echo "Diff output:" + echo "$diff_output" + else + echo "No differences found for $generated_file." + fi + else + echo "New file detected: $generated_file (no corresponding original file found)" + new_file_found=true + new_files="${new_files}${generated_file}\n" # Append the full path of the new file + + # Treat as new file, but still run a diff (compare with /dev/null) + diff_output=$(diff -u /dev/null "$generated_file" || true) + if [ -n "$diff_output" ]; then + echo "New file with diff found in $generated_file" + echo "Diff output for new file:" + echo "$diff_output" + fi + fi +} + + +# Initialize the flags +diff_found=false +new_file_found=false +new_files="" +modified_files="" + +cd ecosystem/indexer-grpc/indexer-test-transactions || exit 1 + +echo "Starting comparison between new and original JSON files." + +# C heck if the new_json_transactions folder exists +if [ ! -d "new_json_transactions" ]; then + echo "Directory new_json_transactions does not exist. Exiting." + exit 1 +fi + +# Loop over all subdirectories inside new_json_transactions +for folder in new_json_transactions/*; do + if [ -d "$folder" ]; then # Ensure it's a directory + echo "Processing folder: $folder" + + # Check if the folder is for imported transactions + if [[ "$folder" == *"imported_"* ]]; then + # For imported transactions, process all files without any 'modified_' check + for file in "$folder"/*.json; do + if [ -f "$file" ]; then + echo "Processing imported file: $file" + base_file=$(basename "$file" .json) + original_file="../indexer-test-transactions/json_transactions/$(basename $folder)/${base_file}.json" + compare_and_diff "$file" "$original_file" + fi + done + else + # For scripted transactions, only process files that are prefixed with 'cleaned_' + for file in "$folder"/cleaned_*.json; do + if [ -f "$file" ]; then + echo "Processing scripted file: $file" + base_file=$(basename "$file" .json) + original_file="../indexer-test-transactions/json_transactions/$(basename $folder)/${base_file}.json" + compare_and_diff "$file" "$original_file" + fi + done + fi + else + echo "Folder $folder is not a valid directory." + fi +done + +# Print all new files if found +if [ "$new_file_found" = "true" ] && [ -n "$new_files" ]; then + echo "New files detected:" + echo -e "$new_files" +else + echo "No new files detected." +fi + +# Print all modified files if found +if [ "$diff_found" = "true" ] && [ -n "$modified_files" ]; then + echo "Modified files detected:" + echo -e "$modified_files" +else + echo "No modified files detected." +fi + +# Debugging logs before setting outputs +echo "diff_found=$diff_found" +echo "new_file_found=$new_file_found" +echo "new_files=$new_files" +echo "modified_files=$modified_files" + +# Set output flags +echo "diff_found=$diff_found" >> $GITHUB_OUTPUT +echo "new_file_found=$new_file_found" >> $GITHUB_OUTPUT +echo "new_files=$new_files" >> $GITHUB_OUTPUT # Store new files as output +echo "modified_files=$modified_files" >> $GITHUB_OUTPUT # Store modified files as output +echo "Comparison completed." diff --git a/sdk/Cargo.toml b/sdk/Cargo.toml index ee2105cbebd55..ed87ebb322a59 100644 --- a/sdk/Cargo.toml +++ b/sdk/Cargo.toml @@ -26,6 +26,7 @@ ed25519-dalek-bip32 = { workspace = true } hex = { workspace = true } move-core-types = { workspace = true } rand_core = { workspace = true } +serde = { workspace = true } serde_json = { workspace = true } tiny-bip39 = { workspace = true } diff --git a/sdk/src/coin_client.rs b/sdk/src/coin_client.rs index a674789e42de4..6f136522e85e9 100644 --- a/sdk/src/coin_client.rs +++ b/sdk/src/coin_client.rs @@ -83,10 +83,10 @@ impl<'a> CoinClient<'a> { pub async fn get_account_balance(&self, account: &AccountAddress) -> Result { let response = self .api_client - .get_account_balance(*account) + .view_apt_account_balance(*account) .await .context("Failed to get account balance")?; - Ok(response.inner().get()) + Ok(response.into_inner()) } } diff --git a/sdk/src/transaction_builder.rs b/sdk/src/transaction_builder.rs index 9f5e958fef9b8..3ba9c981735d1 100644 --- a/sdk/src/transaction_builder.rs +++ b/sdk/src/transaction_builder.rs @@ -187,6 +187,36 @@ impl TransactionFactory { )) } + pub fn create_multisig_account_with_existing_account( + &self, + owners: Vec, + signatures_required: u64, + ) -> TransactionBuilder { + self.payload( + aptos_stdlib::multisig_account_create_with_existing_account_call( + owners, + signatures_required, + vec![], + vec![], + ), + ) + } + + pub fn create_multisig_account_with_existing_account_and_revoke_auth_key( + &self, + owners: Vec, + signatures_required: u64, + ) -> TransactionBuilder { + self.payload( + aptos_stdlib::multisig_account_create_with_existing_account_and_revoke_auth_key_call( + owners, + signatures_required, + vec![], + vec![], + ), + ) + } + pub fn create_multisig_transaction( &self, multisig_account: AccountAddress, diff --git a/sdk/src/types.rs b/sdk/src/types.rs index 9464ac593fff0..8bf98ac47f441 100644 --- a/sdk/src/types.rs +++ b/sdk/src/types.rs @@ -1,12 +1,13 @@ // Copyright © Aptos Foundation // Parts of the project are originally copyright © Meta Platforms, Inc. // SPDX-License-Identifier: Apache-2.0 - use crate::{ crypto::{ ed25519::{Ed25519PrivateKey, Ed25519PublicKey}, + hash::CryptoHash, signing_message, traits::Uniform, + CryptoMaterialError, }, transaction_builder::TransactionBuilder, types::{ @@ -15,7 +16,7 @@ use crate::{ }, }; use anyhow::{Context, Result}; -use aptos_crypto::{ed25519::Ed25519Signature, PrivateKey, SigningKey}; +use aptos_crypto::{ed25519::Ed25519Signature, secp256r1_ecdsa, PrivateKey, SigningKey}; use aptos_ledger::AptosLedgerError; pub use aptos_types::*; use aptos_types::{ @@ -28,6 +29,8 @@ use aptos_types::{ }; use bip39::{Language, Mnemonic, Seed}; use ed25519_dalek_bip32::{DerivationPath, ExtendedSecretKey}; +use keyless::FederatedKeylessPublicKey; +use serde::{Deserialize, Serialize}; use std::{ str::FromStr, sync::atomic::{AtomicU64, Ordering}, @@ -38,7 +41,7 @@ use std::{ enum LocalAccountAuthenticator { PrivateKey(AccountKey), Keyless(KeylessAccount), - // TODO: Add support for keyless authentication + FederatedKeyless(FederatedKeylessAccount), } impl LocalAccountAuthenticator { @@ -49,26 +52,40 @@ impl LocalAccountAuthenticator { .expect("Signing a txn can't fail") .into_inner(), LocalAccountAuthenticator::Keyless(keyless_account) => { - let proof = keyless_account.zk_sig.proof; - let txn_and_zkp = TransactionAndProof { - message: txn.clone(), - proof: Some(proof), - }; - - let esk = &keyless_account.ephemeral_key_pair.private_key; - let ephemeral_signature = - EphemeralSignature::ed25519(esk.sign(&txn_and_zkp).unwrap()); - - let sig = KeylessSignature { - cert: EphemeralCertificate::ZeroKnowledgeSig(keyless_account.zk_sig.clone()), - jwt_header_json: keyless_account.jwt_header_json.clone(), - exp_date_secs: keyless_account.ephemeral_key_pair.expiry_date_secs, - ephemeral_pubkey: keyless_account.ephemeral_key_pair.public_key.clone(), - ephemeral_signature, - }; - + let sig = self.build_keyless_signature(txn.clone(), &keyless_account); SignedTransaction::new_keyless(txn, keyless_account.public_key.clone(), sig) }, + LocalAccountAuthenticator::FederatedKeyless(federated_keyless_account) => { + let sig = self.build_keyless_signature(txn.clone(), &federated_keyless_account); + SignedTransaction::new_federated_keyless( + txn, + federated_keyless_account.public_key.clone(), + sig, + ) + }, + } + } + + fn build_keyless_signature( + &self, + txn: RawTransaction, + account: &impl CommonKeylessAccount, + ) -> KeylessSignature { + let proof = account.zk_sig().proof; + let txn_and_zkp = TransactionAndProof { + message: txn, + proof: Some(proof), + }; + + let esk = account.ephem_private_key(); + let ephemeral_signature = esk.sign(&txn_and_zkp).unwrap(); + + KeylessSignature { + cert: EphemeralCertificate::ZeroKnowledgeSig(account.zk_sig().clone()), + jwt_header_json: account.jwt_header_json().clone(), + exp_date_secs: account.expiry_date_secs(), + ephemeral_pubkey: account.ephem_public_key().clone(), + ephemeral_signature, } } } @@ -123,6 +140,18 @@ impl LocalAccount { } } + pub fn new_federated_keyless( + address: AccountAddress, + federated_keyless_account: FederatedKeylessAccount, + sequence_number: u64, + ) -> Self { + Self { + address, + auth: LocalAccountAuthenticator::FederatedKeyless(federated_keyless_account), + sequence_number: AtomicU64::new(sequence_number), + } + } + /// Recover an account from derive path (e.g. m/44'/637'/0'/0'/0') and mnemonic phrase, pub fn from_derive_path( derive_path: &str, @@ -242,6 +271,7 @@ impl LocalAccount { match &self.auth { LocalAccountAuthenticator::PrivateKey(key) => key.private_key(), LocalAccountAuthenticator::Keyless(_) => todo!(), + LocalAccountAuthenticator::FederatedKeyless(_) => todo!(), } } @@ -249,6 +279,7 @@ impl LocalAccount { match &self.auth { LocalAccountAuthenticator::PrivateKey(key) => key.public_key(), LocalAccountAuthenticator::Keyless(_) => todo!(), + LocalAccountAuthenticator::FederatedKeyless(_) => todo!(), } } @@ -258,6 +289,9 @@ impl LocalAccount { LocalAccountAuthenticator::Keyless(keyless_account) => { keyless_account.authentication_key() }, + LocalAccountAuthenticator::FederatedKeyless(federated_keyless_account) => { + federated_keyless_account.authentication_key() + }, } } @@ -282,6 +316,7 @@ impl LocalAccount { match &mut self.auth { LocalAccountAuthenticator::PrivateKey(key) => std::mem::replace(key, new_key.into()), LocalAccountAuthenticator::Keyless(_) => todo!(), + LocalAccountAuthenticator::FederatedKeyless(_) => todo!(), } } @@ -468,9 +503,56 @@ impl From for AccountKey { } } +#[derive(Debug, Eq, PartialEq, Deserialize)] +pub enum EphemeralPrivateKey { + Ed25519 { + inner_private_key: Ed25519PrivateKey, + }, + Secp256r1Ecdsa { + inner_private_key: secp256r1_ecdsa::PrivateKey, + }, +} + +impl EphemeralPrivateKey { + pub fn public_key(&self) -> EphemeralPublicKey { + match self { + EphemeralPrivateKey::Ed25519 { inner_private_key } => { + EphemeralPublicKey::ed25519(inner_private_key.public_key()) + }, + EphemeralPrivateKey::Secp256r1Ecdsa { inner_private_key } => { + EphemeralPublicKey::secp256r1_ecdsa(inner_private_key.public_key()) + }, + } + } +} + +impl TryFrom<&[u8]> for EphemeralPrivateKey { + type Error = CryptoMaterialError; + + fn try_from(bytes: &[u8]) -> Result { + bcs::from_bytes::(bytes) + .map_err(|_e| CryptoMaterialError::DeserializationError) + } +} + +impl EphemeralPrivateKey { + pub fn sign( + &self, + message: &T, + ) -> Result { + match self { + EphemeralPrivateKey::Ed25519 { inner_private_key } => Ok(EphemeralSignature::ed25519( + inner_private_key.sign(message)?, + )), + EphemeralPrivateKey::Secp256r1Ecdsa { + inner_private_key: _, + } => todo!(), + } + } +} #[derive(Debug)] pub struct EphemeralKeyPair { - private_key: Ed25519PrivateKey, + private_key: EphemeralPrivateKey, public_key: EphemeralPublicKey, #[allow(dead_code)] nonce: String, @@ -481,11 +563,11 @@ pub struct EphemeralKeyPair { impl EphemeralKeyPair { pub fn new( - private_key: Ed25519PrivateKey, + private_key: EphemeralPrivateKey, expiry_date_secs: u64, blinder: Vec, ) -> Result { - let epk = EphemeralPublicKey::ed25519(private_key.public_key()); + let epk = private_key.public_key(); let nonce = OpenIdSig::reconstruct_oauth_nonce( &blinder, expiry_date_secs, @@ -507,14 +589,15 @@ impl EphemeralKeyPair { pub struct KeylessAccount { public_key: KeylessPublicKey, ephemeral_key_pair: EphemeralKeyPair, - #[allow(dead_code)] - uid_key: String, - #[allow(dead_code)] - uid_val: String, - #[allow(dead_code)] - aud: String, - #[allow(dead_code)] - pepper: Pepper, + zk_sig: ZeroKnowledgeSig, + jwt_header_json: String, + jwt: Option, +} + +#[derive(Debug)] +pub struct FederatedKeylessAccount { + public_key: FederatedKeylessPublicKey, + ephemeral_key_pair: EphemeralKeyPair, zk_sig: ZeroKnowledgeSig, jwt_header_json: String, jwt: Option, @@ -531,18 +614,10 @@ impl KeylessAccount { pepper: Pepper, zk_sig: ZeroKnowledgeSig, ) -> Result { - let idc = IdCommitment::new_from_preimage(&pepper, aud, uid_key, uid_val)?; - let public_key = KeylessPublicKey { - iss_val: iss.to_owned(), - idc, - }; + let public_key = create_keyless_public_key(iss, aud, uid_key, uid_val, &pepper)?; Ok(Self { public_key, ephemeral_key_pair, - uid_key: uid_key.to_string(), - uid_val: uid_val.to_string(), - aud: aud.to_string(), - pepper, zk_sig, jwt_header_json: jwt_header_json.to_string(), jwt: None, @@ -556,28 +631,23 @@ impl KeylessAccount { pepper: Option, zk_sig: Option, ) -> Result { - let parts: Vec<&str> = jwt.split('.').collect(); - let header_bytes = base64::decode(parts.first().context("jwt malformed")?)?; - let jwt_header_json = String::from_utf8(header_bytes)?; - let jwt_payload_json = - base64::decode_config(parts.get(1).context("jwt malformed")?, base64::URL_SAFE)?; - let claims: Claims = serde_json::from_slice(&jwt_payload_json)?; - + let claims = extract_claims_from_jwt(jwt)?; let uid_key = uid_key.unwrap_or("sub").to_string(); let uid_val = claims.get_uid_val(&uid_key)?; let aud = claims.oidc_claims.aud; - let account = Self::new( + let mut account = Self::new( &claims.oidc_claims.iss, &aud, &uid_key, &uid_val, - &jwt_header_json, + &extract_header_json_from_jwt(jwt)?, ephemeral_key_pair, pepper.expect("pepper fetch not implemented"), zk_sig.expect("proof fetch not implemented"), )?; - Ok(account.set_jwt(jwt)) + account.jwt = Some(jwt.to_string()); + Ok(account) } pub fn authentication_key(&self) -> AuthenticationKey { @@ -587,10 +657,164 @@ impl KeylessAccount { pub fn public_key(&self) -> &KeylessPublicKey { &self.public_key } +} + +impl FederatedKeylessAccount { + pub fn new( + iss: &str, + aud: &str, + uid_key: &str, + uid_val: &str, + jwt_header_json: &str, + ephemeral_key_pair: EphemeralKeyPair, + pepper: Pepper, + zk_sig: ZeroKnowledgeSig, + jwk_addr: AccountAddress, + ) -> Result { + let public_key = + create_federated_public_key(iss, aud, uid_key, uid_val, &pepper, jwk_addr)?; + Ok(Self { + public_key, + ephemeral_key_pair, + zk_sig, + jwt_header_json: jwt_header_json.to_string(), + jwt: None, + }) + } + + pub fn new_from_jwt( + jwt: &str, + ephemeral_key_pair: EphemeralKeyPair, + jwk_addr: AccountAddress, + uid_key: Option<&str>, + pepper: Option, + zk_sig: Option, + ) -> Result { + let claims = extract_claims_from_jwt(jwt)?; + let uid_key = uid_key.unwrap_or("sub").to_string(); + let uid_val = claims.get_uid_val(&uid_key)?; + let aud = claims.oidc_claims.aud; + + let mut account = Self::new( + &claims.oidc_claims.iss, + &aud, + &uid_key, + &uid_val, + &extract_header_json_from_jwt(jwt)?, + ephemeral_key_pair, + pepper.expect("pepper fetch not implemented"), + zk_sig.expect("proof fetch not implemented"), + jwk_addr, + )?; + account.jwt = Some(jwt.to_string()); + Ok(account) + } + + pub fn authentication_key(&self) -> AuthenticationKey { + AuthenticationKey::any_key(AnyPublicKey::federated_keyless(self.public_key.clone())) + } + + pub fn public_key(&self) -> &FederatedKeylessPublicKey { + &self.public_key + } +} + +fn create_keyless_public_key( + iss: &str, + aud: &str, + uid_key: &str, + uid_val: &str, + pepper: &Pepper, +) -> Result { + let idc = IdCommitment::new_from_preimage(pepper, aud, uid_key, uid_val)?; + Ok(KeylessPublicKey { + iss_val: iss.to_owned(), + idc, + }) +} + +fn create_federated_public_key( + iss: &str, + aud: &str, + uid_key: &str, + uid_val: &str, + pepper: &Pepper, + jwk_addr: AccountAddress, +) -> Result { + let idc = IdCommitment::new_from_preimage(pepper, aud, uid_key, uid_val)?; + Ok(FederatedKeylessPublicKey { + pk: KeylessPublicKey { + iss_val: iss.to_owned(), + idc, + }, + jwk_addr, + }) +} + +pub fn extract_claims_from_jwt(jwt: &str) -> Result { + let parts: Vec<&str> = jwt.split('.').collect(); + let jwt_payload_json = + base64::decode_config(parts.get(1).context("jwt malformed")?, base64::URL_SAFE)?; + let claims: Claims = serde_json::from_slice(&jwt_payload_json)?; + Ok(claims) +} + +pub fn extract_header_json_from_jwt(jwt: &str) -> Result { + let parts: Vec<&str> = jwt.split('.').collect(); + let header_bytes = base64::decode(parts.first().context("jwt malformed")?)?; + + Ok(String::from_utf8(header_bytes)?) +} + +trait CommonKeylessAccount { + fn zk_sig(&self) -> &ZeroKnowledgeSig; + fn ephem_private_key(&self) -> &EphemeralPrivateKey; + fn ephem_public_key(&self) -> &EphemeralPublicKey; + fn jwt_header_json(&self) -> &String; + fn expiry_date_secs(&self) -> u64; +} + +impl CommonKeylessAccount for &KeylessAccount { + fn zk_sig(&self) -> &ZeroKnowledgeSig { + &self.zk_sig + } + + fn ephem_private_key(&self) -> &EphemeralPrivateKey { + &self.ephemeral_key_pair.private_key + } + + fn ephem_public_key(&self) -> &EphemeralPublicKey { + &self.ephemeral_key_pair.public_key + } + + fn jwt_header_json(&self) -> &String { + &self.jwt_header_json + } + + fn expiry_date_secs(&self) -> u64 { + self.ephemeral_key_pair.expiry_date_secs + } +} + +impl CommonKeylessAccount for &FederatedKeylessAccount { + fn zk_sig(&self) -> &ZeroKnowledgeSig { + &self.zk_sig + } + + fn ephem_private_key(&self) -> &EphemeralPrivateKey { + &self.ephemeral_key_pair.private_key + } + + fn ephem_public_key(&self) -> &EphemeralPublicKey { + &self.ephemeral_key_pair.public_key + } + + fn jwt_header_json(&self) -> &String { + &self.jwt_header_json + } - pub fn set_jwt(mut self, jwt: &str) -> Self { - self.jwt = Some(jwt.to_string()); - self + fn expiry_date_secs(&self) -> u64 { + self.ephemeral_key_pair.expiry_date_secs } } diff --git a/secure/storage/src/on_disk.rs b/secure/storage/src/on_disk.rs index a896d8b4ef39c..dfbb921a6f07f 100644 --- a/secure/storage/src/on_disk.rs +++ b/secure/storage/src/on_disk.rs @@ -42,7 +42,7 @@ impl OnDiskStorage { // working directory provided by PathBuf::new(). let file_dir = file_path .parent() - .map_or(PathBuf::new(), |p| p.to_path_buf()); + .map_or_else(PathBuf::new, |p| p.to_path_buf()); Self { file_path, diff --git a/state-sync/data-streaming-service/src/data_stream.rs b/state-sync/data-streaming-service/src/data_stream.rs index 95bf16a173d80..68e8fbe4abaa7 100644 --- a/state-sync/data-streaming-service/src/data_stream.rs +++ b/state-sync/data-streaming-service/src/data_stream.rs @@ -588,9 +588,12 @@ impl DataStream { .advertised_data .highest_synced_ledger_info() .map(|ledger_info| ledger_info.ledger_info().version()) - .ok_or(aptos_data_client::error::Error::UnexpectedErrorEncountered( - "The highest synced ledger info is missing from the global data summary!".into(), - ))?; + .ok_or_else(|| { + aptos_data_client::error::Error::UnexpectedErrorEncountered( + "The highest synced ledger info is missing from the global data summary!" + .into(), + ) + })?; // If the stream is not lagging behind, reset the lag and return if highest_response_version >= highest_advertised_version { diff --git a/state-sync/state-sync-driver/src/bootstrapper.rs b/state-sync/state-sync-driver/src/bootstrapper.rs index 3299577453085..296b75d6ca78d 100644 --- a/state-sync/state-sync-driver/src/bootstrapper.rs +++ b/state-sync/state-sync-driver/src/bootstrapper.rs @@ -1177,7 +1177,7 @@ impl< BootstrappingMode::ApplyTransactionOutputsFromGenesis => { if let Some(transaction_outputs_with_proof) = transaction_outputs_with_proof { utils::apply_transaction_outputs( - self.storage_synchronizer.clone(), + &mut self.storage_synchronizer, notification_metadata, proof_ledger_info, end_of_epoch_ledger_info, @@ -1198,7 +1198,7 @@ impl< BootstrappingMode::ExecuteTransactionsFromGenesis => { if let Some(transaction_list_with_proof) = transaction_list_with_proof { utils::execute_transactions( - self.storage_synchronizer.clone(), + &mut self.storage_synchronizer, notification_metadata, proof_ledger_info, end_of_epoch_ledger_info, @@ -1219,7 +1219,7 @@ impl< BootstrappingMode::ExecuteOrApplyFromGenesis => { if let Some(transaction_list_with_proof) = transaction_list_with_proof { utils::execute_transactions( - self.storage_synchronizer.clone(), + &mut self.storage_synchronizer, notification_metadata, proof_ledger_info, end_of_epoch_ledger_info, @@ -1229,7 +1229,7 @@ impl< } else if let Some(transaction_outputs_with_proof) = transaction_outputs_with_proof { utils::apply_transaction_outputs( - self.storage_synchronizer.clone(), + &mut self.storage_synchronizer, notification_metadata, proof_ledger_info, end_of_epoch_ledger_info, diff --git a/state-sync/state-sync-driver/src/continuous_syncer.rs b/state-sync/state-sync-driver/src/continuous_syncer.rs index c8fc254c976a2..d8da9a4496d52 100644 --- a/state-sync/state-sync-driver/src/continuous_syncer.rs +++ b/state-sync/state-sync-driver/src/continuous_syncer.rs @@ -302,7 +302,7 @@ impl< ContinuousSyncingMode::ApplyTransactionOutputs => { if let Some(transaction_outputs_with_proof) = transaction_outputs_with_proof { utils::apply_transaction_outputs( - self.storage_synchronizer.clone(), + &mut self.storage_synchronizer, notification_metadata, ledger_info_with_signatures.clone(), None, @@ -323,7 +323,7 @@ impl< ContinuousSyncingMode::ExecuteTransactions => { if let Some(transaction_list_with_proof) = transaction_list_with_proof { utils::execute_transactions( - self.storage_synchronizer.clone(), + &mut self.storage_synchronizer, notification_metadata, ledger_info_with_signatures.clone(), None, @@ -344,7 +344,7 @@ impl< ContinuousSyncingMode::ExecuteTransactionsOrApplyOutputs => { if let Some(transaction_list_with_proof) = transaction_list_with_proof { utils::execute_transactions( - self.storage_synchronizer.clone(), + &mut self.storage_synchronizer, notification_metadata, ledger_info_with_signatures.clone(), None, @@ -354,7 +354,7 @@ impl< } else if let Some(transaction_outputs_with_proof) = transaction_outputs_with_proof { utils::apply_transaction_outputs( - self.storage_synchronizer.clone(), + &mut self.storage_synchronizer, notification_metadata, ledger_info_with_signatures.clone(), None, diff --git a/state-sync/state-sync-driver/src/utils.rs b/state-sync/state-sync-driver/src/utils.rs index 25104ad0fda7c..24ca021cca373 100644 --- a/state-sync/state-sync-driver/src/utils.rs +++ b/state-sync/state-sync-driver/src/utils.rs @@ -376,8 +376,8 @@ pub fn update_new_epoch_metrics() { /// Executes the given list of transactions and /// returns the number of transactions in the list. -pub async fn execute_transactions( - mut storage_synchronizer: StorageSyncer, +pub async fn execute_transactions( + storage_synchronizer: &mut StorageSyncer, notification_metadata: NotificationMetadata, proof_ledger_info: LedgerInfoWithSignatures, end_of_epoch_ledger_info: Option, @@ -397,8 +397,8 @@ pub async fn execute_transactions( - mut storage_synchronizer: StorageSyncer, +pub async fn apply_transaction_outputs( + storage_synchronizer: &mut StorageSyncer, notification_metadata: NotificationMetadata, proof_ledger_info: LedgerInfoWithSignatures, end_of_epoch_ledger_info: Option, diff --git a/state-sync/storage-service/server/src/tests/utils.rs b/state-sync/storage-service/server/src/tests/utils.rs index ffb4db8f23729..83ee7276f7c7b 100644 --- a/state-sync/storage-service/server/src/tests/utils.rs +++ b/state-sync/storage-service/server/src/tests/utils.rs @@ -102,7 +102,10 @@ pub fn create_data_chunks_with_epoch_boundary( pub fn create_epoch_ending_ledger_info(epoch: u64, version: u64) -> LedgerInfoWithSignatures { // Create a new epoch state let verifier = ValidatorVerifier::from(&ValidatorSet::empty()); - let next_epoch_state = EpochState { epoch, verifier }; + let next_epoch_state = EpochState { + epoch, + verifier: verifier.into(), + }; // Create a mock ledger info with signatures let ledger_info = LedgerInfo::new( diff --git a/storage/aptosdb/src/db/fake_aptosdb.rs b/storage/aptosdb/src/db/fake_aptosdb.rs index c15a1ea742dea..325c9fe4228c9 100644 --- a/storage/aptosdb/src/db/fake_aptosdb.rs +++ b/storage/aptosdb/src/db/fake_aptosdb.rs @@ -1117,7 +1117,7 @@ mod tests { let signed_transaction = transaction_with_proof .transaction .try_as_signed_user_txn() - .ok_or(anyhow!("not user transaction"))?; + .ok_or_else(|| anyhow!("not user transaction"))?; ensure!( transaction_with_proof.version == version, diff --git a/storage/aptosdb/src/db/include/aptosdb_internal.rs b/storage/aptosdb/src/db/include/aptosdb_internal.rs index 73cc62d2dafd0..d31b38778e1df 100644 --- a/storage/aptosdb/src/db/include/aptosdb_internal.rs +++ b/storage/aptosdb/src/db/include/aptosdb_internal.rs @@ -1,9 +1,9 @@ // Copyright © Aptos Foundation // SPDX-License-Identifier: Apache-2.0 +use crate::metrics::CONCURRENCY_GAUGE; use aptos_metrics_core::IntGaugeHelper; use aptos_storage_interface::block_info::BlockInfo; -use crate::metrics::CONCURRENCY_GAUGE; impl AptosDB { fn new_with_dbs( @@ -44,8 +44,11 @@ impl AptosDB { internal_indexer_db.clone(), )); - let ledger_pruner = - LedgerPrunerManager::new(Arc::clone(&ledger_db), pruner_config.ledger_pruner_config, internal_indexer_db); + let ledger_pruner = LedgerPrunerManager::new( + Arc::clone(&ledger_db), + pruner_config.ledger_pruner_config, + internal_indexer_db, + ); AptosDB { ledger_db: Arc::clone(&ledger_db), @@ -247,9 +250,9 @@ impl AptosDB { .ledger_db .metadata_db() .get_block_info(block_height)? - .ok_or(AptosDbError::NotFound(format!( - "BlockInfo not found at height {block_height}" - )))?) + .ok_or_else(|| { + AptosDbError::NotFound(format!("BlockInfo not found at height {block_height}")) + })?) } } diff --git a/storage/aptosdb/src/db/test_helper.rs b/storage/aptosdb/src/db/test_helper.rs index 570bdf8bd4433..68265f72c0e34 100644 --- a/storage/aptosdb/src/db/test_helper.rs +++ b/storage/aptosdb/src/db/test_helper.rs @@ -67,6 +67,7 @@ pub(crate) fn update_store( store: &StateStore, input: impl Iterator)>, first_version: Version, + enable_sharding: bool, ) -> HashValue { use aptos_storage_interface::{jmt_update_refs, jmt_updates}; let mut root_hash = *aptos_crypto::hash::SPARSE_MERKLE_PLACEHOLDER_HASH; @@ -94,7 +95,7 @@ pub(crate) fn update_store( None, &ledger_batch, &sharded_state_kv_batches, - /*put_state_value_indices=*/ false, + /*put_state_value_indices=*/ enable_sharding, /*skip_usage=*/ false, /*last_checkpoint_index=*/ None, ) diff --git a/storage/aptosdb/src/ledger_db/event_db.rs b/storage/aptosdb/src/ledger_db/event_db.rs index 235df9b29a1ca..f34586cfbb42d 100644 --- a/storage/aptosdb/src/ledger_db/event_db.rs +++ b/storage/aptosdb/src/ledger_db/event_db.rs @@ -105,9 +105,9 @@ impl EventDb { Ok(EventsByVersionIter::new( iter, start_version, - start_version.checked_add(num_versions as u64).ok_or({ - AptosDbError::TooManyRequested(num_versions as u64, Version::max_value()) - })?, + start_version.checked_add(num_versions as u64).ok_or( + AptosDbError::TooManyRequested(num_versions as u64, Version::max_value()), + )?, )) } diff --git a/storage/aptosdb/src/ledger_db/ledger_metadata_db.rs b/storage/aptosdb/src/ledger_db/ledger_metadata_db.rs index d8661d7a4c46a..40ace0969239d 100644 --- a/storage/aptosdb/src/ledger_db/ledger_metadata_db.rs +++ b/storage/aptosdb/src/ledger_db/ledger_metadata_db.rs @@ -107,15 +107,13 @@ impl LedgerMetadataDb { } pub(crate) fn get_ledger_commit_progress(&self) -> Result { - get_progress(&self.db, &DbMetadataKey::LedgerCommitProgress)?.ok_or(AptosDbError::NotFound( - "No LedgerCommitProgress in db.".to_string(), - )) + get_progress(&self.db, &DbMetadataKey::LedgerCommitProgress)? + .ok_or_else(|| AptosDbError::NotFound("No LedgerCommitProgress in db.".to_string())) } pub(crate) fn get_pruner_progress(&self) -> Result { - get_progress(&self.db, &DbMetadataKey::LedgerPrunerProgress)?.ok_or(AptosDbError::NotFound( - "No LedgerPrunerProgress in db.".to_string(), - )) + get_progress(&self.db, &DbMetadataKey::LedgerPrunerProgress)? + .ok_or_else(|| AptosDbError::NotFound("No LedgerPrunerProgress in db.".to_string())) } } @@ -137,7 +135,7 @@ impl LedgerMetadataDb { /// Returns the latest ledger info, or NOT_FOUND if it doesn't exist. pub(crate) fn get_latest_ledger_info(&self) -> Result { self.get_latest_ledger_info_option() - .ok_or(AptosDbError::NotFound(String::from("Genesis LedgerInfo"))) + .ok_or_else(|| AptosDbError::NotFound(String::from("Genesis LedgerInfo"))) } /// Returns the latest ledger info for a given epoch. @@ -147,9 +145,7 @@ impl LedgerMetadataDb { ) -> Result { self.db .get::(&epoch)? - .ok_or(AptosDbError::NotFound(format!( - "Last LedgerInfo of epoch {epoch}" - ))) + .ok_or_else(|| AptosDbError::NotFound(format!("Last LedgerInfo of epoch {epoch}"))) } /// Returns an iterator that yields epoch ending ledger infos, starting from `start_epoch`, and @@ -304,9 +300,10 @@ impl LedgerMetadataDb { let mut iter = self.db.iter::()?; iter.seek_for_prev(&version)?; - let (_, block_height) = iter.next().transpose()?.ok_or(anyhow!( - "Block is not found at version {version}, maybe pruned?" - ))?; + let (_, block_height) = iter + .next() + .transpose()? + .ok_or_else(|| anyhow!("Block is not found at version {version}, maybe pruned?"))?; Ok(block_height) } @@ -320,7 +317,7 @@ impl LedgerMetadataDb { let (block_version, block_height) = iter .next() .transpose()? - .ok_or(anyhow!("Block is not found at or after version {version}"))?; + .ok_or_else(|| anyhow!("Block is not found at or after version {version}"))?; Ok((block_version, block_height)) } diff --git a/storage/aptosdb/src/ledger_db/write_set_db.rs b/storage/aptosdb/src/ledger_db/write_set_db.rs index 92dbc1e654178..9320e6fb5b19a 100644 --- a/storage/aptosdb/src/ledger_db/write_set_db.rs +++ b/storage/aptosdb/src/ledger_db/write_set_db.rs @@ -54,10 +54,7 @@ impl WriteSetDb { pub(crate) fn get_write_set(&self, version: Version) -> Result { self.db .get::(&version)? - .ok_or(AptosDbError::NotFound(format!( - "WriteSet at version {}", - version - ))) + .ok_or_else(|| AptosDbError::NotFound(format!("WriteSet at version {}", version))) } /// Returns an iterator that yields `num_transactions` write sets starting from `start_version`. diff --git a/storage/aptosdb/src/pruner/state_merkle_pruner/test.rs b/storage/aptosdb/src/pruner/state_merkle_pruner/test.rs index a660a6082975e..edf59efde854f 100644 --- a/storage/aptosdb/src/pruner/state_merkle_pruner/test.rs +++ b/storage/aptosdb/src/pruner/state_merkle_pruner/test.rs @@ -375,7 +375,7 @@ fn verify_state_value_pruner(inputs: Vec)>>) { user_pruning_window_offset: 0, }); for batch in inputs { - update_store(store, batch.clone().into_iter(), version); + update_store(store, batch.clone().into_iter(), version, false); for (k, v) in batch.iter() { if let Some((old_version, old_v_opt)) = current_state_values.insert(k.clone(), (version, v.clone())) diff --git a/storage/aptosdb/src/state_merkle_db.rs b/storage/aptosdb/src/state_merkle_db.rs index 4c42a61042712..b1f5e757d9fb5 100644 --- a/storage/aptosdb/src/state_merkle_db.rs +++ b/storage/aptosdb/src/state_merkle_db.rs @@ -19,8 +19,7 @@ use aptos_config::config::{RocksdbConfig, RocksdbConfigs, StorageDirPaths}; use aptos_crypto::{hash::CryptoHash, HashValue}; use aptos_experimental_runtimes::thread_manager::{optimal_min_len, THREAD_MANAGER}; use aptos_jellyfish_merkle::{ - node_type::{NodeKey, NodeType}, - JellyfishMerkleTree, TreeReader, TreeUpdateBatch, TreeWriter, + node_type::NodeKey, JellyfishMerkleTree, TreeReader, TreeUpdateBatch, TreeWriter, }; use aptos_logger::prelude::*; use aptos_rocksdb_options::gen_rocksdb_options; @@ -677,20 +676,6 @@ impl StateMerkleDb { ) -> Result> { let mut ret = None; - if self.enable_sharding { - let mut iter = self.metadata_db().iter::()?; - iter.seek(&(version, 0)).unwrap(); - // early exit if no node is found for the target version - match iter.next().transpose()? { - Some((node_key, node)) => { - if node.node_type() == NodeType::Null || node_key.version() != version { - return Ok(None); - } - }, - None => return Ok(None), - }; - } - // traverse all shards in a naive way let shards = 0..self.hack_num_real_shards(); let start_num_of_nibbles = if self.enable_sharding { 1 } else { 0 }; @@ -822,21 +807,6 @@ impl TreeReader for StateMerkleDb { } fn get_rightmost_leaf(&self, version: Version) -> Result> { - // Since everything has the same version during restore, we seek to the first node and get - // its version. - - let mut iter = self.metadata_db().iter::()?; - // get the root node corresponding to the version - iter.seek(&(version, 0))?; - match iter.next().transpose()? { - Some((node_key, node)) => { - if node.node_type() == NodeType::Null || node_key.version() != version { - return Ok(None); - } - }, - None => return Ok(None), - }; - let ret = None; let shards = 0..self.hack_num_real_shards(); diff --git a/storage/aptosdb/src/state_store/state_store_test.rs b/storage/aptosdb/src/state_store/state_store_test.rs index b76d396b1b630..97f5727a8b431 100644 --- a/storage/aptosdb/src/state_store/state_store_test.rs +++ b/storage/aptosdb/src/state_store/state_store_test.rs @@ -465,6 +465,59 @@ proptest! { ); } + #[test] + fn test_get_rightmost_leaf_with_sharding( + (input, batch1_size) in hash_map(any::(), any::(), 2..1000) + .prop_flat_map(|input| { + let len = input.len(); + (Just(input), 2..len) + }) + ) { + let tmp_dir1 = TempPath::new(); + let db1 = AptosDB::new_for_test_with_sharding(&tmp_dir1, 1000); + let store1 = &db1.state_store; + init_sharded_store(store1, input.clone().into_iter()); + + let version = (input.len() - 1) as Version; + let expected_root_hash = store1.get_root_hash(version).unwrap(); + + let tmp_dir2 = TempPath::new(); + let db2 = AptosDB::new_for_test_with_sharding(&tmp_dir2, 1000); + + + let store2 = &db2.state_store; + let mut restore = + StateSnapshotRestore::new(&store2.state_merkle_db, store2, version, expected_root_hash, true, /* async_commit */ StateSnapshotRestoreMode::Default).unwrap(); + let max_hash = HashValue::new([0xff; HashValue::LENGTH]); + let dummy_state_key = StateKey::raw(&[]); + let (top_levels_batch, sharded_batches, _) = store2.state_merkle_db.merklize_value_set(vec![(max_hash, Some(&(HashValue::random(), dummy_state_key)))], 0, None, None).unwrap(); + store2.state_merkle_db.commit(version, top_levels_batch, sharded_batches).unwrap(); + assert!(store2.state_merkle_db.get_rightmost_leaf(version).unwrap().is_none()); + let mut ordered_input: Vec<_> = input + .into_iter() + .collect(); + ordered_input.sort_unstable_by_key(|(key, _value)| key.hash()); + + let batch1: Vec<_> = ordered_input + .into_iter() + .take(batch1_size) + .collect(); + let rightmost_of_batch1 = batch1.last().map(|(key, _value)| key.hash()).unwrap(); + let proof_of_batch1 = store1 + .get_value_range_proof(rightmost_of_batch1, version) + .unwrap(); + + restore.add_chunk(batch1, proof_of_batch1).unwrap(); + restore.wait_for_async_commit().unwrap(); + + let expected = store2.state_merkle_db.get_rightmost_leaf_naive(version).unwrap(); + // When re-initializing the store, the rightmost leaf should exist indicating the progress + let actual = store2.state_merkle_db.get_rightmost_leaf(version).unwrap(); + // ensure the rightmost leaf is not None + prop_assert!(actual.is_some()); + prop_assert_eq!(actual, expected); + } + #[test] fn test_get_rightmost_leaf( (input, batch1_size) in hash_map(any::(), any::(), 2..1000) @@ -484,15 +537,13 @@ proptest! { let tmp_dir2 = TempPath::new(); let db2 = AptosDB::new_for_test(&tmp_dir2); let store2 = &db2.state_store; - let max_hash = HashValue::new([0xff; HashValue::LENGTH]); let mut restore = StateSnapshotRestore::new(&store2.state_merkle_db, store2, version, expected_root_hash, true, /* async_commit */ StateSnapshotRestoreMode::Default).unwrap(); - + let max_hash = HashValue::new([0xff; HashValue::LENGTH]); let dummy_state_key = StateKey::raw(&[]); let (top_levels_batch, sharded_batches, _) = store2.state_merkle_db.merklize_value_set(vec![(max_hash, Some(&(HashValue::random(), dummy_state_key)))], 0, None, None).unwrap(); store2.state_merkle_db.commit(version, top_levels_batch, sharded_batches).unwrap(); assert!(store2.state_merkle_db.get_rightmost_leaf(version).unwrap().is_none()); - let mut ordered_input: Vec<_> = input .into_iter() .collect(); @@ -512,6 +563,7 @@ proptest! { let expected = store2.state_merkle_db.get_rightmost_leaf_naive(version).unwrap(); let actual = store2.state_merkle_db.get_rightmost_leaf(version).unwrap(); + prop_assert_eq!(actual, expected); } @@ -526,7 +578,7 @@ proptest! { let mut version = 0; for batch in input { let next_version = version + batch.len() as Version; - let root_hash = update_store(store, batch.into_iter(), version); + let root_hash = update_store(store, batch.into_iter(), version, false); let last_version = next_version - 1; let snapshot = db @@ -574,5 +626,14 @@ proptest! { // Initializes the state store by inserting one key at each version. fn init_store(store: &StateStore, input: impl Iterator) { - update_store(store, input.into_iter().map(|(k, v)| (k, Some(v))), 0); + update_store( + store, + input.into_iter().map(|(k, v)| (k, Some(v))), + 0, + false, + ); +} + +fn init_sharded_store(store: &StateStore, input: impl Iterator) { + update_store(store, input.into_iter().map(|(k, v)| (k, Some(v))), 0, true); } diff --git a/storage/backup/backup-cli/src/backup_types/transaction/restore.rs b/storage/backup/backup-cli/src/backup_types/transaction/restore.rs index 0b5c0ceb4dfd2..22a7ab7ff8b8b 100644 --- a/storage/backup/backup-cli/src/backup_types/transaction/restore.rs +++ b/storage/backup/backup-cli/src/backup_types/transaction/restore.rs @@ -27,8 +27,9 @@ use crate::{ use anyhow::{anyhow, ensure, Result}; use aptos_db::backup::restore_handler::RestoreHandler; use aptos_executor::chunk_executor::ChunkExecutor; -use aptos_executor_types::{TransactionReplayer, VerifyExecutionMode}; +use aptos_executor_types::{ChunkExecutorTrait, TransactionReplayer, VerifyExecutionMode}; use aptos_logger::prelude::*; +use aptos_metrics_core::TimerHelper; use aptos_storage_interface::DbReaderWriter; use aptos_types::{ contract_event::ContractEvent, @@ -592,7 +593,7 @@ impl TransactionRestoreBatchController { let replay_start = Instant::now(); let db = DbReaderWriter::from_arc(Arc::clone(&restore_handler.aptosdb)); let chunk_replayer = Arc::new(ChunkExecutor::::new(db)); - let db_commit_stream = txns_to_execute_stream + let ledger_update_stream = txns_to_execute_stream .try_chunks(BATCH_SIZE) .err_into::() .map_ok(|chunk| { @@ -602,11 +603,10 @@ impl TransactionRestoreBatchController { let verify_execution_mode = self.verify_execution_mode.clone(); async move { - let _timer = OTHER_TIMERS_SECONDS - .with_label_values(&["replay_txn_chunk"]) - .start_timer(); + let _timer = OTHER_TIMERS_SECONDS.timer_with(&["enqueue_chunks"]); + tokio::task::spawn_blocking(move || { - chunk_replayer.replay( + chunk_replayer.enqueue_chunks( txns, txn_infos, write_sets, @@ -614,23 +614,38 @@ impl TransactionRestoreBatchController { &verify_execution_mode, ) }) - .err_into::() .await + .expect("spawn_blocking failed") } }) - .try_buffered_x(self.global_opt.concurrent_downloads, 1) - .and_then(future::ready); + .try_buffered_x(3, 1) + .map_ok(|chunks_enqueued| { + futures::stream::repeat_with(|| Result::Ok(())).take(chunks_enqueued) + }) + .try_flatten(); + + let db_commit_stream = ledger_update_stream + .map_ok(|()| { + let chunk_replayer = chunk_replayer.clone(); + async move { + let _timer = OTHER_TIMERS_SECONDS.timer_with(&["ledger_update"]); + + tokio::task::spawn_blocking(move || chunk_replayer.update_ledger()) + .await + .expect("spawn_blocking failed") + } + }) + .try_buffered_x(3, 1); let total_replayed = db_commit_stream .and_then(|()| { let chunk_replayer = chunk_replayer.clone(); async move { - let _timer = OTHER_TIMERS_SECONDS - .with_label_values(&["commit_txn_chunk"]) - .start_timer(); + let _timer = OTHER_TIMERS_SECONDS.timer_with(&["commit"]); + tokio::task::spawn_blocking(move || { - let committed_chunk = chunk_replayer.commit()?; - let v = committed_chunk.result_state.current_version.unwrap_or(0); + let v = chunk_replayer.commit()?; + let total_replayed = v - first_version + 1; TRANSACTION_REPLAY_VERSION.set(v as i64); info!( @@ -640,13 +655,17 @@ impl TransactionRestoreBatchController { as u64, "Transactions replayed." ); - Ok(v) + Ok(total_replayed) }) - .await? + .await + .expect("spawn_blocking failed") } }) - .try_fold(0, |_total, total| future::ok(total)) + .try_fold(0, |_prev_total, total| future::ok(total)) .await?; + // assert all chunks are fully processed and in DB. + assert!(chunk_replayer.is_empty()); + info!( total_replayed = total_replayed, accumulative_tps = diff --git a/storage/backup/backup-cli/src/metadata/view.rs b/storage/backup/backup-cli/src/metadata/view.rs index 3b857651208de..70439943c726e 100644 --- a/storage/backup/backup-cli/src/metadata/view.rs +++ b/storage/backup/backup-cli/src/metadata/view.rs @@ -259,10 +259,10 @@ impl fmt::Display for BackupStorageState { write!( f, "latest_epoch_ending_epoch: {}, latest_state_snapshot_epoch: {}, latest_state_snapshot_version: {}, latest_transaction_version: {}", - self.latest_epoch_ending_epoch.as_ref().map_or("none".to_string(), u64::to_string), - self.latest_state_snapshot_epoch.as_ref().map_or("none".to_string(), u64::to_string), - self.latest_state_snapshot_version.as_ref().map_or("none".to_string(), Version::to_string), - self.latest_transaction_version.as_ref().map_or("none".to_string(), Version::to_string), + self.latest_epoch_ending_epoch.as_ref().map_or_else(|| "none".to_string(), u64::to_string), + self.latest_state_snapshot_epoch.as_ref().map_or_else(|| "none".to_string(), u64::to_string), + self.latest_state_snapshot_version.as_ref().map_or_else(|| "none".to_string(), Version::to_string), + self.latest_transaction_version.as_ref().map_or_else(|| "none".to_string(), Version::to_string), ) } } diff --git a/storage/indexer/Cargo.toml b/storage/indexer/Cargo.toml index 963c1a61864b0..5967a4c20c90e 100644 --- a/storage/indexer/Cargo.toml +++ b/storage/indexer/Cargo.toml @@ -17,6 +17,7 @@ anyhow = { workspace = true } aptos-config = { workspace = true } aptos-db-indexer-schemas = { workspace = true } aptos-logger = { workspace = true } +aptos-metrics-core = { workspace = true } aptos-resource-viewer = { workspace = true } aptos-rocksdb-options = { workspace = true } aptos-schemadb = { workspace = true } @@ -26,6 +27,7 @@ bcs = { workspace = true } bytes = { workspace = true } dashmap = { workspace = true } move-core-types = { workspace = true } +once_cell = { workspace = true } [dev-dependencies] aptos-proptest-helpers = { workspace = true } diff --git a/storage/indexer/src/db_indexer.rs b/storage/indexer/src/db_indexer.rs index 63351a633a15d..ac3d18709a068 100644 --- a/storage/indexer/src/db_indexer.rs +++ b/storage/indexer/src/db_indexer.rs @@ -1,7 +1,7 @@ // Copyright (c) Aptos Foundation // SPDX-License-Identifier: Apache-2.0 -use crate::utils::PrefixedStateValueIterator; +use crate::{metrics::TIMER, utils::PrefixedStateValueIterator}; use aptos_config::config::internal_indexer_db_config::InternalIndexerDBConfig; use aptos_db_indexer_schemas::{ metadata::{MetadataKey, MetadataValue, StateSnapshotProgress}, @@ -361,6 +361,7 @@ impl DBIndexer { } pub fn process_a_batch(&self, start_version: Version) -> Result { + let _timer = TIMER.with_label_values(&["process_a_batch"]).start_timer(); let mut version = start_version; let num_transactions = self.get_num_of_transactions(version)?; // This promises num_transactions should be readable from main db @@ -398,7 +399,7 @@ impl DBIndexer { if self.indexer_db.statekeys_enabled() { writeset.iter().for_each(|(state_key, write_op)| { - if write_op.is_creation() { + if write_op.is_creation() || write_op.is_modification() { batch .put::(state_key, &()) .expect("Failed to put state keys to a batch"); @@ -408,7 +409,8 @@ impl DBIndexer { version += 1; Ok::<(), AptosDbError>(()) })?; - // Assert we have processes all the readable transaction. + assert!(version > 0, "batch number should be greater than 0"); + assert_eq!(num_transactions, version - start_version); if self.indexer_db.transaction_enabled() { diff --git a/storage/indexer/src/lib.rs b/storage/indexer/src/lib.rs index 6a96f37f16cf0..7a041c5dbbaca 100644 --- a/storage/indexer/src/lib.rs +++ b/storage/indexer/src/lib.rs @@ -7,6 +7,7 @@ pub mod db_indexer; pub mod db_ops; pub mod db_v2; pub mod indexer_reader; +mod metrics; mod utils; use crate::db::INDEX_DB_NAME; diff --git a/storage/indexer/src/metrics.rs b/storage/indexer/src/metrics.rs new file mode 100644 index 0000000000000..1ed7179580a22 --- /dev/null +++ b/storage/indexer/src/metrics.rs @@ -0,0 +1,15 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use aptos_metrics_core::{exponential_buckets, register_histogram_vec, HistogramVec}; +use once_cell::sync::Lazy; + +pub static TIMER: Lazy = Lazy::new(|| { + register_histogram_vec!( + "aptos_internal_indexer_timer_seconds", + "Various timers for performance analysis.", + &["name"], + exponential_buckets(/*start=*/ 1e-9, /*factor=*/ 2.0, /*count=*/ 32).unwrap(), + ) + .unwrap() +}); diff --git a/terraform/aptos-node-testnet/aws/variables.tf b/terraform/aptos-node-testnet/aws/variables.tf index 9ce02d893d623..045e570358680 100644 --- a/terraform/aptos-node-testnet/aws/variables.tf +++ b/terraform/aptos-node-testnet/aws/variables.tf @@ -175,7 +175,7 @@ variable "utility_instance_type" { variable "validator_instance_type" { description = "Instance type used for validator and fullnodes" type = string - default = "c6i.8xlarge" + default = "c6i.16xlarge" } ### Forge diff --git a/terraform/aptos-node-testnet/gcp/main.tf b/terraform/aptos-node-testnet/gcp/main.tf index c15119e09b236..26bd6d5523e99 100644 --- a/terraform/aptos-node-testnet/gcp/main.tf +++ b/terraform/aptos-node-testnet/gcp/main.tf @@ -55,6 +55,7 @@ module "validator" { gke_node_autoprovisioning_max_memory = var.gke_node_autoprovisioning_max_memory gke_autoscaling_profile = var.gke_autoscaling_profile gke_autoscaling_max_node_count = var.gke_autoscaling_max_node_count + enable_vertical_pod_autoscaling = var.enable_vertical_pod_autoscaling # Testnet config workspace_name_override = var.workspace_name_override diff --git a/terraform/aptos-node-testnet/gcp/variables.tf b/terraform/aptos-node-testnet/gcp/variables.tf index 880be3978563d..16173bec6c754 100644 --- a/terraform/aptos-node-testnet/gcp/variables.tf +++ b/terraform/aptos-node-testnet/gcp/variables.tf @@ -218,7 +218,7 @@ variable "utility_instance_type" { variable "validator_instance_type" { description = "Instance type used for validator and fullnodes" type = string - default = "t2d-standard-16" + default = "t2d-standard-60" } variable "utility_instance_enable_taint" { @@ -263,6 +263,12 @@ variable "gke_autoscaling_max_node_count" { default = 250 } +variable "enable_vertical_pod_autoscaling" { + description = "Enable vertical pod autoscaling" + type = bool + default = false +} + ### GKE cluster config variable "cluster_ipv4_cidr_block" { diff --git a/terraform/aptos-node/aws/variables.tf b/terraform/aptos-node/aws/variables.tf index be2e004521b4f..61acb59671786 100644 --- a/terraform/aptos-node/aws/variables.tf +++ b/terraform/aptos-node/aws/variables.tf @@ -202,7 +202,7 @@ variable "utility_instance_enable_taint" { variable "validator_instance_type" { description = "Instance type used for validator and fullnodes" type = string - default = "c6i.8xlarge" + default = "c6i.16xlarge" } variable "validator_instance_num" { diff --git a/terraform/aptos-node/gcp/cluster.tf b/terraform/aptos-node/gcp/cluster.tf index 2508bef0643b7..9d9d208f8358d 100644 --- a/terraform/aptos-node/gcp/cluster.tf +++ b/terraform/aptos-node/gcp/cluster.tf @@ -90,6 +90,10 @@ resource "google_container_cluster" "aptos" { ] } + vertical_pod_autoscaling { + enabled = var.enable_vertical_pod_autoscaling + } + dynamic "cluster_autoscaling" { for_each = var.gke_enable_node_autoprovisioning ? [1] : [] content { diff --git a/terraform/aptos-node/gcp/variables.tf b/terraform/aptos-node/gcp/variables.tf index fbea82b870eb8..66e2ddba2ce7a 100644 --- a/terraform/aptos-node/gcp/variables.tf +++ b/terraform/aptos-node/gcp/variables.tf @@ -196,7 +196,7 @@ variable "utility_instance_type" { variable "validator_instance_type" { description = "Instance type used for validator and fullnodes" type = string - default = "t2d-standard-16" + default = "t2d-standard-60" } variable "utility_instance_enable_taint" { @@ -208,7 +208,7 @@ variable "utility_instance_enable_taint" { variable "validator_instance_enable_taint" { description = "Whether to taint instances in the validator nodegroup" type = bool - default = false + default = true } variable "gke_enable_node_autoprovisioning" { @@ -241,6 +241,12 @@ variable "gke_autoscaling_max_node_count" { default = 250 } +variable "enable_vertical_pod_autoscaling" { + description = "Enable vertical pod autoscaling" + type = bool + default = false +} + ### Naming overrides variable "helm_release_name_override" { diff --git a/terraform/fullnode/aws/addons.tf b/terraform/fullnode/aws/addons.tf index 52bb764c77565..da987d7694912 100644 --- a/terraform/fullnode/aws/addons.tf +++ b/terraform/fullnode/aws/addons.tf @@ -78,11 +78,6 @@ resource "helm_release" "pfn-addons" { acm_certificate = var.zone_id != "" ? aws_acm_certificate.ingress[0].arn : null loadBalancerSourceRanges = var.client_sources_ipv4 } - load_test = { - config = { - numFullnodeGroups = var.num_fullnodes - } - } }), jsonencode(var.pfn_helm_values), ] diff --git a/terraform/fullnode/aws/backup.tf b/terraform/fullnode/aws/backup.tf index f2b990e8b0b12..f8f8c5cc8b4cb 100644 --- a/terraform/fullnode/aws/backup.tf +++ b/terraform/fullnode/aws/backup.tf @@ -50,6 +50,7 @@ data "aws_iam_policy_document" "backup" { statement { actions = [ "s3:ListBucket", + "s3:PutBucketAcl", "s3:PutObject", "s3:GetObject", "s3:GetObjectTagging", diff --git a/terraform/fullnode/aws/variables.tf b/terraform/fullnode/aws/variables.tf index f687206cbafc6..e8eadb9a84cd0 100644 --- a/terraform/fullnode/aws/variables.tf +++ b/terraform/fullnode/aws/variables.tf @@ -134,7 +134,7 @@ variable "utility_instance_type" { variable "fullnode_instance_type" { description = "Instance type used for validator and fullnodes" type = string - default = "c6i.8xlarge" + default = "c6i.16xlarge" } variable "num_extra_instance" { diff --git a/terraform/fullnode/gcp/addons.tf b/terraform/fullnode/gcp/addons.tf index de354a72a76be..7adf760a64504 100644 --- a/terraform/fullnode/gcp/addons.tf +++ b/terraform/fullnode/gcp/addons.tf @@ -85,11 +85,6 @@ resource "helm_release" "pfn-addons" { gce_managed_certificate_domains = var.create_google_managed_ssl_certificate ? join(",", distinct(concat([local.domain], var.tls_sans))) : "" # loadBalancerSourceRanges = var.client_sources_ipv4 # not supported yet } - load_test = { - config = { - numFullnodeGroups = var.num_fullnodes - } - } }), jsonencode(var.pfn_helm_values), ] diff --git a/terraform/fullnode/gcp/variables.tf b/terraform/fullnode/gcp/variables.tf index 5954c15a7bd7c..ce9feedbf3941 100644 --- a/terraform/fullnode/gcp/variables.tf +++ b/terraform/fullnode/gcp/variables.tf @@ -119,7 +119,7 @@ variable "utility_instance_type" { variable "fullnode_instance_type" { description = "Instance type used for validator and fullnodes" type = string - default = "t2d-standard-16" + default = "t2d-standard-60" } variable "utility_instance_enable_taint" { diff --git a/terraform/helm/aptos-node/README.md b/terraform/helm/aptos-node/README.md index 36fdfe0a8078f..915bf2b77a2cc 100644 --- a/terraform/helm/aptos-node/README.md +++ b/terraform/helm/aptos-node/README.md @@ -24,6 +24,7 @@ Aptos blockchain node deployment | fullnode.force_enable_telemetry | bool | `false` | Flag to force enable telemetry service (useful for forge tests) | | fullnode.groups | list | `[{"dns_name":"vfn","name":"fullnode","replicas":1}]` | Specify fullnode groups by `name` and number of `replicas` | | fullnode.nodeSelector | object | `{}` | | +| fullnode.podAnnotations | string | `nil` | | | fullnode.resources.limits.cpu | int | `14` | | | fullnode.resources.limits.memory | string | `"56Gi"` | | | fullnode.resources.requests.cpu | int | `14` | | @@ -79,13 +80,14 @@ Aptos blockchain node deployment | serviceAccount.name | string | `nil` | The name of the service account to use. If not set and create is true, a name is generated using the fullname template | | validator.affinity | object | `{}` | | | validator.config | object | `{}` | Validator configuration. See NodeConfig https://github.com/aptos-labs/aptos-core/blob/main/config/src/config/mod.rs | -| validator.enableNetworkPolicy | bool | `true` | Lock down network ingress and egress with Kubernetes NetworkPolicy | +| validator.enableNetworkPolicy | bool | `false` | Lock down network ingress and egress with Kubernetes NetworkPolicy | | validator.force_enable_telemetry | bool | `false` | Flag to force enable telemetry service (useful for forge tests) | | validator.image.pullPolicy | string | `"IfNotPresent"` | Image pull policy to use for validator images | | validator.image.repo | string | `"aptoslabs/validator"` | Image repo to use for validator images | | validator.image.tag | string | `nil` | Image tag to use for validator images. If set, overrides `imageTag` | | validator.name | string | `nil` | Internal: name of your validator for use in labels | | validator.nodeSelector | object | `{}` | | +| validator.podAnnotations | string | `nil` | | | validator.resources.limits.cpu | int | `14` | | | validator.resources.limits.memory | string | `"56Gi"` | | | validator.resources.requests.cpu | int | `14` | | @@ -180,9 +182,8 @@ You may also deploy multiple fullnodes into the cluster via `.Values.numFullnode ### Era -The `.Values.chain.era` is a number that is incremented every time the validator's storage is wiped. This is ueful for testnets when the network is wiped. +The `.Values.chain.era` is a number that is incremented every time the validator's storage is wiped. This is useful for testnets when the network is wiped. ### Privileged Mode For debugging purposes, it's sometimes useful to run the validator as root (privileged mode). This is enabled by `.Values.enablePrivilegedMode`. - diff --git a/terraform/helm/aptos-node/README.md.gotmpl b/terraform/helm/aptos-node/README.md.gotmpl index 9283966591c8d..7f8b5d039656e 100644 --- a/terraform/helm/aptos-node/README.md.gotmpl +++ b/terraform/helm/aptos-node/README.md.gotmpl @@ -100,9 +100,8 @@ You may also deploy multiple fullnodes into the cluster via `.Values.numFullnode ### Era -The `.Values.chain.era` is a number that is incremented every time the validator's storage is wiped. This is ueful for testnets when the network is wiped. +The `.Values.chain.era` is a number that is incremented every time the validator's storage is wiped. This is useful for testnets when the network is wiped. ### Privileged Mode For debugging purposes, it's sometimes useful to run the validator as root (privileged mode). This is enabled by `.Values.enablePrivilegedMode`. - diff --git a/terraform/helm/aptos-node/files/haproxy.cfg b/terraform/helm/aptos-node/files/haproxy.cfg index 06433385fedda..6b4a0f2642889 100644 --- a/terraform/helm/aptos-node/files/haproxy.cfg +++ b/terraform/helm/aptos-node/files/haproxy.cfg @@ -22,9 +22,10 @@ defaults # Set the default mode to TCP mode tcp + log-format "%ci:%cp [%t] %ft %b/%s %Tw/%Tc/%Tt %B %ts %ac/%fc/%bc/%sc/%rc %sq/%bq" # Don't log normal events - option dontlog-normal + # option dontlog-normal # Set timeouts for connections timeout client 60s diff --git a/terraform/helm/aptos-node/values.yaml b/terraform/helm/aptos-node/values.yaml index 1b5887dddb78b..30b5b7cf7c698 100644 --- a/terraform/helm/aptos-node/values.yaml +++ b/terraform/helm/aptos-node/values.yaml @@ -68,11 +68,11 @@ validator: pullPolicy: IfNotPresent resources: limits: - cpu: 14 - memory: 56Gi + cpu: 30 + memory: 60Gi requests: - cpu: 14 - memory: 56Gi + cpu: 30 + memory: 60Gi storage: # -- Kubernetes storage class to use for validator persistent storage class: @@ -87,9 +87,13 @@ validator: affinity: {} # -- Validator configuration. See NodeConfig https://github.com/aptos-labs/aptos-core/blob/main/config/src/config/mod.rs config: {} - + # -- DEPRECATED: it's broken with Cillium a.k.a. GKE DataplaneV2. + # -- templates/networkpolicy.yaml kept around for reference in case we want to resurrect it. # -- Lock down network ingress and egress with Kubernetes NetworkPolicy - enableNetworkPolicy: true + enableNetworkPolicy: false + podAnnotations: + # Determines which log levels are retained by the Vector pipeline + # aptos.dev/min-log-level-to-retain: warn fullnode: # -- Specify fullnode groups by `name` and number of `replicas` @@ -99,11 +103,11 @@ fullnode: replicas: 1 resources: limits: - cpu: 14 - memory: 56Gi + cpu: 30 + memory: 60Gi requests: - cpu: 14 - memory: 56Gi + cpu: 30 + memory: 60Gi storage: # -- Kubernetes storage class to use for fullnode persistent storage class: @@ -124,7 +128,9 @@ fullnode: full_node_networks: # The first item in the array `full_node_networks` must always refer to the public fullnode network - network_id: "public" - seeds: {} + podAnnotations: + # Determines which log levels are retained by the Vector pipeline + # aptos.dev/min-log-level-to-retain: warn service: # -- If set, the base domain name to use for External DNS diff --git a/terraform/helm/fullnode/README.md b/terraform/helm/fullnode/README.md index 665b36b936feb..1470da36a8555 100644 --- a/terraform/helm/fullnode/README.md +++ b/terraform/helm/fullnode/README.md @@ -7,7 +7,7 @@ | Key | Type | Default | Description | |-----|------|---------|-------------| | affinity | object | `{}` | | -| aptos_chains | object | `{"devnet":{"genesis_blob_url":"https://devnet.aptoslabs.com/genesis.blob","waypoint_txt_url":"https://devnet.aptoslabs.com/waypoint.txt"},"mainnet":{"genesis_blob_url":"https://raw.githubusercontent.com/aptos-labs/aptos-networks/main/mainnet/genesis.blob","waypoint_txt_url":"https://raw.githubusercontent.com/aptos-labs/aptos-networks/main/mainnet/waypoint.txt"},"testnet":{"genesis_blob_url":"https://raw.githubusercontent.com/aptos-labs/aptos-networks/main/testnet/genesis.blob","waypoint_txt_url":"https://raw.githubusercontent.com/aptos-labs/aptos-networks/main/testnet/waypoint.txt"}}` | For each supported chain, specify the URLs from which to download the genesis.blob and waypoint.txt | +| aptos_chains | object | `{"devnet":{"genesis_blob_url":"https://devnet.aptoslabs.com/genesis.blob","waypoint_txt_url":"https://devnet.aptoslabs.com/waypoint.txt"},"mainnet":{"genesis_blob_url":"https://raw.githubusercontent.com/aptos-labs/aptos-networks/main/mainnet/genesis.blob","waypoint_txt_url":"https://raw.githubusercontent.com/aptos-labs/aptos-networks/main/mainnet/waypoint.txt"},"testnet":{"genesis_blob_url":"https://raw.githubusercontent.com/aptos-labs/aptos-networks/main/testnet/genesis.blob","waypoint_txt_url":"https://raw.githubusercontent.com/aptos-labs/aptos-networks/main/testnet/genesis_waypoint.txt"}}` | For each supported chain, specify the URLs from which to download the genesis.blob and waypoint.txt | | backup.affinity | object | `{}` | | | backup.config.azure.account | string | `nil` | | | backup.config.azure.container | string | `nil` | | @@ -15,7 +15,7 @@ | backup.config.gcs.bucket | string | `nil` | | | backup.config.location | string | `nil` | Which of the below backup configurations to use | | backup.config.s3.bucket | string | `nil` | | -| backup.config.state_snapshot_interval_epochs | int | `1` | State snapshot interval epochs | +| backup.config.state_snapshot_interval_epochs | int | `2` | State snapshot interval epochs | | backup.config.transaction_batch_size | int | `1000000` | Transaction batch size | | backup.enable | bool | `false` | Whether to enable backup | | backup.image.pullPolicy | string | `"IfNotPresent"` | Image pull policy to use for backup images | @@ -46,8 +46,9 @@ | chain.era | int | `1` | Bump this number to wipe the underlying storage | | chain.genesisConfigmap | string | `nil` | Kubernetes Configmap from which to load the genesis.blob and waypoint.txt | | chain.genesisSecret | string | `nil` | Kubernetes Secret from which to load the genesis.blob and waypoint.txt | +| chain.label | string | `nil` | The value of the `chain_name` label. If empty, defaults to `.Values.chain.name` | | chain.name | string | `"devnet"` | Name of the testnet to connect to. There must be a corresponding entry in .Values.aptos_chains | -| fullnode.config | object | `{"full_node_networks":[{"identity":{},"inbound_rate_limit_config":null,"network_id":"public","outbound_rate_limit_config":null,"seeds":{}}]}` | Fullnode configuration. See NodeConfig https://github.com/aptos-labs/aptos-core/blob/main/config/src/config/mod.rs | +| fullnode.config | object | `{"full_node_networks":[{"identity":{},"inbound_rate_limit_config":null,"network_id":"public","outbound_rate_limit_config":null}]}` | Fullnode configuration. See NodeConfig https://github.com/aptos-labs/aptos-core/blob/main/config/src/config/mod.rs | | image.pullPolicy | string | `"IfNotPresent"` | Image pull policy to use for fullnode images | | image.repo | string | `"aptoslabs/validator"` | Image repo to use for fullnode images. Fullnodes and validators use the same image | | image.tag | string | `nil` | Image tag to use for fullnode images. If set, overrides `imageTag` | @@ -58,6 +59,7 @@ | ingress.ingressClassName | string | `nil` | The ingress class for fullnode ingress. Leaving class empty will result in an ingress that implicity uses the default ingress class | | logging.address | string | `nil` | Address for remote logging | | manageImages | bool | `true` | If true, helm will always override the deployed image with what is configured in the helm values. If not, helm will take the latest image from the currently running workloads, which is useful if you have a separate procedure to update images (e.g. rollout) | +| metrics.destination | string | `"dev"` | The upstream sink for metrics. Supported values are "dev" and "prod" | | nodeSelector | object | `{}` | | | resources.limits.cpu | int | `14` | | | resources.limits.memory | string | `"56Gi"` | | @@ -99,6 +101,7 @@ | serviceAccount.name | string | `nil` | The name of the service account to use. If not set and create is true, a name is generated using the fullname template | | storage.class | string | `nil` | Kubernetes storage class to use for fullnode persistent storage | | storage.size | string | `"1000Gi"` | Size of fullnode persistent storage | +| storage.snapshotRefForRestore | string | `nil` | The name of a VolumeSnapshot to restore from. In unset, the fullnode will start from scratch. | | tolerations | list | `[]` | | Configuration diff --git a/terraform/helm/fullnode/templates/backup-compaction.yaml b/terraform/helm/fullnode/templates/backup-compaction.yaml index 466191d66681c..574095d6629ac 100644 --- a/terraform/helm/fullnode/templates/backup-compaction.yaml +++ b/terraform/helm/fullnode/templates/backup-compaction.yaml @@ -61,7 +61,7 @@ spec: fieldRef: fieldPath: metadata.name - name: PUSH_METRICS_ENDPOINT - value: "{{- include "backup.pushMetricsEndpoint" $ }}/api/v1/import/prometheus?extra_label=role={{- .jobName | default "db_backup_compaction" }}&extra_label=kubernetes_pod_name=$(KUBERNETES_POD_NAME)" + value: "{{- include "backup.pushMetricsEndpoint" $ }}/api/v1/import/prometheus?extra_label=role={{- .jobName | default "db_backup_compaction" }}&extra_label=kubernetes_pod_name=$(KUBERNETES_POD_NAME)&extra_label=chain_name={{ $.Values.chain.label | default $.Values.chain.name }}" {{- end }} {{- include "backup.backupEnvironment" (dict "config" $.Values.backup.config "era" $.Values.chain.era) | nindent 12 }} {{- with .Values.backup_compaction }} diff --git a/terraform/helm/fullnode/templates/backup-verify.yaml b/terraform/helm/fullnode/templates/backup-verify.yaml index deaf64b7650db..28f25d00385c8 100644 --- a/terraform/helm/fullnode/templates/backup-verify.yaml +++ b/terraform/helm/fullnode/templates/backup-verify.yaml @@ -59,7 +59,7 @@ spec: fieldRef: fieldPath: metadata.name - name: PUSH_METRICS_ENDPOINT - value: "{{- include "backup.pushMetricsEndpoint" $ }}/api/v1/import/prometheus?extra_label=role={{- .jobName | default "db_backup_verify" }}&extra_label=kubernetes_pod_name=$(KUBERNETES_POD_NAME)" + value: "{{- include "backup.pushMetricsEndpoint" $ }}/api/v1/import/prometheus?extra_label=role={{- .jobName | default "db_backup_verify" }}&extra_label=kubernetes_pod_name=$(KUBERNETES_POD_NAME)&extra_label=chain_name={{ $.Values.chain.label | default $.Values.chain.name }}" {{- end }} {{- include "backup.backupEnvironment" (dict "config" $.Values.backup.config "era" $.Values.chain.era) | nindent 12 }} {{- with .Values.backup_verify }} diff --git a/terraform/helm/fullnode/templates/backup.yaml b/terraform/helm/fullnode/templates/backup.yaml index 4911c4fa18922..9a80cbd4020ed 100644 --- a/terraform/helm/fullnode/templates/backup.yaml +++ b/terraform/helm/fullnode/templates/backup.yaml @@ -75,7 +75,7 @@ spec: fieldRef: fieldPath: metadata.name - name: PUSH_METRICS_ENDPOINT - value: "{{- include "backup.pushMetricsEndpoint" $ }}/api/v1/import/prometheus?extra_label=role={{- .jobName | default "db_backup" }}&extra_label=kubernetes_pod_name=$(KUBERNETES_POD_NAME)" + value: "{{- include "backup.pushMetricsEndpoint" $ }}/api/v1/import/prometheus?extra_label=role={{- .jobName | default "db_backup" }}&extra_label=kubernetes_pod_name=$(KUBERNETES_POD_NAME)&extra_label=chain_name={{ $.Values.chain.label | default $.Values.chain.name }}" {{- end }} {{- include "backup.backupEnvironment" (dict "config" .config "era" $.Values.chain.era) | nindent 8 }} volumeMounts: diff --git a/terraform/helm/fullnode/templates/storage.yaml b/terraform/helm/fullnode/templates/storage.yaml index 1b247bcb847f4..084ef1ea828eb 100644 --- a/terraform/helm/fullnode/templates/storage.yaml +++ b/terraform/helm/fullnode/templates/storage.yaml @@ -5,9 +5,17 @@ metadata: labels: {{- include "aptos-fullnode.labels" . | nindent 4 }} spec: +{{- with .Values.storage }} accessModes: - ReadWriteOnce - storageClassName: {{ .Values.storage.class }} + storageClassName: {{ .class }} resources: requests: - storage: {{ .Values.storage.size }} + storage: {{ .size }} + {{- if .snapshotRefForRestore }} + dataSourceRef: + apiGroup: snapshot.storage.k8s.io + kind: VolumeSnapshot + name: {{ .snapshotRefForRestore }} + {{- end }} +{{- end }} diff --git a/terraform/helm/fullnode/values.yaml b/terraform/helm/fullnode/values.yaml index c50b6425b9fd7..001abbfecc79d 100644 --- a/terraform/helm/fullnode/values.yaml +++ b/terraform/helm/fullnode/values.yaml @@ -34,7 +34,6 @@ fullnode: full_node_networks: # The first item in the array `full_node_networks` must always refer to the public fullnode network - network_id: "public" - seeds: {} identity: {} inbound_rate_limit_config: outbound_rate_limit_config: @@ -52,11 +51,11 @@ image: resources: limits: - cpu: 14 - memory: 56Gi + cpu: 30 + memory: 60Gi requests: - cpu: 14 - memory: 56Gi + cpu: 30 + memory: 60Gi nodeSelector: {} tolerations: [] @@ -67,6 +66,8 @@ storage: class: # -- Size of fullnode persistent storage size: 1000Gi + # -- The name of a VolumeSnapshot to restore from. In unset, the fullnode will start from scratch. + snapshotRefForRestore: service: # -- The Kubernetes ServiceType to use for the fullnode. Change this to LoadBalancer expose the REST API, aptosnet endpoint externally @@ -117,7 +118,7 @@ backup: pullPolicy: IfNotPresent resources: limits: - cpu: 4 + cpu: 8 memory: 8Gi requests: cpu: 4 diff --git a/terraform/helm/pfn-addons/README.md b/terraform/helm/pfn-addons/README.md index c669ca4b7aa63..8f51c163dce31 100644 --- a/terraform/helm/pfn-addons/README.md +++ b/terraform/helm/pfn-addons/README.md @@ -18,31 +18,8 @@ Additional components for a public fullnode fleet deployment | ingress.gce_security_policy | string | `nil` | Security policy to apply to the backend services behind the ingress | | ingress.health_check_duration_secs | string | `nil` | The maximum number of seconds that a PFN is allowed to be behind to be considered healthy and be allowed to serve traffic | | ingress.loadBalancerSourceRanges | string | `nil` | | +| ingress.logging.enabled | bool | `false` | | | ingress.wafAclArn | string | `nil` | | -| load_test.affinity | object | `{}` | | -| load_test.config.duration | int | `300` | How long to emit transactions for | -| load_test.config.expected_max_txns | int | `6000000` | Default 20k * $duration | -| load_test.config.max_transactions_per_account | int | `5` | | -| load_test.config.mempool_backlog | int | `5000` | Number of transactions outstanding in mempool | -| load_test.config.mint_key | string | `nil` | The private key used to mint to fund load test | -| load_test.config.numFullnodeGroups | string | `nil` | The number of fullnode groups to run traffic against | -| load_test.config.target_tps | int | `0` | Whether to target a constant TPS, or 0 if not used. Cannot be used with mempool_backlog. | -| load_test.config.transaction_type | string | `"coin-transfer"` | | -| load_test.config.txn_expiration_time_secs | int | `30` | How long to wait for transactions to be expired | -| load_test.config.use_pfns | bool | `true` | If true, run $numFullnodeGroups parallel load tests | -| load_test.config.use_validators | bool | `false` | Whether to submit transactions through validator REST API | -| load_test.enabled | bool | `false` | Whether to enable the load test CronJob | -| load_test.fullnode | object | `{"groups":[{"name":"fullnode"}]}` | The fullnode groups to target | -| load_test.image.pullPolicy | string | `"IfNotPresent"` | Image pull policy to use for tools image | -| load_test.image.repo | string | `"aptoslabs/tools"` | Image repo to use for tools image for running load tests | -| load_test.image.tag | string | `nil` | Image tag to use for tools image | -| load_test.intervalMins | int | `15` | How many minutes between load test runs | -| load_test.nodeSelector | object | `{}` | | -| load_test.resources.limits.cpu | int | `4` | | -| load_test.resources.limits.memory | string | `"4Gi"` | | -| load_test.resources.requests.cpu | int | `4` | | -| load_test.resources.requests.memory | string | `"4Gi"` | | -| load_test.tolerations | list | `[]` | | | service.aws_tags | string | `nil` | | | service.domain | string | `nil` | | | service.enableOnchainDiscovery | bool | `false` | | @@ -53,4 +30,4 @@ Additional components for a public fullnode fleet deployment | serviceAccount.name | string | `nil` | The name of the service account to use. If not set and create is true, a name is generated using the fullname template | ---------------------------------------------- -Autogenerated from chart metadata using [helm-docs v1.13.1](https://github.com/norwoodj/helm-docs/releases/v1.13.1) +Autogenerated from chart metadata using [helm-docs v1.14.2](https://github.com/norwoodj/helm-docs/releases/v1.14.2) diff --git a/terraform/helm/pfn-addons/templates/loadtest.yaml b/terraform/helm/pfn-addons/templates/loadtest.yaml deleted file mode 100644 index e5a6048aa197a..0000000000000 --- a/terraform/helm/pfn-addons/templates/loadtest.yaml +++ /dev/null @@ -1,123 +0,0 @@ -{{- if .Values.load_test.enabled }} -apiVersion: batch/v1 -kind: CronJob -metadata: - name: {{ include "pfn-addons.fullname" . }}-load-test - labels: - {{- include "pfn-addons.labels" . | nindent 4 }} - app.kubernetes.io/name: load-test -spec: - concurrencyPolicy: Replace - schedule: {{ printf "*/%d * * * *" (int .Values.load_test.intervalMins) | quote }} - jobTemplate: - spec: - template: - metadata: - labels: - {{- include "pfn-addons.selectorLabels" . | nindent 12 }} - app.kubernetes.io/name: load-test - spec: - restartPolicy: Never - priorityClassName: {{ include "pfn-addons.fullname" . }}-high - containers: - - name: load-test - image: {{ .Values.load_test.image.repo }}:{{ .Values.load_test.image.tag | default .Values.imageTag }} - imagePullPolicy: {{ .Values.load_test.image.pullPolicy }} - command: - - aptos-transaction-emitter - - emit-tx - - --mint-key={{ .Values.load_test.config.mint_key }} - - --chain-id={{ .Values.load_test.config.chain_id }} - # Build targets args for internal cluster targets - {{- $numTargets := 0 }} - {{- $targetSuffix := "" }} - {{- $targetGroups := list }} - {{- if $.Values.load_test.config.use_pfns }} - {{- $numTargets = $.Values.load_test.config.numFullnodeGroups }} - {{- $targetSuffix = "fullnode" }} - {{- $targetGroups = list }} - {{- else if $.Values.load_test.config.use_validators }} - {{- $numTargets = $.Values.genesis.numValidators }} - {{- $targetSuffix = "validator" }} - {{- $targetGroups = list }} - {{- else }} - {{- $numTargets = $.Values.load_test.config.numFullnodeGroups }} - {{- $targetSuffix = "fullnode" }} - {{- $targetGroups = $.Values.load_test.fullnode.groups }} - {{- end }} - {{- if $.Values.load_test.config.use_pfns }} - {{- range $i := until (int $numTargets) }} - - --targets=http://{{ printf "fullnode%d.%s" $i $.Values.service.domain }} - # - --targets=https://{{ printf "%s" $.Values.service.domain }} - {{- end }} - {{- else }} - {{- range $i := until (int $numTargets) }} - {{- $port := 80 }} - {{- if $targetGroups }} - {{- range $group := $targetGroups }} - {{- $nodeName := join "-" (list $.Values.genesis.username_prefix $i $group.name "lb") }} - - --targets=http://{{ $nodeName }}:{{ $port }} - {{- end }} - {{- else }} - {{- $nodeName := join "-" (list $.Values.genesis.username_prefix $i $targetSuffix "lb") }} - - --targets=http://{{ $nodeName }}:{{ $port }} - {{- end }} - {{- end }} - {{- end }} - {{- with .Values.load_test }} - # Either provide target TPS or mempool backlog - {{- if gt (int .config.target_tps) 0 }} - - --target-tps={{ .config.target_tps }} - {{- else }} - - --mempool-backlog={{ .config.mempool_backlog }} - {{- end }} - - --duration={{ .config.duration }} - # - --delay-after-minting=300 - - --expected-max-txns={{ .config.expected_max_txns }} - - --txn-expiration-time-secs={{ .config.txn_expiration_time_secs }} - - --max-transactions-per-account={{ .config.max_transactions_per_account }} - - --transaction-type={{ .config.transaction_type }} - env: - - name: RUST_BACKTRACE - value: "full" - - name: REUSE_ACC - value: "1" - {{- with .resources }} - resources: - {{- toYaml . | nindent 14 }} - {{- end }} - securityContext: - readOnlyRootFilesystem: true - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL - seccompProfile: - type: RuntimeDefault - {{- with .nodeSelector }} - nodeSelector: - {{- toYaml . | nindent 12 }} - {{- end }} - {{- with .affinity }} - affinity: - {{- toYaml . | nindent 12 }} - {{- end }} - {{- with .tolerations }} - tolerations: - {{- toYaml . | nindent 12 }} - {{- end }} - securityContext: - runAsNonRoot: true - runAsUser: 6180 - runAsGroup: 6180 - fsGroup: 6180 - # sysctls: - # - name: net.ipv4.tcp_tw_reuse - # value: "1" - {{- end }} - serviceAccountName: {{ include "pfn-addons.serviceAccountName" . }} - {{- if .Values.imagePullSecret }} - imagePullSecrets: - - name: {{.Values.imagePullSecret}} - {{- end }} -{{- end }} diff --git a/terraform/helm/pfn-addons/templates/service.yaml b/terraform/helm/pfn-addons/templates/service.yaml index c4f6e7f4f984e..18cf919461d79 100644 --- a/terraform/helm/pfn-addons/templates/service.yaml +++ b/terraform/helm/pfn-addons/templates/service.yaml @@ -39,6 +39,8 @@ spec: securityPolicy: name: {{ .Values.ingress.gce_security_policy }} {{- end }} + logging: + enable: {{ .Values.ingress.logging.enabled }} connectionDraining: drainingTimeoutSec: 30 healthCheck: diff --git a/terraform/helm/pfn-addons/values.yaml b/terraform/helm/pfn-addons/values.yaml index 3c5cdf5cb1cdc..16954125c6b81 100644 --- a/terraform/helm/pfn-addons/values.yaml +++ b/terraform/helm/pfn-addons/values.yaml @@ -22,6 +22,8 @@ ingress: loadBalancerSourceRanges: enableStickyness: true cookieDurationSeconds: 86400 + logging: + enabled: false # the below only work for gce ingress gce_managed_certificate: gce_managed_certificate_domains: @@ -32,51 +34,3 @@ ingress: # -- The maximum number of seconds that a PFN is allowed to be behind # to be considered healthy and be allowed to serve traffic health_check_duration_secs: - -load_test: - # -- Whether to enable the load test CronJob - enabled: false - image: - # -- Image repo to use for tools image for running load tests - repo: aptoslabs/tools - # -- Image tag to use for tools image - tag: - # -- Image pull policy to use for tools image - pullPolicy: IfNotPresent - resources: - limits: - cpu: 4 - memory: 4Gi - requests: - cpu: 4 - memory: 4Gi - nodeSelector: {} - tolerations: [] - affinity: {} - # -- How many minutes between load test runs - intervalMins: 15 - # -- The fullnode groups to target - fullnode: - groups: - - name: fullnode - config: - # -- The number of fullnode groups to run traffic against - numFullnodeGroups: - # -- The private key used to mint to fund load test - mint_key: - # -- Number of transactions outstanding in mempool - mempool_backlog: 5000 - # -- Whether to target a constant TPS, or 0 if not used. Cannot be used with mempool_backlog. - target_tps: 0 - # -- How long to emit transactions for - duration: 300 - # -- How long to wait for transactions to be expired - txn_expiration_time_secs: 30 - # -- Whether to submit transactions through validator REST API - use_validators: false - # -- If true, run $numFullnodeGroups parallel load tests - use_pfns: true - # -- Default 20k * $duration - expected_max_txns: 6000000 - max_transactions_per_account: 5 - transaction_type: coin-transfer diff --git a/terraform/helm/testnet-addons/templates/service.yaml b/terraform/helm/testnet-addons/templates/service.yaml index a300e915f736f..430d10d19c859 100644 --- a/terraform/helm/testnet-addons/templates/service.yaml +++ b/terraform/helm/testnet-addons/templates/service.yaml @@ -28,7 +28,6 @@ apiVersion: cloud.google.com/v1 kind: BackendConfig metadata: name: {{ include "testnet-addons.fullname" . }}-api - namespace: default spec: {{- if .Values.ingress.gce_security_policy }} securityPolicy: diff --git a/terraform/helm/testnet-addons/templates/waypoint.yaml b/terraform/helm/testnet-addons/templates/waypoint.yaml index 792f0ad1a3b9b..58e414666f2d6 100644 --- a/terraform/helm/testnet-addons/templates/waypoint.yaml +++ b/terraform/helm/testnet-addons/templates/waypoint.yaml @@ -27,7 +27,6 @@ apiVersion: cloud.google.com/v1 kind: BackendConfig metadata: name: {{ include "testnet-addons.fullname" . }}-waypoint - namespace: default spec: {{- if .Values.ingress.gce_security_policy }} securityPolicy: diff --git a/terraform/helm/vector-log-agent/files/vector-transforms.yaml b/terraform/helm/vector-log-agent/files/vector-transforms.yaml index 943e800b44f39..20ba0aca563b7 100644 --- a/terraform/helm/vector-log-agent/files/vector-transforms.yaml +++ b/terraform/helm/vector-log-agent/files/vector-transforms.yaml @@ -35,6 +35,8 @@ transforms: # del(.k8s.annotations) # } + .min_log_level_to_retain = .k8s.annotations."aptos.dev/min-log-level-to-retain" + del(.k8s.annotations) del(.k8s.labels."app.kubernetes.io/managed-by") @@ -83,7 +85,9 @@ transforms: .timestamp = parsed_timestamp } - # this last stage drops all forge node logs from aptos-nodes 5 (0-indexed) and above unless they are of level error + # This stage filters forge logs based off of node index and log level. + # It retains all logs for nodes 0-4 and only error logs for nodes 5+. + # The `aptos.dev/min-log-level-to-retain` pod annotation overrides the default behavior. filter_forge_logs: type: filter inputs: @@ -91,14 +95,42 @@ transforms: condition: | aptos_node_index = to_int(parse_regex(.k8s.pod_name, r'^aptos-node-(?P\d+)-.*').node_index ?? 0) ?? 0 - should_be_excluded = contains(to_string!(.k8s.cluster), "forge") && exists(.k8s.labels."forge-namespace") && aptos_node_index >= 5 && includes(["warn","info","debug","trace"], .level) + is_forge_log = contains(to_string!(.k8s.cluster), "forge") && exists(.k8s.labels."forge-namespace") + is_low_index_forge_node = aptos_node_index < 5 + is_error_log = (.level == "error") + + log_level_values = { + "trace": 1, + "debug": 2, + "info": 3, + "warn": 4, + "error": 5, + } + + has_log_level_annotation = .min_log_level_to_retain != null + log_level_annotation_value = get(log_level_values, [.min_log_level_to_retain]) ?? null + log_value = get(log_level_values, [.level]) ?? null + is_allowed_log_level = (log_level_annotation_value != null && log_value != null && log_value >= log_level_annotation_value) ?? false + + retain_log = + !is_forge_log || + (has_log_level_annotation && is_allowed_log_level) || + (!has_log_level_annotation && is_low_index_forge_node) || + (!has_log_level_annotation && is_error_log) - !should_be_excluded + retain_log + + delete_temp_fields: + type: remap + inputs: + - filter_forge_logs + source: + del(.min_log_level_to_retain) final_logs: type: filter inputs: - - filter_forge_logs + - delete_temp_fields # temporarily filter out noisy logs in vector until https://github.com/aptos-labs/aptos-core/pull/13965 lands in mainnet release # temporarily filter out noisy logs from https://aptos-org.slack.com/archives/C06TH3DH7SB/p1721328005384169?thread_ts=1720695143.603089&cid=C06TH3DH7SB until lands in mainnet release condition: | @@ -120,27 +152,27 @@ transforms: source: | . = flatten(., ".") # in order for fields to become individual, filterable top-level fields in uptrace we need to flatten nested objects into top-level keys. .service_name = .k8s.labels.app - .repo = "gcp" - - datadog_logs: - type: remap - inputs: - - final_logs - source: | - .ddsource = "k8s" - if is_string(.k8s.labels.app) { - .service = .k8s.labels.app - } - .ddtags, _ = "kube_cluster_name:" + .k8s.cluster + ",kube_namespace:" + .k8s.namespace + ",pod_name:" + .k8s.pod_name - - signoz_logs: - type: remap - inputs: - - final_logs - source: | - .severity_text = del(.level) - .source_type = "k8s" - tmp = { "k8s": del(.k8s)} - resources_tmp = flatten(tmp, ".") - . = flatten(., ".") - .resources = resources_tmp + .repo = "k8s" + + # datadog_logs: + # type: remap + # inputs: + # - final_logs + # source: | + # .ddsource = "k8s" + # if is_string(.k8s.labels.app) { + # .service = .k8s.labels.app + # } + # .ddtags, _ = "kube_cluster_name:" + .k8s.cluster + ",kube_namespace:" + .k8s.namespace + ",pod_name:" + .k8s.pod_name + + # signoz_logs: + # type: remap + # inputs: + # - final_logs + # source: | + # .severity_text = del(.level) + # .source_type = "k8s" + # tmp = { "k8s": del(.k8s)} + # resources_tmp = flatten(tmp, ".") + # . = flatten(., ".") + # .resources = resources_tmp diff --git a/terraform/helm/vector-log-agent/testing/log-level-filter-filtered.json b/terraform/helm/vector-log-agent/testing/log-level-filter-filtered.json new file mode 100644 index 0000000000000..657fd4d8bf731 --- /dev/null +++ b/terraform/helm/vector-log-agent/testing/log-level-filter-filtered.json @@ -0,0 +1,28 @@ +{ + "@timestamp.nanos": 163018, + "kubernetes": { + "annotations": { + "aptos.dev/min-log-level-to-retain": "warn" + }, + "cluster": "forge-0", + "container_image": "12355.dkr.ecr.asia-central-2.amazonaws.com/aptos/validator:fd60a8b334afa0eecae0824f6671ae763ca57664", + "container_name": "validator", + "labels": { + "app.kubernetes.io/instance": "validator-21", + "app.kubernetes.io/name": "validator", + "statefulset.kubernetes.io/pod-name": "aptos-node-21-validator-0", + "forge-namespace": "forge-main" + }, + "namespace": "forge-main", + "node_labels": { + "eks.amazonaws.com/nodegroup": "validators", + "node.kubernetes.io/instance-type": "c5.4xlarge", + "topology.kubernetes.io/zone": "asia-central-2a" + }, + "pod_ip": "192.168.127.54", + "pod_name": "aptos-node-21-validator-0", + "pod_owner": "StatefulSet/aptos-node-21-validator" + }, + "message": "{\"level\":\"INFO\",\"source\":{\"package\":\"consensus\",\"file\":\"consensus/src/round_manager.rs:794\"},\"thread_name\":\"consensus\",\"hostname\":\"aptos-node-21-validator-0\",\"namespace\":\"forge-main\",\"timestamp\":\"2022-07-25T03:20:50.815984Z\",\"data\":{\"committed_round\":17,\"info\":\"Normal operation\",\"round\":19}}", + "stream": "stdout" +} diff --git a/terraform/helm/vector-log-agent/testing/log-level-filter-retained.json b/terraform/helm/vector-log-agent/testing/log-level-filter-retained.json new file mode 100644 index 0000000000000..e1d810a786e6d --- /dev/null +++ b/terraform/helm/vector-log-agent/testing/log-level-filter-retained.json @@ -0,0 +1,28 @@ +{ + "@timestamp.nanos": 163018, + "kubernetes": { + "annotations": { + "aptos.dev/min-log-level-to-retain": "info" + }, + "cluster": "forge-0", + "container_image": "12355.dkr.ecr.asia-central-2.amazonaws.com/aptos/validator:fd60a8b334afa0eecae0824f6671ae763ca57664", + "container_name": "validator", + "labels": { + "app.kubernetes.io/instance": "validator-21", + "app.kubernetes.io/name": "validator", + "statefulset.kubernetes.io/pod-name": "aptos-node-21-validator-0", + "forge-namespace": "forge-main" + }, + "namespace": "forge-main", + "node_labels": { + "eks.amazonaws.com/nodegroup": "validators", + "node.kubernetes.io/instance-type": "c5.4xlarge", + "topology.kubernetes.io/zone": "asia-central-2a" + }, + "pod_ip": "192.168.127.54", + "pod_name": "aptos-node-21-validator-0", + "pod_owner": "StatefulSet/aptos-node-21-validator" + }, + "message": "{\"level\":\"INFO\",\"source\":{\"package\":\"consensus\",\"file\":\"consensus/src/round_manager.rs:794\"},\"thread_name\":\"consensus\",\"hostname\":\"aptos-node-21-validator-0\",\"namespace\":\"forge-main\",\"timestamp\":\"2022-07-25T03:20:50.815984Z\",\"data\":{\"committed_round\":17,\"info\":\"Normal operation\",\"round\":19}}", + "stream": "stdout" +} diff --git a/terraform/helm/vector-log-agent/testing/test-transforms.sh b/terraform/helm/vector-log-agent/testing/test-transforms.sh index 0adcea832da23..37c8f1e3cfac6 100755 --- a/terraform/helm/vector-log-agent/testing/test-transforms.sh +++ b/terraform/helm/vector-log-agent/testing/test-transforms.sh @@ -5,6 +5,9 @@ set -e export VECTOR_SELF_POD_NAME=my-vector-agent export K8S_CLUSTER=forge-0 -cat ./testing/test1.json | jq -c -M | vector --quiet --config ./files/vector-transforms.yaml --config ./testing/vector-test-config.yaml | jq -cat ./testing/test2.json | jq -c -M | vector --quiet --config ./files/vector-transforms.yaml --config ./testing/vector-test-config.yaml | jq -cat ./testing/test3.json | jq -c -M | vector --quiet --config ./files/vector-transforms.yaml --config ./testing/vector-test-config.yaml | jq +jq -c -M < ./testing/test1.json | vector --quiet --config ./files/vector-transforms.yaml --config ./testing/vector-test-config.yaml | jq +jq -c -M < ./testing/test2.json | vector --quiet --config ./files/vector-transforms.yaml --config ./testing/vector-test-config.yaml | jq +jq -c -M < ./testing/test3.json | vector --quiet --config ./files/vector-transforms.yaml --config ./testing/vector-test-config.yaml | jq + +jq -c -M < ./testing/log-level-filter-retained.json | vector --quiet --config ./files/vector-transforms.yaml --config ./testing/vector-test-config.yaml | jq +jq -c -M < ./testing/log-level-filter-filtered.json | vector --quiet --config ./files/vector-transforms.yaml --config ./testing/vector-test-config.yaml | jq diff --git a/terraform/modules/eks/variables.tf b/terraform/modules/eks/variables.tf index 2f9e39926a7e6..4774b4c1e61ca 100644 --- a/terraform/modules/eks/variables.tf +++ b/terraform/modules/eks/variables.tf @@ -6,7 +6,7 @@ variable "region" { variable "kubernetes_version" { description = "Version of Kubernetes to use for EKS cluster" type = string - default = "1.28" + default = "1.30" } variable "eks_cluster_name" { diff --git a/testsuite/forge-cli/src/main.rs b/testsuite/forge-cli/src/main.rs index 3deb5ebfa71c4..794e564efaba1 100644 --- a/testsuite/forge-cli/src/main.rs +++ b/testsuite/forge-cli/src/main.rs @@ -603,6 +603,8 @@ fn get_test_suite( return Ok(test_suite); } else if let Some(test_suite) = get_dag_test(test_name, duration, test_cmd) { return Ok(test_suite); + } else if let Some(test_suite) = get_indexer_test(test_name) { + return Ok(test_suite); } // Otherwise, check the test name against the ungrouped test suites @@ -691,6 +693,15 @@ fn get_land_blocking_test( Some(test) } +/// Attempts to match the test name to an indexer test +fn get_indexer_test(test_name: &str) -> Option { + let test = match test_name { + "indexer_test" => indexer_test(), + _ => return None, // The test name does not match an indexer test + }; + Some(test) +} + /// Attempts to match the test name to a network benchmark test fn get_netbench_test(test_name: &str) -> Option { let test = match test_name { @@ -2367,6 +2378,99 @@ fn multiregion_benchmark_test() -> ForgeConfig { ) } +/// Workload sweep with multiple stressful workloads for indexer +fn indexer_test() -> ForgeConfig { + // Define all the workloads and their corresponding success criteria upfront + // The TransactionTypeArg is the workload per phase + // The structure of the success criteria is generally (min_tps, latencies...). See below for the exact definition. + let workloads_and_criteria = vec![ + ( + TransactionWorkload::new(TransactionTypeArg::CoinTransfer, 20000), + (7000, 0.5, 0.5, 0.5), + ), + ( + TransactionWorkload::new(TransactionTypeArg::NoOp, 20000).with_num_modules(100), + (8500, 0.5, 0.5, 0.5), + ), + ( + TransactionWorkload::new(TransactionTypeArg::ModifyGlobalResource, 6000) + .with_transactions_per_account(1), + (2000, 0.5, 0.5, 0.5), + ), + ( + TransactionWorkload::new(TransactionTypeArg::TokenV2AmbassadorMint, 20000) + .with_unique_senders(), + (3200, 0.5, 0.5, 0.5), + ), + ( + TransactionWorkload::new(TransactionTypeArg::PublishPackage, 200) + .with_transactions_per_account(1), + (28, 0.5, 0.5, 0.5), + ), + ( + TransactionWorkload::new(TransactionTypeArg::VectorPicture30k, 100), + (100, 1.0, 1.0, 1.0), + ), + ( + TransactionWorkload::new(TransactionTypeArg::SmartTablePicture30KWith200Change, 100), + (100, 1.0, 1.0, 1.0), + ), + ( + TransactionWorkload::new( + TransactionTypeArg::TokenV1NFTMintAndTransferSequential, + 1000, + ), + (500, 0.5, 0.5, 0.5), + ), + ( + TransactionWorkload::new(TransactionTypeArg::TokenV1FTMintAndTransfer, 1000), + (500, 0.5, 0.5, 0.5), + ), + ]; + let num_sweep = workloads_and_criteria.len(); + + let workloads = Workloads::TRANSACTIONS( + workloads_and_criteria + .iter() + .map(|(w, _)| w.clone()) + .collect(), + ); + let criteria = workloads_and_criteria + .iter() + .map(|(_, c)| { + let ( + min_tps, + indexer_fullnode_processed_batch, + indexer_cache_worker_processed_batch, + indexer_data_service_all_chunks_sent, + ) = c.to_owned(); + SuccessCriteria::new(min_tps).add_latency_breakdown_threshold( + LatencyBreakdownThreshold::new_strict(vec![ + ( + LatencyBreakdownSlice::IndexerFullnodeProcessedBatch, + indexer_fullnode_processed_batch, + ), + ( + LatencyBreakdownSlice::IndexerCacheWorkerProcessedBatch, + indexer_cache_worker_processed_batch, + ), + ( + LatencyBreakdownSlice::IndexerDataServiceAllChunksSent, + indexer_data_service_all_chunks_sent, + ), + ]), + ) + }) + .collect::>(); + + realistic_env_sweep_wrap(4, 4, LoadVsPerfBenchmark { + test: Box::new(PerformanceBenchmark), + workloads, + criteria, + background_traffic: background_traffic_for_sweep(num_sweep), + }) +} + /// This test runs a constant-TPS benchmark where the network includes /// PFNs, and the transactions are submitted to the PFNs. This is useful /// for measuring latencies when the system is not saturated. @@ -2525,10 +2629,10 @@ pub async fn check_account_balance( expected: u64, ) -> Result<()> { let balance = client - .get_account_balance(account_address) + .view_apt_account_balance(account_address) .await? .into_inner(); - assert_eq!(balance.get(), expected); + assert_eq!(balance, expected); Ok(()) } diff --git a/testsuite/forge/src/backend/k8s/cluster_helper.rs b/testsuite/forge/src/backend/k8s/cluster_helper.rs index 92e44e92f25fd..46131ac4e6435 100644 --- a/testsuite/forge/src/backend/k8s/cluster_helper.rs +++ b/testsuite/forge/src/backend/k8s/cluster_helper.rs @@ -71,71 +71,118 @@ pub fn dump_string_to_file( Ok(file_path_str) } +#[derive(Error, Debug)] +#[error("{0}")] +enum LogJobError { + RetryableError(String), + FinalError(String), +} + +/** + * Tail the logs of a job. Returns OK if the job has a pod that succeeds. + * Assumes that the job only runs once and exits, and has no configured retry policy (i.e. backoffLimit = 0) + */ +async fn tail_job_logs( + jobs_api: Arc>, + job_name: String, + job_namespace: String, +) -> Result<(), LogJobError> { + let genesis_job = jobs_api + .get_status(&job_name) + .await + .map_err(|e| LogJobError::FinalError(format!("Failed to get job status: {}", e)))?; + + let status = genesis_job.status.expect("Job status not found"); + info!("Job {} status: {:?}", &job_name, status); + match status.active { + Some(active) => { + if active < 1 { + return Err(LogJobError::RetryableError(format!( + "Job {} has no active pods. Maybe it has not started yet", + &job_name + ))); + } + // try tailing the logs of the genesis job + // by the time this is done, we can re-evalulate its status + let mut command = tokio::process::Command::new(KUBECTL_BIN) + .args([ + "-n", + &job_namespace, + "logs", + "--tail=10", // in case of connection reset we only want the last few lines to avoid spam + "-f", + format!("job/{}", &job_name).as_str(), + ]) + .stdout(Stdio::piped()) + .spawn() + .map_err(|e| { + LogJobError::RetryableError(format!("Failed to spawn command: {}", e)) + })?; + // Ensure the command has stdout + let stdout = command.stdout.take().ok_or_else(|| { + LogJobError::RetryableError("Failed to capture stdout".to_string()) + })?; + + // Create a BufReader to read the output asynchronously, line by line + let mut reader = BufReader::new(stdout).lines(); + + // Iterate over the lines as they come + while let Some(line) = reader.next_line().await.transpose() { + match line { + Ok(line) => { + info!("[{}]: {}", &job_name, line); // Add a prefix to each line + }, + Err(e) => { + return Err(LogJobError::RetryableError(format!( + "Error reading line: {}", + e + ))); + }, + } + } + command.wait().await.map_err(|e| { + LogJobError::RetryableError(format!("Error waiting for command: {}", e)) + })?; + }, + None => info!("Job {} has no active pods running", &job_name), + } + match status.succeeded { + Some(_) => { + info!("Job {} succeeded!", &job_name); + return Ok(()); + }, + None => info!("Job {} has no succeeded pods", &job_name), + } + if status.failed.is_some() { + info!("Job {} failed!", &job_name); + return Err(LogJobError::FinalError(format!("Job {} failed", &job_name))); + } + Err(LogJobError::RetryableError(format!( + "Job {} has no succeeded or failed pods. Maybe it has not started yet.", + &job_name + ))) +} + /// Waits for a job to complete, while tailing the job's logs pub async fn wait_log_job( jobs_api: Arc>, job_namespace: &str, job_name: String, - retry_strategy: impl Iterator, + retry_policy: RetryPolicy, ) -> Result<()> { - aptos_retrier::retry_async(retry_strategy, || { - let jobs_api = jobs_api.clone(); - let job_name = job_name.clone(); - Box::pin(async move { - let genesis_job = jobs_api.get_status(&job_name).await.unwrap(); - - let status = genesis_job.status.unwrap(); - info!("Job {} status: {:?}", &job_name, status); - match status.active { - Some(_) => { - // try tailing the logs of the genesis job - // by the time this is done, we can re-evalulate its status - let mut command = tokio::process::Command::new(KUBECTL_BIN) - .args([ - "-n", - job_namespace, - "logs", - "--tail=10", // in case of connection reset we only want the last few lines to avoid spam - "-f", - format!("job/{}", &job_name).as_str(), - ]) - .stdout(Stdio::piped()) - .spawn()?; - // Ensure the command has stdout - let stdout = command - .stdout - .take() - .ok_or_else(|| anyhow::anyhow!("Failed to capture stdout"))?; - - // Create a BufReader to read the output asynchronously, line by line - let mut reader = BufReader::new(stdout).lines(); - - // Iterate over the lines as they come - while let Some(line) = reader.next_line().await.transpose() { - match line { - Ok(line) => { - info!("[{}]: {}", &job_name, line); // Add a prefix to each line - }, - Err(e) => { - bail!("Error reading line: {}", e); - }, - } - } - command.wait().await?; - }, - None => info!("Job {} completed running", &job_name), - } - info!("Job {} status: {:?}", &job_name, status); - match status.succeeded { - Some(_) => { - info!("Job {} done", &job_name); - Ok(()) - }, - None => bail!("Job {} did not succeed", &job_name), - } - }) - }) - .await + retry_policy + .retry_if( + move || { + tail_job_logs( + jobs_api.clone(), + job_name.clone(), + job_namespace.to_string(), + ) + }, + |e: &LogJobError| matches!(e, LogJobError::RetryableError(_)), + ) + .await?; + Ok(()) } /// Waits for a given number of HAProxy K8s Deployments to be ready diff --git a/testsuite/forge/src/backend/k8s/mod.rs b/testsuite/forge/src/backend/k8s/mod.rs index 260887b9145b3..782d9d7aa8749 100644 --- a/testsuite/forge/src/backend/k8s/mod.rs +++ b/testsuite/forge/src/backend/k8s/mod.rs @@ -245,6 +245,7 @@ impl Factory for K8sFactory { self.keep, new_era, self.use_port_forward, + self.enable_indexer, ) .await .unwrap(); diff --git a/testsuite/forge/src/backend/k8s/swarm.rs b/testsuite/forge/src/backend/k8s/swarm.rs index 4d3c30e31801e..9211bed8a1381 100644 --- a/testsuite/forge/src/backend/k8s/swarm.rs +++ b/testsuite/forge/src/backend/k8s/swarm.rs @@ -62,6 +62,7 @@ pub struct K8sSwarm { era: Option, use_port_forward: bool, chaos_experiment_ops: Box, + has_indexer: bool, } impl K8sSwarm { @@ -75,6 +76,7 @@ impl K8sSwarm { keep: bool, era: Option, use_port_forward: bool, + has_indexer: bool, ) -> Result { let kube_client = create_k8s_client().await?; @@ -123,6 +125,7 @@ impl K8sSwarm { kube_client: kube_client.clone(), kube_namespace: kube_namespace.to_string(), }), + has_indexer, }; // test hitting the configured prometheus endpoint @@ -446,6 +449,10 @@ impl Swarm for K8sSwarm { fn get_default_pfn_node_config(&self) -> NodeConfig { get_default_pfn_node_config() } + + fn has_indexer(&self) -> bool { + self.has_indexer + } } /// Amount of time to wait for genesis to complete @@ -460,11 +467,6 @@ pub fn k8s_wait_nodes_strategy() -> impl Iterator { fixed_retry_strategy(10 * 1000, 120) } -pub fn k8s_wait_indexer_strategy() -> impl Iterator { - // retry every 10 seconds for 20 minutes - fixed_retry_strategy(10 * 1000, 120) -} - async fn list_stateful_sets(client: K8sClient, kube_namespace: &str) -> Result> { let stateful_set_api: Api = Api::namespaced(client, kube_namespace); let lp = ListParams::default(); diff --git a/testsuite/forge/src/backend/k8s_deployer/deployer.rs b/testsuite/forge/src/backend/k8s_deployer/deployer.rs index 891748cfafd21..62e7ce724d0b3 100644 --- a/testsuite/forge/src/backend/k8s_deployer/deployer.rs +++ b/testsuite/forge/src/backend/k8s_deployer/deployer.rs @@ -5,9 +5,8 @@ use super::{ DEFAULT_FORGE_DEPLOYER_IMAGE_TAG, FORGE_DEPLOYER_SERVICE_ACCOUNT_NAME, FORGE_DEPLOYER_VALUES_ENV_VAR_NAME, }; -use crate::{ - k8s_wait_indexer_strategy, maybe_create_k8s_resource, wait_log_job, K8sApi, ReadWrite, Result, -}; +use crate::{maybe_create_k8s_resource, wait_log_job, K8sApi, ReadWrite, Result}; +use again::RetryPolicy; use aptos_logger::info; use k8s_openapi::api::{ batch::v1::Job, @@ -18,7 +17,7 @@ use kube::{ api::{ObjectMeta, PostParams}, ResourceExt, }; -use std::{collections::BTreeMap, sync::Arc}; +use std::{collections::BTreeMap, sync::Arc, time::Duration}; /// The ForgeDeployerManager is responsible for managing the lifecycle of forge deployers, which deploy the /// forge components to the k8s cluster. @@ -240,11 +239,14 @@ impl ForgeDeployerManager { * Wait for the deployer job to complete. */ pub async fn wait_completed(&self) -> Result<()> { + // retry for ~10 min at a fixed interval. Note the actual job may take longer than this to complete, but the last attempt to tail the logs will succeed before then + // Ideally the deployer itself knows to fail fast depending on the workloads' health + let retry_policy = RetryPolicy::fixed(Duration::from_secs(10)).with_max_retries(6 * 10); wait_log_job( self.jobs_api.clone(), &self.namespace, self.get_name(), - k8s_wait_indexer_strategy(), + retry_policy, ) .await } diff --git a/testsuite/forge/src/backend/local/node.rs b/testsuite/forge/src/backend/local/node.rs index d0beef57a2746..e1a61dacb4ae1 100644 --- a/testsuite/forge/src/backend/local/node.rs +++ b/testsuite/forge/src/backend/local/node.rs @@ -197,7 +197,7 @@ impl LocalNode { &self.config } - pub(crate) fn config_mut(&mut self) -> &mut NodeConfig { + pub fn config_mut(&mut self) -> &mut NodeConfig { &mut self.config } diff --git a/testsuite/forge/src/backend/local/swarm.rs b/testsuite/forge/src/backend/local/swarm.rs index cb1f8ba2989f7..eba54ae3c5bf3 100644 --- a/testsuite/forge/src/backend/local/swarm.rs +++ b/testsuite/forge/src/backend/local/swarm.rs @@ -650,6 +650,10 @@ impl Swarm for LocalSwarm { fn get_default_pfn_node_config(&self) -> NodeConfig { todo!() } + + fn has_indexer(&self) -> bool { + false + } } #[derive(Debug)] diff --git a/testsuite/forge/src/interface/aptos.rs b/testsuite/forge/src/interface/aptos.rs index 6e38ae931dfed..a77bad1eeadab 100644 --- a/testsuite/forge/src/interface/aptos.rs +++ b/testsuite/forge/src/interface/aptos.rs @@ -113,6 +113,7 @@ impl<'t> AptosContext<'t> { } } +#[derive(Clone)] pub struct AptosPublicInfo { chain_id: ChainId, inspection_service_url: Url, diff --git a/testsuite/forge/src/interface/prometheus_metrics.rs b/testsuite/forge/src/interface/prometheus_metrics.rs index cce9096aaac75..20283bbdb4e2c 100644 --- a/testsuite/forge/src/interface/prometheus_metrics.rs +++ b/testsuite/forge/src/interface/prometheus_metrics.rs @@ -43,6 +43,12 @@ impl fmt::Debug for MetricSamples { } } +impl Default for MetricSamples { + fn default() -> Self { + Self::new(vec![]) + } +} + #[derive(Clone, Debug)] pub struct SystemMetrics { pub cpu_core_metrics: MetricSamples, @@ -105,6 +111,11 @@ pub enum LatencyBreakdownSlice { ConsensusProposalToOrdered, ConsensusOrderedToCommit, ConsensusProposalToCommit, + // each of the indexer grpc steps in order + IndexerFullnodeProcessedBatch, + IndexerCacheWorkerProcessedBatch, + IndexerDataServiceAllChunksSent, + // TODO: add processor insertion into DB latency } #[derive(Clone, Debug)] @@ -119,10 +130,16 @@ impl LatencyBreakdown { self.0.keys().cloned().collect() } - pub fn get_samples(&self, slice: &LatencyBreakdownSlice) -> &MetricSamples { - self.0 - .get(slice) - .unwrap_or_else(|| panic!("Missing latency breakdown for {:?}", slice)) + pub fn get_samples(&self, slice: &LatencyBreakdownSlice) -> Option<&MetricSamples> { + self.0.get(slice) + } + + pub fn join(&self, other: &LatencyBreakdown) -> LatencyBreakdown { + let mut ret_latency = self.0.clone(); + for (slice, samples) in other.0.iter() { + ret_latency.insert(slice.clone(), samples.clone()); + } + LatencyBreakdown::new(ret_latency) } } @@ -210,5 +227,54 @@ pub async fn fetch_latency_breakdown( MetricSamples::new(consensus_proposal_to_commit_samples), ); + if swarm.has_indexer() { + // These counters are defined in ecosystem/indexer-grpc/indexer-grpc-utils/src/counters.rs + let indexer_fullnode_processed_batch_query = + r#"max(indexer_grpc_duration_in_secs{step="4", service_type="indexer_fullnode"})"#; + let indexer_cache_worker_processed_batch_query = + r#"max(indexer_grpc_duration_in_secs{step="4", service_type="cache_worker"})"#; + let indexer_data_service_all_chunks_sent_query = + r#"max(indexer_grpc_duration_in_secs{step="4", service_type="data_service"})"#; + + let indexer_fullnode_processed_batch_samples = swarm + .query_range_metrics( + indexer_fullnode_processed_batch_query, + start_time as i64, + end_time as i64, + None, + ) + .await?; + + let indexer_cache_worker_processed_batch_samples = swarm + .query_range_metrics( + indexer_cache_worker_processed_batch_query, + start_time as i64, + end_time as i64, + None, + ) + .await?; + + let indexer_data_service_all_chunks_sent_samples = swarm + .query_range_metrics( + indexer_data_service_all_chunks_sent_query, + start_time as i64, + end_time as i64, + None, + ) + .await?; + + samples.insert( + LatencyBreakdownSlice::IndexerFullnodeProcessedBatch, + MetricSamples::new(indexer_fullnode_processed_batch_samples), + ); + samples.insert( + LatencyBreakdownSlice::IndexerCacheWorkerProcessedBatch, + MetricSamples::new(indexer_cache_worker_processed_batch_samples), + ); + samples.insert( + LatencyBreakdownSlice::IndexerDataServiceAllChunksSent, + MetricSamples::new(indexer_data_service_all_chunks_sent_samples), + ); + } Ok(LatencyBreakdown::new(samples)) } diff --git a/testsuite/forge/src/interface/swarm.rs b/testsuite/forge/src/interface/swarm.rs index b2c3f501ba5ec..4f378f37bc9d8 100644 --- a/testsuite/forge/src/interface/swarm.rs +++ b/testsuite/forge/src/interface/swarm.rs @@ -105,6 +105,10 @@ pub trait Swarm: Sync + Send { } fn get_default_pfn_node_config(&self) -> NodeConfig; + + /// Check if the swarm has an indexer. NOTE: in the future we should make this more rich, and include + /// indexer endpoints, similar to how we collect validator and fullnode endpoints. + fn has_indexer(&self) -> bool; } impl SwarmExt for T where T: Swarm {} diff --git a/testsuite/forge/src/success_criteria.rs b/testsuite/forge/src/success_criteria.rs index e1df551b4cfa9..e7383a87d12fe 100644 --- a/testsuite/forge/src/success_criteria.rs +++ b/testsuite/forge/src/success_criteria.rs @@ -150,7 +150,9 @@ impl LatencyBreakdownThreshold { traffic_name_addition: &String, ) -> anyhow::Result<()> { for (slice, threshold) in &self.thresholds { - let samples = metrics.get_samples(slice); + let samples = metrics + .get_samples(slice) + .expect("Could not get metric samples"); threshold.ensure_metrics_threshold( &format!("{:?}{}", slice, traffic_name_addition), samples.get(), diff --git a/testsuite/forge_test.py b/testsuite/forge_test.py index 5e464c5893642..76d993a32cd17 100644 --- a/testsuite/forge_test.py +++ b/testsuite/forge_test.py @@ -64,7 +64,8 @@ class HasAssertMultiLineEqual(Protocol): - def assertMultiLineEqual(self, first: str, second: str, msg: Any = ...) -> None: ... + def assertMultiLineEqual(self, first: str, second: str, msg: Any = ...) -> None: + ... def get_cwd() -> Path: diff --git a/testsuite/fuzzer/fuzz.sh b/testsuite/fuzzer/fuzz.sh index a932286fc6a70..bdf98b9275816 100755 --- a/testsuite/fuzzer/fuzz.sh +++ b/testsuite/fuzzer/fuzz.sh @@ -39,6 +39,12 @@ function usage() { "build-oss-fuzz") echo "Usage: $0 build-oss-fuzz " ;; + "coverage") + echo "Usage: $0 coverage " + ;; + "clean-coverage") + echo "Usage: $0 clean-coverage " + ;; "debug") echo "Usage: $0 debug " ;; @@ -55,10 +61,11 @@ function usage() { echo "Usage: $0 test" ;; *) - echo "Usage: $0 " + echo "Usage: $0 " echo " add adds a new fuzz target" echo " build builds fuzz targets" echo " build-oss-fuzz builds fuzz targets for oss-fuzz" + echo " coverage generates coverage for a fuzz target" echo " debug debugs a fuzz target with a testcase" echo " flamegraph generates a flamegraph for a fuzz target with a testcase" echo " list lists existing fuzz targets" @@ -125,6 +132,49 @@ function build-oss-fuzz() { done } +function coverage() { + if [ -z "$1" ]; then + usage coverage + fi + fuzz_target=$1 + local corpus_dir="fuzz/corpus/$fuzz_target" + local coverage_dir="./fuzz/coverage/$fuzz_target/report" + mkdir -p $coverage_dir + + if [ ! -d "fuzz/coverage/$fuzz_target" ]; then + cargo_fuzz coverage $fuzz_target $corpus_dir + fi + + info "Generating coverage for $fuzz_target" + + fuzz_target_bin=$(find ./target -name $fuzz_target -type f -perm /111) #$(find target/*/coverage -name $fuzz_target -type f) + echo "Found fuzz target binary: $fuzz_target_bin" + # Generate the coverage report + cargo +nightly cov -- show $fuzz_target_bin \ + --format=html \ + --instr-profile=fuzz/coverage/$fuzz_target/coverage.profdata \ + --show-directory-coverage \ + --output-dir=$coverage_dir \ + -Xdemangler=rustfilt \ + --show-branches=count \ + --ignore-filename-regex='rustc/.*/library|\.cargo' +} + +function clean-coverage() { + if [ "$#" -ne 1 ]; then + usage clean + fi + + local fuzz_target="$1" + local target_dir="coverage/$fuzz_target" + + if [ "$fuzz_target" == "all" ]; then + rm -rf coverage + else + rm -rf $target_dir + fi +} + # use rust-gdb to debug a fuzz target with a testcase function debug() { if [ -z "$2" ]; then @@ -182,7 +232,7 @@ function run() { fi fi info "Running $fuzz_target" - cargo_fuzz run --sanitizer none $fuzz_target $testcase + cargo_fuzz run --sanitizer none -O $fuzz_target $testcase -- -fork=10 } function test() { @@ -247,6 +297,14 @@ case "$1" in shift build-oss-fuzz "$@" ;; + "coverage") + shift + coverage "$@" + ;; + "clean-coverage") + shift + clean-coverage "$@" + ;; "debug") shift debug "$@" diff --git a/testsuite/fuzzer/fuzz/Cargo.toml b/testsuite/fuzzer/fuzz/Cargo.toml index e5054199095f9..bc787a7f08988 100644 --- a/testsuite/fuzzer/fuzz/Cargo.toml +++ b/testsuite/fuzzer/fuzz/Cargo.toml @@ -9,11 +9,12 @@ cargo-fuzz = true [dependencies] aptos-cached-packages = { workspace = true } +aptos-crypto = { workspace = true } aptos-framework = { workspace = true } aptos-language-e2e-tests = { workspace = true, features = ["fuzzing"] } -aptos-types = { workspace = true } +aptos-types = { workspace = true, features = ["fuzzing"] } aptos-vm = { workspace = true } -arbitrary = "1.3.2" +arbitrary = { workspace = true, features = ["derive"] } bcs = { workspace = true } libfuzzer-sys = "0.4" move-binary-format = { workspace = true, features = ["fuzzing"] } @@ -22,6 +23,8 @@ move-core-types = { workspace = true, features = ["fuzzing"] } move-vm-types = { workspace = true, features = ["fuzzing"] } once_cell = { workspace = true } rayon = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } [features] disabled = [] @@ -77,3 +80,9 @@ name = "move_aptosvm_publish" path = "fuzz_targets/move/aptosvm_publish.rs" test = false doc = false + +[[bin]] +name = "move_aptosvm_authenticators" +path = "fuzz_targets/move/aptosvm_authenticators.rs" +test = false +doc = false diff --git a/testsuite/fuzzer/fuzz/fuzz_targets/move/aptosvm_authenticators.rs b/testsuite/fuzzer/fuzz/fuzz_targets/move/aptosvm_authenticators.rs new file mode 100644 index 0000000000000..a406d991a800c --- /dev/null +++ b/testsuite/fuzzer/fuzz/fuzz_targets/move/aptosvm_authenticators.rs @@ -0,0 +1,372 @@ +#![no_main] + +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use aptos_cached_packages::aptos_stdlib; +use aptos_crypto::{ + ed25519::{Ed25519PrivateKey, Ed25519PublicKey}, + PrivateKey, SigningKey, Uniform, +}; +use aptos_language_e2e_tests::{ + account::Account, data_store::GENESIS_CHANGE_SET_HEAD, executor::FakeExecutor, +}; +use aptos_types::{ + chain_id::ChainId, + keyless::{AnyKeylessPublicKey, KeylessSignature, TransactionAndProof}, + transaction::{ + authenticator::{ + AccountAuthenticator, AnyPublicKey, AnySignature, EphemeralPublicKey, + EphemeralSignature, SingleKeyAuthenticator, TransactionAuthenticator, + }, + ExecutionStatus, SignedTransaction, TransactionStatus, + }, + write_set::WriteSet, +}; +use aptos_vm::AptosVM; +use libfuzzer_sys::{fuzz_target, Corpus}; +use move_core_types::vm_status::{StatusCode, StatusType}; +use once_cell::sync::Lazy; +use std::sync::Arc; +mod utils; +use utils::{ + check_for_invariant_violation, FuzzerTransactionAuthenticator, Style, TransactionState, +}; + +// genesis write set generated once for each fuzzing session +static VM: Lazy = Lazy::new(|| GENESIS_CHANGE_SET_HEAD.write_set().clone()); + +const FUZZER_CONCURRENCY_LEVEL: usize = 1; +static TP: Lazy> = Lazy::new(|| { + Arc::new( + rayon::ThreadPoolBuilder::new() + .num_threads(FUZZER_CONCURRENCY_LEVEL) + .build() + .unwrap(), + ) +}); + +fn run_case(input: TransactionState) -> Result<(), Corpus> { + tdbg!(&input); + + AptosVM::set_concurrency_level_once(FUZZER_CONCURRENCY_LEVEL); + let mut vm = FakeExecutor::from_genesis_with_existing_thread_pool( + &VM, + ChainId::mainnet(), + Arc::clone(&TP), + ) + .set_not_parallel(); + + let sender_acc = if true { + // create sender pub/priv key. initialize and fund account + vm.create_accounts(1, input.tx_auth_type.sender().fund_amount(), 0) + .remove(0) + } else { + // only create sender pub/priv key. do not initialize + Account::new() + }; + + let receiver = Account::new(); + + // build tx + let tx = sender_acc + .transaction() + .payload(aptos_stdlib::aptos_coin_transfer(*receiver.address(), 1)) + .sequence_number(0) + .gas_unit_price(100) + .max_gas_amount(1000); + + let tx_auth_type = input.tx_auth_type.clone(); + + let raw_tx = tx.raw(); + let tx = match tx_auth_type { + FuzzerTransactionAuthenticator::Ed25519 { sender: _ } => raw_tx + .sign(&sender_acc.privkey, sender_acc.pubkey.as_ed25519().unwrap()) + .map_err(|_| Corpus::Keep)? + .into_inner(), + FuzzerTransactionAuthenticator::Keyless { + sender: _, + style, + any_keyless_public_key, + keyless_signature, + } => { + match style { + Style::Break => { + // Generate a keypair for ephemeral keys + let private_key = Ed25519PrivateKey::generate_for_testing(); + let public_key: Ed25519PublicKey = private_key.public_key(); + + // Create a TransactionAndProof to be signed + // This needs to be valid because the signature is checked in mempool (real flow) + let txn_and_proof = TransactionAndProof { + message: raw_tx.clone(), + proof: None, + }; + + // Sign the transaction + let signature = private_key.sign(&txn_and_proof).map_err(|_| Corpus::Keep)?; + + // Build AnyPublicKey::Keyless + let any_public_key = match any_keyless_public_key { + AnyKeylessPublicKey::Normal(normal_key) => { + // TODO: think about idc, it's generated by new_from_preimage + AnyPublicKey::Keyless { + public_key: normal_key, + } + }, + AnyKeylessPublicKey::Federated(federated_key) => { + // TODO: think about idc, it's generated by new_from_preimage (nested in KeylessPublicKey) + AnyPublicKey::FederatedKeyless { + public_key: federated_key, + } + }, + }; + + // Build AnySignature::Keyless + let any_signature = AnySignature::Keyless { + signature: KeylessSignature { + cert: keyless_signature.cert().clone(), + jwt_header_json: input.tx_auth_type.get_jwt_header_json().unwrap(), + exp_date_secs: keyless_signature.exp_date_secs(), + ephemeral_pubkey: EphemeralPublicKey::ed25519(public_key), + ephemeral_signature: EphemeralSignature::ed25519(signature), + }, + }; + + // Build an authenticator + let authenticator = TransactionAuthenticator::SingleSender { + sender: AccountAuthenticator::SingleKey { + authenticator: SingleKeyAuthenticator::new( + any_public_key, + any_signature, + ), + }, + }; + + // Construct the SignedTransaction + SignedTransaction::new_signed_transaction(raw_tx, authenticator) + }, + /* + Style::MatchJWT => { + // Generate a keypair for ephemeral keys + let private_key = Ed25519PrivateKey::generate_for_testing(); + let public_key: Ed25519PublicKey = private_key.public_key(); + + // Create a TransactionAndProof to be signed + let txn_and_proof = TransactionAndProof { + message: raw_tx.clone(), + proof: None, + }; + + // Sign the transaction + let signature = private_key.sign(&txn_and_proof).map_err(|_| Corpus::Keep)?; + + // Build AnyPublicKey::Keyless + let any_public_key = AnyPublicKey::Keyless { + public_key: KeylessPublicKey { + iss_val: "test.oidc.provider".to_string(), + idc: IdCommitment::new_from_preimage( + &Pepper::from_number(0x5678), + "aud", + "uid_key", + "uid_val", + ) + .map_err(|_| Corpus::Keep)?, + }, + }; + + /* + EphemeralCertificate::OpenIdSig(OpenIdSig { + jwt_sig: vec![], + jwt_payload_json: "jwt_payload_json".to_string(), + uid_key: "uid_key".to_string(), + epk_blinder: b"epk_blinder".to_vec(), + pepper: Pepper::from_number(0x1234), + idc_aud_val: None, + }) + */ + + // Build AnySignature::Keyless + let any_signature = AnySignature::Keyless { + signature: KeylessSignature { + cert: keyless_signature.cert().clone(), + jwt_header_json: input.tx_auth_type.get_jwt_header_json().unwrap(), + exp_date_secs: keyless_signature.exp_date_secs(), + ephemeral_pubkey: EphemeralPublicKey::ed25519(public_key), + ephemeral_signature: EphemeralSignature::ed25519(signature), + }, + }; + + // Build an authenticator + let authenticator = TransactionAuthenticator::SingleSender { + sender: AccountAuthenticator::SingleKey { + authenticator: SingleKeyAuthenticator::new(any_public_key, any_signature), + }, + }; + + // Construct the SignedTransaction + SignedTransaction::new_signed_transaction(raw_tx, authenticator) + }, + Style::MatchKeys => { + // Generate a keypair for ephemeral keys + let private_key = Ed25519PrivateKey::generate_for_testing(); + let public_key: Ed25519PublicKey = private_key.public_key(); + + // Create a TransactionAndProof to be signed + let txn_and_proof = TransactionAndProof { + message: raw_tx.clone(), + proof: None, + }; + + // Sign the transaction + let signature = private_key.sign(&txn_and_proof).map_err(|_| Corpus::Keep)?; + + // Build AnyPublicKey::Keyless + let any_public_key = AnyPublicKey::Keyless { + public_key: KeylessPublicKey { + iss_val: "test.oidc.provider".to_string(), + idc: IdCommitment::new_from_preimage( + &Pepper::from_number(0x5678), + "aud", + "uid_key", + "uid_val", + ) + .map_err(|_| Corpus::Keep)?, + }, + }; + + /* + EphemeralCertificate::OpenIdSig(OpenIdSig { + jwt_sig: vec![], + jwt_payload_json: "jwt_payload_json".to_string(), + uid_key: "uid_key".to_string(), + epk_blinder: b"epk_blinder".to_vec(), + pepper: Pepper::from_number(0x1234), + idc_aud_val: None, + }) + */ + + // Build AnySignature::Keyless + let any_signature = AnySignature::Keyless { + signature: KeylessSignature { + cert: keyless_signature.cert().clone(), + jwt_header_json: input.tx_auth_type.get_jwt_header_json().unwrap(), + exp_date_secs: keyless_signature.exp_date_secs(), + ephemeral_pubkey: EphemeralPublicKey::ed25519(public_key), + ephemeral_signature: EphemeralSignature::ed25519(signature), + }, + }; + + // Build an authenticator + let authenticator = TransactionAuthenticator::SingleSender { + sender: AccountAuthenticator::SingleKey { + authenticator: SingleKeyAuthenticator::new(any_public_key, any_signature), + }, + }; + + // Construct the SignedTransaction + SignedTransaction::new_signed_transaction(raw_tx, authenticator) + } + */ + } + }, + FuzzerTransactionAuthenticator::MultiAgent { + sender: _, + secondary_signers, + } => { + // higher number here slows down fuzzer significatly due to slow signing process. + if secondary_signers.len() > 10 { + return Err(Corpus::Keep); + } + let secondary_accs: Vec<_> = secondary_signers + .iter() + .map(|acc| acc.convert_account(&mut vm)) + .collect(); + let secondary_signers = secondary_accs.iter().map(|acc| *acc.address()).collect(); + let secondary_private_keys = secondary_accs.iter().map(|acc| &acc.privkey).collect(); + raw_tx + .sign_multi_agent( + &sender_acc.privkey, + secondary_signers, + secondary_private_keys, + ) + .map_err(|_| Corpus::Keep)? + .into_inner() + }, + FuzzerTransactionAuthenticator::FeePayer { + sender: _, + secondary_signers, + fee_payer, + } => { + // higher number here slows down fuzzer significatly due to slow signing process. + if secondary_signers.len() > 10 { + return Err(Corpus::Keep); + } + let secondary_accs: Vec<_> = secondary_signers + .iter() + .map(|acc| acc.convert_account(&mut vm)) + .collect(); + + let secondary_signers = secondary_accs.iter().map(|acc| *acc.address()).collect(); + let secondary_private_keys = secondary_accs.iter().map(|acc| &acc.privkey).collect(); + let fee_payer_acc = fee_payer.convert_account(&mut vm); + raw_tx + .sign_fee_payer( + &sender_acc.privkey, + secondary_signers, + secondary_private_keys, + *fee_payer_acc.address(), + &fee_payer_acc.privkey, + ) + .map_err(|_| Corpus::Keep)? + .into_inner() + }, + }; + + // exec tx + tdbg!("exec start"); + + let res = vm.execute_block(vec![tx.clone()]); + + let res = res + .map_err(|e| { + check_for_invariant_violation(e); + Corpus::Keep + })? + .pop() + .expect("expect 1 output"); + tdbg!("exec end"); + + // if error exit gracefully + let status = match tdbg!(res.status()) { + TransactionStatus::Keep(status) => status, + TransactionStatus::Discard(e) => { + if e.status_type() == StatusType::InvariantViolation { + panic!("invariant violation {:?}", e); + } + return Err(Corpus::Keep); + }, + _ => return Err(Corpus::Keep), + }; + match tdbg!(status) { + ExecutionStatus::Success => (), + ExecutionStatus::MiscellaneousError(e) => { + if let Some(e) = e { + if e.status_type() == StatusType::InvariantViolation + && *e != StatusCode::TYPE_RESOLUTION_FAILURE + && *e != StatusCode::STORAGE_ERROR + { + panic!("invariant violation {:?}", e); + } + } + return Err(Corpus::Keep); + }, + _ => return Err(Corpus::Keep), + }; + + Ok(()) +} + +fuzz_target!(|fuzz_data: TransactionState| -> Corpus { + run_case(fuzz_data).err().unwrap_or(Corpus::Keep) +}); diff --git a/testsuite/fuzzer/fuzz/fuzz_targets/move/aptosvm_publish_and_run.rs b/testsuite/fuzzer/fuzz/fuzz_targets/move/aptosvm_publish_and_run.rs index 20301e8db6d98..5f6a50e42f547 100644 --- a/testsuite/fuzzer/fuzz/fuzz_targets/move/aptosvm_publish_and_run.rs +++ b/testsuite/fuzzer/fuzz/fuzz_targets/move/aptosvm_publish_and_run.rs @@ -32,8 +32,8 @@ use std::{ }; mod utils; use utils::{ - check_for_invariant_violation, publish_group, sort_by_deps, Authenticator, ExecVariant, - RunnableState, + check_for_invariant_violation, publish_group, sort_by_deps, ExecVariant, + FuzzerRunnableAuthenticator, RunnableState, }; // genesis write set generated once for each fuzzing session @@ -258,11 +258,11 @@ fn run_case(mut input: RunnableState) -> Result<(), Corpus> { }; let raw_tx = tx.raw(); let tx = match input.tx_auth_type { - Authenticator::Ed25519 { sender: _ } => raw_tx + FuzzerRunnableAuthenticator::Ed25519 { sender: _ } => raw_tx .sign(&sender_acc.privkey, sender_acc.pubkey.as_ed25519().unwrap()) .map_err(|_| Corpus::Keep)? .into_inner(), - Authenticator::MultiAgent { + FuzzerRunnableAuthenticator::MultiAgent { sender: _, secondary_signers, } => { @@ -285,7 +285,7 @@ fn run_case(mut input: RunnableState) -> Result<(), Corpus> { .map_err(|_| Corpus::Keep)? .into_inner() }, - Authenticator::FeePayer { + FuzzerRunnableAuthenticator::FeePayer { sender: _, secondary_signers, fee_payer, diff --git a/testsuite/fuzzer/fuzz/fuzz_targets/move/utils.rs b/testsuite/fuzzer/fuzz/fuzz_targets/move/utils.rs index cfc1d1971dea5..9a95cdd257361 100644 --- a/testsuite/fuzzer/fuzz/fuzz_targets/move/utils.rs +++ b/testsuite/fuzzer/fuzz/fuzz_targets/move/utils.rs @@ -8,7 +8,10 @@ use aptos_framework::natives::code::{ ModuleMetadata, MoveOption, PackageDep, PackageMetadata, UpgradePolicy, }; use aptos_language_e2e_tests::{account::Account, executor::FakeExecutor}; -use aptos_types::transaction::{ExecutionStatus, TransactionPayload, TransactionStatus}; +use aptos_types::{ + keyless::{AnyKeylessPublicKey, EphemeralCertificate}, + transaction::{ExecutionStatus, TransactionPayload, TransactionStatus}, +}; use arbitrary::Arbitrary; use libfuzzer_sys::Corpus; use move_binary_format::{ @@ -20,6 +23,7 @@ use move_core_types::{ value::{MoveStructLayout, MoveTypeLayout, MoveValue}, vm_status::{StatusType, VMStatus}, }; +use serde::{Deserialize, Serialize}; use std::collections::{BTreeMap, BTreeSet, HashSet}; #[macro_export] @@ -60,22 +64,6 @@ pub struct UserAccount { fund: FundAmount, } -#[derive(Debug, Arbitrary, Eq, PartialEq, Clone)] -pub enum Authenticator { - Ed25519 { - sender: UserAccount, - }, - MultiAgent { - sender: UserAccount, - secondary_signers: Vec, - }, - FeePayer { - sender: UserAccount, - secondary_signers: Vec, - fee_payer: UserAccount, - }, -} - impl UserAccount { pub fn fund_amount(&self) -> u64 { match self.fund { @@ -94,15 +82,32 @@ impl UserAccount { } } -impl Authenticator { +// Used to fuzz the MoveVM +#[derive(Debug, Arbitrary, Eq, PartialEq, Clone)] +pub enum FuzzerRunnableAuthenticator { + Ed25519 { + sender: UserAccount, + }, + MultiAgent { + sender: UserAccount, + secondary_signers: Vec, + }, + FeePayer { + sender: UserAccount, + secondary_signers: Vec, + fee_payer: UserAccount, + }, +} + +impl FuzzerRunnableAuthenticator { pub fn sender(&self) -> UserAccount { match self { - Authenticator::Ed25519 { sender } => *sender, - Authenticator::MultiAgent { + FuzzerRunnableAuthenticator::Ed25519 { sender } => *sender, + FuzzerRunnableAuthenticator::MultiAgent { sender, secondary_signers: _, } => *sender, - Authenticator::FeePayer { + FuzzerRunnableAuthenticator::FeePayer { sender, secondary_signers: _, fee_payer: _, @@ -130,7 +135,118 @@ pub enum ExecVariant { pub struct RunnableState { pub dep_modules: Vec, pub exec_variant: ExecVariant, - pub tx_auth_type: Authenticator, + pub tx_auth_type: FuzzerRunnableAuthenticator, +} + +#[derive(Debug, Arbitrary, Eq, PartialEq, Clone, Serialize, Deserialize)] +pub struct JwtHeader { + pub alg: String, + pub typ: Option, + pub kid: Option, + // Add other JWT header fields as needed +} + +#[derive(Debug, Arbitrary, Eq, PartialEq, Clone, Serialize, Deserialize)] +pub struct FuzzingKeylessSignature { + exp_date_secs: u64, + jwt_header: JwtHeader, + cert: EphemeralCertificate, + //ephemeral_pubkey: EphemeralPublicKey, + //ephemeral_signature: EphemeralSignature, +} + +impl FuzzingKeylessSignature { + pub fn exp_date_secs(&self) -> u64 { + self.exp_date_secs + } + + pub fn jwt_header(&self) -> &JwtHeader { + &self.jwt_header + } + + pub fn cert(&self) -> &EphemeralCertificate { + &self.cert + } + + /* + pub fn ephemeral_pubkey(&self) -> &EphemeralPublicKey { + &self.ephemeral_pubkey + } + + pub fn ephemeral_signature(&self) -> &EphemeralSignature { + &self.ephemeral_signature + } + */ +} + +#[derive(Debug, Arbitrary, Eq, PartialEq, Clone)] +pub enum Style { + Break, + //MatchJWT, + //MatchKeys, +} + +//TODO: reorganize this type excluding not usefull fields. Do it after implementing JWK and Federated Keyless. +// Used to fuzz the transaction authenticator +#[derive(Debug, Arbitrary, Eq, PartialEq, Clone)] +pub enum FuzzerTransactionAuthenticator { + Ed25519 { + sender: UserAccount, + }, + Keyless { + sender: UserAccount, + style: Style, + any_keyless_public_key: AnyKeylessPublicKey, + keyless_signature: FuzzingKeylessSignature, + }, + MultiAgent { + sender: UserAccount, + secondary_signers: Vec, + }, + FeePayer { + sender: UserAccount, + secondary_signers: Vec, + fee_payer: UserAccount, + }, +} + +impl FuzzerTransactionAuthenticator { + pub fn sender(&self) -> UserAccount { + match self { + FuzzerTransactionAuthenticator::Ed25519 { sender } => *sender, + FuzzerTransactionAuthenticator::Keyless { + sender, + style: _, + any_keyless_public_key: _, + keyless_signature: _, + } => *sender, + FuzzerTransactionAuthenticator::MultiAgent { + sender, + secondary_signers: _, + } => *sender, + FuzzerTransactionAuthenticator::FeePayer { + sender, + secondary_signers: _, + fee_payer: _, + } => *sender, + } + } + + pub fn get_jwt_header_json(&self) -> Option { + if let FuzzerTransactionAuthenticator::Keyless { + keyless_signature, .. + } = self + { + serde_json::to_string(&keyless_signature.jwt_header).ok() + } else { + None + } + } +} + +#[derive(Debug, Arbitrary, Eq, PartialEq, Clone)] +pub struct TransactionState { + pub tx_auth_type: FuzzerTransactionAuthenticator, } // used for ordering modules topologically diff --git a/testsuite/generate-format/src/consensus.rs b/testsuite/generate-format/src/consensus.rs index 50db48dcc165f..5ef14eb37c69b 100644 --- a/testsuite/generate-format/src/consensus.rs +++ b/testsuite/generate-format/src/consensus.rs @@ -115,6 +115,7 @@ pub fn get_registry() -> Result { tracer.trace_type::(&samples)?; tracer.trace_type::(&samples)?; + tracer.trace_type::(&samples)?; tracer.trace_type::(&samples)?; tracer.trace_type::(&samples)?; tracer.trace_type::(&samples)?; diff --git a/testsuite/generate-format/tests/staged/consensus.yaml b/testsuite/generate-format/tests/staged/consensus.yaml index 9548f063c237d..f9bb0aaf27da6 100644 --- a/testsuite/generate-format/tests/staged/consensus.yaml +++ b/testsuite/generate-format/tests/staged/consensus.yaml @@ -421,6 +421,10 @@ ConsensusMsg: OrderVoteMsg: NEWTYPE: TYPENAME: OrderVoteMsg + 19: + RoundTimeoutMsg: + NEWTYPE: + TYPENAME: RoundTimeoutMsg ContractEvent: ENUM: 0: @@ -828,6 +832,35 @@ RawTransaction: - expiration_timestamp_secs: U64 - chain_id: TYPENAME: ChainId +RoundTimeout: + STRUCT: + - timeout: + TYPENAME: TwoChainTimeout + - author: + TYPENAME: AccountAddress + - reason: + TYPENAME: RoundTimeoutReason + - signature: + TYPENAME: Signature +RoundTimeoutMsg: + STRUCT: + - round_timeout: + TYPENAME: RoundTimeout + - sync_info: + TYPENAME: SyncInfo +RoundTimeoutReason: + ENUM: + 0: + Unknown: UNIT + 1: + ProposalNotReceived: UNIT + 2: + PayloadUnavailable: + STRUCT: + - missing_authors: + TYPENAME: BitVec + 3: + NoQC: UNIT Script: STRUCT: - code: BYTES diff --git a/testsuite/replay_verify.py b/testsuite/replay_verify.py deleted file mode 100755 index 2ef81c92bfa4f..0000000000000 --- a/testsuite/replay_verify.py +++ /dev/null @@ -1,250 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright © Aptos Foundation -# SPDX-License-Identifier: Apache-2.0 - -import os -import shutil -import subprocess -import sys -from collections import deque -from multiprocessing import Pool, freeze_support -from typing import List, Tuple - -from verify_core.common import clear_artifacts, warm_cache_and_get_latest_backup_version - -TESTNET_RANGES: List[Tuple[int, int]] = [ - (862_000_000, 878_000_000), - (894_000_000, 910_000_000), - (942_000_000, 958_000_000), - (974_000_000, 990_000_000), - (1_006_000_000, 1_022_000_000), - (1_038_000_000, 1_054_000_000), - (1_070_000_000, 1_086_000_000), - (1_102_000_000, 1_115_000_000), - (1_128_000_000, 1_141_000_000), - (1_154_000_000, 1_167_000_000), - (5_495_000_000, 5_520_000_000), - (5_520_000_000, 5_545_000_000), - (5_600_000_000, 5_625_000_000), - (5_650_000_000, 5_675_000_000), - (5_675_000_000, 5_700_000_000), - (5_765_000_000, 5_785_000_000), - (5_922_000_000, 5_935_000_000), - (5_935_000_000, 5_950_000_000), - (5_950_000_000, sys.maxsize), -] - -MAINNET_RANGES: List[Tuple[int, int]] = [ - (518_000_000, 534_000_000), - (534_000_000, 550_000_000), - (550_000_000, 566_000_000), - (566_000_000, 581_000_000), - (581_000_000, 597_000_000), - (597_000_000, 613_000_000), - (613_000_000, 629_000_000), - (629_000_000, 640_000_000), - # Skip tapos range - (949_000_000, 954_000_000), - (954_000_000, 969_000_000), - (969_000_000, 984_000_000), - (984_000_000, 1_000_000_000), - (1_000_000_000, 1_020_000_000), - (1_020_000_000, 1_040_000_000), - (1_040_000_000, 1_060_000_000), - (1_060_000_000, 1_085_000_000), - # Skip tapos2 range - (1_635_000_000, 1_655_000_000), - (1_655_000_000, 1_675_000_000), - (1_675_000_000, sys.maxsize), -] - - -# retry the replay_verify_partition if it fails -def retry_replay_verify_partition(func, *args, **kwargs) -> Tuple[int, int, bytes]: - (partition_number, code, msg) = (0, 0, b"") - NUM_OF_RETRIES = 6 - for i in range(1, NUM_OF_RETRIES + 1): - print(f"try {i}") - (partition_number, code, msg) = func(*args, **kwargs) - # let's only not retry on txn error and success case, - if code == 2 or code == 0: - break - return (partition_number, code, msg) - - -def replay_verify_partition( - n: int, - N: int, - history_start: int, - per_partition: int, - latest_version: int, - txns_to_skip: Tuple[int], - backup_config_template_path: str, -) -> Tuple[int, int, bytes]: - """ - Run replay-verify for a partition of the backup, returning a tuple of the (partition number, return code) - - n: partition number - N: total number of partitions - history_start: start version of the history to verify - per_partition: number of versions per partition - latest_version: last version to verify - txns_to_skip: list of transactions to skip - backup_config_template_path: path to the backup config template - """ - end = history_start + n * per_partition - if n == N and end < latest_version: - end = latest_version - - start = end - per_partition - partition_name = f"run_{n}_{start}_{end}" - - print(f"[partition {n}] spawning {partition_name}") - if not os.path.exists(partition_name): - os.mkdir(partition_name) - # the metadata cache is shared across partitions and downloaded when querying the latest version. - shutil.copytree("metadata-cache", f"{partition_name}/metadata-cache") - - txns_to_skip_args = [f"--txns-to-skip={txn}" for txn in txns_to_skip] - - # run and print output - process = subprocess.Popen( - [ - "target/release/aptos-debugger", - "aptos-db", - "replay-verify", - # "--enable-storage-sharding", - *txns_to_skip_args, - "--concurrent-downloads", - "8", - "--replay-concurrency-level", - "2", - "--metadata-cache-dir", - f"./{partition_name}/metadata-cache", - "--target-db-dir", - f"./{partition_name}/db", - "--start-version", - str(start), - "--end-version", - str(end), - "--lazy-quit", - "--command-adapter-config", - backup_config_template_path, - ], - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, # redirect stderr to stdout - ) - if process.stdout is None: - raise Exception(f"[partition {n}] stdout is None") - last_lines = deque(maxlen=10) - for line in iter(process.stdout.readline, b""): - print(f"[partition {n}] {line}", flush=True) - last_lines.append(line) - process.communicate() - - return (n, process.returncode, b"\n".join(last_lines)) - - -def main(runner_no=None, runner_cnt=None, start_version=None, end_version=None): - # collect all required ENV variables - REQUIRED_ENVS = [ - "BUCKET", - "SUB_DIR", - "HISTORY_START", - "TXNS_TO_SKIP", - "BACKUP_CONFIG_TEMPLATE_PATH", - ] - - if not all(env in os.environ for env in REQUIRED_ENVS): - raise Exception("Missing required ENV variables") - - # the runner may have small overlap at the boundary to prevent missing any transactions - runner_mapping = ( - TESTNET_RANGES if "testnet" in os.environ["BUCKET"] else MAINNET_RANGES - ) - - # by default we only have 1 runner - if runner_no is None or runner_cnt is None: - runner_no = 0 - runner_cnt = 1 - runner_mapping = [[runner_mapping[0][0], runner_mapping[-1][1]]] - - assert ( - runner_no >= 0 and runner_no < runner_cnt - ), "runner_no must be between 0 and runner_cnt" - - TXNS_TO_SKIP = [int(txn) for txn in os.environ["TXNS_TO_SKIP"].split(" ")] - BACKUP_CONFIG_TEMPLATE_PATH = os.environ["BACKUP_CONFIG_TEMPLATE_PATH"] - - if not os.path.exists(BACKUP_CONFIG_TEMPLATE_PATH): - raise Exception("BACKUP_CONFIG_TEMPLATE_PATH does not exist") - with open(BACKUP_CONFIG_TEMPLATE_PATH, "r") as f: - config = f.read() - if "aws" in config and shutil.which("aws") is None: - raise Exception("Missing required AWS CLI for pulling backup data from S3") - - if os.environ.get("REUSE_BACKUP_ARTIFACTS", "true") != "true": - print("[main process] clearing existing backup artifacts") - clear_artifacts() - else: - print("[main process] skipping clearing backup artifacts") - - assert runner_cnt == len( - runner_mapping - ), "runner_cnt must match the number of runners in the mapping" - runner_start = runner_mapping[runner_no][0] - runner_end = runner_mapping[runner_no][1] - latest_version = warm_cache_and_get_latest_backup_version( - BACKUP_CONFIG_TEMPLATE_PATH - ) - if runner_no == runner_cnt - 1: - runner_end = min(runner_end, latest_version) - print("runner start %d end %d" % (runner_start, runner_end)) - if start_version is not None and end_version is not None: - runner_start = start_version - runner_end = end_version - - # run replay-verify in parallel - N = 16 - PER_PARTITION = (runner_end - runner_start) // N - - with Pool(N) as p: - all_partitions = p.starmap( - retry_replay_verify_partition, - [ - ( - replay_verify_partition, - n, - N, - runner_start, - PER_PARTITION, - runner_end, - TXNS_TO_SKIP, - BACKUP_CONFIG_TEMPLATE_PATH, - ) - for n in range(1, N + 1) - ], - ) - - print("[main process] finished") - - err = False - for partition_num, return_code, msg in all_partitions: - if return_code != 0: - print("======== ERROR ========") - print( - f"ERROR: partition {partition_num} failed with exit status {return_code}, {msg})" - ) - err = True - - if err: - sys.exit(1) - - -if __name__ == "__main__": - freeze_support() - (runner_no, runner_cnt) = ( - (int(sys.argv[1]), int(sys.argv[2])) if len(sys.argv) > 2 else (None, None) - ) - main(runner_no, runner_cnt) diff --git a/testsuite/replay_verify_run_local.py b/testsuite/replay_verify_run_local.py deleted file mode 100755 index 1872278a5c6ce..0000000000000 --- a/testsuite/replay_verify_run_local.py +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright © Aptos Foundation -# SPDX-License-Identifier: Apache-2.0 - -# Test replay-verify by running it on a public testnet backup -# While the replay-verify composite Github Action is meant to run with aptos-core checked out in the current -# working directory, this test script is meant to be run from this separate repo. The environment variable APTOS_CORE_PATH -# is required to be set to the path of your local checkout of aptos-core, which will be used to build and copy over test dependencies. - -import os -import subprocess - -import replay_verify - - -def local_setup(): - # Take these from the expected replay verify run - envs = { - "TIMEOUT_MINUTES": "5", - "BUCKET": "aptos-testnet-backup", - "SUB_DIR": "e1", - "HISTORY_START": "350000000", - "TXNS_TO_SKIP": "0", # 46874937 151020059 should be excluded - "BACKUP_CONFIG_TEMPLATE_PATH": "terraform/helm/fullnode/files/backup/gcs.yaml", - "REUSE_BACKUP_ARTIFACTS": "true", - } - - # build backup tools - subprocess.run( - [ - "cargo", - "build", - "--release", - "-p", - "aptos-debugger", - ], - check=True, - ) - - # write to environment variables - for key, value in envs.items(): - os.environ[key] = value - - -if __name__ == "__main__": - local_setup() - replay_verify.main( - runner_no=None, runner_cnt=None, start_version=291217350, end_version=292975771 - ) diff --git a/testsuite/replay_verify_test.py b/testsuite/replay_verify_test.py deleted file mode 100644 index aa751e1ab3452..0000000000000 --- a/testsuite/replay_verify_test.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright © Aptos Foundation -# SPDX-License-Identifier: Apache-2.0 - -import os -import unittest -import subprocess - -from verify_core.common import find_latest_version_from_db_backup_output - - -class ReplayVerifyHarnessTests(unittest.TestCase): - def testFindLatestVersionFromDbBackupOutput(self) -> None: - proc = subprocess.Popen( - f"cat {os.path.dirname(__file__)}/fixtures/backup_oneshot.fixture", - shell=True, - stdout=subprocess.PIPE, - ) - if proc.stdout is None: - raise Exception("Failed to get test fixture contents") - latest_version = find_latest_version_from_db_backup_output(proc.stdout) - self.assertEqual(latest_version, 417000000) - proc.communicate() diff --git a/testsuite/single_node_performance.py b/testsuite/single_node_performance.py index 3b5a017973575..228d202b656b9 100755 --- a/testsuite/single_node_performance.py +++ b/testsuite/single_node_performance.py @@ -37,12 +37,20 @@ class Flow(Flag): LAND_BLOCKING_AND_C = Flow.LAND_BLOCKING | Flow.CONTINUOUS SELECTED_FLOW = Flow[os.environ.get("FLOW", default="LAND_BLOCKING")] + +print(f"Executing flow: {SELECTED_FLOW}") IS_MAINNET = SELECTED_FLOW in [Flow.MAINNET, Flow.MAINNET_LARGE_DB] +SOURCE = os.environ.get("SOURCE", default="LOCAL") +if SOURCE not in ["ADHOC", "CI", "LOCAL"]: + print(f"Unrecogznied source {SOURCE}") + exit(1) + +RUNNER_NAME = os.environ.get("RUNNER_NAME", default="none") DEFAULT_NUM_INIT_ACCOUNTS = ( "100000000" if SELECTED_FLOW == Flow.MAINNET_LARGE_DB else "2000000" ) -DEFAULT_MAX_BLOCK_SIZE = "25000" if IS_MAINNET else "10000" +DEFAULT_MAX_BLOCK_SIZE = "10000" MAX_BLOCK_SIZE = int(os.environ.get("MAX_BLOCK_SIZE", default=DEFAULT_MAX_BLOCK_SIZE)) NUM_BLOCKS = int(os.environ.get("NUM_BLOCKS_PER_TEST", default=15)) @@ -56,11 +64,14 @@ class Flow(Flag): MAIN_SIGNER_ACCOUNTS = 2 * MAX_BLOCK_SIZE NOISE_LOWER_LIMIT = 0.98 if IS_MAINNET else 0.8 -NOISE_LOWER_LIMIT_WARN = None if IS_MAINNET else 0.9 +NOISE_LOWER_LIMIT_WARN = 0.9 # If you want to calibrate the upper limit for perf improvement, you can # increase this value temporarily (i.e. to 1.3) and readjust back after a day or two of runs -NOISE_UPPER_LIMIT = 5 if IS_MAINNET else 1.15 -NOISE_UPPER_LIMIT_WARN = None if IS_MAINNET else 1.05 +NOISE_UPPER_LIMIT = 1.15 +NOISE_UPPER_LIMIT_WARN = 1.05 + +SKIP_WARNS = IS_MAINNET +SKIP_PERF_IMPROVEMENT_NOTICE = IS_MAINNET # bump after a perf improvement, so you can easily distinguish runs # that are on top of this commit @@ -117,8 +128,6 @@ class RunGroupKeyExtra: transaction_weights_override: Optional[str] = field(default=None) sharding_traffic_flags: Optional[str] = field(default=None) - smaller_working_set: bool = field(default=False) - @dataclass class RunGroupConfig: @@ -143,69 +152,71 @@ class RunGroupConfig: # 0-indexed CALIBRATED_TPS_INDEX = -1 +CALIBRATED_COUNT_INDEX = -4 +CALIBRATED_MIN_RATIO_INDEX = -3 +CALIBRATED_MAX_RATIO_INDEX = -2 CALIBRATION_SEPARATOR = " " -# transaction_type module_working_set_size executor_type min_ratio max_ratio median -# (or if from log: -# transaction_type module_working_set_size executor_type block_size expected_tps tps -# ) +# transaction_type module_working_set_size executor_type count min_ratio max_ratio median CALIBRATION = """ -no-op 1 VM 0.822 1.047 38275.3 -no-op 1000 VM 0.775 1.033 22763.8 -apt-fa-transfer 1 VM 0.770 1.059 27699.5 -account-generation 1 VM 0.735 1.026 22763.8 -account-resource32-b 1 VM 0.718 1.049 33440.0 -modify-global-resource 1 VM 0.868 1.019 2819.9 -modify-global-resource 10 VM 0.877 1.018 17562.1 -publish-package 1 VM 0.944 1.037 143.9 -mix_publish_transfer 1 VM 0.953 1.124 2131.6 -batch100-transfer 1 VM 0.768 1.027 770.7 -vector-picture30k 1 VM 0.944 1.036 112.2 -vector-picture30k 20 VM 0.835 1.020 1140.7 -smart-table-picture30-k-with200-change 1 VM 0.955 1.051 21.8 -smart-table-picture30-k-with200-change 20 VM 0.926 1.065 185.9 -modify-global-resource-agg-v2 1 VM 0.792 1.060 33440.0 -modify-global-flag-agg-v2 1 VM 0.921 1.014 5199.3 -modify-global-bounded-agg-v2 1 VM 0.906 1.103 8866.4 -modify-global-milestone-agg-v2 1 VM 0.804 1.033 27699.5 -resource-groups-global-write-tag1-kb 1 VM 0.915 1.074 9039.0 -resource-groups-global-write-and-read-tag1-kb 1 VM 0.938 1.016 6221.0 -resource-groups-sender-write-tag1-kb 1 VM 0.835 1.134 19680.6 -resource-groups-sender-multi-change1-kb 1 VM 0.896 1.071 16553.6 -token-v1ft-mint-and-transfer 1 VM 0.894 1.029 1276.2 -token-v1ft-mint-and-transfer 20 VM 0.897 1.024 11901.1 -token-v1nft-mint-and-transfer-sequential 1 VM 0.923 1.025 798.6 -token-v1nft-mint-and-transfer-sequential 20 VM 0.873 1.024 7732.8 -coin-init-and-mint 1 VM 0.779 1.055 29251.9 -coin-init-and-mint 20 VM 0.827 1.077 24885.0 -fungible-asset-mint 1 VM 0.773 1.023 23174.5 -fungible-asset-mint 20 VM 0.803 1.047 21567.9 -no-op5-signers 1 VM 0.854 1.078 37561.3 -token-v2-ambassador-mint 1 VM 0.848 1.022 16553.6 -token-v2-ambassador-mint 20 VM 0.811 1.044 16228.8 -liquidity-pool-swap 1 VM 0.922 1.027 975.7 -liquidity-pool-swap 20 VM 0.881 1.014 8359.6 -liquidity-pool-swap-stable 1 VM 0.890 1.013 957.5 -liquidity-pool-swap-stable 20 VM 0.916 1.019 8035.3 -deserialize-u256 1 VM 0.842 1.060 37561.3 -no-op-fee-payer 1 VM 0.908 1.029 2131.6 -no-op-fee-payer 50 VM 0.890 1.038 27205.9 +no-op 1 VM 59 0.815 1.101 37283.8 +no-op 1000 VM 59 0.679 1.036 22232.7 +apt-fa-transfer 1 VM 59 0.779 1.064 28096.3 +account-generation 1 VM 59 0.763 1.046 22960.6 +account-resource32-b 1 VM 59 0.794 1.085 34394.7 +modify-global-resource 1 VM 59 0.849 1.029 2784.1 +modify-global-resource 100 VM 17 0.845 1.071 33592.9 +publish-package 1 VM 59 0.926 1.076 142.6 +mix_publish_transfer 1 VM 59 0.917 1.134 2145.5 +batch100-transfer 1 VM 59 0.695 1.028 740.9 +vector-picture30k 1 VM 59 0.891 1.027 111.2 +vector-picture30k 100 VM 17 0.593 1.042 1982.6 +smart-table-picture30-k-with200-change 1 VM 59 0.844 1.078 21.5 +smart-table-picture30-k-with200-change 100 VM 17 0.786 1.018 405.6 +modify-global-resource-agg-v2 1 VM 59 0.706 1.113 35274.8 +modify-global-flag-agg-v2 1 VM 59 0.818 1.023 5508.5 +modify-global-bounded-agg-v2 1 VM 59 0.766 1.089 9840.3 +modify-global-milestone-agg-v2 1 VM 59 0.723 1.038 28560.2 +resource-groups-global-write-tag1-kb 1 VM 59 0.872 1.046 9198.2 +resource-groups-global-write-and-read-tag1-kb 1 VM 59 0.867 1.023 6174.8 +resource-groups-sender-write-tag1-kb 1 VM 59 0.843 1.129 19680.5 +resource-groups-sender-multi-change1-kb 1 VM 59 0.825 1.074 16174.0 +token-v1ft-mint-and-transfer 1 VM 59 0.811 1.045 1262.2 +token-v1ft-mint-and-transfer 100 VM 17 0.718 1.041 17535.3 +token-v1nft-mint-and-transfer-sequential 1 VM 59 0.820 1.032 795.5 +token-v1nft-mint-and-transfer-sequential 100 VM 17 0.586 1.035 12683.5 +coin-init-and-mint 1 VM 59 0.704 1.073 28612.4 +coin-init-and-mint 100 VM 17 0.716 1.087 23415.6 +fungible-asset-mint 1 VM 59 0.644 1.052 26193.9 +fungible-asset-mint 100 VM 17 0.698 1.070 20606.2 +no-op5-signers 1 VM 59 0.783 1.124 37424.8 +token-v2-ambassador-mint 1 VM 59 0.670 1.035 17671.5 +token-v2-ambassador-mint 100 VM 17 0.717 1.058 15617.8 +liquidity-pool-swap 1 VM 59 0.728 1.021 963.2 +liquidity-pool-swap 100 VM 17 0.717 1.019 11116.3 +liquidity-pool-swap-stable 1 VM 59 0.776 1.023 934.6 +liquidity-pool-swap-stable 100 VM 17 0.796 1.021 10839.9 +deserialize-u256 1 VM 59 0.817 1.093 37002.8 +no-op-fee-payer 1 VM 59 0.775 1.027 2103.7 +no-op-fee-payer 100 VM 17 0.585 1.021 27642.4 """ # when adding a new test, add estimated expected_tps to it, as well as waived=True. # And then after a day or two - add calibration result for it above, removing expected_tps/waived fields. +DEFAULT_MODULE_WORKING_SET_SIZE = 100 + TESTS = [ RunGroupConfig(key=RunGroupKey("no-op"), included_in=LAND_BLOCKING_AND_C), RunGroupConfig(key=RunGroupKey("no-op", module_working_set_size=1000), included_in=LAND_BLOCKING_AND_C), - RunGroupConfig(key=RunGroupKey("apt-fa-transfer"), included_in=LAND_BLOCKING_AND_C | Flow.REPRESENTATIVE), + RunGroupConfig(key=RunGroupKey("apt-fa-transfer"), included_in=LAND_BLOCKING_AND_C | Flow.REPRESENTATIVE | Flow.MAINNET), RunGroupConfig(key=RunGroupKey("apt-fa-transfer", executor_type="native"), included_in=LAND_BLOCKING_AND_C), - RunGroupConfig(key=RunGroupKey("account-generation"), included_in=LAND_BLOCKING_AND_C | Flow.REPRESENTATIVE), + RunGroupConfig(key=RunGroupKey("account-generation"), included_in=LAND_BLOCKING_AND_C | Flow.REPRESENTATIVE | Flow.MAINNET), RunGroupConfig(key=RunGroupKey("account-generation", executor_type="native"), included_in=Flow.CONTINUOUS), RunGroupConfig(key=RunGroupKey("account-resource32-b"), included_in=Flow.CONTINUOUS), RunGroupConfig(key=RunGroupKey("modify-global-resource"), included_in=LAND_BLOCKING_AND_C | Flow.REPRESENTATIVE), - RunGroupConfig(key=RunGroupKey("modify-global-resource", module_working_set_size=10), included_in=Flow.CONTINUOUS), - RunGroupConfig(key=RunGroupKey("publish-package"), included_in=LAND_BLOCKING_AND_C | Flow.REPRESENTATIVE), + RunGroupConfig(key=RunGroupKey("modify-global-resource", module_working_set_size=DEFAULT_MODULE_WORKING_SET_SIZE), included_in=Flow.CONTINUOUS), + RunGroupConfig(key=RunGroupKey("publish-package"), included_in=LAND_BLOCKING_AND_C | Flow.REPRESENTATIVE | Flow.MAINNET), RunGroupConfig(key=RunGroupKey("mix_publish_transfer"), key_extra=RunGroupKeyExtra( transaction_type_override="publish-package apt-fa-transfer", transaction_weights_override="1 500", @@ -214,72 +225,72 @@ class RunGroupConfig: RunGroupConfig(key=RunGroupKey("batch100-transfer", executor_type="native"), included_in=Flow.CONTINUOUS), RunGroupConfig(expected_tps=100, key=RunGroupKey("vector-picture40"), included_in=Flow(0), waived=True), - RunGroupConfig(expected_tps=1000, key=RunGroupKey("vector-picture40", module_working_set_size=20), included_in=Flow(0), waived=True), + RunGroupConfig(expected_tps=1000, key=RunGroupKey("vector-picture40", module_working_set_size=DEFAULT_MODULE_WORKING_SET_SIZE), included_in=Flow(0), waived=True), RunGroupConfig(key=RunGroupKey("vector-picture30k"), included_in=LAND_BLOCKING_AND_C), - RunGroupConfig(key=RunGroupKey("vector-picture30k", module_working_set_size=20), included_in=Flow.CONTINUOUS), + RunGroupConfig(key=RunGroupKey("vector-picture30k", module_working_set_size=DEFAULT_MODULE_WORKING_SET_SIZE), included_in=Flow.CONTINUOUS), RunGroupConfig(key=RunGroupKey("smart-table-picture30-k-with200-change"), included_in=LAND_BLOCKING_AND_C), - RunGroupConfig(key=RunGroupKey("smart-table-picture30-k-with200-change", module_working_set_size=20), included_in=Flow.CONTINUOUS), + RunGroupConfig(key=RunGroupKey("smart-table-picture30-k-with200-change", module_working_set_size=DEFAULT_MODULE_WORKING_SET_SIZE), included_in=Flow.CONTINUOUS), # RunGroupConfig(expected_tps=10, key=RunGroupKey("smart-table-picture1-m-with256-change"), included_in=LAND_BLOCKING_AND_C), # RunGroupConfig(expected_tps=40, key=RunGroupKey("smart-table-picture1-m-with256-change", module_working_set_size=20), included_in=Flow.CONTINUOUS), RunGroupConfig(key=RunGroupKey("modify-global-resource-agg-v2"), included_in=Flow.AGG_V2 | LAND_BLOCKING_AND_C), - RunGroupConfig(expected_tps=10000, key=RunGroupKey("modify-global-resource-agg-v2", module_working_set_size=50), included_in=Flow.AGG_V2, waived=True), + RunGroupConfig(expected_tps=10000, key=RunGroupKey("modify-global-resource-agg-v2", module_working_set_size=DEFAULT_MODULE_WORKING_SET_SIZE), included_in=Flow.AGG_V2, waived=True), RunGroupConfig(key=RunGroupKey("modify-global-flag-agg-v2"), included_in=Flow.AGG_V2 | Flow.CONTINUOUS), - RunGroupConfig(expected_tps=10000, key=RunGroupKey("modify-global-flag-agg-v2", module_working_set_size=50), included_in=Flow.AGG_V2, waived=True), + RunGroupConfig(expected_tps=10000, key=RunGroupKey("modify-global-flag-agg-v2", module_working_set_size=DEFAULT_MODULE_WORKING_SET_SIZE), included_in=Flow.AGG_V2, waived=True), RunGroupConfig(key=RunGroupKey("modify-global-bounded-agg-v2"), included_in=Flow.AGG_V2 | Flow.CONTINUOUS), - RunGroupConfig(expected_tps=10000, key=RunGroupKey("modify-global-bounded-agg-v2", module_working_set_size=50), included_in=Flow.AGG_V2, waived=True), + RunGroupConfig(expected_tps=10000, key=RunGroupKey("modify-global-bounded-agg-v2", module_working_set_size=DEFAULT_MODULE_WORKING_SET_SIZE), included_in=Flow.AGG_V2, waived=True), RunGroupConfig(key=RunGroupKey("modify-global-milestone-agg-v2"), included_in=Flow.AGG_V2 | Flow.CONTINUOUS), RunGroupConfig(key=RunGroupKey("resource-groups-global-write-tag1-kb"), included_in=LAND_BLOCKING_AND_C | Flow.RESOURCE_GROUPS), - RunGroupConfig(expected_tps=8000, key=RunGroupKey("resource-groups-global-write-tag1-kb", module_working_set_size=20), included_in=Flow.RESOURCE_GROUPS, waived=True), + RunGroupConfig(expected_tps=8000, key=RunGroupKey("resource-groups-global-write-tag1-kb", module_working_set_size=DEFAULT_MODULE_WORKING_SET_SIZE), included_in=Flow.RESOURCE_GROUPS, waived=True), RunGroupConfig(key=RunGroupKey("resource-groups-global-write-and-read-tag1-kb"), included_in=Flow.CONTINUOUS | Flow.RESOURCE_GROUPS), - RunGroupConfig(expected_tps=8000, key=RunGroupKey("resource-groups-global-write-and-read-tag1-kb", module_working_set_size=20), included_in=Flow.RESOURCE_GROUPS, waived=True), + RunGroupConfig(expected_tps=8000, key=RunGroupKey("resource-groups-global-write-and-read-tag1-kb", module_working_set_size=DEFAULT_MODULE_WORKING_SET_SIZE), included_in=Flow.RESOURCE_GROUPS, waived=True), RunGroupConfig(key=RunGroupKey("resource-groups-sender-write-tag1-kb"), included_in=Flow.CONTINUOUS | Flow.RESOURCE_GROUPS), - RunGroupConfig(expected_tps=8000, key=RunGroupKey("resource-groups-sender-write-tag1-kb", module_working_set_size=20), included_in=Flow.RESOURCE_GROUPS, waived=True), + RunGroupConfig(expected_tps=8000, key=RunGroupKey("resource-groups-sender-write-tag1-kb", module_working_set_size=DEFAULT_MODULE_WORKING_SET_SIZE), included_in=Flow.RESOURCE_GROUPS, waived=True), RunGroupConfig(key=RunGroupKey("resource-groups-sender-multi-change1-kb"), included_in=LAND_BLOCKING_AND_C | Flow.RESOURCE_GROUPS), - RunGroupConfig(expected_tps=8000, key=RunGroupKey("resource-groups-sender-multi-change1-kb", module_working_set_size=20), included_in=Flow.RESOURCE_GROUPS, waived=True), - + RunGroupConfig(expected_tps=8000, key=RunGroupKey("resource-groups-sender-multi-change1-kb", module_working_set_size=DEFAULT_MODULE_WORKING_SET_SIZE), included_in=Flow.RESOURCE_GROUPS, waived=True), + RunGroupConfig(key=RunGroupKey("token-v1ft-mint-and-transfer"), included_in=Flow.CONTINUOUS), - RunGroupConfig(key=RunGroupKey("token-v1ft-mint-and-transfer", module_working_set_size=20), included_in=Flow.CONTINUOUS), + RunGroupConfig(key=RunGroupKey("token-v1ft-mint-and-transfer", module_working_set_size=DEFAULT_MODULE_WORKING_SET_SIZE), included_in=Flow.CONTINUOUS), RunGroupConfig(key=RunGroupKey("token-v1nft-mint-and-transfer-sequential"), included_in=Flow.CONTINUOUS), - RunGroupConfig(key=RunGroupKey("token-v1nft-mint-and-transfer-sequential", module_working_set_size=20), included_in=Flow.CONTINUOUS), + RunGroupConfig(key=RunGroupKey("token-v1nft-mint-and-transfer-sequential", module_working_set_size=DEFAULT_MODULE_WORKING_SET_SIZE), included_in=Flow.CONTINUOUS), RunGroupConfig(expected_tps=1300, key=RunGroupKey("token-v1nft-mint-and-transfer-parallel"), included_in=Flow(0), waived=True), - RunGroupConfig(expected_tps=5300, key=RunGroupKey("token-v1nft-mint-and-transfer-parallel", module_working_set_size=20), included_in=Flow(0), waived=True), + RunGroupConfig(expected_tps=5300, key=RunGroupKey("token-v1nft-mint-and-transfer-parallel", module_working_set_size=DEFAULT_MODULE_WORKING_SET_SIZE), included_in=Flow(0), waived=True), RunGroupConfig(key=RunGroupKey("coin-init-and-mint", module_working_set_size=1), included_in=Flow.CONTINUOUS), - RunGroupConfig(key=RunGroupKey("coin-init-and-mint", module_working_set_size=20), included_in=Flow.CONTINUOUS), + RunGroupConfig(key=RunGroupKey("coin-init-and-mint", module_working_set_size=DEFAULT_MODULE_WORKING_SET_SIZE), included_in=Flow.CONTINUOUS), RunGroupConfig(key=RunGroupKey("fungible-asset-mint", module_working_set_size=1), included_in=LAND_BLOCKING_AND_C), - RunGroupConfig(key=RunGroupKey("fungible-asset-mint", module_working_set_size=20), included_in=Flow.CONTINUOUS), + RunGroupConfig(key=RunGroupKey("fungible-asset-mint", module_working_set_size=DEFAULT_MODULE_WORKING_SET_SIZE), included_in=Flow.CONTINUOUS), # RunGroupConfig(expected_tps=1000, key=RunGroupKey("token-v1ft-mint-and-store"), included_in=Flow(0)), # RunGroupConfig(expected_tps=1000, key=RunGroupKey("token-v1nft-mint-and-store-sequential"), included_in=Flow(0)), # RunGroupConfig(expected_tps=1000, key=RunGroupKey("token-v1nft-mint-and-transfer-parallel"), included_in=Flow(0)), RunGroupConfig(key=RunGroupKey("no-op5-signers"), included_in=Flow.CONTINUOUS), - - RunGroupConfig(key=RunGroupKey("token-v2-ambassador-mint"), included_in=LAND_BLOCKING_AND_C | Flow.REPRESENTATIVE), - RunGroupConfig(key=RunGroupKey("token-v2-ambassador-mint", module_working_set_size=20), included_in=Flow.CONTINUOUS), + + RunGroupConfig(key=RunGroupKey("token-v2-ambassador-mint"), included_in=LAND_BLOCKING_AND_C | Flow.REPRESENTATIVE | Flow.MAINNET), + RunGroupConfig(key=RunGroupKey("token-v2-ambassador-mint", module_working_set_size=DEFAULT_MODULE_WORKING_SET_SIZE), included_in=Flow.CONTINUOUS), RunGroupConfig(key=RunGroupKey("liquidity-pool-swap"), included_in=LAND_BLOCKING_AND_C | Flow.REPRESENTATIVE), - RunGroupConfig(key=RunGroupKey("liquidity-pool-swap", module_working_set_size=20), included_in=Flow.CONTINUOUS), + RunGroupConfig(key=RunGroupKey("liquidity-pool-swap", module_working_set_size=DEFAULT_MODULE_WORKING_SET_SIZE), included_in=Flow.CONTINUOUS), RunGroupConfig(key=RunGroupKey("liquidity-pool-swap-stable"), included_in=Flow.CONTINUOUS), - RunGroupConfig(key=RunGroupKey("liquidity-pool-swap-stable", module_working_set_size=20), included_in=Flow.CONTINUOUS), + RunGroupConfig(key=RunGroupKey("liquidity-pool-swap-stable", module_working_set_size=DEFAULT_MODULE_WORKING_SET_SIZE), included_in=Flow.CONTINUOUS), RunGroupConfig(key=RunGroupKey("deserialize-u256"), included_in=Flow.CONTINUOUS), - + # fee payer sequentializes transactions today. in these tests module publisher is the fee payer, so larger number of modules tests throughput with multiple fee payers RunGroupConfig(key=RunGroupKey("no-op-fee-payer"), included_in=LAND_BLOCKING_AND_C), - RunGroupConfig(key=RunGroupKey("no-op-fee-payer", module_working_set_size=50), included_in=Flow.CONTINUOUS), + RunGroupConfig(key=RunGroupKey("no-op-fee-payer", module_working_set_size=DEFAULT_MODULE_WORKING_SET_SIZE), included_in=Flow.CONTINUOUS), RunGroupConfig(expected_tps=50000, key=RunGroupKey("coin_transfer_connected_components", executor_type="sharded"), key_extra=RunGroupKeyExtra(sharding_traffic_flags="--connected-tx-grps 5000", transaction_type_override=""), included_in=Flow.REPRESENTATIVE, waived=True), RunGroupConfig(expected_tps=50000, key=RunGroupKey("coin_transfer_hotspot", executor_type="sharded"), key_extra=RunGroupKeyExtra(sharding_traffic_flags="--hotspot-probability 0.8", transaction_type_override=""), included_in=Flow.REPRESENTATIVE, waived=True), # setting separately for previewnet, as we run on a different number of cores. - RunGroupConfig(expected_tps=26000 if NUM_ACCOUNTS < 5000000 else 20000, key=RunGroupKey("apt-fa-transfer"), key_extra=RunGroupKeyExtra(smaller_working_set=True), included_in=Flow.MAINNET | Flow.MAINNET_LARGE_DB), - RunGroupConfig(expected_tps=20000 if NUM_ACCOUNTS < 5000000 else 15000, key=RunGroupKey("account-generation"), included_in=Flow.MAINNET | Flow.MAINNET_LARGE_DB), - RunGroupConfig(expected_tps=140 if NUM_ACCOUNTS < 5000000 else 60, key=RunGroupKey("publish-package"), included_in=Flow.MAINNET | Flow.MAINNET_LARGE_DB), - RunGroupConfig(expected_tps=15400 if NUM_ACCOUNTS < 5000000 else 6800, key=RunGroupKey("token-v2-ambassador-mint"), included_in=Flow.MAINNET | Flow.MAINNET_LARGE_DB), + RunGroupConfig(expected_tps=20000, key=RunGroupKey("apt-fa-transfer"), included_in=Flow.MAINNET_LARGE_DB), + RunGroupConfig(expected_tps=15000, key=RunGroupKey("account-generation"), included_in=Flow.MAINNET_LARGE_DB), + RunGroupConfig(expected_tps=60, key=RunGroupKey("publish-package"), included_in=Flow.MAINNET_LARGE_DB), + RunGroupConfig(expected_tps=6800, key=RunGroupKey("token-v2-ambassador-mint"), included_in=Flow.MAINNET_LARGE_DB), # RunGroupConfig(expected_tps=17000 if NUM_ACCOUNTS < 5000000 else 28000, key=RunGroupKey("coin_transfer_connected_components", executor_type="sharded"), key_extra=RunGroupKeyExtra(sharding_traffic_flags="--connected-tx-grps 5000", transaction_type_override=""), included_in=Flow.MAINNET | Flow.MAINNET_LARGE_DB, waived=True), # RunGroupConfig(expected_tps=27000 if NUM_ACCOUNTS < 5000000 else 23000, key=RunGroupKey("coin_transfer_hotspot", executor_type="sharded"), key_extra=RunGroupKeyExtra(sharding_traffic_flags="--hotspot-probability 0.8", transaction_type_override=""), included_in=Flow.MAINNET | Flow.MAINNET_LARGE_DB, waived=True), ] @@ -355,6 +366,23 @@ class RunGroupInstance: expected_tps: float +@dataclass +class CalibrationData: + expected_tps: float + count: int + min_ratio: float + max_ratio: float + + +@dataclass +class Criteria: + expected_tps: float + min_tps: float + min_warn_tps: float + max_tps: float + max_warn_tps: float + + def get_only(values): assert len(values) == 1, "Multiple values parsed: " + str(values) return values[0] @@ -534,7 +562,12 @@ def print_table( transaction_type=parts[0], module_working_set_size=int(parts[1]), executor_type=parts[2], - ): float(parts[CALIBRATED_TPS_INDEX]) + ): CalibrationData( + expected_tps=float(parts[CALIBRATED_TPS_INDEX]), + count=int(parts[CALIBRATED_COUNT_INDEX]), + min_ratio=float(parts[CALIBRATED_MIN_RATIO_INDEX]), + max_ratio=float(parts[CALIBRATED_MAX_RATIO_INDEX]), + ) for line in CALIBRATION.split("\n") if len( parts := [ @@ -574,11 +607,42 @@ def print_table( if test.expected_tps is not None: print(f"WARNING: using uncalibrated TPS for {test.key}") - expected_tps = test.expected_tps + criteria = Criteria( + expected_tps=test.expected_tps, + min_tps=test.expected_tps * NOISE_LOWER_LIMIT, + min_warn_tps=test.expected_tps * NOISE_LOWER_LIMIT_WARN, + max_tps=test.expected_tps * NOISE_UPPER_LIMIT, + max_warn_tps=test.expected_tps * NOISE_UPPER_LIMIT_WARN, + ) else: assert test.key in calibrated_expected_tps, test - expected_tps = calibrated_expected_tps[test.key] - cur_block_size = int(min([expected_tps, MAX_BLOCK_SIZE])) + cur_calibration = calibrated_expected_tps[test.key] + if cur_calibration.count > 20: + criteria = Criteria( + expected_tps=cur_calibration.expected_tps, + min_tps=cur_calibration.expected_tps + * (cur_calibration.min_ratio - 0.01), + min_warn_tps=cur_calibration.expected_tps + * pow(cur_calibration.min_ratio, 0.5), + max_tps=cur_calibration.expected_tps + * (cur_calibration.max_ratio + 0.01), + max_warn_tps=cur_calibration.expected_tps + * pow(cur_calibration.max_ratio, 0.5), + ) + else: + criteria = Criteria( + expected_tps=cur_calibration.expected_tps, + min_tps=cur_calibration.expected_tps + * (cur_calibration.min_ratio - 0.1), + min_warn_tps=cur_calibration.expected_tps + * min(cur_calibration.min_ratio, 0.95), + max_tps=cur_calibration.expected_tps + * (cur_calibration.max_ratio + 0.1), + max_warn_tps=cur_calibration.expected_tps + * max(cur_calibration.max_ratio, 1.05), + ) + + cur_block_size = int(min([criteria.expected_tps, MAX_BLOCK_SIZE])) print(f"Testing {test.key}") if test.key_extra.transaction_type_override == "": @@ -604,11 +668,7 @@ def print_table( raise Exception(f"executor type not supported {test.key.executor_type}") txn_emitter_prefix_str = "" if NUM_BLOCKS > 200 else " --generate-then-execute" - ADDITIONAL_DST_POOL_ACCOUNTS = ( - 2 - * MAX_BLOCK_SIZE - * (1 if test.key_extra.smaller_working_set else NUM_BLOCKS) - ) + ADDITIONAL_DST_POOL_ACCOUNTS = 2 * MAX_BLOCK_SIZE * NUM_BLOCKS common_command_suffix = f"{executor_type_str} {txn_emitter_prefix_str} --block-size {cur_block_size} {DB_CONFIG_FLAGS} {DB_PRUNER_FLAGS} run-executor {FEATURE_FLAGS} {workload_args_str} --module-working-set-size {test.key.module_working_set_size} --main-signer-accounts {MAIN_SIGNER_ACCOUNTS} --additional-dst-pool-accounts {ADDITIONAL_DST_POOL_ACCOUNTS} --data-dir {tmpdirname}/db --checkpoint-dir {tmpdirname}/cp" @@ -641,7 +701,7 @@ def print_table( single_node_result=single_node_result, number_of_threads_results=number_of_threads_results, block_size=cur_block_size, - expected_tps=expected_tps, + expected_tps=criteria.expected_tps, ) ) @@ -657,7 +717,7 @@ def print_table( single_node_result=stage_node_result, number_of_threads_results=number_of_threads_results, block_size=cur_block_size, - expected_tps=expected_tps, + expected_tps=criteria.expected_tps, ) ) @@ -666,17 +726,23 @@ def print_table( json.dumps( { "grep": "grep_json_single_node_perf", + "source": SOURCE, + "runner_name": RUNNER_NAME, "transaction_type": test.key.transaction_type, "module_working_set_size": test.key.module_working_set_size, "executor_type": test.key.executor_type, "block_size": cur_block_size, "execution_threads": NUMBER_OF_EXECUTION_THREADS, - "expected_tps": expected_tps, + "warmup_num_accounts": NUM_ACCOUNTS, + "expected_tps": criteria.expected_tps, + "expected_min_tps": criteria.min_tps, + "expected_max_tps": criteria.max_tps, "waived": test.waived, "tps": single_node_result.tps, "gps": single_node_result.gps, "gpt": single_node_result.gpt, "code_perf_version": CODE_PERF_VERSION, + "flow": str(SELECTED_FLOW), "test_index": test_index, } ) @@ -721,39 +787,29 @@ def print_table( ) print_table(results, by_levels=False, single_field=None) - # if expected TPS is not set, skip performance checks - if expected_tps is None: - continue - - if ( - NOISE_LOWER_LIMIT is not None - and single_node_result.tps < expected_tps * NOISE_LOWER_LIMIT - ): - text = f"regression detected {single_node_result.tps} < {expected_tps * NOISE_LOWER_LIMIT} = {expected_tps} * {NOISE_LOWER_LIMIT}, {test.key} didn't meet TPS requirements" + if single_node_result.tps < criteria.min_tps: + text = f"regression detected {single_node_result.tps} < {criteria.min_tps} (expected median {criteria.expected_tps}), {test.key} didn't meet TPS requirements" if not test.waived: errors.append(text) else: warnings.append(text) - elif ( - NOISE_LOWER_LIMIT_WARN is not None - and single_node_result.tps < expected_tps * NOISE_LOWER_LIMIT_WARN - ): - text = f"potential (but within normal noise) regression detected {single_node_result.tps} < {expected_tps * NOISE_LOWER_LIMIT_WARN} = {expected_tps} * {NOISE_LOWER_LIMIT_WARN}, {test.key} didn't meet TPS requirements" + elif single_node_result.tps < criteria.min_warn_tps: + text = f"potential (but within normal noise) regression detected {single_node_result.tps} < {criteria.min_warn_tps} (expected median {criteria.expected_tps}), {test.key} didn't meet TPS requirements" warnings.append(text) elif ( - NOISE_UPPER_LIMIT is not None - and single_node_result.tps > expected_tps * NOISE_UPPER_LIMIT + not SKIP_PERF_IMPROVEMENT_NOTICE + and single_node_result.tps > criteria.max_tps ): - text = f"perf improvement detected {single_node_result.tps} > {expected_tps * NOISE_UPPER_LIMIT} = {expected_tps} * {NOISE_UPPER_LIMIT}, {test.key} exceeded TPS requirements, increase TPS requirements to match new baseline" + text = f"perf improvement detected {single_node_result.tps} > {criteria.max_tps} (expected median {criteria.expected_tps}), {test.key} exceeded TPS requirements, increase TPS requirements to match new baseline" if not test.waived: errors.append(text) else: warnings.append(text) elif ( - NOISE_UPPER_LIMIT_WARN is not None - and single_node_result.tps > expected_tps * NOISE_UPPER_LIMIT_WARN + not SKIP_PERF_IMPROVEMENT_NOTICE + and single_node_result.tps > criteria.max_warn_tps ): - text = f"potential (but within normal noise) perf improvement detected {single_node_result.tps} > {expected_tps * NOISE_UPPER_LIMIT_WARN} = {expected_tps} * {NOISE_UPPER_LIMIT_WARN}, {test.key} exceeded TPS requirements, increase TPS requirements to match new baseline" + text = f"potential (but within normal noise) perf improvement detected {single_node_result.tps} > {criteria.max_warn_tps} (expected median {criteria.expected_tps}), {test.key} exceeded TPS requirements, increase TPS requirements to match new baseline" warnings.append(text) if HIDE_OUTPUT: @@ -762,10 +818,20 @@ def print_table( if warnings: print("Warnings: ") print("\n".join(warnings)) + print("You can run again to see if it is noise, or consistent.") if errors: print("Errors: ") print("\n".join(errors)) + print( + """If you expect your PR to change the performance, you need to recalibrate the values. +To do so, you should run the test on your branch 6 times +(https://github.com/aptos-labs/aptos-core/actions/workflows/workflow-run-execution-performance.yaml). +Then go to Humio calibration link (https://gist.github.com/igor-aptos/7b12ca28de03894cddda8e415f37889e), +update it to your branch, and export values as CSV, and then open and copy values inside +testsuite/single_node_performance.py testsuite), and add Blockchain oncall as the reviewer. +""" + ) exit(1) if move_e2e_benchmark_failed: diff --git a/testsuite/smoke-test/src/aptos/mint_transfer.rs b/testsuite/smoke-test/src/aptos/mint_transfer.rs index 8be365164a269..cb8b02b9186b6 100644 --- a/testsuite/smoke-test/src/aptos/mint_transfer.rs +++ b/testsuite/smoke-test/src/aptos/mint_transfer.rs @@ -36,11 +36,10 @@ async fn test_mint_transfer() { info.client().submit_and_wait(&transfer_txn).await.unwrap(); assert_eq!( info.client() - .get_account_balance(account2.address()) + .view_apt_account_balance(account2.address()) .await .unwrap() - .into_inner() - .get(), + .into_inner(), 40000 ); diff --git a/testsuite/smoke-test/src/consensus/consensus_fault_tolerance.rs b/testsuite/smoke-test/src/consensus/consensus_fault_tolerance.rs index 56e6cea9fa2d5..dc640d98146da 100644 --- a/testsuite/smoke-test/src/consensus/consensus_fault_tolerance.rs +++ b/testsuite/smoke-test/src/consensus/consensus_fault_tolerance.rs @@ -9,7 +9,7 @@ use aptos_forge::{ test_utils::consensus_utils::{ no_failure_injection, test_consensus_fault_tolerance, FailPointFailureInjection, NodeState, }, - LocalSwarm, Swarm, SwarmExt, + LocalSwarm, NodeExt, Swarm, SwarmExt, }; use aptos_logger::info; use rand::{self, rngs::SmallRng, Rng, SeedableRng}; @@ -213,6 +213,59 @@ async fn test_no_failures() { .unwrap(); } +#[tokio::test] +async fn test_faulty_votes() { + let num_validators = 7; + + let swarm = create_swarm(num_validators, 1).await; + + let (validator_clients, public_info) = { + ( + swarm.get_validator_clients_with_names(), + swarm.aptos_public_info(), + ) + }; + test_consensus_fault_tolerance( + validator_clients, + public_info, + 3, + 5.0, + 1, + Box::new(FailPointFailureInjection::new(Box::new(move |cycle, _| { + ( + vec![ + ( + cycle % num_validators, + "consensus::create_invalid_vote".to_string(), + format!("{}%return", 50), + ), + ( + (cycle + 1) % num_validators, + "consensus::create_invalid_order_vote".to_string(), + format!("{}%return", 50), + ), + ( + (cycle + 2) % num_validators, + "consensus::create_invalid_commit_vote".to_string(), + format!("{}%return", 50), + ), + ], + true, + ) + }))), + Box::new( + move |_, executed_epochs, executed_rounds, executed_transactions, _, _| { + successful_criteria(executed_epochs, executed_rounds, executed_transactions); + Ok(()) + }, + ), + true, + false, + ) + .await + .unwrap(); +} + #[tokio::test] async fn test_ordered_only_cert() { let num_validators = 3; @@ -550,3 +603,64 @@ async fn test_alternating_having_consensus() { ) .await; } + +#[tokio::test] +async fn test_round_timeout_msg_rollout() { + let num_validators = 3; + + let mut swarm = create_swarm(num_validators, 1).await; + + let (validator_clients, public_info) = { + ( + swarm.get_validator_clients_with_names(), + swarm.aptos_public_info(), + ) + }; + test_consensus_fault_tolerance( + validator_clients.clone(), + public_info.clone(), + 3, + 5.0, + 1, + no_failure_injection(), + Box::new( + move |_, executed_epochs, executed_rounds, executed_transactions, _, _| { + successful_criteria(executed_epochs, executed_rounds, executed_transactions); + Ok(()) + }, + ), + true, + false, + ) + .await + .unwrap(); + + for val in swarm.validators_mut() { + val.stop(); + val.config_mut().consensus.enable_round_timeout_msg = true; + val.start().unwrap(); + + val.wait_until_healthy(Instant::now().checked_add(Duration::from_secs(60)).unwrap()) + .await + .unwrap(); + + test_consensus_fault_tolerance( + validator_clients.clone(), + public_info.clone(), + 1, + 30.0, + 1, + no_failure_injection(), + Box::new( + move |_, executed_epochs, executed_rounds, executed_transactions, _, _| { + successful_criteria(executed_epochs, executed_rounds, executed_transactions); + Ok(()) + }, + ), + true, + false, + ) + .await + .unwrap(); + } +} diff --git a/testsuite/smoke-test/src/fullnode.rs b/testsuite/smoke-test/src/fullnode.rs index 0a5bbb139ae17..b0ff39eaf3401 100644 --- a/testsuite/smoke-test/src/fullnode.rs +++ b/testsuite/smoke-test/src/fullnode.rs @@ -80,12 +80,12 @@ async fn test_indexer() { client.submit_and_wait(&txn).await.unwrap(); let balance = client - .get_account_balance(account2.address()) + .view_apt_account_balance(account2.address()) .await .unwrap() .into_inner(); - assert_eq!(balance.get(), 10); + assert_eq!(balance, 10); } async fn wait_for_account(client: &RestClient, address: AccountAddress) -> Result<()> { diff --git a/testsuite/smoke-test/src/keyless.rs b/testsuite/smoke-test/src/keyless.rs index d7af6aafb1175..6e847afbc5dc5 100644 --- a/testsuite/smoke-test/src/keyless.rs +++ b/testsuite/smoke-test/src/keyless.rs @@ -10,7 +10,9 @@ use aptos_crypto::{ use aptos_forge::{AptosPublicInfo, LocalSwarm, NodeExt, Swarm, SwarmExt}; use aptos_logger::{debug, info}; use aptos_rest_client::Client; -use aptos_sdk::types::{EphemeralKeyPair, KeylessAccount, LocalAccount}; +use aptos_sdk::types::{ + EphemeralKeyPair, EphemeralPrivateKey, FederatedKeylessAccount, KeylessAccount, LocalAccount, +}; use aptos_types::{ jwks::{ jwk::{JWKMoveStruct, JWK}, @@ -21,11 +23,11 @@ use aptos_types::{ get_public_inputs_hash, test_utils::{ self, get_groth16_sig_and_pk_for_upgraded_vk, get_sample_aud, get_sample_epk_blinder, - get_sample_esk, get_sample_exp_date, get_sample_groth16_sig_and_fed_pk, - get_sample_groth16_sig_and_pk, get_sample_groth16_sig_and_pk_no_extra_field, - get_sample_iss, get_sample_jwk, get_sample_jwt_header_json, get_sample_jwt_token, - get_sample_openid_sig_and_pk, get_sample_pepper, get_sample_tw_sk, get_sample_uid_key, - get_sample_uid_val, get_sample_zk_sig, get_upgraded_vk, + get_sample_esk, get_sample_exp_date, get_sample_groth16_sig_and_pk, + get_sample_groth16_sig_and_pk_no_extra_field, get_sample_iss, get_sample_jwk, + get_sample_jwt_header_json, get_sample_jwt_token, get_sample_openid_sig_and_pk, + get_sample_pepper, get_sample_tw_sk, get_sample_uid_key, get_sample_uid_val, + get_sample_zk_sig, get_upgraded_vk, }, AnyKeylessPublicKey, Configuration, EphemeralCertificate, Groth16ProofAndStatement, Groth16VerificationKey, KeylessPublicKey, KeylessSignature, TransactionAndProof, @@ -248,7 +250,7 @@ async fn federated_keyless_scenario( install_fed_jwk: bool, expect_txn_succeed: bool, ) { - let (tw_sk, config, jwk, swarm, mut cli, _) = setup_local_net_inner(set_feature_flag).await; + let (_tw_sk, _config, _jwk, swarm, mut cli, _) = setup_local_net_inner(set_feature_flag).await; let root_addr = swarm.chain_info().root_account().address(); let _root_idx = cli.add_account_with_address_to_cli(swarm.root_key(), root_addr); @@ -317,19 +319,63 @@ script {{ assert_eq!(Some(true), txn_result.unwrap().success); } - // For simplicity we use the root account as the jwk owner. - let (sig, pk) = get_sample_groth16_sig_and_fed_pk(root_addr); let mut info = swarm.aptos_public_info(); - let signed_txn = sign_transaction_any_keyless_pk( - &mut info, - sig.clone(), - AnyKeylessPublicKey::Federated(pk), - &jwk, - &config, - Some(&tw_sk), - 1, + + let esk = EphemeralPrivateKey::Ed25519 { + inner_private_key: get_sample_esk(), + }; + let ephemeral_key_pair = + EphemeralKeyPair::new(esk, get_sample_exp_date(), get_sample_epk_blinder()).unwrap(); + let federated_keyless_account = FederatedKeylessAccount::new_from_jwt( + &get_sample_jwt_token(), + ephemeral_key_pair, + root_addr, + None, + Some(get_sample_pepper()), + Some(get_sample_zk_sig()), ) - .await; + .unwrap(); + + let federated_keyless_public_key = federated_keyless_account.public_key().clone(); + + let local_account = LocalAccount::new_federated_keyless( + federated_keyless_account + .authentication_key() + .account_address(), + federated_keyless_account, + 0, + ); + + // If the account does not exist, create it. + if info.account_exists(local_account.address()).await.is_err() { + info!( + "{} account does not exist. Creating...", + local_account.address().to_hex_literal() + ); + info.sync_root_account_sequence_number().await; + info.create_user_account_with_any_key(&AnyPublicKey::FederatedKeyless { + public_key: federated_keyless_public_key, + }) + .await + .unwrap(); + info.mint(local_account.address(), 10_000_000_000) + .await + .unwrap(); + } + info.sync_root_account_sequence_number().await; + let recipient = info + .create_and_fund_user_account(20_000_000_000) + .await + .unwrap(); + + let txn_builder = info + .transaction_factory() + .payload(aptos_stdlib::aptos_coin_transfer( + recipient.address(), + 1_000_000, + )); + + let signed_txn = local_account.sign_with_transaction_builder(txn_builder); let result = swarm .aptos_public_info() @@ -384,10 +430,11 @@ async fn test_keyless_no_training_wheels_groth16_verifies() { async fn test_keyless_groth16_verifies_using_rust_sdk() { let (_tw_sk, _, _, swarm, mut cli, root_idx) = setup_local_net().await; - let blinder = get_sample_epk_blinder(); - let exp_date = get_sample_exp_date(); - let esk = get_sample_esk(); - let ephemeral_key_pair = EphemeralKeyPair::new(esk, exp_date, blinder).unwrap(); + let esk = EphemeralPrivateKey::Ed25519 { + inner_private_key: get_sample_esk(), + }; + let ephemeral_key_pair = + EphemeralKeyPair::new(esk, get_sample_exp_date(), get_sample_epk_blinder()).unwrap(); let mut info = swarm.aptos_public_info(); let keyless_account = KeylessAccount::new( @@ -443,15 +490,15 @@ async fn test_keyless_groth16_verifies_using_rust_sdk() { async fn test_keyless_groth16_verifies_using_rust_sdk_from_jwt() { let (_tw_sk, _, _, swarm, mut cli, root_idx) = setup_local_net().await; - let jwt = get_sample_jwt_token(); - let blinder = get_sample_epk_blinder(); - let exp_date = get_sample_exp_date(); - let esk = get_sample_esk(); - let ephemeral_key_pair = EphemeralKeyPair::new(esk, exp_date, blinder).unwrap(); + let esk = EphemeralPrivateKey::Ed25519 { + inner_private_key: get_sample_esk(), + }; + let ephemeral_key_pair = + EphemeralKeyPair::new(esk, get_sample_exp_date(), get_sample_epk_blinder()).unwrap(); let mut info = swarm.aptos_public_info(); let keyless_account = KeylessAccount::new_from_jwt( - &jwt, + &get_sample_jwt_token(), ephemeral_key_pair, None, Some(get_sample_pepper()), diff --git a/testsuite/smoke-test/src/rosetta.rs b/testsuite/smoke-test/src/rosetta.rs index 8b7a36b4652e1..9facc3c80b4c8 100644 --- a/testsuite/smoke-test/src/rosetta.rs +++ b/testsuite/smoke-test/src/rosetta.rs @@ -113,6 +113,7 @@ async fn setup_test( Some(aptos_rest_client::Client::new( validator.rest_api_endpoint(), )), + HashSet::new(), ) .await .unwrap(); @@ -593,13 +594,10 @@ async fn test_transfer() { let receiver = AccountAddress::from_hex_literal("0xBEEF").unwrap(); let sender_private_key = cli.private_key(0); let sender_balance = client - .get_account_balance(sender) + .view_apt_account_balance(sender) .await .unwrap() - .into_inner() - .coin - .value - .0; + .into_inner(); let network = NetworkIdentifier::from(chain_id); let node_clients = NodeClients { rosetta_client: &rosetta_client, @@ -663,25 +661,19 @@ async fn test_transfer() { // Sender balance should be 0 assert_eq!( client - .get_account_balance(sender) + .view_apt_account_balance(sender) .await .unwrap() - .into_inner() - .coin - .value - .0, + .into_inner(), 0 ); // Receiver should be sent coins assert_eq!( client - .get_account_balance(receiver) + .view_apt_account_balance(receiver) .await .unwrap() - .into_inner() - .coin - .value - .0, + .into_inner(), max_sent ); } @@ -2152,6 +2144,7 @@ async fn transfer_and_wait( sequence_number, max_gas, gas_unit_price, + native_coin(), ) }, ) diff --git a/testsuite/smoke-test/src/utils.rs b/testsuite/smoke-test/src/utils.rs index 05d0f5c9bbcd4..ca8f761d86071 100644 --- a/testsuite/smoke-test/src/utils.rs +++ b/testsuite/smoke-test/src/utils.rs @@ -189,12 +189,12 @@ pub async fn transfer_and_maybe_reconfig( pub async fn assert_balance(client: &RestClient, account: &LocalAccount, balance: u64) { let on_chain_balance = client - .get_account_balance(account.address()) + .view_apt_account_balance(account.address()) .await .unwrap() .into_inner(); - assert_eq!(on_chain_balance.get(), balance); + assert_eq!(on_chain_balance, balance); } /// This helper function creates 3 new accounts, mints funds, transfers funds diff --git a/testsuite/testcases/src/fullnode_reboot_stress_test.rs b/testsuite/testcases/src/fullnode_reboot_stress_test.rs index 8fdd177b5af9b..46934d2616224 100644 --- a/testsuite/testcases/src/fullnode_reboot_stress_test.rs +++ b/testsuite/testcases/src/fullnode_reboot_stress_test.rs @@ -10,6 +10,12 @@ use rand::{seq::SliceRandom, thread_rng}; use std::{sync::Arc, time::Duration}; use tokio::time::Instant; +// The buffer (in seconds) at the end of the test to allow for graceful shutdown +const END_OF_TEST_BUFFER_SECS: u64 = 60; + +// The wait time (in seconds) between fullnode reboots +const WAIT_TIME_BETWEEN_REBOOTS_SECS: u64 = 10; + pub struct FullNodeRebootStressTest; impl Test for FullNodeRebootStressTest { @@ -30,8 +36,19 @@ impl NetworkLoadTest for FullNodeRebootStressTest { _report: &mut TestReport, duration: Duration, ) -> Result<()> { + // Start the test timer let start = Instant::now(); + // Ensure the total test duration is at least as long as the buffer + let end_of_test_buffer = Duration::from_secs(END_OF_TEST_BUFFER_SECS); + if duration <= end_of_test_buffer { + panic!( + "Total test duration must be at least: {:?}! Given duration: {:?}", + end_of_test_buffer, duration + ); + } + + // Collect all the fullnodes let all_fullnodes = { swarm .read() @@ -41,7 +58,9 @@ impl NetworkLoadTest for FullNodeRebootStressTest { .collect::>() }; - while start.elapsed() < duration { + // Reboot fullnodes until the test duration is reached + let test_reboot_duration = duration - end_of_test_buffer; + while start.elapsed() < test_reboot_duration { { let swarm = swarm.read().await; let fullnode_to_reboot = { @@ -53,7 +72,7 @@ impl NetworkLoadTest for FullNodeRebootStressTest { fullnode_to_reboot.stop().await?; fullnode_to_reboot.start().await?; } - tokio::time::sleep(Duration::from_secs(10)).await; + tokio::time::sleep(Duration::from_secs(WAIT_TIME_BETWEEN_REBOOTS_SECS)).await; } Ok(()) diff --git a/testsuite/testcases/src/lib.rs b/testsuite/testcases/src/lib.rs index 1857815437254..92320a3136405 100644 --- a/testsuite/testcases/src/lib.rs +++ b/testsuite/testcases/src/lib.rs @@ -283,7 +283,10 @@ impl NetworkTest for dyn NetworkLoadTest { .keys() .into_iter() .map(|slice| { - let slice_samples = phase_stats.latency_breakdown.get_samples(&slice); + let slice_samples = phase_stats + .latency_breakdown + .get_samples(&slice) + .expect("Could not get samples"); format!( "{:?}: max: {:.3}, avg: {:.3}", slice, diff --git a/testsuite/testcases/src/load_vs_perf_benchmark.rs b/testsuite/testcases/src/load_vs_perf_benchmark.rs index e63ff8bc0ec56..634fda24f2ccb 100644 --- a/testsuite/testcases/src/load_vs_perf_benchmark.rs +++ b/testsuite/testcases/src/load_vs_perf_benchmark.rs @@ -6,7 +6,7 @@ use anyhow::Context; use aptos_forge::{ args::TransactionTypeArg, emitter::NumAccountsMode, - prometheus_metrics::{LatencyBreakdown, LatencyBreakdownSlice}, + prometheus_metrics::{LatencyBreakdown, LatencyBreakdownSlice, MetricSamples}, success_criteria::{SuccessCriteria, SuccessCriteriaChecker}, EmitJob, EmitJobMode, EmitJobRequest, NetworkContext, NetworkContextSynchronizer, NetworkTest, Result, Test, TxnStats, WorkflowProgress, @@ -471,7 +471,7 @@ fn to_table(type_name: String, results: &[Vec]) -> Vec { let mut table = Vec::new(); table.push(format!( - "{: ]) -> Vec { "pos->prop", "prop->order", "order->commit", - "actual dur" + "actual dur", + // optional indexer metrics + "idx_fn", + "idx_cache", + "idx_data", )); for run_results in results { for result in run_results { let rate = result.stats.rate(); table.push(format!( - "{: ]) -> Vec { rate.p50_latency as f64 / 1000.0, rate.p90_latency as f64 / 1000.0, rate.p99_latency as f64 / 1000.0, - result.latency_breakdown.get_samples(&LatencyBreakdownSlice::QsBatchToPos).max_sample(), - result.latency_breakdown.get_samples(&LatencyBreakdownSlice::QsPosToProposal).max_sample(), - result.latency_breakdown.get_samples(&LatencyBreakdownSlice::ConsensusProposalToOrdered).max_sample(), - result.latency_breakdown.get_samples(&LatencyBreakdownSlice::ConsensusOrderedToCommit).max_sample(), - result.actual_duration.as_secs() + result.latency_breakdown.get_samples(&LatencyBreakdownSlice::QsBatchToPos).unwrap_or(&MetricSamples::default()).max_sample(), + result.latency_breakdown.get_samples(&LatencyBreakdownSlice::QsPosToProposal).unwrap_or(&MetricSamples::default()).max_sample(), + result.latency_breakdown.get_samples(&LatencyBreakdownSlice::ConsensusProposalToOrdered).unwrap_or(&MetricSamples::default()).max_sample(), + result.latency_breakdown.get_samples(&LatencyBreakdownSlice::ConsensusOrderedToCommit).unwrap_or(&MetricSamples::default()).max_sample(), + result.actual_duration.as_secs(), + // optional indexer metrics + result.latency_breakdown.get_samples(&LatencyBreakdownSlice::IndexerFullnodeProcessedBatch).unwrap_or(&MetricSamples::default()).max_sample(), + result.latency_breakdown.get_samples(&LatencyBreakdownSlice::IndexerCacheWorkerProcessedBatch).unwrap_or(&MetricSamples::default()).max_sample(), + result.latency_breakdown.get_samples(&LatencyBreakdownSlice::IndexerDataServiceAllChunksSent).unwrap_or(&MetricSamples::default()).max_sample(), )); } } diff --git a/third_party/move/evm/move-to-yul/tests/AccountStateMachine.exp b/third_party/move/evm/move-to-yul/tests/AccountStateMachine.exp index 08f39e05a23f3..19d655cc32931 100644 --- a/third_party/move/evm/move-to-yul/tests/AccountStateMachine.exp +++ b/third_party/move/evm/move-to-yul/tests/AccountStateMachine.exp @@ -48,9 +48,9 @@ object "A3_AccountStateMachine" { $t0 := 0 // $t1 := 0 $t1 := 0 - // $t2 := vector::empty() + // $t2 := vector::empty<0x3::AccountStateMachine::PendingTransfer>() $t2 := A1_vector_empty$A3_AccountStateMachine_PendingTransfer$() - // $t3 := pack AccountStateMachine::Account($t0, $t1, $t2) + // $t3 := pack 0x3::AccountStateMachine::Account($t0, $t1, $t2) { let $mem := $Malloc(48) $MemoryStoreU64(add($mem, 32), $t0) @@ -245,7 +245,7 @@ object "A3_AccountStateMachine" { // label L6 // $t7 := freeze_ref($t5) $t7 := $t5 - // $t8 := vector::length($t7) + // $t8 := vector::length<0x3::AccountStateMachine::PendingTransfer>($t7) $t8 := A1_vector_length$A3_AccountStateMachine_PendingTransfer$($t7) // $t9 := <($t2, $t8) $t9 := $Lt(i, $t8) @@ -255,7 +255,7 @@ object "A3_AccountStateMachine" { default { $block := 4 } } case 3 { - // $t5 := borrow_field.pending($t0) + // $t5 := borrow_field<0x3::AccountStateMachine::Account>.pending($t0) $t5 := this // $t6 := 0 $t6 := 0 @@ -279,11 +279,11 @@ object "A3_AccountStateMachine" { // label L2 // $t10 := freeze_ref($t5) $t10 := $t5 - // $t11 := vector::borrow($t10, $t2) + // $t11 := vector::borrow<0x3::AccountStateMachine::PendingTransfer>($t10, $t2) $t11 := A1_vector_borrow$A3_AccountStateMachine_PendingTransfer$($t10, i) // $t12 := Actor::virtual_time() $t12 := A1_Actor_virtual_time() - // $t13 := borrow_field.initiated_at($t11) + // $t13 := borrow_field<0x3::AccountStateMachine::PendingTransfer>.initiated_at($t11) $t13 := $t11 // $t14 := read_ref($t13) $t14 := $LoadU128($t13) @@ -300,7 +300,7 @@ object "A3_AccountStateMachine" { } case 7 { // label L4 - // $t18 := vector::remove($t5, $t2) + // $t18 := vector::remove<0x3::AccountStateMachine::PendingTransfer>($t5, $t2) $t18 := A1_vector_remove$A3_AccountStateMachine_PendingTransfer$($t5, i) // drop($t18) $Free($t18, 32) @@ -345,7 +345,7 @@ object "A3_AccountStateMachine" { case 4 { // $t2 := 43 $t2 := 43 - // $t3 := borrow_field.value($t0) + // $t3 := borrow_field<0x3::AccountStateMachine::Account>.value($t0) $t3 := $IndexPtr(this, 32) // $t4 := read_ref($t3) $t4 := $LoadU64($t3) @@ -360,13 +360,13 @@ object "A3_AccountStateMachine" { } case 5 { // label L2 - // $t8 := borrow_field.value($t0) + // $t8 := borrow_field<0x3::AccountStateMachine::Account>.value($t0) $t8 := $IndexPtr(this, 32) // $t9 := read_ref($t8) $t9 := $LoadU64($t8) // $t10 := +($t9, $t1) $t10 := $AddU64($t9, v) - // $t11 := borrow_field.value($t0) + // $t11 := borrow_field<0x3::AccountStateMachine::Account>.value($t0) $t11 := $IndexPtr(this, 32) // write_ref($t11, $t10) $StoreU64($t11, $t10) @@ -395,7 +395,7 @@ object "A3_AccountStateMachine" { $Abort($t5) } case 4 { - // $t2 := borrow_field.value($t0) + // $t2 := borrow_field<0x3::AccountStateMachine::Account>.value($t0) $t2 := $IndexPtr(this, 32) // $t3 := read_ref($t2) $t3 := $LoadU64($t2) @@ -408,13 +408,13 @@ object "A3_AccountStateMachine" { } case 5 { // label L2 - // $t6 := borrow_field.value($t0) + // $t6 := borrow_field<0x3::AccountStateMachine::Account>.value($t0) $t6 := $IndexPtr(this, 32) // $t7 := read_ref($t6) $t7 := $LoadU64($t6) // $t8 := -($t7, $t1) $t8 := $Sub($t7, v) - // $t9 := borrow_field.value($t0) + // $t9 := borrow_field<0x3::AccountStateMachine::Account>.value($t0) $t9 := $IndexPtr(this, 32) // write_ref($t9, $t8) $StoreU64($t9, $t8) @@ -443,7 +443,7 @@ object "A3_AccountStateMachine" { $Abort($t7) } case 4 { - // $t4 := borrow_field.value($t0) + // $t4 := borrow_field<0x3::AccountStateMachine::Account>.value($t0) $t4 := $IndexPtr(this, 32) // $t5 := read_ref($t4) $t5 := $LoadU64($t4) @@ -458,11 +458,11 @@ object "A3_AccountStateMachine" { // label L2 // $t8 := AccountStateMachine::new_xfer_id($t0) $t8 := A3_AccountStateMachine_new_xfer_id(this) - // $t9 := borrow_field.pending($t0) + // $t9 := borrow_field<0x3::AccountStateMachine::Account>.pending($t0) $t9 := this // $t10 := Actor::virtual_time() $t10 := A1_Actor_virtual_time() - // $t11 := pack AccountStateMachine::PendingTransfer($t8, $t2, $t10) + // $t11 := pack 0x3::AccountStateMachine::PendingTransfer($t8, $t2, $t10) { let $mem := $Malloc(32) $MemoryStoreU64(add($mem, 16), $t8) @@ -470,7 +470,7 @@ object "A3_AccountStateMachine" { $MemoryStoreU128(add($mem, 0), $t10) $t11 := $mem } - // vector::push_back($t9, $t11) + // vector::push_back<0x3::AccountStateMachine::PendingTransfer>($t9, $t11) A1_vector_push_back$A3_AccountStateMachine_PendingTransfer$($t9, $t11) // $t12 := Actor::self() $t12 := A1_Actor_self() @@ -496,17 +496,17 @@ object "A3_AccountStateMachine" { $t5 := this // $t6 := AccountStateMachine::find_xfer($t5, $t1) $t6 := A3_AccountStateMachine_find_xfer($t5, xfer_id) - // $t7 := borrow_field.pending($t0) + // $t7 := borrow_field<0x3::AccountStateMachine::Account>.pending($t0) $t7 := this - // $t8 := vector::borrow($t7, $t6) + // $t8 := vector::borrow<0x3::AccountStateMachine::PendingTransfer>($t7, $t6) $t8 := A1_vector_borrow$A3_AccountStateMachine_PendingTransfer$($t7, $t6) - // $t9 := borrow_field.amount($t8) + // $t9 := borrow_field<0x3::AccountStateMachine::PendingTransfer>.amount($t8) $t9 := $IndexPtr($t8, 24) // $t10 := read_ref($t9) $t10 := $LoadU64($t9) - // $t11 := borrow_field.pending($t0) + // $t11 := borrow_field<0x3::AccountStateMachine::Account>.pending($t0) $t11 := this - // $t12 := vector::remove($t11, $t6) + // $t12 := vector::remove<0x3::AccountStateMachine::PendingTransfer>($t11, $t6) $t12 := A1_vector_remove$A3_AccountStateMachine_PendingTransfer$($t11, $t6) // drop($t12) $Free($t12, 32) @@ -639,7 +639,7 @@ object "A3_AccountStateMachine" { switch $block case 2 { // label L6 - // $t7 := vector::length($t5) + // $t7 := vector::length<0x3::AccountStateMachine::PendingTransfer>($t5) $t7 := A1_vector_length$A3_AccountStateMachine_PendingTransfer$($t5) // $t8 := <($t3, $t7) $t8 := $Lt(i, $t7) @@ -649,7 +649,7 @@ object "A3_AccountStateMachine" { default { $block := 4 } } case 3 { - // $t5 := borrow_field.pending($t0) + // $t5 := borrow_field<0x3::AccountStateMachine::Account>.pending($t0) $t5 := this // $t6 := 0 $t6 := 0 @@ -674,9 +674,9 @@ object "A3_AccountStateMachine" { } case 6 { // label L2 - // $t9 := vector::borrow($t5, $t3) + // $t9 := vector::borrow<0x3::AccountStateMachine::PendingTransfer>($t5, $t3) $t9 := A1_vector_borrow$A3_AccountStateMachine_PendingTransfer$($t5, i) - // $t10 := borrow_field.xfer_id($t9) + // $t10 := borrow_field<0x3::AccountStateMachine::PendingTransfer>.xfer_id($t9) $t10 := $IndexPtr($t9, 16) // $t11 := read_ref($t10) $t11 := $LoadU64($t10) @@ -703,7 +703,7 @@ object "A3_AccountStateMachine" { } case 9 { // label L4 - // $t14 := vector::length($t5) + // $t14 := vector::length<0x3::AccountStateMachine::PendingTransfer>($t5) $t14 := A1_vector_length$A3_AccountStateMachine_PendingTransfer$($t5) // $t15 := <($t3, $t14) $t15 := $Lt(i, $t14) @@ -782,7 +782,7 @@ object "A3_AccountStateMachine" { } function A3_AccountStateMachine_new_xfer_id(this) -> $result { let counter, xfer_id, $t3, $t4, $t5, $t6, $t7 - // $t3 := borrow_field.xfer_id_counter($t0) + // $t3 := borrow_field<0x3::AccountStateMachine::Account>.xfer_id_counter($t0) $t3 := $IndexPtr(this, 40) // $t4 := read_ref($t3) $t4 := $LoadU64($t3) diff --git a/third_party/move/evm/move-to-yul/tests/ConstructorTest.exp b/third_party/move/evm/move-to-yul/tests/ConstructorTest.exp index b51e33ba8fde2..4666b284b3fcd 100644 --- a/third_party/move/evm/move-to-yul/tests/ConstructorTest.exp +++ b/third_party/move/evm/move-to-yul/tests/ConstructorTest.exp @@ -23,14 +23,14 @@ object "A2_ConstructorTest" { mstore($locals, A2_Evm_sign($t3)) // $t4 := borrow_local($t2) $t4 := $MakePtr(false, $locals) - // $t5 := pack ConstructorTest::Balance($t0, $t1) + // $t5 := pack 0x2::ConstructorTest::Balance($t0, $t1) { let $mem := $Malloc(16) $MemoryStoreU64(add($mem, 0), value) $MemoryStoreU64(add($mem, 8), value2) $t5 := $mem } - // move_to($t5, $t4) + // move_to<0x2::ConstructorTest::Balance>($t5, $t4) { let $base_offset := $MakeTypeStorageBase(0, 0x91d9463a, $LoadU256($t4)) if $AlignedStorageLoad($base_offset) { @@ -212,7 +212,7 @@ object "A2_ConstructorTest" { let $t0, $t1, $t2, $t3 // $t0 := 0x42 $t0 := 0x42 - // $t1 := borrow_global($t0) + // $t1 := borrow_global<0x2::ConstructorTest::Balance>($t0) { let $base_offset := $MakeTypeStorageBase(0, 0x91d9463a, $t0) if iszero($AlignedStorageLoad($base_offset)) { @@ -220,7 +220,7 @@ object "A2_ConstructorTest" { } $t1 := $MakePtr(true, add($base_offset, 32)) } - // $t2 := borrow_field.value($t1) + // $t2 := borrow_field<0x2::ConstructorTest::Balance>.value($t1) $t2 := $t1 // $t3 := read_ref($t2) $t3 := $LoadU64($t2) diff --git a/third_party/move/evm/move-to-yul/tests/GlobalVectors.exp b/third_party/move/evm/move-to-yul/tests/GlobalVectors.exp index fcee1e13c2424..12db22c42d0a0 100644 --- a/third_party/move/evm/move-to-yul/tests/GlobalVectors.exp +++ b/third_party/move/evm/move-to-yul/tests/GlobalVectors.exp @@ -93,13 +93,13 @@ object "test_A2_GlobalVectors_test_borrow_mut_global" { $t10 := $MakePtr(false, add($locals, 32)) // $t11 := move($t2) $t11 := mload($locals) - // $t12 := pack GlobalVectors::T($t11) + // $t12 := pack 0x2::GlobalVectors::T($t11) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t11) $t12 := $mem } - // move_to>($t12, $t10) + // move_to<0x2::GlobalVectors::T>($t12, $t10) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $LoadU256($t10)) if $AlignedStorageLoad($base_offset) { @@ -128,7 +128,7 @@ object "test_A2_GlobalVectors_test_borrow_mut_global" { } // $t13 := 0x42 $t13 := 0x42 - // $t14 := borrow_global>($t13) + // $t14 := borrow_global<0x2::GlobalVectors::T>($t13) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $t13) if iszero($AlignedStorageLoad($base_offset)) { @@ -136,7 +136,7 @@ object "test_A2_GlobalVectors_test_borrow_mut_global" { } $t14 := $MakePtr(true, add($base_offset, 32)) } - // $t15 := borrow_field>.v($t14) + // $t15 := borrow_field<0x2::GlobalVectors::T>.v($t14) $t15 := $t14 // $t16 := 0 $t16 := 0 @@ -148,7 +148,7 @@ object "test_A2_GlobalVectors_test_borrow_mut_global" { $StoreU64($t17, $t18) // $t19 := 0x42 $t19 := 0x42 - // $t20 := borrow_global>($t19) + // $t20 := borrow_global<0x2::GlobalVectors::T>($t19) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $t19) if iszero($AlignedStorageLoad($base_offset)) { @@ -156,7 +156,7 @@ object "test_A2_GlobalVectors_test_borrow_mut_global" { } $t20 := $MakePtr(true, add($base_offset, 32)) } - // $t21 := borrow_field>.v($t20) + // $t21 := borrow_field<0x2::GlobalVectors::T>.v($t20) $t21 := $t20 // $t22 := 0 $t22 := 0 @@ -502,13 +502,13 @@ object "test_A2_GlobalVectors_test_move_from" { $t10 := $MakePtr(false, add($locals, 32)) // $t11 := move($t1) $t11 := mload($locals) - // $t12 := pack GlobalVectors::T($t11) + // $t12 := pack 0x2::GlobalVectors::T($t11) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t11) $t12 := $mem } - // move_to>($t12, $t10) + // move_to<0x2::GlobalVectors::T>($t12, $t10) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $LoadU256($t10)) if $AlignedStorageLoad($base_offset) { @@ -537,7 +537,7 @@ object "test_A2_GlobalVectors_test_move_from" { } // $t13 := 0x42 $t13 := 0x42 - // $t14 := move_from>($t13) + // $t14 := move_from<0x2::GlobalVectors::T>($t13) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $t13) if iszero($AlignedStorageLoad($base_offset)) { @@ -569,7 +569,7 @@ object "test_A2_GlobalVectors_test_move_from" { $t14 := $dst } } - // $t2 := unpack GlobalVectors::T($t14) + // $t2 := unpack 0x2::GlobalVectors::T($t14) mstore(add($locals, 64), $MemoryLoadU256(add($t14, 0))) $Free($t14, 32) // $t15 := borrow_local($t2) @@ -1060,7 +1060,7 @@ object "test_A2_GlobalVectors_test_move_from_vector_of_struct" { $Abort($t25) } case 4 { - // $t2 := vector::empty() + // $t2 := vector::empty<0x2::GlobalVectors::S>() mstore($locals, A1_vector_empty$A2_GlobalVectors_S$()) // $t3 := borrow_local($t2) $t3 := $MakePtr(false, $locals) @@ -1068,14 +1068,14 @@ object "test_A2_GlobalVectors_test_move_from_vector_of_struct" { $t4 := 10 // $t5 := 40 $t5 := 40 - // $t6 := pack GlobalVectors::S($t4, $t5) + // $t6 := pack 0x2::GlobalVectors::S($t4, $t5) { let $mem := $Malloc(24) $MemoryStoreU128(add($mem, 0), $t4) $MemoryStoreU64(add($mem, 16), $t5) $t6 := $mem } - // vector::push_back($t3, $t6) + // vector::push_back<0x2::GlobalVectors::S>($t3, $t6) A1_vector_push_back$A2_GlobalVectors_S$($t3, $t6) // $t7 := borrow_local($t2) $t7 := $MakePtr(false, $locals) @@ -1083,14 +1083,14 @@ object "test_A2_GlobalVectors_test_move_from_vector_of_struct" { $t8 := 11 // $t9 := 41 $t9 := 41 - // $t10 := pack GlobalVectors::S($t8, $t9) + // $t10 := pack 0x2::GlobalVectors::S($t8, $t9) { let $mem := $Malloc(24) $MemoryStoreU128(add($mem, 0), $t8) $MemoryStoreU64(add($mem, 16), $t9) $t10 := $mem } - // vector::push_back($t7, $t10) + // vector::push_back<0x2::GlobalVectors::S>($t7, $t10) A1_vector_push_back$A2_GlobalVectors_S$($t7, $t10) // $t11 := borrow_local($t2) $t11 := $MakePtr(false, $locals) @@ -1098,14 +1098,14 @@ object "test_A2_GlobalVectors_test_move_from_vector_of_struct" { $t12 := 12 // $t13 := 42 $t13 := 42 - // $t14 := pack GlobalVectors::S($t12, $t13) + // $t14 := pack 0x2::GlobalVectors::S($t12, $t13) { let $mem := $Malloc(24) $MemoryStoreU128(add($mem, 0), $t12) $MemoryStoreU64(add($mem, 16), $t13) $t14 := $mem } - // vector::push_back($t11, $t14) + // vector::push_back<0x2::GlobalVectors::S>($t11, $t14) A1_vector_push_back$A2_GlobalVectors_S$($t11, $t14) // $t15 := 0x42 $t15 := 0x42 @@ -1115,13 +1115,13 @@ object "test_A2_GlobalVectors_test_move_from_vector_of_struct" { $t16 := $MakePtr(false, add($locals, 32)) // $t17 := move($t2) $t17 := mload($locals) - // $t18 := pack GlobalVectors::T($t17) + // $t18 := pack 0x2::GlobalVectors::T<0x2::GlobalVectors::S>($t17) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t17) $t18 := $mem } - // move_to>($t18, $t16) + // move_to<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>($t18, $t16) { let $base_offset := $MakeTypeStorageBase(0, 0x27c9b6b7, $LoadU256($t16)) if $AlignedStorageLoad($base_offset) { @@ -1156,7 +1156,7 @@ object "test_A2_GlobalVectors_test_move_from_vector_of_struct" { } // $t19 := 0x42 $t19 := 0x42 - // $t1 := move_from>($t19) + // $t1 := move_from<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>($t19) { let $base_offset := $MakeTypeStorageBase(0, 0x27c9b6b7, $t19) if iszero($AlignedStorageLoad($base_offset)) { @@ -1197,9 +1197,9 @@ object "test_A2_GlobalVectors_test_move_from_vector_of_struct" { } // $t20 := borrow_local($t1) $t20 := $MakePtr(false, local_t) - // $t21 := borrow_field>.v($t20) + // $t21 := borrow_field<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>.v($t20) $t21 := $t20 - // $t22 := vector::length($t21) + // $t22 := vector::length<0x2::GlobalVectors::S>($t21) $t22 := A1_vector_length$A2_GlobalVectors_S$($t21) // $t23 := 3 $t23 := 3 @@ -1214,13 +1214,13 @@ object "test_A2_GlobalVectors_test_move_from_vector_of_struct" { // label L2 // $t26 := borrow_local($t1) $t26 := $MakePtr(false, local_t) - // $t27 := borrow_field>.v($t26) + // $t27 := borrow_field<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>.v($t26) $t27 := $t26 // $t28 := 0 $t28 := 0 - // $t29 := vector::borrow($t27, $t28) + // $t29 := vector::borrow<0x2::GlobalVectors::S>($t27, $t28) $t29 := A1_vector_borrow$A2_GlobalVectors_S$($t27, $t28) - // $t30 := borrow_field.x($t29) + // $t30 := borrow_field<0x2::GlobalVectors::S>.x($t29) $t30 := $t29 // $t31 := read_ref($t30) $t31 := $LoadU128($t30) @@ -1249,13 +1249,13 @@ object "test_A2_GlobalVectors_test_move_from_vector_of_struct" { // label L5 // $t35 := borrow_local($t1) $t35 := $MakePtr(false, local_t) - // $t36 := borrow_field>.v($t35) + // $t36 := borrow_field<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>.v($t35) $t36 := $t35 // $t37 := 1 $t37 := 1 - // $t38 := vector::borrow($t36, $t37) + // $t38 := vector::borrow<0x2::GlobalVectors::S>($t36, $t37) $t38 := A1_vector_borrow$A2_GlobalVectors_S$($t36, $t37) - // $t39 := borrow_field.x($t38) + // $t39 := borrow_field<0x2::GlobalVectors::S>.x($t38) $t39 := $t38 // $t40 := read_ref($t39) $t40 := $LoadU128($t39) @@ -1284,13 +1284,13 @@ object "test_A2_GlobalVectors_test_move_from_vector_of_struct" { // label L8 // $t44 := borrow_local($t1) $t44 := $MakePtr(false, local_t) - // $t45 := borrow_field>.v($t44) + // $t45 := borrow_field<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>.v($t44) $t45 := $t44 // $t46 := 2 $t46 := 2 - // $t47 := vector::borrow($t45, $t46) + // $t47 := vector::borrow<0x2::GlobalVectors::S>($t45, $t46) $t47 := A1_vector_borrow$A2_GlobalVectors_S$($t45, $t46) - // $t48 := borrow_field.x($t47) + // $t48 := borrow_field<0x2::GlobalVectors::S>.x($t47) $t48 := $t47 // $t49 := read_ref($t48) $t49 := $LoadU128($t48) @@ -1685,13 +1685,13 @@ object "test_A2_GlobalVectors_test_move_from_vector_of_vector" { $t13 := $MakePtr(false, add($locals, 32)) // $t14 := move($t2) $t14 := mload($locals) - // $t15 := pack GlobalVectors::T>($t14) + // $t15 := pack 0x2::GlobalVectors::T>($t14) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t14) $t15 := $mem } - // move_to>>($t15, $t13) + // move_to<0x2::GlobalVectors::T>>($t15, $t13) { let $base_offset := $MakeTypeStorageBase(0, 0x9947b477, $LoadU256($t13)) if $AlignedStorageLoad($base_offset) { @@ -1733,7 +1733,7 @@ object "test_A2_GlobalVectors_test_move_from_vector_of_vector" { } // $t16 := 0x42 $t16 := 0x42 - // $t1 := move_from>>($t16) + // $t1 := move_from<0x2::GlobalVectors::T>>($t16) { let $base_offset := $MakeTypeStorageBase(0, 0x9947b477, $t16) if iszero($AlignedStorageLoad($base_offset)) { @@ -1783,7 +1783,7 @@ object "test_A2_GlobalVectors_test_move_from_vector_of_vector" { } // $t17 := borrow_local($t1) $t17 := $MakePtr(false, local_t) - // $t18 := borrow_field>>.v($t17) + // $t18 := borrow_field<0x2::GlobalVectors::T>>.v($t17) $t18 := $t17 // $t19 := vector::length>($t18) $t19 := A1_vector_length$vec$u64$$($t18) @@ -1800,7 +1800,7 @@ object "test_A2_GlobalVectors_test_move_from_vector_of_vector" { // label L2 // $t23 := borrow_local($t1) $t23 := $MakePtr(false, local_t) - // $t24 := borrow_field>>.v($t23) + // $t24 := borrow_field<0x2::GlobalVectors::T>>.v($t23) $t24 := $t23 // $t25 := 0 $t25 := 0 @@ -1837,7 +1837,7 @@ object "test_A2_GlobalVectors_test_move_from_vector_of_vector" { // label L5 // $t33 := borrow_local($t1) $t33 := $MakePtr(false, local_t) - // $t34 := borrow_field>>.v($t33) + // $t34 := borrow_field<0x2::GlobalVectors::T>>.v($t33) $t34 := $t33 // $t35 := 1 $t35 := 1 @@ -1874,7 +1874,7 @@ object "test_A2_GlobalVectors_test_move_from_vector_of_vector" { // label L8 // $t43 := borrow_local($t1) $t43 := $MakePtr(false, local_t) - // $t44 := borrow_field>>.v($t43) + // $t44 := borrow_field<0x2::GlobalVectors::T>>.v($t43) $t44 := $t43 // $t45 := 2 $t45 := 2 @@ -2298,13 +2298,13 @@ object "test_A2_GlobalVectors_test_move_to" { $t9 := $MakePtr(false, add($locals, 32)) // $t10 := move($t1) $t10 := mload($locals) - // $t11 := pack GlobalVectors::T($t10) + // $t11 := pack 0x2::GlobalVectors::T($t10) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t10) $t11 := $mem } - // move_to>($t11, $t9) + // move_to<0x2::GlobalVectors::T>($t11, $t9) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $LoadU256($t9)) if $AlignedStorageLoad($base_offset) { @@ -2333,7 +2333,7 @@ object "test_A2_GlobalVectors_test_move_to" { } // $t12 := 0x42 $t12 := 0x42 - // $t13 := borrow_global>($t12) + // $t13 := borrow_global<0x2::GlobalVectors::T>($t12) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $t12) if iszero($AlignedStorageLoad($base_offset)) { @@ -2341,7 +2341,7 @@ object "test_A2_GlobalVectors_test_move_to" { } $t13 := $MakePtr(true, add($base_offset, 32)) } - // $t14 := borrow_field>.v($t13) + // $t14 := borrow_field<0x2::GlobalVectors::T>.v($t13) $t14 := $t13 // $t15 := vector::length($t14) $t15 := A1_vector_length$u64$($t14) @@ -2358,7 +2358,7 @@ object "test_A2_GlobalVectors_test_move_to" { // label L2 // $t19 := 0x42 $t19 := 0x42 - // $t20 := borrow_global>($t19) + // $t20 := borrow_global<0x2::GlobalVectors::T>($t19) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $t19) if iszero($AlignedStorageLoad($base_offset)) { @@ -2366,7 +2366,7 @@ object "test_A2_GlobalVectors_test_move_to" { } $t20 := $MakePtr(true, add($base_offset, 32)) } - // $t21 := borrow_field>.v($t20) + // $t21 := borrow_field<0x2::GlobalVectors::T>.v($t20) $t21 := $t20 // $t22 := 0 $t22 := 0 @@ -2399,7 +2399,7 @@ object "test_A2_GlobalVectors_test_move_to" { // label L5 // $t28 := 0x42 $t28 := 0x42 - // $t29 := borrow_global>($t28) + // $t29 := borrow_global<0x2::GlobalVectors::T>($t28) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $t28) if iszero($AlignedStorageLoad($base_offset)) { @@ -2407,7 +2407,7 @@ object "test_A2_GlobalVectors_test_move_to" { } $t29 := $MakePtr(true, add($base_offset, 32)) } - // $t30 := borrow_field>.v($t29) + // $t30 := borrow_field<0x2::GlobalVectors::T>.v($t29) $t30 := $t29 // $t31 := 1 $t31 := 1 @@ -2440,7 +2440,7 @@ object "test_A2_GlobalVectors_test_move_to" { // label L8 // $t37 := 0x42 $t37 := 0x42 - // $t38 := borrow_global>($t37) + // $t38 := borrow_global<0x2::GlobalVectors::T>($t37) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $t37) if iszero($AlignedStorageLoad($base_offset)) { @@ -2448,7 +2448,7 @@ object "test_A2_GlobalVectors_test_move_to" { } $t38 := $MakePtr(true, add($base_offset, 32)) } - // $t39 := borrow_field>.v($t38) + // $t39 := borrow_field<0x2::GlobalVectors::T>.v($t38) $t39 := $t38 // $t40 := 2 $t40 := 2 @@ -2776,7 +2776,7 @@ object "test_A2_GlobalVectors_test_move_to_vector_of_struct" { $Abort($t24) } case 4 { - // $t1 := vector::empty() + // $t1 := vector::empty<0x2::GlobalVectors::S>() mstore($locals, A1_vector_empty$A2_GlobalVectors_S$()) // $t2 := borrow_local($t1) $t2 := $MakePtr(false, $locals) @@ -2784,14 +2784,14 @@ object "test_A2_GlobalVectors_test_move_to_vector_of_struct" { $t3 := 10 // $t4 := 40 $t4 := 40 - // $t5 := pack GlobalVectors::S($t3, $t4) + // $t5 := pack 0x2::GlobalVectors::S($t3, $t4) { let $mem := $Malloc(24) $MemoryStoreU128(add($mem, 0), $t3) $MemoryStoreU64(add($mem, 16), $t4) $t5 := $mem } - // vector::push_back($t2, $t5) + // vector::push_back<0x2::GlobalVectors::S>($t2, $t5) A1_vector_push_back$A2_GlobalVectors_S$($t2, $t5) // $t6 := borrow_local($t1) $t6 := $MakePtr(false, $locals) @@ -2799,14 +2799,14 @@ object "test_A2_GlobalVectors_test_move_to_vector_of_struct" { $t7 := 11 // $t8 := 41 $t8 := 41 - // $t9 := pack GlobalVectors::S($t7, $t8) + // $t9 := pack 0x2::GlobalVectors::S($t7, $t8) { let $mem := $Malloc(24) $MemoryStoreU128(add($mem, 0), $t7) $MemoryStoreU64(add($mem, 16), $t8) $t9 := $mem } - // vector::push_back($t6, $t9) + // vector::push_back<0x2::GlobalVectors::S>($t6, $t9) A1_vector_push_back$A2_GlobalVectors_S$($t6, $t9) // $t10 := borrow_local($t1) $t10 := $MakePtr(false, $locals) @@ -2814,14 +2814,14 @@ object "test_A2_GlobalVectors_test_move_to_vector_of_struct" { $t11 := 12 // $t12 := 42 $t12 := 42 - // $t13 := pack GlobalVectors::S($t11, $t12) + // $t13 := pack 0x2::GlobalVectors::S($t11, $t12) { let $mem := $Malloc(24) $MemoryStoreU128(add($mem, 0), $t11) $MemoryStoreU64(add($mem, 16), $t12) $t13 := $mem } - // vector::push_back($t10, $t13) + // vector::push_back<0x2::GlobalVectors::S>($t10, $t13) A1_vector_push_back$A2_GlobalVectors_S$($t10, $t13) // $t14 := 0x42 $t14 := 0x42 @@ -2831,13 +2831,13 @@ object "test_A2_GlobalVectors_test_move_to_vector_of_struct" { $t15 := $MakePtr(false, add($locals, 32)) // $t16 := move($t1) $t16 := mload($locals) - // $t17 := pack GlobalVectors::T($t16) + // $t17 := pack 0x2::GlobalVectors::T<0x2::GlobalVectors::S>($t16) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t16) $t17 := $mem } - // move_to>($t17, $t15) + // move_to<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>($t17, $t15) { let $base_offset := $MakeTypeStorageBase(0, 0x27c9b6b7, $LoadU256($t15)) if $AlignedStorageLoad($base_offset) { @@ -2872,7 +2872,7 @@ object "test_A2_GlobalVectors_test_move_to_vector_of_struct" { } // $t18 := 0x42 $t18 := 0x42 - // $t19 := borrow_global>($t18) + // $t19 := borrow_global<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>($t18) { let $base_offset := $MakeTypeStorageBase(0, 0x27c9b6b7, $t18) if iszero($AlignedStorageLoad($base_offset)) { @@ -2880,9 +2880,9 @@ object "test_A2_GlobalVectors_test_move_to_vector_of_struct" { } $t19 := $MakePtr(true, add($base_offset, 32)) } - // $t20 := borrow_field>.v($t19) + // $t20 := borrow_field<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>.v($t19) $t20 := $t19 - // $t21 := vector::length($t20) + // $t21 := vector::length<0x2::GlobalVectors::S>($t20) $t21 := A1_vector_length$A2_GlobalVectors_S$($t20) // $t22 := 3 $t22 := 3 @@ -2897,7 +2897,7 @@ object "test_A2_GlobalVectors_test_move_to_vector_of_struct" { // label L2 // $t25 := 0x42 $t25 := 0x42 - // $t26 := borrow_global>($t25) + // $t26 := borrow_global<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>($t25) { let $base_offset := $MakeTypeStorageBase(0, 0x27c9b6b7, $t25) if iszero($AlignedStorageLoad($base_offset)) { @@ -2905,13 +2905,13 @@ object "test_A2_GlobalVectors_test_move_to_vector_of_struct" { } $t26 := $MakePtr(true, add($base_offset, 32)) } - // $t27 := borrow_field>.v($t26) + // $t27 := borrow_field<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>.v($t26) $t27 := $t26 // $t28 := 0 $t28 := 0 - // $t29 := vector::borrow($t27, $t28) + // $t29 := vector::borrow<0x2::GlobalVectors::S>($t27, $t28) $t29 := A1_vector_borrow$A2_GlobalVectors_S$($t27, $t28) - // $t30 := borrow_field.x($t29) + // $t30 := borrow_field<0x2::GlobalVectors::S>.x($t29) $t30 := $t29 // $t31 := read_ref($t30) $t31 := $LoadU128($t30) @@ -2940,7 +2940,7 @@ object "test_A2_GlobalVectors_test_move_to_vector_of_struct" { // label L5 // $t35 := 0x42 $t35 := 0x42 - // $t36 := borrow_global>($t35) + // $t36 := borrow_global<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>($t35) { let $base_offset := $MakeTypeStorageBase(0, 0x27c9b6b7, $t35) if iszero($AlignedStorageLoad($base_offset)) { @@ -2948,13 +2948,13 @@ object "test_A2_GlobalVectors_test_move_to_vector_of_struct" { } $t36 := $MakePtr(true, add($base_offset, 32)) } - // $t37 := borrow_field>.v($t36) + // $t37 := borrow_field<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>.v($t36) $t37 := $t36 // $t38 := 1 $t38 := 1 - // $t39 := vector::borrow($t37, $t38) + // $t39 := vector::borrow<0x2::GlobalVectors::S>($t37, $t38) $t39 := A1_vector_borrow$A2_GlobalVectors_S$($t37, $t38) - // $t40 := borrow_field.x($t39) + // $t40 := borrow_field<0x2::GlobalVectors::S>.x($t39) $t40 := $t39 // $t41 := read_ref($t40) $t41 := $LoadU128($t40) @@ -2983,7 +2983,7 @@ object "test_A2_GlobalVectors_test_move_to_vector_of_struct" { // label L8 // $t45 := 0x42 $t45 := 0x42 - // $t46 := borrow_global>($t45) + // $t46 := borrow_global<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>($t45) { let $base_offset := $MakeTypeStorageBase(0, 0x27c9b6b7, $t45) if iszero($AlignedStorageLoad($base_offset)) { @@ -2991,13 +2991,13 @@ object "test_A2_GlobalVectors_test_move_to_vector_of_struct" { } $t46 := $MakePtr(true, add($base_offset, 32)) } - // $t47 := borrow_field>.v($t46) + // $t47 := borrow_field<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>.v($t46) $t47 := $t46 // $t48 := 2 $t48 := 2 - // $t49 := vector::borrow($t47, $t48) + // $t49 := vector::borrow<0x2::GlobalVectors::S>($t47, $t48) $t49 := A1_vector_borrow$A2_GlobalVectors_S$($t47, $t48) - // $t50 := borrow_field.x($t49) + // $t50 := borrow_field<0x2::GlobalVectors::S>.x($t49) $t50 := $t49 // $t51 := read_ref($t50) $t51 := $LoadU128($t50) @@ -3383,13 +3383,13 @@ object "test_A2_GlobalVectors_test_move_to_vector_of_vector" { $t12 := $MakePtr(false, add($locals, 32)) // $t13 := move($t1) $t13 := mload($locals) - // $t14 := pack GlobalVectors::T>($t13) + // $t14 := pack 0x2::GlobalVectors::T>($t13) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t13) $t14 := $mem } - // move_to>>($t14, $t12) + // move_to<0x2::GlobalVectors::T>>($t14, $t12) { let $base_offset := $MakeTypeStorageBase(0, 0x9947b477, $LoadU256($t12)) if $AlignedStorageLoad($base_offset) { @@ -3431,7 +3431,7 @@ object "test_A2_GlobalVectors_test_move_to_vector_of_vector" { } // $t15 := 0x42 $t15 := 0x42 - // $t16 := borrow_global>>($t15) + // $t16 := borrow_global<0x2::GlobalVectors::T>>($t15) { let $base_offset := $MakeTypeStorageBase(0, 0x9947b477, $t15) if iszero($AlignedStorageLoad($base_offset)) { @@ -3439,7 +3439,7 @@ object "test_A2_GlobalVectors_test_move_to_vector_of_vector" { } $t16 := $MakePtr(true, add($base_offset, 32)) } - // $t17 := borrow_field>>.v($t16) + // $t17 := borrow_field<0x2::GlobalVectors::T>>.v($t16) $t17 := $t16 // $t18 := vector::length>($t17) $t18 := A1_vector_length$vec$u64$$($t17) @@ -3456,7 +3456,7 @@ object "test_A2_GlobalVectors_test_move_to_vector_of_vector" { // label L2 // $t22 := 0x42 $t22 := 0x42 - // $t23 := borrow_global>>($t22) + // $t23 := borrow_global<0x2::GlobalVectors::T>>($t22) { let $base_offset := $MakeTypeStorageBase(0, 0x9947b477, $t22) if iszero($AlignedStorageLoad($base_offset)) { @@ -3464,7 +3464,7 @@ object "test_A2_GlobalVectors_test_move_to_vector_of_vector" { } $t23 := $MakePtr(true, add($base_offset, 32)) } - // $t24 := borrow_field>>.v($t23) + // $t24 := borrow_field<0x2::GlobalVectors::T>>.v($t23) $t24 := $t23 // $t25 := 0 $t25 := 0 @@ -3501,7 +3501,7 @@ object "test_A2_GlobalVectors_test_move_to_vector_of_vector" { // label L5 // $t33 := 0x42 $t33 := 0x42 - // $t34 := borrow_global>>($t33) + // $t34 := borrow_global<0x2::GlobalVectors::T>>($t33) { let $base_offset := $MakeTypeStorageBase(0, 0x9947b477, $t33) if iszero($AlignedStorageLoad($base_offset)) { @@ -3509,7 +3509,7 @@ object "test_A2_GlobalVectors_test_move_to_vector_of_vector" { } $t34 := $MakePtr(true, add($base_offset, 32)) } - // $t35 := borrow_field>>.v($t34) + // $t35 := borrow_field<0x2::GlobalVectors::T>>.v($t34) $t35 := $t34 // $t36 := 1 $t36 := 1 @@ -3546,7 +3546,7 @@ object "test_A2_GlobalVectors_test_move_to_vector_of_vector" { // label L8 // $t44 := 0x42 $t44 := 0x42 - // $t45 := borrow_global>>($t44) + // $t45 := borrow_global<0x2::GlobalVectors::T>>($t44) { let $base_offset := $MakeTypeStorageBase(0, 0x9947b477, $t44) if iszero($AlignedStorageLoad($base_offset)) { @@ -3554,7 +3554,7 @@ object "test_A2_GlobalVectors_test_move_to_vector_of_vector" { } $t45 := $MakePtr(true, add($base_offset, 32)) } - // $t46 := borrow_field>>.v($t45) + // $t46 := borrow_field<0x2::GlobalVectors::T>>.v($t45) $t46 := $t45 // $t47 := 2 $t47 := 2 @@ -3969,13 +3969,13 @@ object "test_A2_GlobalVectors_test_pop_back_global" { $t9 := $MakePtr(false, add($locals, 32)) // $t10 := move($t1) $t10 := mload($locals) - // $t11 := pack GlobalVectors::T($t10) + // $t11 := pack 0x2::GlobalVectors::T($t10) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t10) $t11 := $mem } - // move_to>($t11, $t9) + // move_to<0x2::GlobalVectors::T>($t11, $t9) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $LoadU256($t9)) if $AlignedStorageLoad($base_offset) { @@ -4004,7 +4004,7 @@ object "test_A2_GlobalVectors_test_pop_back_global" { } // $t12 := 0x42 $t12 := 0x42 - // $t13 := borrow_global>($t12) + // $t13 := borrow_global<0x2::GlobalVectors::T>($t12) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $t12) if iszero($AlignedStorageLoad($base_offset)) { @@ -4012,7 +4012,7 @@ object "test_A2_GlobalVectors_test_pop_back_global" { } $t13 := $MakePtr(true, add($base_offset, 32)) } - // $t14 := borrow_field>.v($t13) + // $t14 := borrow_field<0x2::GlobalVectors::T>.v($t13) $t14 := $t13 // $t15 := vector::pop_back($t14) $t15 := A1_vector_pop_back$u64$($t14) @@ -4029,7 +4029,7 @@ object "test_A2_GlobalVectors_test_pop_back_global" { // label L2 // $t19 := 0x42 $t19 := 0x42 - // $t20 := borrow_global>($t19) + // $t20 := borrow_global<0x2::GlobalVectors::T>($t19) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $t19) if iszero($AlignedStorageLoad($base_offset)) { @@ -4037,7 +4037,7 @@ object "test_A2_GlobalVectors_test_pop_back_global" { } $t20 := $MakePtr(true, add($base_offset, 32)) } - // $t21 := borrow_field>.v($t20) + // $t21 := borrow_field<0x2::GlobalVectors::T>.v($t20) $t21 := $t20 // $t22 := vector::length($t21) $t22 := A1_vector_length$u64$($t21) @@ -4066,7 +4066,7 @@ object "test_A2_GlobalVectors_test_pop_back_global" { // label L5 // $t26 := 0x42 $t26 := 0x42 - // $t27 := borrow_global>($t26) + // $t27 := borrow_global<0x2::GlobalVectors::T>($t26) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $t26) if iszero($AlignedStorageLoad($base_offset)) { @@ -4074,7 +4074,7 @@ object "test_A2_GlobalVectors_test_pop_back_global" { } $t27 := $MakePtr(true, add($base_offset, 32)) } - // $t28 := borrow_field>.v($t27) + // $t28 := borrow_field<0x2::GlobalVectors::T>.v($t27) $t28 := $t27 // $t29 := vector::pop_back($t28) $t29 := A1_vector_pop_back$u64$($t28) @@ -4103,7 +4103,7 @@ object "test_A2_GlobalVectors_test_pop_back_global" { // label L8 // $t33 := 0x42 $t33 := 0x42 - // $t34 := borrow_global>($t33) + // $t34 := borrow_global<0x2::GlobalVectors::T>($t33) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $t33) if iszero($AlignedStorageLoad($base_offset)) { @@ -4111,7 +4111,7 @@ object "test_A2_GlobalVectors_test_pop_back_global" { } $t34 := $MakePtr(true, add($base_offset, 32)) } - // $t35 := borrow_field>.v($t34) + // $t35 := borrow_field<0x2::GlobalVectors::T>.v($t34) $t35 := $t34 // $t36 := vector::length($t35) $t36 := A1_vector_length$u64$($t35) @@ -4425,7 +4425,7 @@ object "test_A2_GlobalVectors_test_pop_back_struct_global" { // label L1 // $t29 := borrow_local($t3) $t29 := $MakePtr(false, e) - // $t30 := borrow_field.y($t29) + // $t30 := borrow_field<0x2::GlobalVectors::S>.y($t29) $t30 := $IndexPtr($t29, 16) // $t31 := read_ref($t30) $t31 := $LoadU64($t30) @@ -4446,7 +4446,7 @@ object "test_A2_GlobalVectors_test_pop_back_struct_global" { $block := 5 } case 4 { - // $t4 := vector::empty() + // $t4 := vector::empty<0x2::GlobalVectors::S>() mstore($locals, A1_vector_empty$A2_GlobalVectors_S$()) // $t5 := borrow_local($t4) $t5 := $MakePtr(false, $locals) @@ -4454,14 +4454,14 @@ object "test_A2_GlobalVectors_test_pop_back_struct_global" { $t6 := 10 // $t7 := 40 $t7 := 40 - // $t8 := pack GlobalVectors::S($t6, $t7) + // $t8 := pack 0x2::GlobalVectors::S($t6, $t7) { let $mem := $Malloc(24) $MemoryStoreU128(add($mem, 0), $t6) $MemoryStoreU64(add($mem, 16), $t7) $t8 := $mem } - // vector::push_back($t5, $t8) + // vector::push_back<0x2::GlobalVectors::S>($t5, $t8) A1_vector_push_back$A2_GlobalVectors_S$($t5, $t8) // $t9 := borrow_local($t4) $t9 := $MakePtr(false, $locals) @@ -4469,14 +4469,14 @@ object "test_A2_GlobalVectors_test_pop_back_struct_global" { $t10 := 11 // $t11 := 41 $t11 := 41 - // $t12 := pack GlobalVectors::S($t10, $t11) + // $t12 := pack 0x2::GlobalVectors::S($t10, $t11) { let $mem := $Malloc(24) $MemoryStoreU128(add($mem, 0), $t10) $MemoryStoreU64(add($mem, 16), $t11) $t12 := $mem } - // vector::push_back($t9, $t12) + // vector::push_back<0x2::GlobalVectors::S>($t9, $t12) A1_vector_push_back$A2_GlobalVectors_S$($t9, $t12) // $t13 := borrow_local($t4) $t13 := $MakePtr(false, $locals) @@ -4484,14 +4484,14 @@ object "test_A2_GlobalVectors_test_pop_back_struct_global" { $t14 := 12 // $t15 := 42 $t15 := 42 - // $t16 := pack GlobalVectors::S($t14, $t15) + // $t16 := pack 0x2::GlobalVectors::S($t14, $t15) { let $mem := $Malloc(24) $MemoryStoreU128(add($mem, 0), $t14) $MemoryStoreU64(add($mem, 16), $t15) $t16 := $mem } - // vector::push_back($t13, $t16) + // vector::push_back<0x2::GlobalVectors::S>($t13, $t16) A1_vector_push_back$A2_GlobalVectors_S$($t13, $t16) // $t17 := 0x42 $t17 := 0x42 @@ -4501,13 +4501,13 @@ object "test_A2_GlobalVectors_test_pop_back_struct_global" { $t18 := $MakePtr(false, add($locals, 32)) // $t19 := move($t4) $t19 := mload($locals) - // $t20 := pack GlobalVectors::T($t19) + // $t20 := pack 0x2::GlobalVectors::T<0x2::GlobalVectors::S>($t19) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t19) $t20 := $mem } - // move_to>($t20, $t18) + // move_to<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>($t20, $t18) { let $base_offset := $MakeTypeStorageBase(0, 0x27c9b6b7, $LoadU256($t18)) if $AlignedStorageLoad($base_offset) { @@ -4542,7 +4542,7 @@ object "test_A2_GlobalVectors_test_pop_back_struct_global" { } // $t21 := 0x42 $t21 := 0x42 - // $t22 := borrow_global>($t21) + // $t22 := borrow_global<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>($t21) { let $base_offset := $MakeTypeStorageBase(0, 0x27c9b6b7, $t21) if iszero($AlignedStorageLoad($base_offset)) { @@ -4550,13 +4550,13 @@ object "test_A2_GlobalVectors_test_pop_back_struct_global" { } $t22 := $MakePtr(true, add($base_offset, 32)) } - // $t23 := borrow_field>.v($t22) + // $t23 := borrow_field<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>.v($t22) $t23 := $t22 - // $t3 := vector::pop_back($t23) + // $t3 := vector::pop_back<0x2::GlobalVectors::S>($t23) e := A1_vector_pop_back$A2_GlobalVectors_S$($t23) // $t24 := borrow_local($t3) $t24 := $MakePtr(false, e) - // $t25 := borrow_field.x($t24) + // $t25 := borrow_field<0x2::GlobalVectors::S>.x($t24) $t25 := $t24 // $t26 := read_ref($t25) $t26 := $LoadU128($t25) @@ -4592,7 +4592,7 @@ object "test_A2_GlobalVectors_test_pop_back_struct_global" { // label L5 // $t35 := 0x42 $t35 := 0x42 - // $t36 := borrow_global>($t35) + // $t36 := borrow_global<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>($t35) { let $base_offset := $MakeTypeStorageBase(0, 0x27c9b6b7, $t35) if iszero($AlignedStorageLoad($base_offset)) { @@ -4600,9 +4600,9 @@ object "test_A2_GlobalVectors_test_pop_back_struct_global" { } $t36 := $MakePtr(true, add($base_offset, 32)) } - // $t37 := borrow_field>.v($t36) + // $t37 := borrow_field<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>.v($t36) $t37 := $t36 - // $t38 := vector::length($t37) + // $t38 := vector::length<0x2::GlobalVectors::S>($t37) $t38 := A1_vector_length$A2_GlobalVectors_S$($t37) // $t39 := 2 $t39 := 2 @@ -4629,7 +4629,7 @@ object "test_A2_GlobalVectors_test_pop_back_struct_global" { // label L8 // $t42 := 0x42 $t42 := 0x42 - // $t43 := borrow_global>($t42) + // $t43 := borrow_global<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>($t42) { let $base_offset := $MakeTypeStorageBase(0, 0x27c9b6b7, $t42) if iszero($AlignedStorageLoad($base_offset)) { @@ -4637,13 +4637,13 @@ object "test_A2_GlobalVectors_test_pop_back_struct_global" { } $t43 := $MakePtr(true, add($base_offset, 32)) } - // $t44 := borrow_field>.v($t43) + // $t44 := borrow_field<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>.v($t43) $t44 := $t43 - // $t3 := vector::pop_back($t44) + // $t3 := vector::pop_back<0x2::GlobalVectors::S>($t44) e := A1_vector_pop_back$A2_GlobalVectors_S$($t44) // $t45 := borrow_local($t3) $t45 := $MakePtr(false, e) - // $t46 := borrow_field.x($t45) + // $t46 := borrow_field<0x2::GlobalVectors::S>.x($t45) $t46 := $t45 // $t47 := read_ref($t46) $t47 := $LoadU128($t46) @@ -4660,7 +4660,7 @@ object "test_A2_GlobalVectors_test_pop_back_struct_global" { // label L10 // $t50 := borrow_local($t3) $t50 := $MakePtr(false, e) - // $t51 := borrow_field.y($t50) + // $t51 := borrow_field<0x2::GlobalVectors::S>.y($t50) $t51 := $IndexPtr($t50, 16) // $t52 := read_ref($t51) $t52 := $LoadU64($t51) @@ -4703,7 +4703,7 @@ object "test_A2_GlobalVectors_test_pop_back_struct_global" { // label L14 // $t56 := 0x42 $t56 := 0x42 - // $t57 := borrow_global>($t56) + // $t57 := borrow_global<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>($t56) { let $base_offset := $MakeTypeStorageBase(0, 0x27c9b6b7, $t56) if iszero($AlignedStorageLoad($base_offset)) { @@ -4711,9 +4711,9 @@ object "test_A2_GlobalVectors_test_pop_back_struct_global" { } $t57 := $MakePtr(true, add($base_offset, 32)) } - // $t58 := borrow_field>.v($t57) + // $t58 := borrow_field<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>.v($t57) $t58 := $t57 - // $t59 := vector::length($t58) + // $t59 := vector::length<0x2::GlobalVectors::S>($t58) $t59 := A1_vector_length$A2_GlobalVectors_S$($t58) // $t60 := 1 $t60 := 1 @@ -5096,13 +5096,13 @@ object "test_A2_GlobalVectors_test_push_back_global" { $t9 := $MakePtr(false, add($locals, 32)) // $t10 := copy($t1) $t10 := mload($locals) - // $t11 := pack GlobalVectors::T($t10) + // $t11 := pack 0x2::GlobalVectors::T($t10) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t10) $t11 := $mem } - // move_to>($t11, $t9) + // move_to<0x2::GlobalVectors::T>($t11, $t9) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $LoadU256($t9)) if $AlignedStorageLoad($base_offset) { @@ -5131,7 +5131,7 @@ object "test_A2_GlobalVectors_test_push_back_global" { } // $t12 := 0x42 $t12 := 0x42 - // $t13 := borrow_global>($t12) + // $t13 := borrow_global<0x2::GlobalVectors::T>($t12) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $t12) if iszero($AlignedStorageLoad($base_offset)) { @@ -5139,7 +5139,7 @@ object "test_A2_GlobalVectors_test_push_back_global" { } $t13 := $MakePtr(true, add($base_offset, 32)) } - // $t14 := borrow_field>.v($t13) + // $t14 := borrow_field<0x2::GlobalVectors::T>.v($t13) $t14 := $t13 // $t15 := 13 $t15 := 13 @@ -5147,7 +5147,7 @@ object "test_A2_GlobalVectors_test_push_back_global" { A1_vector_push_back$u64$($t14, $t15) // $t16 := 0x42 $t16 := 0x42 - // $t17 := borrow_global>($t16) + // $t17 := borrow_global<0x2::GlobalVectors::T>($t16) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $t16) if iszero($AlignedStorageLoad($base_offset)) { @@ -5155,7 +5155,7 @@ object "test_A2_GlobalVectors_test_push_back_global" { } $t17 := $MakePtr(true, add($base_offset, 32)) } - // $t18 := borrow_field>.v($t17) + // $t18 := borrow_field<0x2::GlobalVectors::T>.v($t17) $t18 := $t17 // $t19 := 14 $t19 := 14 @@ -5163,7 +5163,7 @@ object "test_A2_GlobalVectors_test_push_back_global" { A1_vector_push_back$u64$($t18, $t19) // $t20 := 0x42 $t20 := 0x42 - // $t21 := borrow_global>($t20) + // $t21 := borrow_global<0x2::GlobalVectors::T>($t20) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $t20) if iszero($AlignedStorageLoad($base_offset)) { @@ -5171,7 +5171,7 @@ object "test_A2_GlobalVectors_test_push_back_global" { } $t21 := $MakePtr(true, add($base_offset, 32)) } - // $t22 := borrow_field>.v($t21) + // $t22 := borrow_field<0x2::GlobalVectors::T>.v($t21) $t22 := $t21 // $t23 := vector::length($t22) $t23 := A1_vector_length$u64$($t22) @@ -5188,7 +5188,7 @@ object "test_A2_GlobalVectors_test_push_back_global" { // label L2 // $t27 := 0x42 $t27 := 0x42 - // $t28 := borrow_global>($t27) + // $t28 := borrow_global<0x2::GlobalVectors::T>($t27) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $t27) if iszero($AlignedStorageLoad($base_offset)) { @@ -5196,7 +5196,7 @@ object "test_A2_GlobalVectors_test_push_back_global" { } $t28 := $MakePtr(true, add($base_offset, 32)) } - // $t29 := borrow_field>.v($t28) + // $t29 := borrow_field<0x2::GlobalVectors::T>.v($t28) $t29 := $t28 // $t30 := 0 $t30 := 0 @@ -5229,7 +5229,7 @@ object "test_A2_GlobalVectors_test_push_back_global" { // label L5 // $t36 := 0x42 $t36 := 0x42 - // $t37 := borrow_global>($t36) + // $t37 := borrow_global<0x2::GlobalVectors::T>($t36) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $t36) if iszero($AlignedStorageLoad($base_offset)) { @@ -5237,7 +5237,7 @@ object "test_A2_GlobalVectors_test_push_back_global" { } $t37 := $MakePtr(true, add($base_offset, 32)) } - // $t38 := borrow_field>.v($t37) + // $t38 := borrow_field<0x2::GlobalVectors::T>.v($t37) $t38 := $t37 // $t39 := 1 $t39 := 1 @@ -5270,7 +5270,7 @@ object "test_A2_GlobalVectors_test_push_back_global" { // label L8 // $t45 := 0x42 $t45 := 0x42 - // $t46 := borrow_global>($t45) + // $t46 := borrow_global<0x2::GlobalVectors::T>($t45) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $t45) if iszero($AlignedStorageLoad($base_offset)) { @@ -5278,7 +5278,7 @@ object "test_A2_GlobalVectors_test_push_back_global" { } $t46 := $MakePtr(true, add($base_offset, 32)) } - // $t47 := borrow_field>.v($t46) + // $t47 := borrow_field<0x2::GlobalVectors::T>.v($t46) $t47 := $t46 // $t48 := 2 $t48 := 2 @@ -5311,7 +5311,7 @@ object "test_A2_GlobalVectors_test_push_back_global" { // label L11 // $t54 := 0x42 $t54 := 0x42 - // $t55 := borrow_global>($t54) + // $t55 := borrow_global<0x2::GlobalVectors::T>($t54) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $t54) if iszero($AlignedStorageLoad($base_offset)) { @@ -5319,7 +5319,7 @@ object "test_A2_GlobalVectors_test_push_back_global" { } $t55 := $MakePtr(true, add($base_offset, 32)) } - // $t56 := borrow_field>.v($t55) + // $t56 := borrow_field<0x2::GlobalVectors::T>.v($t55) $t56 := $t55 // $t57 := 3 $t57 := 3 @@ -5352,7 +5352,7 @@ object "test_A2_GlobalVectors_test_push_back_global" { // label L14 // $t63 := 0x42 $t63 := 0x42 - // $t64 := borrow_global>($t63) + // $t64 := borrow_global<0x2::GlobalVectors::T>($t63) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $t63) if iszero($AlignedStorageLoad($base_offset)) { @@ -5360,7 +5360,7 @@ object "test_A2_GlobalVectors_test_push_back_global" { } $t64 := $MakePtr(true, add($base_offset, 32)) } - // $t65 := borrow_field>.v($t64) + // $t65 := borrow_field<0x2::GlobalVectors::T>.v($t64) $t65 := $t64 // $t66 := 4 $t66 := 4 @@ -5688,7 +5688,7 @@ object "test_A2_GlobalVectors_test_push_back_struct_global" { $Abort($t36) } case 4 { - // $t1 := vector::empty() + // $t1 := vector::empty<0x2::GlobalVectors::S>() mstore($locals, A1_vector_empty$A2_GlobalVectors_S$()) // $t2 := borrow_local($t1) $t2 := $MakePtr(false, $locals) @@ -5696,14 +5696,14 @@ object "test_A2_GlobalVectors_test_push_back_struct_global" { $t3 := 10 // $t4 := 40 $t4 := 40 - // $t5 := pack GlobalVectors::S($t3, $t4) + // $t5 := pack 0x2::GlobalVectors::S($t3, $t4) { let $mem := $Malloc(24) $MemoryStoreU128(add($mem, 0), $t3) $MemoryStoreU64(add($mem, 16), $t4) $t5 := $mem } - // vector::push_back($t2, $t5) + // vector::push_back<0x2::GlobalVectors::S>($t2, $t5) A1_vector_push_back$A2_GlobalVectors_S$($t2, $t5) // $t6 := borrow_local($t1) $t6 := $MakePtr(false, $locals) @@ -5711,14 +5711,14 @@ object "test_A2_GlobalVectors_test_push_back_struct_global" { $t7 := 11 // $t8 := 41 $t8 := 41 - // $t9 := pack GlobalVectors::S($t7, $t8) + // $t9 := pack 0x2::GlobalVectors::S($t7, $t8) { let $mem := $Malloc(24) $MemoryStoreU128(add($mem, 0), $t7) $MemoryStoreU64(add($mem, 16), $t8) $t9 := $mem } - // vector::push_back($t6, $t9) + // vector::push_back<0x2::GlobalVectors::S>($t6, $t9) A1_vector_push_back$A2_GlobalVectors_S$($t6, $t9) // $t10 := borrow_local($t1) $t10 := $MakePtr(false, $locals) @@ -5726,14 +5726,14 @@ object "test_A2_GlobalVectors_test_push_back_struct_global" { $t11 := 12 // $t12 := 42 $t12 := 42 - // $t13 := pack GlobalVectors::S($t11, $t12) + // $t13 := pack 0x2::GlobalVectors::S($t11, $t12) { let $mem := $Malloc(24) $MemoryStoreU128(add($mem, 0), $t11) $MemoryStoreU64(add($mem, 16), $t12) $t13 := $mem } - // vector::push_back($t10, $t13) + // vector::push_back<0x2::GlobalVectors::S>($t10, $t13) A1_vector_push_back$A2_GlobalVectors_S$($t10, $t13) // $t14 := 0x42 $t14 := 0x42 @@ -5743,13 +5743,13 @@ object "test_A2_GlobalVectors_test_push_back_struct_global" { $t15 := $MakePtr(false, add($locals, 32)) // $t16 := move($t1) $t16 := mload($locals) - // $t17 := pack GlobalVectors::T($t16) + // $t17 := pack 0x2::GlobalVectors::T<0x2::GlobalVectors::S>($t16) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t16) $t17 := $mem } - // move_to>($t17, $t15) + // move_to<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>($t17, $t15) { let $base_offset := $MakeTypeStorageBase(0, 0x27c9b6b7, $LoadU256($t15)) if $AlignedStorageLoad($base_offset) { @@ -5784,7 +5784,7 @@ object "test_A2_GlobalVectors_test_push_back_struct_global" { } // $t18 := 0x42 $t18 := 0x42 - // $t19 := borrow_global>($t18) + // $t19 := borrow_global<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>($t18) { let $base_offset := $MakeTypeStorageBase(0, 0x27c9b6b7, $t18) if iszero($AlignedStorageLoad($base_offset)) { @@ -5792,24 +5792,24 @@ object "test_A2_GlobalVectors_test_push_back_struct_global" { } $t19 := $MakePtr(true, add($base_offset, 32)) } - // $t20 := borrow_field>.v($t19) + // $t20 := borrow_field<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>.v($t19) $t20 := $t19 // $t21 := 13 $t21 := 13 // $t22 := 43 $t22 := 43 - // $t23 := pack GlobalVectors::S($t21, $t22) + // $t23 := pack 0x2::GlobalVectors::S($t21, $t22) { let $mem := $Malloc(24) $MemoryStoreU128(add($mem, 0), $t21) $MemoryStoreU64(add($mem, 16), $t22) $t23 := $mem } - // vector::push_back($t20, $t23) + // vector::push_back<0x2::GlobalVectors::S>($t20, $t23) A1_vector_push_back$A2_GlobalVectors_S$($t20, $t23) // $t24 := 0x42 $t24 := 0x42 - // $t25 := borrow_global>($t24) + // $t25 := borrow_global<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>($t24) { let $base_offset := $MakeTypeStorageBase(0, 0x27c9b6b7, $t24) if iszero($AlignedStorageLoad($base_offset)) { @@ -5817,24 +5817,24 @@ object "test_A2_GlobalVectors_test_push_back_struct_global" { } $t25 := $MakePtr(true, add($base_offset, 32)) } - // $t26 := borrow_field>.v($t25) + // $t26 := borrow_field<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>.v($t25) $t26 := $t25 // $t27 := 14 $t27 := 14 // $t28 := 44 $t28 := 44 - // $t29 := pack GlobalVectors::S($t27, $t28) + // $t29 := pack 0x2::GlobalVectors::S($t27, $t28) { let $mem := $Malloc(24) $MemoryStoreU128(add($mem, 0), $t27) $MemoryStoreU64(add($mem, 16), $t28) $t29 := $mem } - // vector::push_back($t26, $t29) + // vector::push_back<0x2::GlobalVectors::S>($t26, $t29) A1_vector_push_back$A2_GlobalVectors_S$($t26, $t29) // $t30 := 0x42 $t30 := 0x42 - // $t31 := borrow_global>($t30) + // $t31 := borrow_global<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>($t30) { let $base_offset := $MakeTypeStorageBase(0, 0x27c9b6b7, $t30) if iszero($AlignedStorageLoad($base_offset)) { @@ -5842,9 +5842,9 @@ object "test_A2_GlobalVectors_test_push_back_struct_global" { } $t31 := $MakePtr(true, add($base_offset, 32)) } - // $t32 := borrow_field>.v($t31) + // $t32 := borrow_field<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>.v($t31) $t32 := $t31 - // $t33 := vector::length($t32) + // $t33 := vector::length<0x2::GlobalVectors::S>($t32) $t33 := A1_vector_length$A2_GlobalVectors_S$($t32) // $t34 := 5 $t34 := 5 @@ -5859,7 +5859,7 @@ object "test_A2_GlobalVectors_test_push_back_struct_global" { // label L2 // $t37 := 0x42 $t37 := 0x42 - // $t38 := borrow_global>($t37) + // $t38 := borrow_global<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>($t37) { let $base_offset := $MakeTypeStorageBase(0, 0x27c9b6b7, $t37) if iszero($AlignedStorageLoad($base_offset)) { @@ -5867,13 +5867,13 @@ object "test_A2_GlobalVectors_test_push_back_struct_global" { } $t38 := $MakePtr(true, add($base_offset, 32)) } - // $t39 := borrow_field>.v($t38) + // $t39 := borrow_field<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>.v($t38) $t39 := $t38 // $t40 := 0 $t40 := 0 - // $t41 := vector::borrow($t39, $t40) + // $t41 := vector::borrow<0x2::GlobalVectors::S>($t39, $t40) $t41 := A1_vector_borrow$A2_GlobalVectors_S$($t39, $t40) - // $t42 := borrow_field.x($t41) + // $t42 := borrow_field<0x2::GlobalVectors::S>.x($t41) $t42 := $t41 // $t43 := read_ref($t42) $t43 := $LoadU128($t42) @@ -5902,7 +5902,7 @@ object "test_A2_GlobalVectors_test_push_back_struct_global" { // label L5 // $t47 := 0x42 $t47 := 0x42 - // $t48 := borrow_global>($t47) + // $t48 := borrow_global<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>($t47) { let $base_offset := $MakeTypeStorageBase(0, 0x27c9b6b7, $t47) if iszero($AlignedStorageLoad($base_offset)) { @@ -5910,13 +5910,13 @@ object "test_A2_GlobalVectors_test_push_back_struct_global" { } $t48 := $MakePtr(true, add($base_offset, 32)) } - // $t49 := borrow_field>.v($t48) + // $t49 := borrow_field<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>.v($t48) $t49 := $t48 // $t50 := 1 $t50 := 1 - // $t51 := vector::borrow($t49, $t50) + // $t51 := vector::borrow<0x2::GlobalVectors::S>($t49, $t50) $t51 := A1_vector_borrow$A2_GlobalVectors_S$($t49, $t50) - // $t52 := borrow_field.x($t51) + // $t52 := borrow_field<0x2::GlobalVectors::S>.x($t51) $t52 := $t51 // $t53 := read_ref($t52) $t53 := $LoadU128($t52) @@ -5945,7 +5945,7 @@ object "test_A2_GlobalVectors_test_push_back_struct_global" { // label L8 // $t57 := 0x42 $t57 := 0x42 - // $t58 := borrow_global>($t57) + // $t58 := borrow_global<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>($t57) { let $base_offset := $MakeTypeStorageBase(0, 0x27c9b6b7, $t57) if iszero($AlignedStorageLoad($base_offset)) { @@ -5953,13 +5953,13 @@ object "test_A2_GlobalVectors_test_push_back_struct_global" { } $t58 := $MakePtr(true, add($base_offset, 32)) } - // $t59 := borrow_field>.v($t58) + // $t59 := borrow_field<0x2::GlobalVectors::T<0x2::GlobalVectors::S>>.v($t58) $t59 := $t58 // $t60 := 2 $t60 := 2 - // $t61 := vector::borrow($t59, $t60) + // $t61 := vector::borrow<0x2::GlobalVectors::S>($t59, $t60) $t61 := A1_vector_borrow$A2_GlobalVectors_S$($t59, $t60) - // $t62 := borrow_field.x($t61) + // $t62 := borrow_field<0x2::GlobalVectors::S>.x($t61) $t62 := $t61 // $t63 := read_ref($t62) $t63 := $LoadU128($t62) @@ -6327,13 +6327,13 @@ object "test_A2_GlobalVectors_test_read_ref_copy" { $t6 := $MakePtr(false, add($locals, 32)) // $t7 := move($t1) $t7 := mload($locals) - // $t8 := pack GlobalVectors::T($t7) + // $t8 := pack 0x2::GlobalVectors::T($t7) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t7) $t8 := $mem } - // move_to>($t8, $t6) + // move_to<0x2::GlobalVectors::T>($t8, $t6) { let $base_offset := $MakeTypeStorageBase(0, 0x10b7746c, $LoadU256($t6)) if $AlignedStorageLoad($base_offset) { @@ -6362,7 +6362,7 @@ object "test_A2_GlobalVectors_test_read_ref_copy" { } // $t9 := 0x42 $t9 := 0x42 - // $t10 := borrow_global>($t9) + // $t10 := borrow_global<0x2::GlobalVectors::T>($t9) { let $base_offset := $MakeTypeStorageBase(0, 0x10b7746c, $t9) if iszero($AlignedStorageLoad($base_offset)) { @@ -6370,7 +6370,7 @@ object "test_A2_GlobalVectors_test_read_ref_copy" { } $t10 := $MakePtr(true, add($base_offset, 32)) } - // $t11 := borrow_field>.v($t10) + // $t11 := borrow_field<0x2::GlobalVectors::T>.v($t10) $t11 := $t10 // $t2 := read_ref($t11) mstore(add($locals, 64), $LoadU256($t11)) @@ -6802,13 +6802,13 @@ object "test_A2_GlobalVectors_test_swap_global" { $t9 := $MakePtr(false, add($locals, 32)) // $t10 := move($t1) $t10 := mload($locals) - // $t11 := pack GlobalVectors::T($t10) + // $t11 := pack 0x2::GlobalVectors::T($t10) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t10) $t11 := $mem } - // move_to>($t11, $t9) + // move_to<0x2::GlobalVectors::T>($t11, $t9) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $LoadU256($t9)) if $AlignedStorageLoad($base_offset) { @@ -6837,7 +6837,7 @@ object "test_A2_GlobalVectors_test_swap_global" { } // $t12 := 0x42 $t12 := 0x42 - // $t13 := borrow_global>($t12) + // $t13 := borrow_global<0x2::GlobalVectors::T>($t12) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $t12) if iszero($AlignedStorageLoad($base_offset)) { @@ -6845,7 +6845,7 @@ object "test_A2_GlobalVectors_test_swap_global" { } $t13 := $MakePtr(true, add($base_offset, 32)) } - // $t14 := borrow_field>.v($t13) + // $t14 := borrow_field<0x2::GlobalVectors::T>.v($t13) $t14 := $t13 // $t15 := 0 $t15 := 0 @@ -6855,7 +6855,7 @@ object "test_A2_GlobalVectors_test_swap_global" { A1_vector_swap$u64$($t14, $t15, $t16) // $t17 := 0x42 $t17 := 0x42 - // $t18 := borrow_global>($t17) + // $t18 := borrow_global<0x2::GlobalVectors::T>($t17) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $t17) if iszero($AlignedStorageLoad($base_offset)) { @@ -6863,7 +6863,7 @@ object "test_A2_GlobalVectors_test_swap_global" { } $t18 := $MakePtr(true, add($base_offset, 32)) } - // $t19 := borrow_field>.v($t18) + // $t19 := borrow_field<0x2::GlobalVectors::T>.v($t18) $t19 := $t18 // $t20 := vector::length($t19) $t20 := A1_vector_length$u64$($t19) @@ -6880,7 +6880,7 @@ object "test_A2_GlobalVectors_test_swap_global" { // label L2 // $t24 := 0x42 $t24 := 0x42 - // $t25 := borrow_global>($t24) + // $t25 := borrow_global<0x2::GlobalVectors::T>($t24) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $t24) if iszero($AlignedStorageLoad($base_offset)) { @@ -6888,7 +6888,7 @@ object "test_A2_GlobalVectors_test_swap_global" { } $t25 := $MakePtr(true, add($base_offset, 32)) } - // $t26 := borrow_field>.v($t25) + // $t26 := borrow_field<0x2::GlobalVectors::T>.v($t25) $t26 := $t25 // $t27 := 0 $t27 := 0 @@ -6921,7 +6921,7 @@ object "test_A2_GlobalVectors_test_swap_global" { // label L5 // $t33 := 0x42 $t33 := 0x42 - // $t34 := borrow_global>($t33) + // $t34 := borrow_global<0x2::GlobalVectors::T>($t33) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $t33) if iszero($AlignedStorageLoad($base_offset)) { @@ -6929,7 +6929,7 @@ object "test_A2_GlobalVectors_test_swap_global" { } $t34 := $MakePtr(true, add($base_offset, 32)) } - // $t35 := borrow_field>.v($t34) + // $t35 := borrow_field<0x2::GlobalVectors::T>.v($t34) $t35 := $t34 // $t36 := 1 $t36 := 1 @@ -6962,7 +6962,7 @@ object "test_A2_GlobalVectors_test_swap_global" { // label L8 // $t42 := 0x42 $t42 := 0x42 - // $t43 := borrow_global>($t42) + // $t43 := borrow_global<0x2::GlobalVectors::T>($t42) { let $base_offset := $MakeTypeStorageBase(0, 0x7da2a540, $t42) if iszero($AlignedStorageLoad($base_offset)) { @@ -6970,7 +6970,7 @@ object "test_A2_GlobalVectors_test_swap_global" { } $t43 := $MakePtr(true, add($base_offset, 32)) } - // $t44 := borrow_field>.v($t43) + // $t44 := borrow_field<0x2::GlobalVectors::T>.v($t43) $t44 := $t43 // $t45 := 2 $t45 := 2 diff --git a/third_party/move/evm/move-to-yul/tests/Resources.exp b/third_party/move/evm/move-to-yul/tests/Resources.exp index e75268ee3b854..d28737892fc66 100644 --- a/third_party/move/evm/move-to-yul/tests/Resources.exp +++ b/third_party/move/evm/move-to-yul/tests/Resources.exp @@ -81,7 +81,7 @@ object "test_A2_M_test_increment_a" { A2_M_increment_a($t4) // $t5 := 0x3 $t5 := 0x3 - // $t6 := borrow_global($t5) + // $t6 := borrow_global<0x2::M::S>($t5) { let $base_offset := $MakeTypeStorageBase(0, 0x698265eb, $t5) if iszero($AlignedStorageLoad($base_offset)) { @@ -89,7 +89,7 @@ object "test_A2_M_test_increment_a" { } $t6 := $MakePtr(true, add($base_offset, 32)) } - // $t7 := borrow_field.a($t6) + // $t7 := borrow_field<0x2::M::S>.a($t6) $t7 := $IndexPtr($t6, 32) // $t8 := read_ref($t7) $t8 := $LoadU64($t7) @@ -113,7 +113,7 @@ object "test_A2_M_test_increment_a" { function A2_M_increment_a(addr) { let r, $t2, $t3, $t4, $t5, $t6, $t7 - // $t2 := borrow_global($t0) + // $t2 := borrow_global<0x2::M::S>($t0) { let $base_offset := $MakeTypeStorageBase(0, 0x698265eb, addr) if iszero($AlignedStorageLoad($base_offset)) { @@ -121,7 +121,7 @@ object "test_A2_M_test_increment_a" { } $t2 := $MakePtr(true, add($base_offset, 32)) } - // $t3 := borrow_field.a($t2) + // $t3 := borrow_field<0x2::M::S>.a($t2) $t3 := $IndexPtr($t2, 32) // $t4 := read_ref($t3) $t4 := $LoadU64($t3) @@ -129,7 +129,7 @@ object "test_A2_M_test_increment_a" { $t5 := 1 // $t6 := +($t4, $t5) $t6 := $AddU64($t4, $t5) - // $t7 := borrow_field.a($t2) + // $t7 := borrow_field<0x2::M::S>.a($t2) $t7 := $IndexPtr($t2, 32) // write_ref($t7, $t6) $StoreU64($t7, $t6) @@ -148,13 +148,13 @@ object "test_A2_M_test_increment_a" { $t6 := $AddU64(a, a) // $t7 := (u128)($t6) $t7 := $CastU128($t6) - // $t8 := pack M::S2($t7) + // $t8 := pack 0x2::M::S2($t7) { let $mem := $Malloc(16) $MemoryStoreU128(add($mem, 0), $t7) $t8 := $mem } - // $t9 := pack M::S($t1, $t5, $t8) + // $t9 := pack 0x2::M::S($t1, $t5, $t8) { let $mem := $Malloc(41) $MemoryStoreU64(add($mem, 32), a) @@ -162,7 +162,7 @@ object "test_A2_M_test_increment_a" { $MemoryStoreU256(add($mem, 0), $t8) $t9 := $mem } - // move_to($t9, $t0) + // move_to<0x2::M::S>($t9, $t0) { let $base_offset := $MakeTypeStorageBase(0, 0x698265eb, $LoadU256(sg)) if $AlignedStorageLoad($base_offset) { @@ -441,7 +441,7 @@ object "test_A2_M_test_publish" { A2_M_publish($t2, $t3) // $t4 := 0x3 $t4 := 0x3 - // $t5 := exists($t4) + // $t5 := exists<0x2::M::S>($t4) $t5 := $AlignedStorageLoad($MakeTypeStorageBase(0, 0x698265eb, $t4)) // if ($t5) goto L1 else goto L0 switch $t5 @@ -452,7 +452,7 @@ object "test_A2_M_test_publish" { // label L2 // $t7 := 0x3 $t7 := 0x3 - // $t8 := borrow_global($t7) + // $t8 := borrow_global<0x2::M::S>($t7) { let $base_offset := $MakeTypeStorageBase(0, 0x698265eb, $t7) if iszero($AlignedStorageLoad($base_offset)) { @@ -460,7 +460,7 @@ object "test_A2_M_test_publish" { } $t8 := $MakePtr(true, add($base_offset, 32)) } - // $t9 := borrow_field.a($t8) + // $t9 := borrow_field<0x2::M::S>.a($t8) $t9 := $IndexPtr($t8, 32) // $t10 := read_ref($t9) $t10 := $LoadU64($t9) @@ -489,7 +489,7 @@ object "test_A2_M_test_publish" { // label L5 // $t14 := 0x3 $t14 := 0x3 - // $t15 := borrow_global($t14) + // $t15 := borrow_global<0x2::M::S>($t14) { let $base_offset := $MakeTypeStorageBase(0, 0x698265eb, $t14) if iszero($AlignedStorageLoad($base_offset)) { @@ -497,7 +497,7 @@ object "test_A2_M_test_publish" { } $t15 := $MakePtr(true, add($base_offset, 32)) } - // $t16 := borrow_field.b($t15) + // $t16 := borrow_field<0x2::M::S>.b($t15) $t16 := $IndexPtr($t15, 40) // $t17 := read_ref($t16) $t17 := $LoadU8($t16) @@ -526,7 +526,7 @@ object "test_A2_M_test_publish" { // label L8 // $t21 := 0x3 $t21 := 0x3 - // $t22 := borrow_global($t21) + // $t22 := borrow_global<0x2::M::S>($t21) { let $base_offset := $MakeTypeStorageBase(0, 0x698265eb, $t21) if iszero($AlignedStorageLoad($base_offset)) { @@ -534,11 +534,11 @@ object "test_A2_M_test_publish" { } $t22 := $MakePtr(true, add($base_offset, 32)) } - // $t23 := borrow_field.c($t22) + // $t23 := borrow_field<0x2::M::S>.c($t22) { $t23 := $MakePtr($IsStoragePtr($t22), $LoadU256($t22)) } - // $t24 := borrow_field.x($t23) + // $t24 := borrow_field<0x2::M::S2>.x($t23) $t24 := $t23 // $t25 := read_ref($t24) $t25 := $LoadU128($t24) @@ -584,13 +584,13 @@ object "test_A2_M_test_publish" { $t6 := $AddU64(a, a) // $t7 := (u128)($t6) $t7 := $CastU128($t6) - // $t8 := pack M::S2($t7) + // $t8 := pack 0x2::M::S2($t7) { let $mem := $Malloc(16) $MemoryStoreU128(add($mem, 0), $t7) $t8 := $mem } - // $t9 := pack M::S($t1, $t5, $t8) + // $t9 := pack 0x2::M::S($t1, $t5, $t8) { let $mem := $Malloc(41) $MemoryStoreU64(add($mem, 32), a) @@ -598,7 +598,7 @@ object "test_A2_M_test_publish" { $MemoryStoreU256(add($mem, 0), $t8) $t9 := $mem } - // move_to($t9, $t0) + // move_to<0x2::M::S>($t9, $t0) { let $base_offset := $MakeTypeStorageBase(0, 0x698265eb, $LoadU256(sg)) if $AlignedStorageLoad($base_offset) { @@ -870,7 +870,7 @@ object "test_A2_M_test_publish_t" { A2_M_publish_t($t2, $t3) // $t4 := 0x3 $t4 := 0x3 - // $t5 := exists($t4) + // $t5 := exists<0x2::M::T>($t4) $t5 := $AlignedStorageLoad($MakeTypeStorageBase(0, 0x3948ca0a, $t4)) // if ($t5) goto L1 else goto L0 switch $t5 @@ -881,7 +881,7 @@ object "test_A2_M_test_publish_t" { // label L2 // $t7 := 0x3 $t7 := 0x3 - // $t8 := borrow_global($t7) + // $t8 := borrow_global<0x2::M::T>($t7) { let $base_offset := $MakeTypeStorageBase(0, 0x3948ca0a, $t7) if iszero($AlignedStorageLoad($base_offset)) { @@ -889,11 +889,11 @@ object "test_A2_M_test_publish_t" { } $t8 := $MakePtr(true, add($base_offset, 32)) } - // $t9 := borrow_field.s($t8) + // $t9 := borrow_field<0x2::M::T>.s($t8) { $t9 := $MakePtr($IsStoragePtr($t8), $LoadU256($t8)) } - // $t10 := borrow_field.a($t9) + // $t10 := borrow_field<0x2::M::S>.a($t9) $t10 := $IndexPtr($t9, 32) // $t11 := read_ref($t10) $t11 := $LoadU64($t10) @@ -922,7 +922,7 @@ object "test_A2_M_test_publish_t" { // label L5 // $t15 := 0x3 $t15 := 0x3 - // $t16 := borrow_global($t15) + // $t16 := borrow_global<0x2::M::T>($t15) { let $base_offset := $MakeTypeStorageBase(0, 0x3948ca0a, $t15) if iszero($AlignedStorageLoad($base_offset)) { @@ -930,11 +930,11 @@ object "test_A2_M_test_publish_t" { } $t16 := $MakePtr(true, add($base_offset, 32)) } - // $t17 := borrow_field.s($t16) + // $t17 := borrow_field<0x2::M::T>.s($t16) { $t17 := $MakePtr($IsStoragePtr($t16), $LoadU256($t16)) } - // $t18 := borrow_field.b($t17) + // $t18 := borrow_field<0x2::M::S>.b($t17) $t18 := $IndexPtr($t17, 40) // $t19 := read_ref($t18) $t19 := $LoadU8($t18) @@ -963,7 +963,7 @@ object "test_A2_M_test_publish_t" { // label L8 // $t23 := 0x3 $t23 := 0x3 - // $t24 := borrow_global($t23) + // $t24 := borrow_global<0x2::M::T>($t23) { let $base_offset := $MakeTypeStorageBase(0, 0x3948ca0a, $t23) if iszero($AlignedStorageLoad($base_offset)) { @@ -971,15 +971,15 @@ object "test_A2_M_test_publish_t" { } $t24 := $MakePtr(true, add($base_offset, 32)) } - // $t25 := borrow_field.s($t24) + // $t25 := borrow_field<0x2::M::T>.s($t24) { $t25 := $MakePtr($IsStoragePtr($t24), $LoadU256($t24)) } - // $t26 := borrow_field.c($t25) + // $t26 := borrow_field<0x2::M::S>.c($t25) { $t26 := $MakePtr($IsStoragePtr($t25), $LoadU256($t25)) } - // $t27 := borrow_field.x($t26) + // $t27 := borrow_field<0x2::M::S2>.x($t26) $t27 := $t26 // $t28 := read_ref($t27) $t28 := $LoadU128($t27) @@ -1025,13 +1025,13 @@ object "test_A2_M_test_publish_t" { $t6 := $AddU64(a, a) // $t7 := (u128)($t6) $t7 := $CastU128($t6) - // $t8 := pack M::S2($t7) + // $t8 := pack 0x2::M::S2($t7) { let $mem := $Malloc(16) $MemoryStoreU128(add($mem, 0), $t7) $t8 := $mem } - // $t9 := pack M::S($t1, $t5, $t8) + // $t9 := pack 0x2::M::S($t1, $t5, $t8) { let $mem := $Malloc(41) $MemoryStoreU64(add($mem, 32), a) @@ -1039,13 +1039,13 @@ object "test_A2_M_test_publish_t" { $MemoryStoreU256(add($mem, 0), $t8) $t9 := $mem } - // $t10 := pack M::T($t9) + // $t10 := pack 0x2::M::T($t9) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t9) $t10 := $mem } - // move_to($t10, $t0) + // move_to<0x2::M::T>($t10, $t0) { let $base_offset := $MakeTypeStorageBase(0, 0x3948ca0a, $LoadU256(sg)) if $AlignedStorageLoad($base_offset) { @@ -1325,12 +1325,12 @@ object "test_A2_M_test_unpublish" { $t6 := 0x3 // $t7 := M::unpublish($t6) $t7 := A2_M_unpublish($t6) - // ($t8, $t9, $t10) := unpack M::S($t7) + // ($t8, $t9, $t10) := unpack 0x2::M::S($t7) $t8 := $MemoryLoadU64(add($t7, 32)) $t9 := $MemoryLoadU8(add($t7, 40)) $t10 := $MemoryLoadU256(add($t7, 0)) $Free($t7, 41) - // $t11 := unpack M::S2($t10) + // $t11 := unpack 0x2::M::S2($t10) $t11 := $MemoryLoadU128(add($t10, 0)) $Free($t10, 16) // $t12 := 33 @@ -1399,7 +1399,7 @@ object "test_A2_M_test_unpublish" { function A2_M_unpublish(a) -> $result { let $t1 - // $t1 := move_from($t0) + // $t1 := move_from<0x2::M::S>($t0) { let $base_offset := $MakeTypeStorageBase(0, 0x698265eb, a) if iszero($AlignedStorageLoad($base_offset)) { @@ -1440,13 +1440,13 @@ object "test_A2_M_test_unpublish" { $t6 := $AddU64(a, a) // $t7 := (u128)($t6) $t7 := $CastU128($t6) - // $t8 := pack M::S2($t7) + // $t8 := pack 0x2::M::S2($t7) { let $mem := $Malloc(16) $MemoryStoreU128(add($mem, 0), $t7) $t8 := $mem } - // $t9 := pack M::S($t1, $t5, $t8) + // $t9 := pack 0x2::M::S($t1, $t5, $t8) { let $mem := $Malloc(41) $MemoryStoreU64(add($mem, 32), a) @@ -1454,7 +1454,7 @@ object "test_A2_M_test_unpublish" { $MemoryStoreU256(add($mem, 0), $t8) $t9 := $mem } - // move_to($t9, $t0) + // move_to<0x2::M::S>($t9, $t0) { let $base_offset := $MakeTypeStorageBase(0, 0x698265eb, $LoadU256(sg)) if $AlignedStorageLoad($base_offset) { diff --git a/third_party/move/evm/move-to-yul/tests/Resources.exp.capture-source-info b/third_party/move/evm/move-to-yul/tests/Resources.exp.capture-source-info index c04def6d750e5..9832954c64548 100644 --- a/third_party/move/evm/move-to-yul/tests/Resources.exp.capture-source-info +++ b/third_party/move/evm/move-to-yul/tests/Resources.exp.capture-source-info @@ -96,7 +96,7 @@ object "test_A2_M_test_increment_a" { // $t5 := 0x3 /// @src 1:1309:1311 $t5 := 0x3 - // $t6 := borrow_global($t5) + // $t6 := borrow_global<0x2::M::S>($t5) /// @src 1:1292:1305 { let $base_offset := $MakeTypeStorageBase(0, 0x698265eb, $t5) @@ -105,7 +105,7 @@ object "test_A2_M_test_increment_a" { } $t6 := $MakePtr(true, add($base_offset, 32)) } - // $t7 := borrow_field.a($t6) + // $t7 := borrow_field<0x2::M::S>.a($t6) /// @src 1:1292:1314 $t7 := $IndexPtr($t6, 32) // $t8 := read_ref($t7) @@ -135,7 +135,7 @@ object "test_A2_M_test_increment_a" { function A2_M_increment_a(addr) { let r, $t2, $t3, $t4, $t5, $t6, $t7 - // $t2 := borrow_global($t0) + // $t2 := borrow_global<0x2::M::S>($t0) /// @src 1:1106:1123 { let $base_offset := $MakeTypeStorageBase(0, 0x698265eb, addr) @@ -144,7 +144,7 @@ object "test_A2_M_test_increment_a" { } $t2 := $MakePtr(true, add($base_offset, 32)) } - // $t3 := borrow_field.a($t2) + // $t3 := borrow_field<0x2::M::S>.a($t2) /// @src 1:1148:1151 $t3 := $IndexPtr($t2, 32) // $t4 := read_ref($t3) @@ -156,7 +156,7 @@ object "test_A2_M_test_increment_a" { // $t6 := +($t4, $t5) /// @src 1:1152:1153 $t6 := $AddU64($t4, $t5) - // $t7 := borrow_field.a($t2) + // $t7 := borrow_field<0x2::M::S>.a($t2) /// @src 1:1142:1145 $t7 := $IndexPtr($t2, 32) // write_ref($t7, $t6) @@ -183,14 +183,14 @@ object "test_A2_M_test_increment_a" { // $t7 := (u128)($t6) /// @src 1:405:422 $t7 := $CastU128($t6) - // $t8 := pack M::S2($t7) + // $t8 := pack 0x2::M::S2($t7) /// @src 1:399:423 { let $mem := $Malloc(16) $MemoryStoreU128(add($mem, 0), $t7) $t8 := $mem } - // $t9 := pack M::S($t1, $t5, $t8) + // $t9 := pack 0x2::M::S($t1, $t5, $t8) /// @src 1:371:424 { let $mem := $Malloc(41) @@ -199,7 +199,7 @@ object "test_A2_M_test_increment_a" { $MemoryStoreU256(add($mem, 0), $t8) $t9 := $mem } - // move_to($t9, $t0) + // move_to<0x2::M::S>($t9, $t0) /// @src 1:434:441 { let $base_offset := $MakeTypeStorageBase(0, 0x698265eb, $LoadU256(sg)) @@ -492,7 +492,7 @@ object "test_A2_M_test_publish" { // $t4 := 0x3 /// @src 1:566:568 $t4 := 0x3 - // $t5 := exists($t4) + // $t5 := exists<0x2::M::S>($t4) /// @src 1:556:562 $t5 := $AlignedStorageLoad($MakeTypeStorageBase(0, 0x698265eb, $t4)) // if ($t5) goto L1 else goto L0 @@ -506,7 +506,7 @@ object "test_A2_M_test_publish" { // $t7 := 0x3 /// @src 1:609:611 $t7 := 0x3 - // $t8 := borrow_global($t7) + // $t8 := borrow_global<0x2::M::S>($t7) /// @src 1:592:605 { let $base_offset := $MakeTypeStorageBase(0, 0x698265eb, $t7) @@ -515,7 +515,7 @@ object "test_A2_M_test_publish" { } $t8 := $MakePtr(true, add($base_offset, 32)) } - // $t9 := borrow_field.a($t8) + // $t9 := borrow_field<0x2::M::S>.a($t8) /// @src 1:592:614 $t9 := $IndexPtr($t8, 32) // $t10 := read_ref($t9) @@ -553,7 +553,7 @@ object "test_A2_M_test_publish" { // $t14 := 0x3 /// @src 1:660:662 $t14 := 0x3 - // $t15 := borrow_global($t14) + // $t15 := borrow_global<0x2::M::S>($t14) /// @src 1:643:656 { let $base_offset := $MakeTypeStorageBase(0, 0x698265eb, $t14) @@ -562,7 +562,7 @@ object "test_A2_M_test_publish" { } $t15 := $MakePtr(true, add($base_offset, 32)) } - // $t16 := borrow_field.b($t15) + // $t16 := borrow_field<0x2::M::S>.b($t15) /// @src 1:643:665 $t16 := $IndexPtr($t15, 40) // $t17 := read_ref($t16) @@ -600,7 +600,7 @@ object "test_A2_M_test_publish" { // $t21 := 0x3 /// @src 1:711:713 $t21 := 0x3 - // $t22 := borrow_global($t21) + // $t22 := borrow_global<0x2::M::S>($t21) /// @src 1:694:707 { let $base_offset := $MakeTypeStorageBase(0, 0x698265eb, $t21) @@ -609,12 +609,12 @@ object "test_A2_M_test_publish" { } $t22 := $MakePtr(true, add($base_offset, 32)) } - // $t23 := borrow_field.c($t22) + // $t23 := borrow_field<0x2::M::S>.c($t22) /// @src 1:694:716 { $t23 := $MakePtr($IsStoragePtr($t22), $LoadU256($t22)) } - // $t24 := borrow_field.x($t23) + // $t24 := borrow_field<0x2::M::S2>.x($t23) /// @src 1:694:718 $t24 := $t23 // $t25 := read_ref($t24) @@ -674,14 +674,14 @@ object "test_A2_M_test_publish" { // $t7 := (u128)($t6) /// @src 1:405:422 $t7 := $CastU128($t6) - // $t8 := pack M::S2($t7) + // $t8 := pack 0x2::M::S2($t7) /// @src 1:399:423 { let $mem := $Malloc(16) $MemoryStoreU128(add($mem, 0), $t7) $t8 := $mem } - // $t9 := pack M::S($t1, $t5, $t8) + // $t9 := pack 0x2::M::S($t1, $t5, $t8) /// @src 1:371:424 { let $mem := $Malloc(41) @@ -690,7 +690,7 @@ object "test_A2_M_test_publish" { $MemoryStoreU256(add($mem, 0), $t8) $t9 := $mem } - // move_to($t9, $t0) + // move_to<0x2::M::S>($t9, $t0) /// @src 1:434:441 { let $base_offset := $MakeTypeStorageBase(0, 0x698265eb, $LoadU256(sg)) @@ -976,7 +976,7 @@ object "test_A2_M_test_publish_t" { // $t4 := 0x3 /// @src 1:1602:1604 $t4 := 0x3 - // $t5 := exists($t4) + // $t5 := exists<0x2::M::T>($t4) /// @src 1:1592:1598 $t5 := $AlignedStorageLoad($MakeTypeStorageBase(0, 0x3948ca0a, $t4)) // if ($t5) goto L1 else goto L0 @@ -990,7 +990,7 @@ object "test_A2_M_test_publish_t" { // $t7 := 0x3 /// @src 1:1646:1648 $t7 := 0x3 - // $t8 := borrow_global($t7) + // $t8 := borrow_global<0x2::M::T>($t7) /// @src 1:1629:1642 { let $base_offset := $MakeTypeStorageBase(0, 0x3948ca0a, $t7) @@ -999,12 +999,12 @@ object "test_A2_M_test_publish_t" { } $t8 := $MakePtr(true, add($base_offset, 32)) } - // $t9 := borrow_field.s($t8) + // $t9 := borrow_field<0x2::M::T>.s($t8) /// @src 1:1629:1651 { $t9 := $MakePtr($IsStoragePtr($t8), $LoadU256($t8)) } - // $t10 := borrow_field.a($t9) + // $t10 := borrow_field<0x2::M::S>.a($t9) /// @src 1:1629:1653 $t10 := $IndexPtr($t9, 32) // $t11 := read_ref($t10) @@ -1042,7 +1042,7 @@ object "test_A2_M_test_publish_t" { // $t15 := 0x3 /// @src 1:1700:1702 $t15 := 0x3 - // $t16 := borrow_global($t15) + // $t16 := borrow_global<0x2::M::T>($t15) /// @src 1:1683:1696 { let $base_offset := $MakeTypeStorageBase(0, 0x3948ca0a, $t15) @@ -1051,12 +1051,12 @@ object "test_A2_M_test_publish_t" { } $t16 := $MakePtr(true, add($base_offset, 32)) } - // $t17 := borrow_field.s($t16) + // $t17 := borrow_field<0x2::M::T>.s($t16) /// @src 1:1683:1705 { $t17 := $MakePtr($IsStoragePtr($t16), $LoadU256($t16)) } - // $t18 := borrow_field.b($t17) + // $t18 := borrow_field<0x2::M::S>.b($t17) /// @src 1:1683:1707 $t18 := $IndexPtr($t17, 40) // $t19 := read_ref($t18) @@ -1094,7 +1094,7 @@ object "test_A2_M_test_publish_t" { // $t23 := 0x3 /// @src 1:1754:1756 $t23 := 0x3 - // $t24 := borrow_global($t23) + // $t24 := borrow_global<0x2::M::T>($t23) /// @src 1:1737:1750 { let $base_offset := $MakeTypeStorageBase(0, 0x3948ca0a, $t23) @@ -1103,17 +1103,17 @@ object "test_A2_M_test_publish_t" { } $t24 := $MakePtr(true, add($base_offset, 32)) } - // $t25 := borrow_field.s($t24) + // $t25 := borrow_field<0x2::M::T>.s($t24) /// @src 1:1737:1759 { $t25 := $MakePtr($IsStoragePtr($t24), $LoadU256($t24)) } - // $t26 := borrow_field.c($t25) + // $t26 := borrow_field<0x2::M::S>.c($t25) /// @src 1:1737:1761 { $t26 := $MakePtr($IsStoragePtr($t25), $LoadU256($t25)) } - // $t27 := borrow_field.x($t26) + // $t27 := borrow_field<0x2::M::S2>.x($t26) /// @src 1:1737:1763 $t27 := $t26 // $t28 := read_ref($t27) @@ -1173,14 +1173,14 @@ object "test_A2_M_test_publish_t" { // $t7 := (u128)($t6) /// @src 1:1434:1451 $t7 := $CastU128($t6) - // $t8 := pack M::S2($t7) + // $t8 := pack 0x2::M::S2($t7) /// @src 1:1428:1452 { let $mem := $Malloc(16) $MemoryStoreU128(add($mem, 0), $t7) $t8 := $mem } - // $t9 := pack M::S($t1, $t5, $t8) + // $t9 := pack 0x2::M::S($t1, $t5, $t8) /// @src 1:1400:1453 { let $mem := $Malloc(41) @@ -1189,14 +1189,14 @@ object "test_A2_M_test_publish_t" { $MemoryStoreU256(add($mem, 0), $t8) $t9 := $mem } - // $t10 := pack M::T($t9) + // $t10 := pack 0x2::M::T($t9) /// @src 1:1393:1454 { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t9) $t10 := $mem } - // move_to($t10, $t0) + // move_to<0x2::M::T>($t10, $t0) /// @src 1:1464:1471 { let $base_offset := $MakeTypeStorageBase(0, 0x3948ca0a, $LoadU256(sg)) @@ -1491,13 +1491,13 @@ object "test_A2_M_test_unpublish" { // $t7 := M::unpublish($t6) /// @src 1:931:944 $t7 := A2_M_unpublish($t6) - // ($t8, $t9, $t10) := unpack M::S($t7) + // ($t8, $t9, $t10) := unpack 0x2::M::S($t7) /// @src 1:911:928 $t8 := $MemoryLoadU64(add($t7, 32)) $t9 := $MemoryLoadU8(add($t7, 40)) $t10 := $MemoryLoadU256(add($t7, 0)) $Free($t7, 41) - // $t11 := unpack M::S2($t10) + // $t11 := unpack 0x2::M::S2($t10) /// @src 1:922:927 $t11 := $MemoryLoadU128(add($t10, 0)) $Free($t10, 16) @@ -1583,7 +1583,7 @@ object "test_A2_M_test_unpublish" { function A2_M_unpublish(a) -> $result { let $t1 - // $t1 := move_from($t0) + // $t1 := move_from<0x2::M::S>($t0) /// @src 1:793:802 { let $base_offset := $MakeTypeStorageBase(0, 0x698265eb, a) @@ -1631,14 +1631,14 @@ object "test_A2_M_test_unpublish" { // $t7 := (u128)($t6) /// @src 1:405:422 $t7 := $CastU128($t6) - // $t8 := pack M::S2($t7) + // $t8 := pack 0x2::M::S2($t7) /// @src 1:399:423 { let $mem := $Malloc(16) $MemoryStoreU128(add($mem, 0), $t7) $t8 := $mem } - // $t9 := pack M::S($t1, $t5, $t8) + // $t9 := pack 0x2::M::S($t1, $t5, $t8) /// @src 1:371:424 { let $mem := $Malloc(41) @@ -1647,7 +1647,7 @@ object "test_A2_M_test_unpublish" { $MemoryStoreU256(add($mem, 0), $t8) $t9 := $mem } - // move_to($t9, $t0) + // move_to<0x2::M::S>($t9, $t0) /// @src 1:434:441 { let $base_offset := $MakeTypeStorageBase(0, 0x698265eb, $LoadU256(sg)) diff --git a/third_party/move/evm/move-to-yul/tests/Structs.exp b/third_party/move/evm/move-to-yul/tests/Structs.exp index d79871424ed16..078aae2d530ec 100644 --- a/third_party/move/evm/move-to-yul/tests/Structs.exp +++ b/third_party/move/evm/move-to-yul/tests/Structs.exp @@ -54,7 +54,7 @@ object "test_A2_M_test_drop" { $t1 := false // $t2 := M::pack_S($t0, $t1) $t2 := A2_M_pack_S($t0, $t1) - // $t3 := pack M::S3($t2) + // $t3 := pack 0x2::M::S3($t2) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t2) @@ -79,7 +79,7 @@ object "test_A2_M_test_drop" { $t2 := $CastU128(a) // $t3 := M::pack_S2($t2) $t3 := A2_M_pack_S2($t2) - // $t4 := pack M::S($t0, $t1, $t3) + // $t4 := pack 0x2::M::S($t0, $t1, $t3) { let $mem := $Malloc(41) $MemoryStoreU64(add($mem, 32), a) @@ -93,7 +93,7 @@ object "test_A2_M_test_drop" { function A2_M_pack_S2(x) -> $result { let $t1 - // $t1 := pack M::S2($t0) + // $t1 := pack 0x2::M::S2($t0) { let $mem := $Malloc(16) $MemoryStoreU128(add($mem, 0), x) @@ -330,7 +330,7 @@ object "test_A2_M_test_equality" { $t2 := $CastU128(a) // $t3 := M::pack_S2($t2) $t3 := A2_M_pack_S2($t2) - // $t4 := pack M::S($t0, $t1, $t3) + // $t4 := pack 0x2::M::S($t0, $t1, $t3) { let $mem := $Malloc(41) $MemoryStoreU64(add($mem, 32), a) @@ -344,7 +344,7 @@ object "test_A2_M_test_equality" { function A2_M_pack_S2(x) -> $result { let $t1 - // $t1 := pack M::S2($t0) + // $t1 := pack 0x2::M::S2($t0) { let $mem := $Malloc(16) $MemoryStoreU128(add($mem, 0), x) @@ -460,7 +460,7 @@ object "test_A2_M_test_pack_S" { s := A2_M_pack_S($t1, $t2) // $t3 := borrow_local($t0) $t3 := $MakePtr(false, s) - // $t4 := borrow_field.a($t3) + // $t4 := borrow_field<0x2::M::S>.a($t3) $t4 := $IndexPtr($t3, 32) // $t5 := read_ref($t4) $t5 := $LoadU64($t4) @@ -477,7 +477,7 @@ object "test_A2_M_test_pack_S" { // label L2 // $t9 := borrow_local($t0) $t9 := $MakePtr(false, s) - // $t10 := borrow_field.b($t9) + // $t10 := borrow_field<0x2::M::S>.b($t9) $t10 := $IndexPtr($t9, 40) // $t11 := read_ref($t10) $t11 := $LoadU8($t10) @@ -506,11 +506,11 @@ object "test_A2_M_test_pack_S" { // label L5 // $t15 := borrow_local($t0) $t15 := $MakePtr(false, s) - // $t16 := borrow_field.c($t15) + // $t16 := borrow_field<0x2::M::S>.c($t15) { $t16 := $MakePtr($IsStoragePtr($t15), $LoadU256($t15)) } - // $t17 := borrow_field.x($t16) + // $t17 := borrow_field<0x2::M::S2>.x($t16) $t17 := $t16 // $t18 := read_ref($t17) $t18 := $LoadU128($t17) @@ -549,7 +549,7 @@ object "test_A2_M_test_pack_S" { $t2 := $CastU128(a) // $t3 := M::pack_S2($t2) $t3 := A2_M_pack_S2($t2) - // $t4 := pack M::S($t0, $t1, $t3) + // $t4 := pack 0x2::M::S($t0, $t1, $t3) { let $mem := $Malloc(41) $MemoryStoreU64(add($mem, 32), a) @@ -563,7 +563,7 @@ object "test_A2_M_test_pack_S" { function A2_M_pack_S2(x) -> $result { let $t1 - // $t1 := pack M::S2($t0) + // $t1 := pack 0x2::M::S2($t0) { let $mem := $Malloc(16) $MemoryStoreU128(add($mem, 0), x) @@ -780,7 +780,7 @@ object "test_A2_M_test_pack_S2" { s := A2_M_pack_S2($t1) // $t2 := borrow_local($t0) $t2 := $MakePtr(false, s) - // $t3 := borrow_field.x($t2) + // $t3 := borrow_field<0x2::M::S2>.x($t2) $t3 := $t2 // $t4 := read_ref($t3) $t4 := $LoadU128($t3) @@ -803,7 +803,7 @@ object "test_A2_M_test_pack_S2" { function A2_M_pack_S2(x) -> $result { let $t1 - // $t1 := pack M::S2($t0) + // $t1 := pack 0x2::M::S2($t0) { let $mem := $Malloc(16) $MemoryStoreU128(add($mem, 0), x) @@ -952,7 +952,7 @@ object "test_A2_M_test_pack_S2_fail" { s := A2_M_pack_S2($t1) // $t2 := borrow_local($t0) $t2 := $MakePtr(false, s) - // $t3 := borrow_field.x($t2) + // $t3 := borrow_field<0x2::M::S2>.x($t2) $t3 := $t2 // $t4 := read_ref($t3) $t4 := $LoadU128($t3) @@ -975,7 +975,7 @@ object "test_A2_M_test_pack_S2_fail" { function A2_M_pack_S2(x) -> $result { let $t1 - // $t1 := pack M::S2($t0) + // $t1 := pack 0x2::M::S2($t0) { let $mem := $Malloc(16) $MemoryStoreU128(add($mem, 0), x) @@ -1147,15 +1147,15 @@ object "test_A2_M_test_read_S" { function A2_M_read_S(s) -> $result { let $t1, $t2, $t3, $t4, $t5, $t6, $t7 - // $t1 := borrow_field.a($t0) + // $t1 := borrow_field<0x2::M::S>.a($t0) $t1 := $IndexPtr(s, 32) // $t2 := read_ref($t1) $t2 := $LoadU64($t1) - // $t3 := borrow_field.c($t0) + // $t3 := borrow_field<0x2::M::S>.c($t0) { $t3 := $MakePtr($IsStoragePtr(s), $LoadU256(s)) } - // $t4 := borrow_field.x($t3) + // $t4 := borrow_field<0x2::M::S2>.x($t3) $t4 := $t3 // $t5 := read_ref($t4) $t5 := $LoadU128($t4) @@ -1173,7 +1173,7 @@ object "test_A2_M_test_read_S" { $t2 := $CastU128(a) // $t3 := M::pack_S2($t2) $t3 := A2_M_pack_S2($t2) - // $t4 := pack M::S($t0, $t1, $t3) + // $t4 := pack 0x2::M::S($t0, $t1, $t3) { let $mem := $Malloc(41) $MemoryStoreU64(add($mem, 32), a) @@ -1187,7 +1187,7 @@ object "test_A2_M_test_read_S" { function A2_M_pack_S2(x) -> $result { let $t1 - // $t1 := pack M::S2($t0) + // $t1 := pack 0x2::M::S2($t0) { let $mem := $Malloc(16) $MemoryStoreU128(add($mem, 0), x) @@ -1394,7 +1394,7 @@ object "test_A2_M_test_read_and_write_S" { s := A2_M_read_and_write_S() // $t1 := borrow_local($t0) $t1 := $MakePtr(false, s) - // $t2 := borrow_field.a($t1) + // $t2 := borrow_field<0x2::M::S>.a($t1) $t2 := $IndexPtr($t1, 32) // $t3 := read_ref($t2) $t3 := $LoadU64($t2) @@ -1411,11 +1411,11 @@ object "test_A2_M_test_read_and_write_S" { // label L2 // $t7 := borrow_local($t0) $t7 := $MakePtr(false, s) - // $t8 := borrow_field.c($t7) + // $t8 := borrow_field<0x2::M::S>.c($t7) { $t8 := $MakePtr($IsStoragePtr($t7), $LoadU256($t7)) } - // $t9 := borrow_field.x($t8) + // $t9 := borrow_field<0x2::M::S2>.x($t8) $t9 := $t8 // $t10 := read_ref($t9) $t10 := $LoadU128($t9) @@ -1472,21 +1472,21 @@ object "test_A2_M_test_read_and_write_S" { function A2_M_write_S(s, v) { let $t2, $t3, $t4, $t5, $t6, $t7 - // $t2 := borrow_field.a($t0) + // $t2 := borrow_field<0x2::M::S>.a($t0) $t2 := $IndexPtr(s, 32) // write_ref($t2, $t1) $StoreU64($t2, v) - // $t3 := borrow_field.a($t0) + // $t3 := borrow_field<0x2::M::S>.a($t0) $t3 := $IndexPtr(s, 32) // $t4 := read_ref($t3) $t4 := $LoadU64($t3) // $t5 := (u128)($t4) $t5 := $CastU128($t4) - // $t6 := borrow_field.c($t0) + // $t6 := borrow_field<0x2::M::S>.c($t0) { $t6 := $MakePtr($IsStoragePtr(s), $LoadU256(s)) } - // $t7 := borrow_field.x($t6) + // $t7 := borrow_field<0x2::M::S2>.x($t6) $t7 := $t6 // write_ref($t7, $t5) $StoreU128($t7, $t5) @@ -1495,15 +1495,15 @@ object "test_A2_M_test_read_and_write_S" { function A2_M_read_S(s) -> $result { let $t1, $t2, $t3, $t4, $t5, $t6, $t7 - // $t1 := borrow_field.a($t0) + // $t1 := borrow_field<0x2::M::S>.a($t0) $t1 := $IndexPtr(s, 32) // $t2 := read_ref($t1) $t2 := $LoadU64($t1) - // $t3 := borrow_field.c($t0) + // $t3 := borrow_field<0x2::M::S>.c($t0) { $t3 := $MakePtr($IsStoragePtr(s), $LoadU256(s)) } - // $t4 := borrow_field.x($t3) + // $t4 := borrow_field<0x2::M::S2>.x($t3) $t4 := $t3 // $t5 := read_ref($t4) $t5 := $LoadU128($t4) @@ -1521,7 +1521,7 @@ object "test_A2_M_test_read_and_write_S" { $t2 := $CastU128(a) // $t3 := M::pack_S2($t2) $t3 := A2_M_pack_S2($t2) - // $t4 := pack M::S($t0, $t1, $t3) + // $t4 := pack 0x2::M::S($t0, $t1, $t3) { let $mem := $Malloc(41) $MemoryStoreU64(add($mem, 32), a) @@ -1535,7 +1535,7 @@ object "test_A2_M_test_read_and_write_S" { function A2_M_pack_S2(x) -> $result { let $t1 - // $t1 := pack M::S2($t0) + // $t1 := pack 0x2::M::S2($t0) { let $mem := $Malloc(16) $MemoryStoreU128(add($mem, 0), x) @@ -1800,7 +1800,7 @@ object "test_A2_M_test_unpack" { s1 := A2_M_unpack($t3) // $t4 := borrow_local($t0) $t4 := $MakePtr(false, s1) - // $t5 := borrow_field.x($t4) + // $t5 := borrow_field<0x2::M::S2>.x($t4) $t5 := $t4 // $t6 := read_ref($t5) $t6 := $LoadU128($t5) @@ -1823,7 +1823,7 @@ object "test_A2_M_test_unpack" { function A2_M_unpack(s) -> $result { let c, $t2, $t3, $t4 - // ($t2, $t3, $t4) := unpack M::S($t0) + // ($t2, $t3, $t4) := unpack 0x2::M::S($t0) $t2 := $MemoryLoadU64(add(s, 32)) $t3 := $MemoryLoadU8(add(s, 40)) $t4 := $MemoryLoadU256(add(s, 0)) @@ -1840,7 +1840,7 @@ object "test_A2_M_test_unpack" { $t2 := $CastU128(a) // $t3 := M::pack_S2($t2) $t3 := A2_M_pack_S2($t2) - // $t4 := pack M::S($t0, $t1, $t3) + // $t4 := pack 0x2::M::S($t0, $t1, $t3) { let $mem := $Malloc(41) $MemoryStoreU64(add($mem, 32), a) @@ -1854,7 +1854,7 @@ object "test_A2_M_test_unpack" { function A2_M_pack_S2(x) -> $result { let $t1 - // $t1 := pack M::S2($t0) + // $t1 := pack 0x2::M::S2($t0) { let $mem := $Malloc(16) $MemoryStoreU128(add($mem, 0), x) @@ -2039,7 +2039,7 @@ object "test_A2_M_test_write_S" { A2_M_write_S($t3, $t4) // $t5 := borrow_local($t0) $t5 := $MakePtr(false, s) - // $t6 := borrow_field.a($t5) + // $t6 := borrow_field<0x2::M::S>.a($t5) $t6 := $IndexPtr($t5, 32) // $t7 := read_ref($t6) $t7 := $LoadU64($t6) @@ -2056,11 +2056,11 @@ object "test_A2_M_test_write_S" { // label L2 // $t11 := borrow_local($t0) $t11 := $MakePtr(false, s) - // $t12 := borrow_field.c($t11) + // $t12 := borrow_field<0x2::M::S>.c($t11) { $t12 := $MakePtr($IsStoragePtr($t11), $LoadU256($t11)) } - // $t13 := borrow_field.x($t12) + // $t13 := borrow_field<0x2::M::S2>.x($t12) $t13 := $t12 // $t14 := read_ref($t13) $t14 := $LoadU128($t13) @@ -2095,21 +2095,21 @@ object "test_A2_M_test_write_S" { function A2_M_write_S(s, v) { let $t2, $t3, $t4, $t5, $t6, $t7 - // $t2 := borrow_field.a($t0) + // $t2 := borrow_field<0x2::M::S>.a($t0) $t2 := $IndexPtr(s, 32) // write_ref($t2, $t1) $StoreU64($t2, v) - // $t3 := borrow_field.a($t0) + // $t3 := borrow_field<0x2::M::S>.a($t0) $t3 := $IndexPtr(s, 32) // $t4 := read_ref($t3) $t4 := $LoadU64($t3) // $t5 := (u128)($t4) $t5 := $CastU128($t4) - // $t6 := borrow_field.c($t0) + // $t6 := borrow_field<0x2::M::S>.c($t0) { $t6 := $MakePtr($IsStoragePtr(s), $LoadU256(s)) } - // $t7 := borrow_field.x($t6) + // $t7 := borrow_field<0x2::M::S2>.x($t6) $t7 := $t6 // write_ref($t7, $t5) $StoreU128($t7, $t5) @@ -2122,7 +2122,7 @@ object "test_A2_M_test_write_S" { $t2 := $CastU128(a) // $t3 := M::pack_S2($t2) $t3 := A2_M_pack_S2($t2) - // $t4 := pack M::S($t0, $t1, $t3) + // $t4 := pack 0x2::M::S($t0, $t1, $t3) { let $mem := $Malloc(41) $MemoryStoreU64(add($mem, 32), a) @@ -2136,7 +2136,7 @@ object "test_A2_M_test_write_S" { function A2_M_pack_S2(x) -> $result { let $t1 - // $t1 := pack M::S2($t0) + // $t1 := pack 0x2::M::S2($t0) { let $mem := $Malloc(16) $MemoryStoreU128(add($mem, 0), x) diff --git a/third_party/move/evm/move-to-yul/tests/Tables.exp b/third_party/move/evm/move-to-yul/tests/Tables.exp index f6ce37ffea2a9..04a2ed965d8c1 100644 --- a/third_party/move/evm/move-to-yul/tests/Tables.exp +++ b/third_party/move/evm/move-to-yul/tests/Tables.exp @@ -108,13 +108,13 @@ object "test_A2_Tables_test_borrow_fail" { $t19 := $MakePtr(false, add($locals, 96)) // $t20 := move($t6) $t20 := mload($locals) - // $t21 := pack Tables::S($t20) + // $t21 := pack 0x2::Tables::S($t20) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t20) $t21 := $mem } - // move_to>($t21, $t19) + // move_to<0x2::Tables::S>($t21, $t19) { let $base_offset := $MakeTypeStorageBase(0, 0x8a475b1c, $LoadU256($t19)) if $AlignedStorageLoad($base_offset) { @@ -456,13 +456,13 @@ object "test_A2_Tables_test_insert_fail" { $t30 := $MakePtr(false, add($locals, 160)) // $t31 := move($t9) $t31 := mload($locals) - // $t32 := pack Tables::S($t31) + // $t32 := pack 0x2::Tables::S($t31) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t31) $t32 := $mem } - // move_to>($t32, $t30) + // move_to<0x2::Tables::S>($t32, $t30) { let $base_offset := $MakeTypeStorageBase(0, 0x8a475b1c, $LoadU256($t30)) if $AlignedStorageLoad($base_offset) { @@ -1022,13 +1022,13 @@ object "test_A2_Tables_test_primitive" { $t98 := $MakePtr(false, add($locals, 384)) // $t99 := move($t31) $t99 := mload($locals) - // $t100 := pack Tables::S($t99) + // $t100 := pack 0x2::Tables::S($t99) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t99) $t100 := $mem } - // move_to>($t100, $t98) + // move_to<0x2::Tables::S>($t100, $t98) { let $base_offset := $MakeTypeStorageBase(0, 0x8a475b1c, $LoadU256($t98)) if $AlignedStorageLoad($base_offset) { @@ -1044,7 +1044,7 @@ object "test_A2_Tables_test_primitive" { } // $t101 := 0x42 $t101 := 0x42 - // $t102 := borrow_global>($t101) + // $t102 := borrow_global<0x2::Tables::S>($t101) { let $base_offset := $MakeTypeStorageBase(0, 0x8a475b1c, $t101) if iszero($AlignedStorageLoad($base_offset)) { @@ -1052,7 +1052,7 @@ object "test_A2_Tables_test_primitive" { } $t102 := $MakePtr(true, add($base_offset, 32)) } - // $t103 := borrow_field>.t($t102) + // $t103 := borrow_field<0x2::Tables::S>.t($t102) $t103 := $t102 // $t104 := 42 $t104 := 42 @@ -1119,7 +1119,7 @@ object "test_A2_Tables_test_primitive" { // label L32 // $t116 := 0x42 $t116 := 0x42 - // $t117 := move_from>($t116) + // $t117 := move_from<0x2::Tables::S>($t116) { let $base_offset := $MakeTypeStorageBase(0, 0x8a475b1c, $t116) if iszero($AlignedStorageLoad($base_offset)) { @@ -1135,7 +1135,7 @@ object "test_A2_Tables_test_primitive" { $t117 := $dst } } - // $t30 := unpack Tables::S($t117) + // $t30 := unpack 0x2::Tables::S($t117) mstore(add($locals, 480), $MemoryLoadU256(add($t117, 0))) $Free($t117, 32) // $t118 := borrow_local($t30) @@ -1181,13 +1181,13 @@ object "test_A2_Tables_test_primitive" { $t127 := $MakePtr(false, add($locals, 544)) // $t128 := move($t30) $t128 := mload(add($locals, 480)) - // $t129 := pack Tables::S($t128) + // $t129 := pack 0x2::Tables::S($t128) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t128) $t129 := $mem } - // move_to>($t129, $t127) + // move_to<0x2::Tables::S>($t129, $t127) { let $base_offset := $MakeTypeStorageBase(0, 0x8a475b1c, $LoadU256($t127)) if $AlignedStorageLoad($base_offset) { @@ -1483,7 +1483,7 @@ object "test_A2_Tables_test_remove_fail" { $Abort($t11) } case 4 { - // $t3 := Table::empty() + // $t3 := Table::empty() mstore($locals, A2_Table_empty$u64_A2_Tables_Balance$()) // $t4 := borrow_local($t3) $t4 := $MakePtr(false, $locals) @@ -1493,9 +1493,9 @@ object "test_A2_Tables_test_remove_fail" { mstore(add($locals, 32), $t5) // $t6 := borrow_local($t0) $t6 := $MakePtr(false, add($locals, 56)) - // $t7 := Table::remove($t4, $t6) + // $t7 := Table::remove($t4, $t6) $t7 := A2_Table_remove$u64_A2_Tables_Balance$($t4, $t6) - // $t8 := unpack Tables::Balance($t7) + // $t8 := unpack 0x2::Tables::Balance($t7) $t8 := $MemoryLoadU256(add($t7, 0)) $Free($t7, 32) // $t9 := U256::zero() @@ -1517,13 +1517,13 @@ object "test_A2_Tables_test_remove_fail" { $t13 := $MakePtr(false, add($locals, 64)) // $t14 := move($t3) $t14 := mload($locals) - // $t15 := pack Tables::S($t14) + // $t15 := pack 0x2::Tables::S($t14) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t14) $t15 := $mem } - // move_to>($t15, $t13) + // move_to<0x2::Tables::S>($t15, $t13) { let $base_offset := $MakeTypeStorageBase(0, 0x6f98bffd, $LoadU256($t13)) if $AlignedStorageLoad($base_offset) { @@ -1749,7 +1749,7 @@ object "test_A2_Tables_test_struct" { $Abort($t40) } case 4 { - // $t27 := Table::empty() + // $t27 := Table::empty() mstore($locals, A2_Table_empty$address_A2_Tables_Balance$()) // $t30 := 3743106036130323098097120681749450326028 $t30 := 3743106036130323098097120681749450326028 @@ -1763,13 +1763,13 @@ object "test_A2_Tables_test_struct" { mstore(add($locals, 32), $t33) // $t34 := borrow_local($t0) $t34 := $MakePtr(false, add($locals, 32)) - // $t35 := pack Tables::Balance($t30) + // $t35 := pack 0x2::Tables::Balance($t30) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t30) $t35 := $mem } - // Table::insert($t32, $t34, $t35) + // Table::insert($t32, $t34, $t35) A2_Table_insert$address_A2_Tables_Balance$($t32, $t34, $t35) // $t36 := borrow_local($t27) $t36 := $MakePtr(false, $locals) @@ -1779,7 +1779,7 @@ object "test_A2_Tables_test_struct" { mstore(add($locals, 64), $t37) // $t38 := borrow_local($t18) $t38 := $MakePtr(false, add($locals, 64)) - // $t39 := Table::contains($t36, $t38) + // $t39 := Table::contains($t36, $t38) $t39 := A2_Table_contains$address_A2_Tables_Balance$($t36, $t38) // if ($t39) goto L1 else goto L0 switch $t39 @@ -1796,9 +1796,9 @@ object "test_A2_Tables_test_struct" { mstore(add($locals, 96), $t42) // $t43 := borrow_local($t20) $t43 := $MakePtr(false, add($locals, 96)) - // $t44 := Table::borrow($t41, $t43) + // $t44 := Table::borrow($t41, $t43) $t44 := A2_Table_borrow$address_A2_Tables_Balance$($t41, $t43) - // $t45 := borrow_field.value($t44) + // $t45 := borrow_field<0x2::Tables::Balance>.value($t44) $t45 := $t44 // $t46 := read_ref($t45) $t46 := $LoadU256($t45) @@ -1831,13 +1831,13 @@ object "test_A2_Tables_test_struct" { $t50 := $MakePtr(false, add($locals, 128)) // $t51 := move($t27) $t51 := mload($locals) - // $t52 := pack Tables::S($t51) + // $t52 := pack 0x2::Tables::S($t51) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t51) $t52 := $mem } - // move_to>($t52, $t50) + // move_to<0x2::Tables::S>($t52, $t50) { let $base_offset := $MakeTypeStorageBase(0, 0x7dd56ec3, $LoadU256($t50)) if $AlignedStorageLoad($base_offset) { @@ -1853,7 +1853,7 @@ object "test_A2_Tables_test_struct" { } // $t53 := 0x42 $t53 := 0x42 - // $t54 := borrow_global>($t53) + // $t54 := borrow_global<0x2::Tables::S>($t53) { let $base_offset := $MakeTypeStorageBase(0, 0x7dd56ec3, $t53) if iszero($AlignedStorageLoad($base_offset)) { @@ -1861,7 +1861,7 @@ object "test_A2_Tables_test_struct" { } $t54 := $MakePtr(true, add($base_offset, 32)) } - // $t55 := borrow_field>.t($t54) + // $t55 := borrow_field<0x2::Tables::S>.t($t54) $t55 := $t54 // $t56 := 0xcd $t56 := 0xcd @@ -1869,13 +1869,13 @@ object "test_A2_Tables_test_struct" { mstore(add($locals, 160), $t56) // $t57 := borrow_local($t23) $t57 := $MakePtr(false, add($locals, 160)) - // $t58 := pack Tables::Balance($t31) + // $t58 := pack 0x2::Tables::Balance($t31) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t31) $t58 := $mem } - // Table::insert($t55, $t57, $t58) + // Table::insert($t55, $t57, $t58) A2_Table_insert$address_A2_Tables_Balance$($t55, $t57, $t58) // $t59 := 0xab $t59 := 0xab @@ -1885,9 +1885,9 @@ object "test_A2_Tables_test_struct" { $t60 := $MakePtr(false, add($locals, 192)) // $t61 := freeze_ref($t55) $t61 := $t55 - // $t62 := Table::borrow($t61, $t60) + // $t62 := Table::borrow($t61, $t60) $t62 := A2_Table_borrow$address_A2_Tables_Balance$($t61, $t60) - // $t63 := borrow_field.value($t62) + // $t63 := borrow_field<0x2::Tables::Balance>.value($t62) $t63 := $t62 // $t64 := read_ref($t63) $t64 := $LoadU256($t63) @@ -1921,9 +1921,9 @@ object "test_A2_Tables_test_struct" { $t68 := $MakePtr(false, add($locals, 224)) // $t69 := freeze_ref($t55) $t69 := $t55 - // $t70 := Table::borrow($t69, $t68) + // $t70 := Table::borrow($t69, $t68) $t70 := A2_Table_borrow$address_A2_Tables_Balance$($t69, $t68) - // $t71 := borrow_field.value($t70) + // $t71 := borrow_field<0x2::Tables::Balance>.value($t70) $t71 := $t70 // $t72 := read_ref($t71) $t72 := $LoadU256($t71) @@ -1955,9 +1955,9 @@ object "test_A2_Tables_test_struct" { mstore(add($locals, 256), $t75) // $t76 := borrow_local($t7) $t76 := $MakePtr(false, add($locals, 256)) - // $t77 := Table::borrow_mut($t55, $t76) + // $t77 := Table::borrow_mut($t55, $t76) $t77 := A2_Table_borrow_mut$address_A2_Tables_Balance$($t55, $t76) - // $t78 := borrow_field.value($t77) + // $t78 := borrow_field<0x2::Tables::Balance>.value($t77) $t78 := $t77 // $t79 := read_ref($t78) $t79 := $LoadU256($t78) @@ -1965,7 +1965,7 @@ object "test_A2_Tables_test_struct" { $t80 := A2_U256_one() // $t81 := -($t79, $t80) $t81 := $Sub($t79, $t80) - // $t82 := borrow_field.value($t77) + // $t82 := borrow_field<0x2::Tables::Balance>.value($t77) $t82 := $t77 // write_ref($t82, $t81) $StoreU256($t82, $t81) @@ -1977,9 +1977,9 @@ object "test_A2_Tables_test_struct" { $t84 := $MakePtr(false, add($locals, 288)) // $t85 := freeze_ref($t55) $t85 := $t55 - // $t86 := Table::borrow($t85, $t84) + // $t86 := Table::borrow($t85, $t84) $t86 := A2_Table_borrow$address_A2_Tables_Balance$($t85, $t84) - // $t87 := borrow_field.value($t86) + // $t87 := borrow_field<0x2::Tables::Balance>.value($t86) $t87 := $t86 // $t88 := read_ref($t87) $t88 := $LoadU256($t87) @@ -2013,9 +2013,9 @@ object "test_A2_Tables_test_struct" { mstore(add($locals, 320), $t92) // $t93 := borrow_local($t13) $t93 := $MakePtr(false, add($locals, 320)) - // $t94 := Table::remove($t55, $t93) + // $t94 := Table::remove($t55, $t93) $t94 := A2_Table_remove$address_A2_Tables_Balance$($t55, $t93) - // $t95 := unpack Tables::Balance($t94) + // $t95 := unpack 0x2::Tables::Balance($t94) $t95 := $MemoryLoadU256(add($t94, 0)) $Free($t94, 32) // $t96 := ==($t95, $t30) @@ -2048,7 +2048,7 @@ object "test_A2_Tables_test_struct" { $t99 := $MakePtr(false, add($locals, 352)) // $t100 := freeze_ref($t55) $t100 := $t55 - // $t101 := Table::contains($t100, $t99) + // $t101 := Table::contains($t100, $t99) $t101 := A2_Table_contains$address_A2_Tables_Balance$($t100, $t99) // $t102 := !($t101) $t102 := $LogicalNot($t101) @@ -2356,7 +2356,7 @@ object "test_A2_Tables_test_table_of_tables" { $Abort($t75) } case 4 { - // $t45 := Table::empty>() + // $t45 := Table::empty>() mstore(add($locals, 128), A2_Table_empty$address_A2_Table_Table$address_A2_U256_U256$$()) // $t51 := 3743106036130323098097120681749450326028 $t51 := 3743106036130323098097120681749450326028 @@ -2364,7 +2364,7 @@ object "test_A2_Tables_test_table_of_tables" { $t52 := 15312706511442230855851857334429569515566 // $t53 := 26542024619833200150143219379677920493647 $t53 := 26542024619833200150143219379677920493647 - // $t46 := Table::empty() + // $t46 := Table::empty() mstore($locals, A2_Table_empty$address_A2_U256_U256$()) // $t54 := borrow_local($t46) $t54 := $MakePtr(false, $locals) @@ -2374,9 +2374,9 @@ object "test_A2_Tables_test_table_of_tables" { mstore(add($locals, 32), $t55) // $t56 := borrow_local($t0) $t56 := $MakePtr(false, add($locals, 32)) - // Table::insert($t54, $t56, $t51) + // Table::insert($t54, $t56, $t51) A2_Table_insert$address_A2_U256_U256$($t54, $t56, $t51) - // $t47 := Table::empty() + // $t47 := Table::empty() mstore(add($locals, 64), A2_Table_empty$address_A2_U256_U256$()) // $t57 := borrow_local($t47) $t57 := $MakePtr(false, add($locals, 64)) @@ -2386,7 +2386,7 @@ object "test_A2_Tables_test_table_of_tables" { mstore(add($locals, 96), $t58) // $t59 := borrow_local($t22) $t59 := $MakePtr(false, add($locals, 96)) - // Table::insert($t57, $t59, $t52) + // Table::insert($t57, $t59, $t52) A2_Table_insert$address_A2_U256_U256$($t57, $t59, $t52) // $t60 := borrow_local($t45) $t60 := $MakePtr(false, add($locals, 128)) @@ -2398,7 +2398,7 @@ object "test_A2_Tables_test_table_of_tables" { $t62 := $MakePtr(false, add($locals, 160)) // $t63 := move($t46) $t63 := mload($locals) - // Table::insert>($t60, $t62, $t63) + // Table::insert>($t60, $t62, $t63) A2_Table_insert$address_A2_Table_Table$address_A2_U256_U256$$($t60, $t62, $t63) // $t64 := borrow_local($t45) $t64 := $MakePtr(false, add($locals, 128)) @@ -2410,7 +2410,7 @@ object "test_A2_Tables_test_table_of_tables" { $t66 := $MakePtr(false, add($locals, 192)) // $t67 := move($t47) $t67 := mload(add($locals, 64)) - // Table::insert>($t64, $t66, $t67) + // Table::insert>($t64, $t66, $t67) A2_Table_insert$address_A2_Table_Table$address_A2_U256_U256$$($t64, $t66, $t67) // $t68 := borrow_local($t45) $t68 := $MakePtr(false, add($locals, 128)) @@ -2420,7 +2420,7 @@ object "test_A2_Tables_test_table_of_tables" { mstore(add($locals, 224), $t69) // $t70 := borrow_local($t1) $t70 := $MakePtr(false, add($locals, 224)) - // $t71 := Table::borrow>($t68, $t70) + // $t71 := Table::borrow>($t68, $t70) $t71 := A2_Table_borrow$address_A2_Table_Table$address_A2_U256_U256$$($t68, $t70) // $t72 := 0xab $t72 := 0xab @@ -2428,7 +2428,7 @@ object "test_A2_Tables_test_table_of_tables" { mstore(add($locals, 256), $t72) // $t73 := borrow_local($t44) $t73 := $MakePtr(false, add($locals, 256)) - // $t74 := Table::contains($t71, $t73) + // $t74 := Table::contains($t71, $t73) $t74 := A2_Table_contains$address_A2_U256_U256$($t71, $t73) // if ($t74) goto L1 else goto L0 switch $t74 @@ -2445,7 +2445,7 @@ object "test_A2_Tables_test_table_of_tables" { mstore(add($locals, 288), $t77) // $t78 := borrow_local($t5) $t78 := $MakePtr(false, add($locals, 288)) - // $t79 := Table::borrow>($t76, $t78) + // $t79 := Table::borrow>($t76, $t78) $t79 := A2_Table_borrow$address_A2_Table_Table$address_A2_U256_U256$$($t76, $t78) // $t80 := 0xcd $t80 := 0xcd @@ -2453,7 +2453,7 @@ object "test_A2_Tables_test_table_of_tables" { mstore(add($locals, 320), $t80) // $t81 := borrow_local($t4) $t81 := $MakePtr(false, add($locals, 320)) - // $t82 := Table::contains($t79, $t81) + // $t82 := Table::contains($t79, $t81) $t82 := A2_Table_contains$address_A2_U256_U256$($t79, $t81) // if ($t82) goto L4 else goto L3 switch $t82 @@ -2482,7 +2482,7 @@ object "test_A2_Tables_test_table_of_tables" { mstore(add($locals, 352), $t85) // $t86 := borrow_local($t9) $t86 := $MakePtr(false, add($locals, 352)) - // $t87 := Table::borrow>($t84, $t86) + // $t87 := Table::borrow>($t84, $t86) $t87 := A2_Table_borrow$address_A2_Table_Table$address_A2_U256_U256$$($t84, $t86) // $t88 := 0xab $t88 := 0xab @@ -2490,7 +2490,7 @@ object "test_A2_Tables_test_table_of_tables" { mstore(add($locals, 384), $t88) // $t89 := borrow_local($t8) $t89 := $MakePtr(false, add($locals, 384)) - // $t90 := Table::borrow($t87, $t89) + // $t90 := Table::borrow($t87, $t89) $t90 := A2_Table_borrow$address_A2_U256_U256$($t87, $t89) // $t91 := read_ref($t90) $t91 := $LoadU256($t90) @@ -2523,7 +2523,7 @@ object "test_A2_Tables_test_table_of_tables" { mstore(add($locals, 416), $t95) // $t96 := borrow_local($t14) $t96 := $MakePtr(false, add($locals, 416)) - // $t97 := Table::borrow>($t94, $t96) + // $t97 := Table::borrow>($t94, $t96) $t97 := A2_Table_borrow$address_A2_Table_Table$address_A2_U256_U256$$($t94, $t96) // $t98 := 0xcd $t98 := 0xcd @@ -2531,7 +2531,7 @@ object "test_A2_Tables_test_table_of_tables" { mstore(add($locals, 448), $t98) // $t99 := borrow_local($t13) $t99 := $MakePtr(false, add($locals, 448)) - // $t100 := Table::borrow($t97, $t99) + // $t100 := Table::borrow($t97, $t99) $t100 := A2_Table_borrow$address_A2_U256_U256$($t97, $t99) // $t101 := read_ref($t100) $t101 := $LoadU256($t100) @@ -2564,7 +2564,7 @@ object "test_A2_Tables_test_table_of_tables" { mstore(add($locals, 480), $t105) // $t106 := borrow_local($t18) $t106 := $MakePtr(false, add($locals, 480)) - // $t107 := Table::borrow_mut>($t104, $t106) + // $t107 := Table::borrow_mut>($t104, $t106) $t107 := A2_Table_borrow_mut$address_A2_Table_Table$address_A2_U256_U256$$($t104, $t106) // $t108 := 0xef $t108 := 0xef @@ -2572,7 +2572,7 @@ object "test_A2_Tables_test_table_of_tables" { mstore(add($locals, 512), $t108) // $t109 := borrow_local($t17) $t109 := $MakePtr(false, add($locals, 512)) - // Table::insert($t107, $t109, $t53) + // Table::insert($t107, $t109, $t53) A2_Table_insert$address_A2_U256_U256$($t107, $t109, $t53) // $t110 := borrow_local($t45) $t110 := $MakePtr(false, add($locals, 128)) @@ -2582,7 +2582,7 @@ object "test_A2_Tables_test_table_of_tables" { mstore(add($locals, 544), $t111) // $t112 := borrow_local($t23) $t112 := $MakePtr(false, add($locals, 544)) - // $t113 := Table::borrow>($t110, $t112) + // $t113 := Table::borrow>($t110, $t112) $t113 := A2_Table_borrow$address_A2_Table_Table$address_A2_U256_U256$$($t110, $t112) // $t114 := 0xef $t114 := 0xef @@ -2590,7 +2590,7 @@ object "test_A2_Tables_test_table_of_tables" { mstore(add($locals, 576), $t114) // $t115 := borrow_local($t21) $t115 := $MakePtr(false, add($locals, 576)) - // $t116 := Table::borrow($t113, $t115) + // $t116 := Table::borrow($t113, $t115) $t116 := A2_Table_borrow$address_A2_U256_U256$($t113, $t115) // $t117 := read_ref($t116) $t117 := $LoadU256($t116) @@ -2623,7 +2623,7 @@ object "test_A2_Tables_test_table_of_tables" { mstore(add($locals, 608), $t121) // $t122 := borrow_local($t27) $t122 := $MakePtr(false, add($locals, 608)) - // $t123 := Table::borrow>($t120, $t122) + // $t123 := Table::borrow>($t120, $t122) $t123 := A2_Table_borrow$address_A2_Table_Table$address_A2_U256_U256$$($t120, $t122) // $t124 := 0xab $t124 := 0xab @@ -2631,7 +2631,7 @@ object "test_A2_Tables_test_table_of_tables" { mstore(add($locals, 640), $t124) // $t125 := borrow_local($t26) $t125 := $MakePtr(false, add($locals, 640)) - // $t126 := Table::borrow($t123, $t125) + // $t126 := Table::borrow($t123, $t125) $t126 := A2_Table_borrow$address_A2_U256_U256$($t123, $t125) // $t127 := read_ref($t126) $t127 := $LoadU256($t126) @@ -2664,7 +2664,7 @@ object "test_A2_Tables_test_table_of_tables" { mstore(add($locals, 672), $t131) // $t132 := borrow_local($t31) $t132 := $MakePtr(false, add($locals, 672)) - // $t133 := Table::borrow_mut>($t130, $t132) + // $t133 := Table::borrow_mut>($t130, $t132) $t133 := A2_Table_borrow_mut$address_A2_Table_Table$address_A2_U256_U256$$($t130, $t132) // $t134 := 0xcd $t134 := 0xcd @@ -2672,7 +2672,7 @@ object "test_A2_Tables_test_table_of_tables" { mstore(add($locals, 704), $t134) // $t135 := borrow_local($t30) $t135 := $MakePtr(false, add($locals, 704)) - // $t136 := Table::remove($t133, $t135) + // $t136 := Table::remove($t133, $t135) $t136 := A2_Table_remove$address_A2_U256_U256$($t133, $t135) // $t137 := ==($t136, $t52) $t137 := $Eq($t136, $t52) @@ -2703,7 +2703,7 @@ object "test_A2_Tables_test_table_of_tables" { mstore(add($locals, 736), $t140) // $t141 := borrow_local($t36) $t141 := $MakePtr(false, add($locals, 736)) - // $t142 := Table::borrow>($t139, $t141) + // $t142 := Table::borrow>($t139, $t141) $t142 := A2_Table_borrow$address_A2_Table_Table$address_A2_U256_U256$$($t139, $t141) // $t143 := 0xcd $t143 := 0xcd @@ -2711,7 +2711,7 @@ object "test_A2_Tables_test_table_of_tables" { mstore(add($locals, 768), $t143) // $t144 := borrow_local($t35) $t144 := $MakePtr(false, add($locals, 768)) - // $t145 := Table::contains($t142, $t144) + // $t145 := Table::contains($t142, $t144) $t145 := A2_Table_contains$address_A2_U256_U256$($t142, $t144) // $t146 := !($t145) $t146 := $LogicalNot($t145) @@ -2742,13 +2742,13 @@ object "test_A2_Tables_test_table_of_tables" { $t149 := $MakePtr(false, add($locals, 800)) // $t150 := move($t45) $t150 := mload(add($locals, 128)) - // $t151 := pack Tables::S>($t150) + // $t151 := pack 0x2::Tables::S>($t150) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t150) $t151 := $mem } - // move_to>>($t151, $t149) + // move_to<0x2::Tables::S>>($t151, $t149) { let $base_offset := $MakeTypeStorageBase(0, 0xc5110c9a, $LoadU256($t149)) if $AlignedStorageLoad($base_offset) { @@ -3047,7 +3047,7 @@ object "test_A2_Tables_test_u256" { $Abort($t14) } case 4 { - // $t3 := Table::empty() + // $t3 := Table::empty<0x2::U256::U256, 0x2::U256::U256>() mstore($locals, A2_Table_empty$A2_U256_U256_A2_U256_U256$()) // $t6 := 26542024619833200150143219379677920493647 $t6 := 26542024619833200150143219379677920493647 @@ -3061,13 +3061,13 @@ object "test_A2_Tables_test_u256" { $t9 := $MakePtr(false, $locals) // $t10 := borrow_local($t2) $t10 := $MakePtr(false, add($locals, 32)) - // Table::insert($t9, $t10, $t7) + // Table::insert<0x2::U256::U256, 0x2::U256::U256>($t9, $t10, $t7) A2_Table_insert$A2_U256_U256_A2_U256_U256$($t9, $t10, $t7) // $t11 := borrow_local($t3) $t11 := $MakePtr(false, $locals) // $t12 := borrow_local($t2) $t12 := $MakePtr(false, add($locals, 32)) - // $t13 := Table::contains($t11, $t12) + // $t13 := Table::contains<0x2::U256::U256, 0x2::U256::U256>($t11, $t12) $t13 := A2_Table_contains$A2_U256_U256_A2_U256_U256$($t11, $t12) // if ($t13) goto L1 else goto L0 switch $t13 @@ -3080,7 +3080,7 @@ object "test_A2_Tables_test_u256" { $t15 := $MakePtr(false, $locals) // $t16 := borrow_local($t2) $t16 := $MakePtr(false, add($locals, 32)) - // $t17 := Table::borrow($t15, $t16) + // $t17 := Table::borrow<0x2::U256::U256, 0x2::U256::U256>($t15, $t16) $t17 := A2_Table_borrow$A2_U256_U256_A2_U256_U256$($t15, $t16) // $t18 := read_ref($t17) $t18 := $LoadU256($t17) @@ -3111,7 +3111,7 @@ object "test_A2_Tables_test_u256" { $t22 := $MakePtr(false, $locals) // $t23 := borrow_local($t2) $t23 := $MakePtr(false, add($locals, 32)) - // $t24 := Table::borrow_mut($t22, $t23) + // $t24 := Table::borrow_mut<0x2::U256::U256, 0x2::U256::U256>($t22, $t23) $t24 := A2_Table_borrow_mut$A2_U256_U256_A2_U256_U256$($t22, $t23) // write_ref($t24, $t8) $StoreU256($t24, $t8) @@ -3119,7 +3119,7 @@ object "test_A2_Tables_test_u256" { $t25 := $MakePtr(false, $locals) // $t26 := borrow_local($t2) $t26 := $MakePtr(false, add($locals, 32)) - // $t27 := Table::borrow($t25, $t26) + // $t27 := Table::borrow<0x2::U256::U256, 0x2::U256::U256>($t25, $t26) $t27 := A2_Table_borrow$A2_U256_U256_A2_U256_U256$($t25, $t26) // $t28 := read_ref($t27) $t28 := $LoadU256($t27) @@ -3152,13 +3152,13 @@ object "test_A2_Tables_test_u256" { $t32 := $MakePtr(false, add($locals, 64)) // $t33 := move($t3) $t33 := mload($locals) - // $t34 := pack Tables::S($t33) + // $t34 := pack 0x2::Tables::S<0x2::U256::U256, 0x2::U256::U256>($t33) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t33) $t34 := $mem } - // move_to>($t34, $t32) + // move_to<0x2::Tables::S<0x2::U256::U256, 0x2::U256::U256>>($t34, $t32) { let $base_offset := $MakeTypeStorageBase(0, 0xa20c17a3, $LoadU256($t32)) if $AlignedStorageLoad($base_offset) { @@ -3576,13 +3576,13 @@ object "test_A2_Tables_test_vector" { $t59 := $MakePtr(false, add($locals, 192)) // $t60 := move($t22) $t60 := mload($locals) - // $t61 := pack Tables::S>($t60) + // $t61 := pack 0x2::Tables::S>($t60) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t60) $t61 := $mem } - // move_to>>($t61, $t59) + // move_to<0x2::Tables::S>>($t61, $t59) { let $base_offset := $MakeTypeStorageBase(0, 0xa0362133, $LoadU256($t59)) if $AlignedStorageLoad($base_offset) { @@ -3598,7 +3598,7 @@ object "test_A2_Tables_test_vector" { } // $t62 := 0x42 $t62 := 0x42 - // $t63 := borrow_global>>($t62) + // $t63 := borrow_global<0x2::Tables::S>>($t62) { let $base_offset := $MakeTypeStorageBase(0, 0xa0362133, $t62) if iszero($AlignedStorageLoad($base_offset)) { @@ -3606,7 +3606,7 @@ object "test_A2_Tables_test_vector" { } $t63 := $MakePtr(true, add($base_offset, 32)) } - // $t64 := borrow_field>>.t($t63) + // $t64 := borrow_field<0x2::Tables::S>>.t($t63) $t64 := $t63 // $t65 := 42 $t65 := 42 @@ -3620,7 +3620,7 @@ object "test_A2_Tables_test_vector" { $t68 := 0x1013 // vector::push_back

($t67, $t68) A1_vector_push_back$address$($t67, $t68) - // $t69 := borrow_field>>.t($t63) + // $t69 := borrow_field<0x2::Tables::S>>.t($t63) $t69 := $t63 // $t70 := 42 $t70 := 42 @@ -3656,7 +3656,7 @@ object "test_A2_Tables_test_vector" { } case 17 { // label L14 - // $t77 := borrow_field>>.t($t63) + // $t77 := borrow_field<0x2::Tables::S>>.t($t63) $t77 := $t63 // $t78 := 42 $t78 := 42 @@ -3696,7 +3696,7 @@ object "test_A2_Tables_test_vector" { } case 20 { // label L17 - // $t87 := borrow_field>>.t($t63) + // $t87 := borrow_field<0x2::Tables::S>>.t($t63) $t87 := $t63 // $t88 := 42 $t88 := 42 @@ -3798,7 +3798,7 @@ object "test_A2_Tables_test_vector" { } case 29 { // label L26 - // $t109 := borrow_field>>.t($t63) + // $t109 := borrow_field<0x2::Tables::S>>.t($t63) $t109 := $t63 // $t110 := 42 $t110 := 42 diff --git a/third_party/move/evm/move-to-yul/tests/TestABIStructs.exp b/third_party/move/evm/move-to-yul/tests/TestABIStructs.exp index 49a3eb0ea4782..06d766ee22495 100644 --- a/third_party/move/evm/move-to-yul/tests/TestABIStructs.exp +++ b/third_party/move/evm/move-to-yul/tests/TestABIStructs.exp @@ -101,13 +101,13 @@ object "A2_M" { $t1 := true // $t2 := M::pack_S($t0, $t1) $t2 := A2_M_pack_S($t0, $t1) - // $t3 := pack M::Event_S($t2) + // $t3 := pack 0x2::M::Event_S($t2) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t2) $t3 := $mem } - // Evm::emit($t3) + // Evm::emit<0x2::M::Event_S>($t3) A2_Evm_emit$A2_M_Event_S$($t3) // return () } @@ -130,7 +130,7 @@ object "A2_M" { $t1 := $MakePtr(false, $locals) // $t2 := 0 $t2 := 0 - // $t3 := vector::borrow>($t1, $t2) + // $t3 := vector::borrow>($t1, $t2) $t3 := A1_vector_borrow$vec$A2_M_S$$($t1, $t2) // $t4 := read_ref($t3) $t4 := $LoadU256($t3) @@ -244,7 +244,7 @@ object "A2_M" { function A2_M_pack_S(a, b) -> $result { let s2, $t4, $t5, $t6, $t7, $t8 let $locals := $Malloc(32) - // $t3 := vector::empty() + // $t3 := vector::empty<0x2::M::S2>() mstore($locals, A1_vector_empty$A2_M_S2$()) // $t4 := (u128)($t0) $t4 := $CastU128(a) @@ -252,11 +252,11 @@ object "A2_M" { $t5 := A2_M_pack_S2($t4) // $t6 := borrow_local($t3) $t6 := $MakePtr(false, $locals) - // vector::push_back($t6, $t5) + // vector::push_back<0x2::M::S2>($t6, $t5) A1_vector_push_back$A2_M_S2$($t6, $t5) // $t7 := move($t3) $t7 := mload($locals) - // $t8 := pack M::S($t0, $t1, $t7) + // $t8 := pack 0x2::M::S($t0, $t1, $t7) { let $mem := $Malloc(41) $MemoryStoreU64(add($mem, 32), a) @@ -314,7 +314,7 @@ object "A2_M" { A1_vector_push_back$u128$($t2, x) // $t3 := move($t1) $t3 := mload($locals) - // $t4 := pack M::S2($t3) + // $t4 := pack 0x2::M::S2($t3) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t3) @@ -1068,13 +1068,13 @@ object "test_A2_M_test_abi_S" { _s := A2_M_decode_S($t7) // $t8 := borrow_local($t2) $t8 := $MakePtr(false, s) - // $t9 := borrow_field.a($t8) + // $t9 := borrow_field<0x2::M::S>.a($t8) $t9 := $IndexPtr($t8, 32) // $t10 := read_ref($t9) $t10 := $LoadU64($t9) // $t11 := borrow_local($t0) $t11 := $MakePtr(false, _s) - // $t12 := borrow_field.a($t11) + // $t12 := borrow_field<0x2::M::S>.a($t11) $t12 := $IndexPtr($t11, 32) // $t13 := read_ref($t12) $t13 := $LoadU64($t12) @@ -1089,7 +1089,7 @@ object "test_A2_M_test_abi_S" { // label L2 // $t16 := borrow_local($t0) $t16 := $MakePtr(false, _s) - // $t17 := borrow_field.a($t16) + // $t17 := borrow_field<0x2::M::S>.a($t16) $t17 := $IndexPtr($t16, 32) // $t18 := read_ref($t17) $t18 := $LoadU64($t17) @@ -1118,13 +1118,13 @@ object "test_A2_M_test_abi_S" { // label L5 // $t22 := borrow_local($t2) $t22 := $MakePtr(false, s) - // $t23 := borrow_field.b($t22) + // $t23 := borrow_field<0x2::M::S>.b($t22) $t23 := $IndexPtr($t22, 40) // $t24 := read_ref($t23) $t24 := $LoadU8($t23) // $t25 := borrow_local($t0) $t25 := $MakePtr(false, _s) - // $t26 := borrow_field.b($t25) + // $t26 := borrow_field<0x2::M::S>.b($t25) $t26 := $IndexPtr($t25, 40) // $t27 := read_ref($t26) $t27 := $LoadU8($t26) @@ -1151,7 +1151,7 @@ object "test_A2_M_test_abi_S" { // label L8 // $t30 := borrow_local($t0) $t30 := $MakePtr(false, _s) - // $t31 := borrow_field.b($t30) + // $t31 := borrow_field<0x2::M::S>.b($t30) $t31 := $IndexPtr($t30, 40) // $t32 := read_ref($t31) $t32 := $LoadU8($t31) @@ -1180,7 +1180,7 @@ object "test_A2_M_test_abi_S" { // label L11 // $t36 := borrow_local($t2) $t36 := $MakePtr(false, s) - // $t37 := borrow_field.c($t36) + // $t37 := borrow_field<0x2::M::S>.c($t36) $t37 := $t36 // $t3 := read_ref($t37) mstore($locals, $LoadU256($t37)) @@ -1222,7 +1222,7 @@ object "test_A2_M_test_abi_S" { } // $t38 := borrow_local($t0) $t38 := $MakePtr(false, _s) - // $t39 := borrow_field.c($t38) + // $t39 := borrow_field<0x2::M::S>.c($t38) $t39 := $t38 // $t1 := read_ref($t39) mstore(add($locals, 32), $LoadU256($t39)) @@ -1264,7 +1264,7 @@ object "test_A2_M_test_abi_S" { } // $t40 := borrow_local($t3) $t40 := $MakePtr(false, $locals) - // $t41 := vector::length($t40) + // $t41 := vector::length<0x2::M::S2>($t40) $t41 := A1_vector_length$A2_M_S2$($t40) // $t42 := 1 $t42 := 1 @@ -1291,7 +1291,7 @@ object "test_A2_M_test_abi_S" { // label L14 // $t45 := borrow_local($t1) $t45 := $MakePtr(false, add($locals, 32)) - // $t46 := vector::length($t45) + // $t46 := vector::length<0x2::M::S2>($t45) $t46 := A1_vector_length$A2_M_S2$($t45) // $t47 := 1 $t47 := 1 @@ -1320,9 +1320,9 @@ object "test_A2_M_test_abi_S" { $t50 := $MakePtr(false, add($locals, 32)) // $t51 := 0 $t51 := 0 - // $t52 := vector::borrow($t50, $t51) + // $t52 := vector::borrow<0x2::M::S2>($t50, $t51) $t52 := A1_vector_borrow$A2_M_S2$($t50, $t51) - // $t53 := borrow_field.x($t52) + // $t53 := borrow_field<0x2::M::S2>.x($t52) $t53 := $t52 // $t54 := 0 $t54 := 0 @@ -1404,7 +1404,7 @@ object "test_A2_M_test_abi_S" { function A2_M_pack_S(a, b) -> $result { let s2, $t4, $t5, $t6, $t7, $t8 let $locals := $Malloc(32) - // $t3 := vector::empty() + // $t3 := vector::empty<0x2::M::S2>() mstore($locals, A1_vector_empty$A2_M_S2$()) // $t4 := (u128)($t0) $t4 := $CastU128(a) @@ -1412,11 +1412,11 @@ object "test_A2_M_test_abi_S" { $t5 := A2_M_pack_S2($t4) // $t6 := borrow_local($t3) $t6 := $MakePtr(false, $locals) - // vector::push_back($t6, $t5) + // vector::push_back<0x2::M::S2>($t6, $t5) A1_vector_push_back$A2_M_S2$($t6, $t5) // $t7 := move($t3) $t7 := mload($locals) - // $t8 := pack M::S($t0, $t1, $t7) + // $t8 := pack 0x2::M::S($t0, $t1, $t7) { let $mem := $Malloc(41) $MemoryStoreU64(add($mem, 32), a) @@ -1474,7 +1474,7 @@ object "test_A2_M_test_abi_S" { A1_vector_push_back$u128$($t2, x) // $t3 := move($t1) $t3 := mload($locals) - // $t4 := pack M::S2($t3) + // $t4 := pack 0x2::M::S2($t3) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t3) @@ -2216,7 +2216,7 @@ object "test_A2_M_test_abi_String" { } function A1_ascii_into_bytes(string) -> $result { let $t1 - // $t1 := unpack ascii::String($t0) + // $t1 := unpack 0x1::ascii::String($t0) $t1 := $MemoryLoadU256(add(string, 0)) $Free(string, 32) // return $t1 @@ -2249,7 +2249,7 @@ object "test_A2_M_test_abi_String" { function A1_ascii_as_bytes(string) -> $result { let $t1 - // $t1 := borrow_field.bytes($t0) + // $t1 := borrow_field<0x1::ascii::String>.bytes($t0) $t1 := string // return $t1 $result := $t1 @@ -2297,7 +2297,7 @@ object "test_A2_M_test_abi_String" { x := A1_ascii_try_string(bytes) // $t2 := borrow_local($t1) $t2 := $MakePtr(false, x) - // $t3 := option::is_some($t2) + // $t3 := option::is_some<0x1::ascii::String>($t2) $t3 := A1_option_is_some$A1_ascii_String$($t2) // if ($t3) goto L1 else goto L0 switch $t3 @@ -2308,7 +2308,7 @@ object "test_A2_M_test_abi_String" { // label L2 // $t5 := move($t1) $t5 := x - // $t6 := option::destroy_some($t5) + // $t6 := option::destroy_some<0x1::ascii::String>($t5) $t6 := A1_option_destroy_some$A1_ascii_String$($t5) // return $t6 $result := $t6 @@ -2349,7 +2349,7 @@ object "test_A2_M_test_abi_String" { // label L2 // $t6 := move($t0) $t6 := t - // $t2 := unpack option::Option<#0>($t6) + // $t2 := unpack 0x1::option::Option<#0>($t6) mstore($locals, $MemoryLoadU256(add($t6, 0))) $Free($t6, 32) // $t7 := borrow_local($t2) @@ -2409,7 +2409,7 @@ object "test_A2_M_test_abi_String" { } function A1_option_is_some$A1_ascii_String$(t) -> $result { let $t1, $t2, $t3 - // $t1 := borrow_field>.vec($t0) + // $t1 := borrow_field<0x1::option::Option<#0>>.vec($t0) $t1 := t // $t2 := vector::is_empty<#0>($t1) $t2 := A1_vector_is_empty$A1_ascii_String$($t1) @@ -2477,13 +2477,13 @@ object "test_A2_M_test_abi_String" { // assert forall j: num: Range(0, $t4): ascii::is_valid_char(Index($t0, j)) // $t14 := move($t0) $t14 := mload($locals) - // $t15 := pack ascii::String($t14) + // $t15 := pack 0x1::ascii::String($t14) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t14) $t15 := $mem } - // $t16 := option::some($t15) + // $t16 := option::some<0x1::ascii::String>($t15) $t16 := A1_option_some$A1_ascii_String$($t15) // return $t16 $result := $t16 @@ -2509,7 +2509,7 @@ object "test_A2_M_test_abi_String" { } case 7 { // label L4 - // $t12 := option::none() + // $t12 := option::none<0x1::ascii::String>() $t12 := A1_option_none$A1_ascii_String$() // return $t12 $result := $t12 @@ -2532,7 +2532,7 @@ object "test_A2_M_test_abi_String" { let $t0, $t1 // $t0 := vector::empty<#0>() $t0 := A1_vector_empty$A1_ascii_String$() - // $t1 := pack option::Option<#0>($t0) + // $t1 := pack 0x1::option::Option<#0>($t0) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t0) @@ -2550,7 +2550,7 @@ object "test_A2_M_test_abi_String" { let $t1, $t2 // $t1 := vector::singleton<#0>($t0) $t1 := A1_vector_singleton$A1_ascii_String$(e) - // $t2 := pack option::Option<#0>($t1) + // $t2 := pack 0x1::option::Option<#0>($t1) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t1) diff --git a/third_party/move/evm/move-to-yul/tests/TestExternalResult.exp b/third_party/move/evm/move-to-yul/tests/TestExternalResult.exp index 0efdfd006698d..53d43f5dcd892 100644 --- a/third_party/move/evm/move-to-yul/tests/TestExternalResult.exp +++ b/third_party/move/evm/move-to-yul/tests/TestExternalResult.exp @@ -171,7 +171,7 @@ object "test_A2_M_extract_err_data" { } function A2_ExternalResult_unwrap_err_data$u64$(result) -> $result { let err_data, err_reason, panic_code, $t4, $t5, $t6, $t7, $t8 - // ($t4, $t5, $t6, $t7) := unpack ExternalResult::ExternalResult<#0>($t0) + // ($t4, $t5, $t6, $t7) := unpack 0x2::ExternalResult::ExternalResult<#0>($t0) $t4 := $MemoryLoadU256(add(result, 0)) $t5 := $MemoryLoadU256(add(result, 32)) $t6 := $MemoryLoadU256(add(result, 64)) @@ -181,7 +181,7 @@ object "test_A2_M_extract_err_data" { A1_option_destroy_none$u64$($t4) // option::destroy_none>($t6) A1_option_destroy_none$vec$u8$$($t6) - // option::destroy_none($t7) + // option::destroy_none<0x2::U256::U256>($t7) A1_option_destroy_none$A2_U256_U256$($t7) // $t8 := option::destroy_some>($t5) $t8 := A1_option_destroy_some$vec$u8$$($t5) @@ -221,7 +221,7 @@ object "test_A2_M_extract_err_data" { // label L2 // $t6 := move($t0) $t6 := t - // $t2 := unpack option::Option<#0>($t6) + // $t2 := unpack 0x1::option::Option<#0>($t6) mstore($locals, $MemoryLoadU256(add($t6, 0))) $Free($t6, 32) // $t7 := borrow_local($t2) @@ -274,7 +274,7 @@ object "test_A2_M_extract_err_data" { } function A1_option_is_some$vec$u8$$(t) -> $result { let $t1, $t2, $t3 - // $t1 := borrow_field>.vec($t0) + // $t1 := borrow_field<0x1::option::Option<#0>>.vec($t0) $t1 := t // $t2 := vector::is_empty<#0>($t1) $t2 := A1_vector_is_empty$vec$u8$$($t1) @@ -332,7 +332,7 @@ object "test_A2_M_extract_err_data" { // label L2 // $t4 := move($t0) $t4 := t - // $t5 := unpack option::Option<#0>($t4) + // $t5 := unpack 0x1::option::Option<#0>($t4) $t5 := $MemoryLoadU256(add($t4, 0)) $Free($t4, 32) // vector::destroy_empty<#0>($t5) @@ -351,7 +351,7 @@ object "test_A2_M_extract_err_data" { } function A1_option_is_none$A2_U256_U256$(t) -> $result { let $t1, $t2 - // $t1 := borrow_field>.vec($t0) + // $t1 := borrow_field<0x1::option::Option<#0>>.vec($t0) $t1 := t // $t2 := vector::is_empty<#0>($t1) $t2 := A1_vector_is_empty$A2_U256_U256$($t1) @@ -407,7 +407,7 @@ object "test_A2_M_extract_err_data" { // label L2 // $t4 := move($t0) $t4 := t - // $t5 := unpack option::Option<#0>($t4) + // $t5 := unpack 0x1::option::Option<#0>($t4) $t5 := $MemoryLoadU256(add($t4, 0)) $Free($t4, 32) // vector::destroy_empty<#0>($t5) @@ -420,7 +420,7 @@ object "test_A2_M_extract_err_data" { function A1_option_is_none$vec$u8$$(t) -> $result { let $t1, $t2 - // $t1 := borrow_field>.vec($t0) + // $t1 := borrow_field<0x1::option::Option<#0>>.vec($t0) $t1 := t // $t2 := vector::is_empty<#0>($t1) $t2 := A1_vector_is_empty$vec$u8$$($t1) @@ -459,7 +459,7 @@ object "test_A2_M_extract_err_data" { // label L2 // $t4 := move($t0) $t4 := t - // $t5 := unpack option::Option<#0>($t4) + // $t5 := unpack 0x1::option::Option<#0>($t4) $t5 := $MemoryLoadU256(add($t4, 0)) $Free($t4, 32) // vector::destroy_empty<#0>($t5) @@ -478,7 +478,7 @@ object "test_A2_M_extract_err_data" { } function A1_option_is_none$u64$(t) -> $result { let $t1, $t2 - // $t1 := borrow_field>.vec($t0) + // $t1 := borrow_field<0x1::option::Option<#0>>.vec($t0) $t1 := t // $t2 := vector::is_empty<#0>($t1) $t2 := A1_vector_is_empty$u64$($t1) @@ -505,7 +505,7 @@ object "test_A2_M_extract_err_data" { } function A2_ExternalResult_is_err_data$u64$(result) -> $result { let $t1, $t2 - // $t1 := borrow_field>.err_data($t0) + // $t1 := borrow_field<0x2::ExternalResult::ExternalResult<#0>>.err_data($t0) { let $field_ptr := $IndexPtr(result, 32) $t1 := $MakePtr($IsStoragePtr($field_ptr), $LoadU256($field_ptr)) @@ -532,9 +532,9 @@ object "test_A2_M_extract_err_data" { $t2 := A1_option_some$vec$u8$$(error) // $t3 := option::none>() $t3 := A1_option_none$vec$u8$$() - // $t4 := option::none() + // $t4 := option::none<0x2::U256::U256>() $t4 := A1_option_none$A2_U256_U256$() - // $t5 := pack ExternalResult::ExternalResult<#0>($t1, $t2, $t3, $t4) + // $t5 := pack 0x2::ExternalResult::ExternalResult<#0>($t1, $t2, $t3, $t4) { let $mem := $Malloc(128) $MemoryStoreU256(add($mem, 0), $t1) @@ -551,7 +551,7 @@ object "test_A2_M_extract_err_data" { let $t0, $t1 // $t0 := vector::empty<#0>() $t0 := A1_vector_empty$A2_U256_U256$() - // $t1 := pack option::Option<#0>($t0) + // $t1 := pack 0x1::option::Option<#0>($t0) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t0) @@ -569,7 +569,7 @@ object "test_A2_M_extract_err_data" { let $t0, $t1 // $t0 := vector::empty<#0>() $t0 := A1_vector_empty$vec$u8$$() - // $t1 := pack option::Option<#0>($t0) + // $t1 := pack 0x1::option::Option<#0>($t0) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t0) @@ -587,7 +587,7 @@ object "test_A2_M_extract_err_data" { let $t1, $t2 // $t1 := vector::singleton<#0>($t0) $t1 := A1_vector_singleton$vec$u8$$(e) - // $t2 := pack option::Option<#0>($t1) + // $t2 := pack 0x1::option::Option<#0>($t1) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t1) @@ -645,7 +645,7 @@ object "test_A2_M_extract_err_data" { let $t0, $t1 // $t0 := vector::empty<#0>() $t0 := A1_vector_empty$u64$() - // $t1 := pack option::Option<#0>($t0) + // $t1 := pack 0x1::option::Option<#0>($t0) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t0) @@ -1082,7 +1082,7 @@ object "test_A2_M_extract_err_reason" { } function A2_ExternalResult_unwrap_err_reason$u64$(result) -> $result { let err_data, err_reason, panic_code, $t4, $t5, $t6, $t7, $t8 - // ($t4, $t5, $t6, $t7) := unpack ExternalResult::ExternalResult<#0>($t0) + // ($t4, $t5, $t6, $t7) := unpack 0x2::ExternalResult::ExternalResult<#0>($t0) $t4 := $MemoryLoadU256(add(result, 0)) $t5 := $MemoryLoadU256(add(result, 32)) $t6 := $MemoryLoadU256(add(result, 64)) @@ -1092,7 +1092,7 @@ object "test_A2_M_extract_err_reason" { A1_option_destroy_none$u64$($t4) // option::destroy_none>($t5) A1_option_destroy_none$vec$u8$$($t5) - // option::destroy_none($t7) + // option::destroy_none<0x2::U256::U256>($t7) A1_option_destroy_none$A2_U256_U256$($t7) // $t8 := option::destroy_some>($t6) $t8 := A1_option_destroy_some$vec$u8$$($t6) @@ -1132,7 +1132,7 @@ object "test_A2_M_extract_err_reason" { // label L2 // $t6 := move($t0) $t6 := t - // $t2 := unpack option::Option<#0>($t6) + // $t2 := unpack 0x1::option::Option<#0>($t6) mstore($locals, $MemoryLoadU256(add($t6, 0))) $Free($t6, 32) // $t7 := borrow_local($t2) @@ -1185,7 +1185,7 @@ object "test_A2_M_extract_err_reason" { } function A1_option_is_some$vec$u8$$(t) -> $result { let $t1, $t2, $t3 - // $t1 := borrow_field>.vec($t0) + // $t1 := borrow_field<0x1::option::Option<#0>>.vec($t0) $t1 := t // $t2 := vector::is_empty<#0>($t1) $t2 := A1_vector_is_empty$vec$u8$$($t1) @@ -1243,7 +1243,7 @@ object "test_A2_M_extract_err_reason" { // label L2 // $t4 := move($t0) $t4 := t - // $t5 := unpack option::Option<#0>($t4) + // $t5 := unpack 0x1::option::Option<#0>($t4) $t5 := $MemoryLoadU256(add($t4, 0)) $Free($t4, 32) // vector::destroy_empty<#0>($t5) @@ -1262,7 +1262,7 @@ object "test_A2_M_extract_err_reason" { } function A1_option_is_none$A2_U256_U256$(t) -> $result { let $t1, $t2 - // $t1 := borrow_field>.vec($t0) + // $t1 := borrow_field<0x1::option::Option<#0>>.vec($t0) $t1 := t // $t2 := vector::is_empty<#0>($t1) $t2 := A1_vector_is_empty$A2_U256_U256$($t1) @@ -1318,7 +1318,7 @@ object "test_A2_M_extract_err_reason" { // label L2 // $t4 := move($t0) $t4 := t - // $t5 := unpack option::Option<#0>($t4) + // $t5 := unpack 0x1::option::Option<#0>($t4) $t5 := $MemoryLoadU256(add($t4, 0)) $Free($t4, 32) // vector::destroy_empty<#0>($t5) @@ -1331,7 +1331,7 @@ object "test_A2_M_extract_err_reason" { function A1_option_is_none$vec$u8$$(t) -> $result { let $t1, $t2 - // $t1 := borrow_field>.vec($t0) + // $t1 := borrow_field<0x1::option::Option<#0>>.vec($t0) $t1 := t // $t2 := vector::is_empty<#0>($t1) $t2 := A1_vector_is_empty$vec$u8$$($t1) @@ -1370,7 +1370,7 @@ object "test_A2_M_extract_err_reason" { // label L2 // $t4 := move($t0) $t4 := t - // $t5 := unpack option::Option<#0>($t4) + // $t5 := unpack 0x1::option::Option<#0>($t4) $t5 := $MemoryLoadU256(add($t4, 0)) $Free($t4, 32) // vector::destroy_empty<#0>($t5) @@ -1389,7 +1389,7 @@ object "test_A2_M_extract_err_reason" { } function A1_option_is_none$u64$(t) -> $result { let $t1, $t2 - // $t1 := borrow_field>.vec($t0) + // $t1 := borrow_field<0x1::option::Option<#0>>.vec($t0) $t1 := t // $t2 := vector::is_empty<#0>($t1) $t2 := A1_vector_is_empty$u64$($t1) @@ -1416,7 +1416,7 @@ object "test_A2_M_extract_err_reason" { } function A2_ExternalResult_is_err_reason$u64$(result) -> $result { let $t1, $t2 - // $t1 := borrow_field>.err_reason($t0) + // $t1 := borrow_field<0x2::ExternalResult::ExternalResult<#0>>.err_reason($t0) { let $field_ptr := $IndexPtr(result, 64) $t1 := $MakePtr($IsStoragePtr($field_ptr), $LoadU256($field_ptr)) @@ -1443,9 +1443,9 @@ object "test_A2_M_extract_err_reason" { $t6 := A1_option_some$vec$u8$$(error) // $t7 := option::none>() $t7 := A1_option_none$vec$u8$$() - // $t8 := option::none() + // $t8 := option::none<0x2::U256::U256>() $t8 := A1_option_none$A2_U256_U256$() - // $t9 := pack ExternalResult::ExternalResult<#0>($t5, $t7, $t6, $t8) + // $t9 := pack 0x2::ExternalResult::ExternalResult<#0>($t5, $t7, $t6, $t8) { let $mem := $Malloc(128) $MemoryStoreU256(add($mem, 0), $t5) @@ -1462,7 +1462,7 @@ object "test_A2_M_extract_err_reason" { let $t0, $t1 // $t0 := vector::empty<#0>() $t0 := A1_vector_empty$A2_U256_U256$() - // $t1 := pack option::Option<#0>($t0) + // $t1 := pack 0x1::option::Option<#0>($t0) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t0) @@ -1480,7 +1480,7 @@ object "test_A2_M_extract_err_reason" { let $t0, $t1 // $t0 := vector::empty<#0>() $t0 := A1_vector_empty$vec$u8$$() - // $t1 := pack option::Option<#0>($t0) + // $t1 := pack 0x1::option::Option<#0>($t0) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t0) @@ -1498,7 +1498,7 @@ object "test_A2_M_extract_err_reason" { let $t1, $t2 // $t1 := vector::singleton<#0>($t0) $t1 := A1_vector_singleton$vec$u8$$(e) - // $t2 := pack option::Option<#0>($t1) + // $t2 := pack 0x1::option::Option<#0>($t1) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t1) @@ -1556,7 +1556,7 @@ object "test_A2_M_extract_err_reason" { let $t0, $t1 // $t0 := vector::empty<#0>() $t0 := A1_vector_empty$u64$() - // $t1 := pack option::Option<#0>($t0) + // $t1 := pack 0x1::option::Option<#0>($t0) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t0) @@ -1950,7 +1950,7 @@ object "test_A2_M_extract_panic_code" { function A2_ExternalResult_unwrap_panic$u64$(result) -> $result { let err_data, err_reason, panic_code, $t4, $t5, $t6, $t7, $t8 - // ($t4, $t5, $t6, $t7) := unpack ExternalResult::ExternalResult<#0>($t0) + // ($t4, $t5, $t6, $t7) := unpack 0x2::ExternalResult::ExternalResult<#0>($t0) $t4 := $MemoryLoadU256(add(result, 0)) $t5 := $MemoryLoadU256(add(result, 32)) $t6 := $MemoryLoadU256(add(result, 64)) @@ -1962,7 +1962,7 @@ object "test_A2_M_extract_panic_code" { A1_option_destroy_none$vec$u8$$($t6) // option::destroy_none>($t5) A1_option_destroy_none$vec$u8$$($t5) - // $t8 := option::destroy_some($t7) + // $t8 := option::destroy_some<0x2::U256::U256>($t7) $t8 := A1_option_destroy_some$A2_U256_U256$($t7) // return $t8 $result := $t8 @@ -2000,7 +2000,7 @@ object "test_A2_M_extract_panic_code" { // label L2 // $t6 := move($t0) $t6 := t - // $t2 := unpack option::Option<#0>($t6) + // $t2 := unpack 0x1::option::Option<#0>($t6) mstore($locals, $MemoryLoadU256(add($t6, 0))) $Free($t6, 32) // $t7 := borrow_local($t2) @@ -2036,7 +2036,7 @@ object "test_A2_M_extract_panic_code" { } function A1_option_is_some$A2_U256_U256$(t) -> $result { let $t1, $t2, $t3 - // $t1 := borrow_field>.vec($t0) + // $t1 := borrow_field<0x1::option::Option<#0>>.vec($t0) $t1 := t // $t2 := vector::is_empty<#0>($t1) $t2 := A1_vector_is_empty$A2_U256_U256$($t1) @@ -2094,7 +2094,7 @@ object "test_A2_M_extract_panic_code" { // label L2 // $t4 := move($t0) $t4 := t - // $t5 := unpack option::Option<#0>($t4) + // $t5 := unpack 0x1::option::Option<#0>($t4) $t5 := $MemoryLoadU256(add($t4, 0)) $Free($t4, 32) // vector::destroy_empty<#0>($t5) @@ -2113,7 +2113,7 @@ object "test_A2_M_extract_panic_code" { } function A1_option_is_none$vec$u8$$(t) -> $result { let $t1, $t2 - // $t1 := borrow_field>.vec($t0) + // $t1 := borrow_field<0x1::option::Option<#0>>.vec($t0) $t1 := t // $t2 := vector::is_empty<#0>($t1) $t2 := A1_vector_is_empty$vec$u8$$($t1) @@ -2169,7 +2169,7 @@ object "test_A2_M_extract_panic_code" { // label L2 // $t4 := move($t0) $t4 := t - // $t5 := unpack option::Option<#0>($t4) + // $t5 := unpack 0x1::option::Option<#0>($t4) $t5 := $MemoryLoadU256(add($t4, 0)) $Free($t4, 32) // vector::destroy_empty<#0>($t5) @@ -2188,7 +2188,7 @@ object "test_A2_M_extract_panic_code" { } function A1_option_is_none$u64$(t) -> $result { let $t1, $t2 - // $t1 := borrow_field>.vec($t0) + // $t1 := borrow_field<0x1::option::Option<#0>>.vec($t0) $t1 := t // $t2 := vector::is_empty<#0>($t1) $t2 := A1_vector_is_empty$u64$($t1) @@ -2215,12 +2215,12 @@ object "test_A2_M_extract_panic_code" { } function A2_ExternalResult_is_panic$u64$(result) -> $result { let $t1, $t2 - // $t1 := borrow_field>.panic_code($t0) + // $t1 := borrow_field<0x2::ExternalResult::ExternalResult<#0>>.panic_code($t0) { let $field_ptr := $IndexPtr(result, 96) $t1 := $MakePtr($IsStoragePtr($field_ptr), $LoadU256($field_ptr)) } - // $t2 := option::is_some($t1) + // $t2 := option::is_some<0x2::U256::U256>($t1) $t2 := A1_option_is_some$A2_U256_U256$($t1) // return $t2 $result := $t2 @@ -2242,9 +2242,9 @@ object "test_A2_M_extract_panic_code" { $t6 := A1_option_none$vec$u8$$() // $t7 := option::none>() $t7 := A1_option_none$vec$u8$$() - // $t8 := option::some($t0) + // $t8 := option::some<0x2::U256::U256>($t0) $t8 := A1_option_some$A2_U256_U256$(panic_code) - // $t9 := pack ExternalResult::ExternalResult<#0>($t5, $t7, $t6, $t8) + // $t9 := pack 0x2::ExternalResult::ExternalResult<#0>($t5, $t7, $t6, $t8) { let $mem := $Malloc(128) $MemoryStoreU256(add($mem, 0), $t5) @@ -2261,7 +2261,7 @@ object "test_A2_M_extract_panic_code" { let $t1, $t2 // $t1 := vector::singleton<#0>($t0) $t1 := A1_vector_singleton$A2_U256_U256$(e) - // $t2 := pack option::Option<#0>($t1) + // $t2 := pack 0x1::option::Option<#0>($t1) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t1) @@ -2309,7 +2309,7 @@ object "test_A2_M_extract_panic_code" { let $t0, $t1 // $t0 := vector::empty<#0>() $t0 := A1_vector_empty$vec$u8$$() - // $t1 := pack option::Option<#0>($t0) + // $t1 := pack 0x1::option::Option<#0>($t0) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t0) @@ -2327,7 +2327,7 @@ object "test_A2_M_extract_panic_code" { let $t0, $t1 // $t0 := vector::empty<#0>() $t0 := A1_vector_empty$u64$() - // $t1 := pack option::Option<#0>($t0) + // $t1 := pack 0x1::option::Option<#0>($t0) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t0) @@ -2632,7 +2632,7 @@ object "test_A2_M_extract_value" { function A2_ExternalResult_unwrap$u64$(result) -> $result { let err_data, err_reason, panic_code, value, $t5, $t6, $t7, $t8, $t9 - // ($t5, $t6, $t7, $t8) := unpack ExternalResult::ExternalResult<#0>($t0) + // ($t5, $t6, $t7, $t8) := unpack 0x2::ExternalResult::ExternalResult<#0>($t0) $t5 := $MemoryLoadU256(add(result, 0)) $t6 := $MemoryLoadU256(add(result, 32)) $t7 := $MemoryLoadU256(add(result, 64)) @@ -2642,7 +2642,7 @@ object "test_A2_M_extract_value" { A1_option_destroy_none$vec$u8$$($t6) // option::destroy_none>($t7) A1_option_destroy_none$vec$u8$$($t7) - // option::destroy_none($t8) + // option::destroy_none<0x2::U256::U256>($t8) A1_option_destroy_none$A2_U256_U256$($t8) // $t9 := option::destroy_some<#0>($t5) $t9 := A1_option_destroy_some$u64$($t5) @@ -2682,7 +2682,7 @@ object "test_A2_M_extract_value" { // label L2 // $t6 := move($t0) $t6 := t - // $t2 := unpack option::Option<#0>($t6) + // $t2 := unpack 0x1::option::Option<#0>($t6) mstore($locals, $MemoryLoadU256(add($t6, 0))) $Free($t6, 32) // $t7 := borrow_local($t2) @@ -2718,7 +2718,7 @@ object "test_A2_M_extract_value" { } function A1_option_is_some$u64$(t) -> $result { let $t1, $t2, $t3 - // $t1 := borrow_field>.vec($t0) + // $t1 := borrow_field<0x1::option::Option<#0>>.vec($t0) $t1 := t // $t2 := vector::is_empty<#0>($t1) $t2 := A1_vector_is_empty$u64$($t1) @@ -2776,7 +2776,7 @@ object "test_A2_M_extract_value" { // label L2 // $t4 := move($t0) $t4 := t - // $t5 := unpack option::Option<#0>($t4) + // $t5 := unpack 0x1::option::Option<#0>($t4) $t5 := $MemoryLoadU256(add($t4, 0)) $Free($t4, 32) // vector::destroy_empty<#0>($t5) @@ -2795,7 +2795,7 @@ object "test_A2_M_extract_value" { } function A1_option_is_none$A2_U256_U256$(t) -> $result { let $t1, $t2 - // $t1 := borrow_field>.vec($t0) + // $t1 := borrow_field<0x1::option::Option<#0>>.vec($t0) $t1 := t // $t2 := vector::is_empty<#0>($t1) $t2 := A1_vector_is_empty$A2_U256_U256$($t1) @@ -2851,7 +2851,7 @@ object "test_A2_M_extract_value" { // label L2 // $t4 := move($t0) $t4 := t - // $t5 := unpack option::Option<#0>($t4) + // $t5 := unpack 0x1::option::Option<#0>($t4) $t5 := $MemoryLoadU256(add($t4, 0)) $Free($t4, 32) // vector::destroy_empty<#0>($t5) @@ -2870,7 +2870,7 @@ object "test_A2_M_extract_value" { } function A1_option_is_none$vec$u8$$(t) -> $result { let $t1, $t2 - // $t1 := borrow_field>.vec($t0) + // $t1 := borrow_field<0x1::option::Option<#0>>.vec($t0) $t1 := t // $t2 := vector::is_empty<#0>($t1) $t2 := A1_vector_is_empty$vec$u8$$($t1) @@ -2897,7 +2897,7 @@ object "test_A2_M_extract_value" { } function A2_ExternalResult_is_ok$u64$(result) -> $result { let $t1, $t2 - // $t1 := borrow_field>.value($t0) + // $t1 := borrow_field<0x2::ExternalResult::ExternalResult<#0>>.value($t0) { $t1 := $MakePtr($IsStoragePtr(result), $LoadU256(result)) } @@ -2923,9 +2923,9 @@ object "test_A2_M_extract_value" { $t2 := A1_option_none$vec$u8$$() // $t3 := option::none>() $t3 := A1_option_none$vec$u8$$() - // $t4 := option::none() + // $t4 := option::none<0x2::U256::U256>() $t4 := A1_option_none$A2_U256_U256$() - // $t5 := pack ExternalResult::ExternalResult<#0>($t1, $t2, $t3, $t4) + // $t5 := pack 0x2::ExternalResult::ExternalResult<#0>($t1, $t2, $t3, $t4) { let $mem := $Malloc(128) $MemoryStoreU256(add($mem, 0), $t1) @@ -2942,7 +2942,7 @@ object "test_A2_M_extract_value" { let $t0, $t1 // $t0 := vector::empty<#0>() $t0 := A1_vector_empty$A2_U256_U256$() - // $t1 := pack option::Option<#0>($t0) + // $t1 := pack 0x1::option::Option<#0>($t0) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t0) @@ -2960,7 +2960,7 @@ object "test_A2_M_extract_value" { let $t0, $t1 // $t0 := vector::empty<#0>() $t0 := A1_vector_empty$vec$u8$$() - // $t1 := pack option::Option<#0>($t0) + // $t1 := pack 0x1::option::Option<#0>($t0) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t0) @@ -2978,7 +2978,7 @@ object "test_A2_M_extract_value" { let $t1, $t2 // $t1 := vector::singleton<#0>($t0) $t1 := A1_vector_singleton$u64$(e) - // $t2 := pack option::Option<#0>($t1) + // $t2 := pack 0x1::option::Option<#0>($t1) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t1) diff --git a/third_party/move/evm/move-to-yul/tests/TestStringLiteral.exp b/third_party/move/evm/move-to-yul/tests/TestStringLiteral.exp index 7ff6ff55a5591..d268ad845efe2 100644 --- a/third_party/move/evm/move-to-yul/tests/TestStringLiteral.exp +++ b/third_party/move/evm/move-to-yul/tests/TestStringLiteral.exp @@ -76,13 +76,13 @@ object "test_A2_M_h1" { $MemoryStoreU64($t5, 3) $MemoryStoreU64(add($t5, 8), $ClosestGreaterPowerOfTwo(3)) copy_literal_string_to_memory_2053440334(add($t5, 32)) - // $t6 := pack M::T($t5) + // $t6 := pack 0x2::M::T($t5) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t5) $t6 := $mem } - // move_to($t6, $t4) + // move_to<0x2::M::T>($t6, $t4) { let $base_offset := $MakeTypeStorageBase(0, 0x3948ca0a, $LoadU256($t4)) if $AlignedStorageLoad($base_offset) { @@ -111,7 +111,7 @@ object "test_A2_M_h1" { } // $t7 := 0x3 $t7 := 0x3 - // $t8 := borrow_global($t7) + // $t8 := borrow_global<0x2::M::T>($t7) { let $base_offset := $MakeTypeStorageBase(0, 0x3948ca0a, $t7) if iszero($AlignedStorageLoad($base_offset)) { @@ -119,7 +119,7 @@ object "test_A2_M_h1" { } $t8 := $MakePtr(true, add($base_offset, 32)) } - // $t9 := borrow_field.s($t8) + // $t9 := borrow_field<0x2::M::T>.s($t8) $t9 := $t8 // $t1 := read_ref($t9) mstore(add($locals, 32), $LoadU256($t9)) @@ -253,7 +253,7 @@ object "test_A2_M_h1" { copy_literal_string_to_memory_2788570470(add($t36, 32)) // $t37 := 0x3 $t37 := 0x3 - // $t38 := borrow_global($t37) + // $t38 := borrow_global<0x2::M::T>($t37) { let $base_offset := $MakeTypeStorageBase(0, 0x3948ca0a, $t37) if iszero($AlignedStorageLoad($base_offset)) { @@ -261,7 +261,7 @@ object "test_A2_M_h1" { } $t38 := $MakePtr(true, add($base_offset, 32)) } - // $t39 := borrow_field.s($t38) + // $t39 := borrow_field<0x2::M::T>.s($t38) $t39 := $t38 // write_ref($t39, $t36) if $IsStoragePtr($t39){ @@ -279,7 +279,7 @@ object "test_A2_M_h1" { $StoreU256($t39, $t36) // $t40 := 0x3 $t40 := 0x3 - // $t41 := borrow_global($t40) + // $t41 := borrow_global<0x2::M::T>($t40) { let $base_offset := $MakeTypeStorageBase(0, 0x3948ca0a, $t40) if iszero($AlignedStorageLoad($base_offset)) { @@ -287,7 +287,7 @@ object "test_A2_M_h1" { } $t41 := $MakePtr(true, add($base_offset, 32)) } - // $t42 := borrow_field.s($t41) + // $t42 := borrow_field<0x2::M::T>.s($t41) $t42 := $t41 // $t2 := read_ref($t42) mstore(add($locals, 64), $LoadU256($t42)) diff --git a/third_party/move/evm/move-to-yul/tests/Vectors.exp b/third_party/move/evm/move-to-yul/tests/Vectors.exp index b173205f13135..0c52bbbddab24 100644 --- a/third_party/move/evm/move-to-yul/tests/Vectors.exp +++ b/third_party/move/evm/move-to-yul/tests/Vectors.exp @@ -622,7 +622,7 @@ object "test_A2_Vectors_test_borrow_mut" { $t5 := false // $t6 := 123 $t6 := 123 - // $t7 := pack Vectors::S($t4, $t5, $t6) + // $t7 := pack 0x2::Vectors::S($t4, $t5, $t6) { let $mem := $Malloc(25) $MemoryStoreU128(add($mem, 0), $t4) @@ -630,29 +630,29 @@ object "test_A2_Vectors_test_borrow_mut" { $MemoryStoreU64(add($mem, 16), $t6) $t7 := $mem } - // vector::push_back($t3, $t7) + // vector::push_back<0x2::Vectors::S>($t3, $t7) A1_vector_push_back$A2_Vectors_S$($t3, $t7) // $t8 := borrow_local($t2) $t8 := $MakePtr(false, $locals) // $t9 := 0 $t9 := 0 - // $t10 := vector::borrow_mut($t8, $t9) + // $t10 := vector::borrow_mut<0x2::Vectors::S>($t8, $t9) $t10 := A1_vector_borrow_mut$A2_Vectors_S$($t8, $t9) // $t11 := 90 $t11 := 90 - // $t12 := borrow_field.x($t10) + // $t12 := borrow_field<0x2::Vectors::S>.x($t10) $t12 := $t10 // write_ref($t12, $t11) $StoreU128($t12, $t11) // $t13 := false $t13 := false - // $t14 := borrow_field.y($t10) + // $t14 := borrow_field<0x2::Vectors::S>.y($t10) $t14 := $IndexPtr($t10, 24) // write_ref($t14, $t13) $StoreU8($t14, $t13) // $t15 := 1028 $t15 := 1028 - // $t16 := borrow_field.z($t10) + // $t16 := borrow_field<0x2::Vectors::S>.z($t10) $t16 := $IndexPtr($t10, 16) // write_ref($t16, $t15) $StoreU64($t16, $t15) @@ -660,9 +660,9 @@ object "test_A2_Vectors_test_borrow_mut" { $t17 := $MakePtr(false, $locals) // $t18 := 0 $t18 := 0 - // $t19 := vector::borrow($t17, $t18) + // $t19 := vector::borrow<0x2::Vectors::S>($t17, $t18) $t19 := A1_vector_borrow$A2_Vectors_S$($t17, $t18) - // $t20 := borrow_field.x($t19) + // $t20 := borrow_field<0x2::Vectors::S>.x($t19) $t20 := $t19 // $t21 := read_ref($t20) $t21 := $LoadU128($t20) @@ -681,9 +681,9 @@ object "test_A2_Vectors_test_borrow_mut" { $t25 := $MakePtr(false, $locals) // $t26 := 0 $t26 := 0 - // $t27 := vector::borrow($t25, $t26) + // $t27 := vector::borrow<0x2::Vectors::S>($t25, $t26) $t27 := A1_vector_borrow$A2_Vectors_S$($t25, $t26) - // $t28 := borrow_field.y($t27) + // $t28 := borrow_field<0x2::Vectors::S>.y($t27) $t28 := $IndexPtr($t27, 24) // $t29 := read_ref($t28) $t29 := $LoadU8($t28) @@ -714,9 +714,9 @@ object "test_A2_Vectors_test_borrow_mut" { $t33 := $MakePtr(false, $locals) // $t34 := 0 $t34 := 0 - // $t35 := vector::borrow($t33, $t34) + // $t35 := vector::borrow<0x2::Vectors::S>($t33, $t34) $t35 := A1_vector_borrow$A2_Vectors_S$($t33, $t34) - // $t36 := borrow_field.z($t35) + // $t36 := borrow_field<0x2::Vectors::S>.z($t35) $t36 := $IndexPtr($t35, 16) // $t37 := read_ref($t36) $t37 := $LoadU64($t36) @@ -747,9 +747,9 @@ object "test_A2_Vectors_test_borrow_mut" { $t41 := $MakePtr(false, $locals) // $t42 := 1 $t42 := 1 - // $t43 := vector::borrow($t41, $t42) + // $t43 := vector::borrow<0x2::Vectors::S>($t41, $t42) $t43 := A1_vector_borrow$A2_Vectors_S$($t41, $t42) - // $t44 := borrow_field.x($t43) + // $t44 := borrow_field<0x2::Vectors::S>.x($t43) $t44 := $t43 // $t45 := read_ref($t44) $t45 := $LoadU128($t44) @@ -780,9 +780,9 @@ object "test_A2_Vectors_test_borrow_mut" { $t49 := $MakePtr(false, $locals) // $t50 := 1 $t50 := 1 - // $t51 := vector::borrow($t49, $t50) + // $t51 := vector::borrow<0x2::Vectors::S>($t49, $t50) $t51 := A1_vector_borrow$A2_Vectors_S$($t49, $t50) - // $t52 := borrow_field.y($t51) + // $t52 := borrow_field<0x2::Vectors::S>.y($t51) $t52 := $IndexPtr($t51, 24) // $t53 := read_ref($t52) $t53 := $LoadU8($t52) @@ -813,9 +813,9 @@ object "test_A2_Vectors_test_borrow_mut" { $t57 := $MakePtr(false, $locals) // $t58 := 1 $t58 := 1 - // $t59 := vector::borrow($t57, $t58) + // $t59 := vector::borrow<0x2::Vectors::S>($t57, $t58) $t59 := A1_vector_borrow$A2_Vectors_S$($t57, $t58) - // $t60 := borrow_field.z($t59) + // $t60 := borrow_field<0x2::Vectors::S>.z($t59) $t60 := $IndexPtr($t59, 16) // $t61 := read_ref($t60) $t61 := $LoadU64($t60) @@ -846,23 +846,23 @@ object "test_A2_Vectors_test_borrow_mut" { $t65 := $MakePtr(false, $locals) // $t66 := 1 $t66 := 1 - // $t67 := vector::borrow_mut($t65, $t66) + // $t67 := vector::borrow_mut<0x2::Vectors::S>($t65, $t66) $t67 := A1_vector_borrow_mut$A2_Vectors_S$($t65, $t66) // $t68 := 10 $t68 := 10 - // $t69 := borrow_field.x($t67) + // $t69 := borrow_field<0x2::Vectors::S>.x($t67) $t69 := $t67 // write_ref($t69, $t68) $StoreU128($t69, $t68) // $t70 := true $t70 := true - // $t71 := borrow_field.y($t67) + // $t71 := borrow_field<0x2::Vectors::S>.y($t67) $t71 := $IndexPtr($t67, 24) // write_ref($t71, $t70) $StoreU8($t71, $t70) // $t72 := 456 $t72 := 456 - // $t73 := borrow_field.z($t67) + // $t73 := borrow_field<0x2::Vectors::S>.z($t67) $t73 := $IndexPtr($t67, 16) // write_ref($t73, $t72) $StoreU64($t73, $t72) @@ -870,9 +870,9 @@ object "test_A2_Vectors_test_borrow_mut" { $t74 := $MakePtr(false, $locals) // $t75 := 1 $t75 := 1 - // $t76 := vector::borrow($t74, $t75) + // $t76 := vector::borrow<0x2::Vectors::S>($t74, $t75) $t76 := A1_vector_borrow$A2_Vectors_S$($t74, $t75) - // $t77 := borrow_field.x($t76) + // $t77 := borrow_field<0x2::Vectors::S>.x($t76) $t77 := $t76 // $t78 := read_ref($t77) $t78 := $LoadU128($t77) @@ -903,9 +903,9 @@ object "test_A2_Vectors_test_borrow_mut" { $t82 := $MakePtr(false, $locals) // $t83 := 1 $t83 := 1 - // $t84 := vector::borrow($t82, $t83) + // $t84 := vector::borrow<0x2::Vectors::S>($t82, $t83) $t84 := A1_vector_borrow$A2_Vectors_S$($t82, $t83) - // $t85 := borrow_field.y($t84) + // $t85 := borrow_field<0x2::Vectors::S>.y($t84) $t85 := $IndexPtr($t84, 24) // $t86 := read_ref($t85) $t86 := $LoadU8($t85) @@ -936,9 +936,9 @@ object "test_A2_Vectors_test_borrow_mut" { $t90 := $MakePtr(false, $locals) // $t91 := 1 $t91 := 1 - // $t92 := vector::borrow($t90, $t91) + // $t92 := vector::borrow<0x2::Vectors::S>($t90, $t91) $t92 := A1_vector_borrow$A2_Vectors_S$($t90, $t91) - // $t93 := borrow_field.z($t92) + // $t93 := borrow_field<0x2::Vectors::S>.z($t92) $t93 := $IndexPtr($t92, 16) // $t94 := read_ref($t93) $t94 := $LoadU64($t93) @@ -1014,7 +1014,7 @@ object "test_A2_Vectors_test_borrow_mut" { function A2_Vectors_one_elem_struct() -> $result { let $t1, $t2, $t3, $t4, $t5, $t6 let $locals := $Malloc(32) - // $t0 := vector::empty() + // $t0 := vector::empty<0x2::Vectors::S>() mstore($locals, A1_vector_empty$A2_Vectors_S$()) // $t1 := borrow_local($t0) $t1 := $MakePtr(false, $locals) @@ -1024,7 +1024,7 @@ object "test_A2_Vectors_test_borrow_mut" { $t3 := true // $t4 := 789 $t4 := 789 - // $t5 := pack Vectors::S($t2, $t3, $t4) + // $t5 := pack 0x2::Vectors::S($t2, $t3, $t4) { let $mem := $Malloc(25) $MemoryStoreU128(add($mem, 0), $t2) @@ -1032,7 +1032,7 @@ object "test_A2_Vectors_test_borrow_mut" { $MemoryStoreU64(add($mem, 16), $t4) $t5 := $mem } - // vector::push_back($t1, $t5) + // vector::push_back<0x2::Vectors::S>($t1, $t5) A1_vector_push_back$A2_Vectors_S$($t1, $t5) // $t6 := move($t0) $t6 := mload($locals) @@ -1418,7 +1418,7 @@ object "test_A2_Vectors_test_destroy_non_empty_fail" { let $t0 // $t0 := Vectors::one_elem_struct() $t0 := A2_Vectors_one_elem_struct() - // vector::destroy_empty($t0) + // vector::destroy_empty<0x2::Vectors::S>($t0) A1_vector_destroy_empty$A2_Vectors_S$($t0) // return () } @@ -1432,7 +1432,7 @@ object "test_A2_Vectors_test_destroy_non_empty_fail" { function A2_Vectors_one_elem_struct() -> $result { let $t1, $t2, $t3, $t4, $t5, $t6 let $locals := $Malloc(32) - // $t0 := vector::empty() + // $t0 := vector::empty<0x2::Vectors::S>() mstore($locals, A1_vector_empty$A2_Vectors_S$()) // $t1 := borrow_local($t0) $t1 := $MakePtr(false, $locals) @@ -1442,7 +1442,7 @@ object "test_A2_Vectors_test_destroy_non_empty_fail" { $t3 := true // $t4 := 789 $t4 := 789 - // $t5 := pack Vectors::S($t2, $t3, $t4) + // $t5 := pack 0x2::Vectors::S($t2, $t3, $t4) { let $mem := $Malloc(25) $MemoryStoreU128(add($mem, 0), $t2) @@ -1450,7 +1450,7 @@ object "test_A2_Vectors_test_destroy_non_empty_fail" { $MemoryStoreU64(add($mem, 16), $t4) $t5 := $mem } - // vector::push_back($t1, $t5) + // vector::push_back<0x2::Vectors::S>($t1, $t5) A1_vector_push_back$A2_Vectors_S$($t1, $t5) // $t6 := move($t0) $t6 := mload($locals) @@ -3121,7 +3121,7 @@ object "test_A2_Vectors_test_one_elem_struct" { mstore($locals, A2_Vectors_one_elem_struct()) // $t1 := borrow_local($t0) $t1 := $MakePtr(false, $locals) - // $t2 := vector::length($t1) + // $t2 := vector::length<0x2::Vectors::S>($t1) $t2 := A1_vector_length$A2_Vectors_S$($t1) // $t3 := 1 $t3 := 1 @@ -3138,9 +3138,9 @@ object "test_A2_Vectors_test_one_elem_struct" { $t6 := $MakePtr(false, $locals) // $t7 := 0 $t7 := 0 - // $t8 := vector::borrow($t6, $t7) + // $t8 := vector::borrow<0x2::Vectors::S>($t6, $t7) $t8 := A1_vector_borrow$A2_Vectors_S$($t6, $t7) - // $t9 := borrow_field.x($t8) + // $t9 := borrow_field<0x2::Vectors::S>.x($t8) $t9 := $t8 // $t10 := read_ref($t9) $t10 := $LoadU128($t9) @@ -3171,9 +3171,9 @@ object "test_A2_Vectors_test_one_elem_struct" { $t14 := $MakePtr(false, $locals) // $t15 := 0 $t15 := 0 - // $t16 := vector::borrow($t14, $t15) + // $t16 := vector::borrow<0x2::Vectors::S>($t14, $t15) $t16 := A1_vector_borrow$A2_Vectors_S$($t14, $t15) - // $t17 := borrow_field.y($t16) + // $t17 := borrow_field<0x2::Vectors::S>.y($t16) $t17 := $IndexPtr($t16, 24) // $t18 := read_ref($t17) $t18 := $LoadU8($t17) @@ -3204,9 +3204,9 @@ object "test_A2_Vectors_test_one_elem_struct" { $t22 := $MakePtr(false, $locals) // $t23 := 0 $t23 := 0 - // $t24 := vector::borrow($t22, $t23) + // $t24 := vector::borrow<0x2::Vectors::S>($t22, $t23) $t24 := A1_vector_borrow$A2_Vectors_S$($t22, $t23) - // $t25 := borrow_field.z($t24) + // $t25 := borrow_field<0x2::Vectors::S>.z($t24) $t25 := $IndexPtr($t24, 16) // $t26 := read_ref($t25) $t26 := $LoadU64($t25) @@ -3257,7 +3257,7 @@ object "test_A2_Vectors_test_one_elem_struct" { function A2_Vectors_one_elem_struct() -> $result { let $t1, $t2, $t3, $t4, $t5, $t6 let $locals := $Malloc(32) - // $t0 := vector::empty() + // $t0 := vector::empty<0x2::Vectors::S>() mstore($locals, A1_vector_empty$A2_Vectors_S$()) // $t1 := borrow_local($t0) $t1 := $MakePtr(false, $locals) @@ -3267,7 +3267,7 @@ object "test_A2_Vectors_test_one_elem_struct" { $t3 := true // $t4 := 789 $t4 := 789 - // $t5 := pack Vectors::S($t2, $t3, $t4) + // $t5 := pack 0x2::Vectors::S($t2, $t3, $t4) { let $mem := $Malloc(25) $MemoryStoreU128(add($mem, 0), $t2) @@ -3275,7 +3275,7 @@ object "test_A2_Vectors_test_one_elem_struct" { $MemoryStoreU64(add($mem, 16), $t4) $t5 := $mem } - // vector::push_back($t1, $t5) + // vector::push_back<0x2::Vectors::S>($t1, $t5) A1_vector_push_back$A2_Vectors_S$($t1, $t5) // $t6 := move($t0) $t6 := mload($locals) @@ -6666,9 +6666,9 @@ object "test_A2_Vectors_test_vector_equality_struct" { $Abort($t7) } case 4 { - // $t2 := vector::empty() + // $t2 := vector::empty<0x2::Vectors::R>() mstore($locals, A1_vector_empty$A2_Vectors_R$()) - // $t3 := vector::empty() + // $t3 := vector::empty<0x2::Vectors::R>() mstore(add($locals, 32), A1_vector_empty$A2_Vectors_R$()) // $t4 := copy($t2) $t4 := mload($locals) @@ -6689,7 +6689,7 @@ object "test_A2_Vectors_test_vector_equality_struct" { $t9 := true // $t10 := 9 $t10 := 9 - // $t11 := pack Vectors::S($t8, $t9, $t10) + // $t11 := pack 0x2::Vectors::S($t8, $t9, $t10) { let $mem := $Malloc(25) $MemoryStoreU128(add($mem, 0), $t8) @@ -6699,7 +6699,7 @@ object "test_A2_Vectors_test_vector_equality_struct" { } // $t12 := Vectors::one_elem_u64() $t12 := A2_Vectors_one_elem_u64() - // $t13 := pack Vectors::R($t11, $t12) + // $t13 := pack 0x2::Vectors::R($t11, $t12) { let $mem := $Malloc(64) $MemoryStoreU256(add($mem, 0), $t11) @@ -6708,7 +6708,7 @@ object "test_A2_Vectors_test_vector_equality_struct" { } // $t14 := borrow_local($t2) $t14 := $MakePtr(false, $locals) - // vector::push_back($t14, $t13) + // vector::push_back<0x2::Vectors::R>($t14, $t13) A1_vector_push_back$A2_Vectors_R$($t14, $t13) // $t15 := copy($t2) $t15 := mload($locals) @@ -6737,7 +6737,7 @@ object "test_A2_Vectors_test_vector_equality_struct" { // label L5 // $t19 := borrow_local($t3) $t19 := $MakePtr(false, add($locals, 32)) - // vector::push_back($t19, $t13) + // vector::push_back<0x2::Vectors::R>($t19, $t13) A1_vector_push_back$A2_Vectors_R$($t19, $t13) // $t20 := copy($t2) $t20 := mload($locals) @@ -6770,7 +6770,7 @@ object "test_A2_Vectors_test_vector_equality_struct" { $t25 := false // $t26 := 9 $t26 := 9 - // $t27 := pack Vectors::S($t24, $t25, $t26) + // $t27 := pack 0x2::Vectors::S($t24, $t25, $t26) { let $mem := $Malloc(25) $MemoryStoreU128(add($mem, 0), $t24) @@ -6780,7 +6780,7 @@ object "test_A2_Vectors_test_vector_equality_struct" { } // $t28 := Vectors::one_elem_u64() $t28 := A2_Vectors_one_elem_u64() - // $t29 := pack Vectors::R($t27, $t28) + // $t29 := pack 0x2::Vectors::R($t27, $t28) { let $mem := $Malloc(64) $MemoryStoreU256(add($mem, 0), $t27) @@ -6789,7 +6789,7 @@ object "test_A2_Vectors_test_vector_equality_struct" { } // $t30 := borrow_local($t2) $t30 := $MakePtr(false, $locals) - // vector::push_back($t30, $t13) + // vector::push_back<0x2::Vectors::R>($t30, $t13) A1_vector_push_back$A2_Vectors_R$($t30, $t13) // $t31 := copy($t2) $t31 := mload($locals) @@ -6818,7 +6818,7 @@ object "test_A2_Vectors_test_vector_equality_struct" { // label L11 // $t35 := borrow_local($t3) $t35 := $MakePtr(false, add($locals, 32)) - // vector::push_back($t35, $t29) + // vector::push_back<0x2::Vectors::R>($t35, $t29) A1_vector_push_back$A2_Vectors_R$($t35, $t29) // $t36 := move($t2) $t36 := mload($locals) @@ -7282,7 +7282,7 @@ object "test_A2_Vectors_test_vectors_in_structs" { $t9 := true // $t10 := 9 $t10 := 9 - // $t11 := pack Vectors::S($t8, $t9, $t10) + // $t11 := pack 0x2::Vectors::S($t8, $t9, $t10) { let $mem := $Malloc(25) $MemoryStoreU128(add($mem, 0), $t8) @@ -7292,7 +7292,7 @@ object "test_A2_Vectors_test_vectors_in_structs" { } // $t12 := move($t1) $t12 := mload($locals) - // $t0 := pack Vectors::R($t11, $t12) + // $t0 := pack 0x2::Vectors::R($t11, $t12) { let $mem := $Malloc(64) $MemoryStoreU256(add($mem, 0), $t11) @@ -7301,7 +7301,7 @@ object "test_A2_Vectors_test_vectors_in_structs" { } // $t13 := borrow_local($t0) $t13 := $MakePtr(false, r) - // $t14 := borrow_field.v($t13) + // $t14 := borrow_field<0x2::Vectors::R>.v($t13) $t14 := $IndexPtr($t13, 32) // $t15 := vector::length($t14) $t15 := A1_vector_length$u64$($t14) @@ -7318,7 +7318,7 @@ object "test_A2_Vectors_test_vectors_in_structs" { // label L2 // $t19 := borrow_local($t0) $t19 := $MakePtr(false, r) - // $t20 := borrow_field.v($t19) + // $t20 := borrow_field<0x2::Vectors::R>.v($t19) $t20 := $IndexPtr($t19, 32) // $t21 := 0 $t21 := 0 @@ -7351,7 +7351,7 @@ object "test_A2_Vectors_test_vectors_in_structs" { // label L5 // $t27 := borrow_local($t0) $t27 := $MakePtr(false, r) - // $t28 := borrow_field.v($t27) + // $t28 := borrow_field<0x2::Vectors::R>.v($t27) $t28 := $IndexPtr($t27, 32) // $t29 := 1 $t29 := 1 @@ -7384,7 +7384,7 @@ object "test_A2_Vectors_test_vectors_in_structs" { // label L8 // $t35 := borrow_local($t0) $t35 := $MakePtr(false, r) - // $t36 := borrow_field.v($t35) + // $t36 := borrow_field<0x2::Vectors::R>.v($t35) $t36 := $IndexPtr($t35, 32) // $t37 := 2 $t37 := 2 @@ -7419,7 +7419,7 @@ object "test_A2_Vectors_test_vectors_in_structs" { $t43 := 41 // $t44 := borrow_local($t0) $t44 := $MakePtr(false, r) - // $t45 := borrow_field.v($t44) + // $t45 := borrow_field<0x2::Vectors::R>.v($t44) $t45 := $IndexPtr($t44, 32) // $t46 := 1 $t46 := 1 @@ -7429,7 +7429,7 @@ object "test_A2_Vectors_test_vectors_in_structs" { $StoreU64($t47, $t43) // $t48 := borrow_local($t0) $t48 := $MakePtr(false, r) - // $t49 := borrow_field.v($t48) + // $t49 := borrow_field<0x2::Vectors::R>.v($t48) $t49 := $IndexPtr($t48, 32) // $t50 := 1 $t50 := 1 @@ -7464,7 +7464,7 @@ object "test_A2_Vectors_test_vectors_in_structs" { $t56 := A2_Vectors_one_elem_u64() // $t57 := borrow_local($t0) $t57 := $MakePtr(false, r) - // $t58 := borrow_field.v($t57) + // $t58 := borrow_field<0x2::Vectors::R>.v($t57) $t58 := $IndexPtr($t57, 32) // write_ref($t58, $t56) if $IsStoragePtr($t58){ @@ -7482,7 +7482,7 @@ object "test_A2_Vectors_test_vectors_in_structs" { $StoreU256($t58, $t56) // $t59 := borrow_local($t0) $t59 := $MakePtr(false, r) - // $t60 := borrow_field.v($t59) + // $t60 := borrow_field<0x2::Vectors::R>.v($t59) $t60 := $IndexPtr($t59, 32) // $t61 := vector::length($t60) $t61 := A1_vector_length$u64$($t60) @@ -7511,7 +7511,7 @@ object "test_A2_Vectors_test_vectors_in_structs" { // label L17 // $t65 := borrow_local($t0) $t65 := $MakePtr(false, r) - // $t66 := borrow_field.v($t65) + // $t66 := borrow_field<0x2::Vectors::R>.v($t65) $t66 := $IndexPtr($t65, 32) // $t67 := 0 $t67 := 0 diff --git a/third_party/move/evm/move-to-yul/tests/test-dispatcher/DispatcherArrayDecoding.exp b/third_party/move/evm/move-to-yul/tests/test-dispatcher/DispatcherArrayDecoding.exp index 970b17cc61d8c..8d10b80137a38 100644 --- a/third_party/move/evm/move-to-yul/tests/test-dispatcher/DispatcherArrayDecoding.exp +++ b/third_party/move/evm/move-to-yul/tests/test-dispatcher/DispatcherArrayDecoding.exp @@ -461,7 +461,7 @@ object "A2_M" { case 3 { // $t7 := borrow_local($t0) $t7 := $MakePtr(false, $locals) - // $t8 := vector::length>($t7) + // $t8 := vector::length>($t7) $t8 := A1_vector_length$vec$A2_U256_U256$$($t7) // $t9 := 0 $t9 := 0 @@ -488,17 +488,17 @@ object "A2_M" { // label L2 // $t11 := borrow_local($t0) $t11 := $MakePtr(false, $locals) - // $t12 := vector::borrow>($t11, $t1) + // $t12 := vector::borrow>($t11, $t1) $t12 := A1_vector_borrow$vec$A2_U256_U256$$($t11, i) // $t13 := 0 $t13 := 0 - // $t14 := vector::borrow($t12, $t13) + // $t14 := vector::borrow<0x2::U256::U256>($t12, $t13) $t14 := A1_vector_borrow$A2_U256_U256$($t12, $t13) // $t15 := read_ref($t14) $t15 := $LoadU256($t14) // $t16 := 1 $t16 := 1 - // $t17 := vector::borrow($t12, $t16) + // $t17 := vector::borrow<0x2::U256::U256>($t12, $t16) $t17 := A1_vector_borrow$A2_U256_U256$($t12, $t16) // $t18 := read_ref($t17) $t18 := $LoadU256($t17) @@ -689,9 +689,9 @@ object "A2_M" { // label L2 // $t10 := borrow_local($t0) $t10 := $MakePtr(false, $locals) - // $t11 := vector::borrow>($t10, $t1) + // $t11 := vector::borrow>($t10, $t1) $t11 := A1_vector_borrow$vec$A2_U256_U256$$($t10, i) - // $t12 := vector::length($t11) + // $t12 := vector::length<0x2::U256::U256>($t11) $t12 := A1_vector_length$A2_U256_U256$($t11) // $t13 := 0 $t13 := 0 @@ -726,7 +726,7 @@ object "A2_M" { } case 10 { // label L5 - // $t15 := vector::borrow($t11, $t2) + // $t15 := vector::borrow<0x2::U256::U256>($t11, $t2) $t15 := A1_vector_borrow$A2_U256_U256$($t11, j) // $t16 := read_ref($t15) $t16 := $LoadU256($t15) @@ -959,7 +959,7 @@ object "A2_M" { case 3 { // $t7 := borrow_local($t0) $t7 := $MakePtr(false, $locals) - // $t8 := vector::length>($t7) + // $t8 := vector::length>($t7) $t8 := A1_vector_length$vec$A2_U256_U256$$($t7) // $t9 := 0 $t9 := 0 @@ -986,17 +986,17 @@ object "A2_M" { // label L2 // $t11 := borrow_local($t0) $t11 := $MakePtr(false, $locals) - // $t12 := vector::borrow>($t11, $t1) + // $t12 := vector::borrow>($t11, $t1) $t12 := A1_vector_borrow$vec$A2_U256_U256$$($t11, i) // $t13 := 0 $t13 := 0 - // $t14 := vector::borrow($t12, $t13) + // $t14 := vector::borrow<0x2::U256::U256>($t12, $t13) $t14 := A1_vector_borrow$A2_U256_U256$($t12, $t13) // $t15 := read_ref($t14) $t15 := $LoadU256($t14) // $t16 := 1 $t16 := 1 - // $t17 := vector::borrow($t12, $t16) + // $t17 := vector::borrow<0x2::U256::U256>($t12, $t16) $t17 := A1_vector_borrow$A2_U256_U256$($t12, $t16) // $t18 := read_ref($t17) $t18 := $LoadU256($t17) diff --git a/third_party/move/evm/move-to-yul/tests/test-dispatcher/DispatcherBasicStorage.exp b/third_party/move/evm/move-to-yul/tests/test-dispatcher/DispatcherBasicStorage.exp index bc0463b511e92..f18ac758a350e 100644 --- a/third_party/move/evm/move-to-yul/tests/test-dispatcher/DispatcherBasicStorage.exp +++ b/third_party/move/evm/move-to-yul/tests/test-dispatcher/DispatcherBasicStorage.exp @@ -26,7 +26,7 @@ object "A2_M" { let $t0, $t1 // $t0 := 0 $t0 := 0 - // $t1 := pack M::Storage($t0) + // $t1 := pack 0x2::M::Storage($t0) { let $mem := $Malloc(8) $MemoryStoreU64(add($mem, 0), $t0) @@ -139,7 +139,7 @@ object "A2_M" { } function A2_M_current(self) -> $result { let $t1, $t2 - // $t1 := borrow_field.counter($t0) + // $t1 := borrow_field<0x2::M::Storage>.counter($t0) $t1 := self // $t2 := read_ref($t1) $t2 := $LoadU64($t1) @@ -149,7 +149,7 @@ object "A2_M" { function A2_M_increment(self) { let $t1, $t2, $t3, $t4, $t5 - // $t1 := borrow_field.counter($t0) + // $t1 := borrow_field<0x2::M::Storage>.counter($t0) $t1 := self // $t2 := read_ref($t1) $t2 := $LoadU64($t1) @@ -157,7 +157,7 @@ object "A2_M" { $t3 := 1 // $t4 := +($t2, $t3) $t4 := $AddU64($t2, $t3) - // $t5 := borrow_field.counter($t0) + // $t5 := borrow_field<0x2::M::Storage>.counter($t0) $t5 := self // write_ref($t5, $t4) $StoreU64($t5, $t4) @@ -166,7 +166,7 @@ object "A2_M" { function A2_M_receive(self) { let $t1, $t2, $t3, $t4, $t5 - // $t1 := borrow_field.counter($t0) + // $t1 := borrow_field<0x2::M::Storage>.counter($t0) $t1 := self // $t2 := read_ref($t1) $t2 := $LoadU64($t1) @@ -174,7 +174,7 @@ object "A2_M" { $t3 := 2 // $t4 := +($t2, $t3) $t4 := $AddU64($t2, $t3) - // $t5 := borrow_field.counter($t0) + // $t5 := borrow_field<0x2::M::Storage>.counter($t0) $t5 := self // write_ref($t5, $t4) $StoreU64($t5, $t4) diff --git a/third_party/move/evm/move-to-yul/tests/test-dispatcher/DispatcherEncodingStorage.exp b/third_party/move/evm/move-to-yul/tests/test-dispatcher/DispatcherEncodingStorage.exp index 9112f59810d17..b748bf5ebed78 100644 --- a/third_party/move/evm/move-to-yul/tests/test-dispatcher/DispatcherEncodingStorage.exp +++ b/third_party/move/evm/move-to-yul/tests/test-dispatcher/DispatcherEncodingStorage.exp @@ -61,13 +61,13 @@ object "A2_M" { $t5 := $MakePtr(false, add($locals, 32)) // $t6 := move($t1) $t6 := mload($locals) - // $t7 := pack M::T($t6) + // $t7 := pack 0x2::M::T($t6) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t6) $t7 := $mem } - // move_to>($t7, $t5) + // move_to<0x2::M::T>($t7, $t5) { let $base_offset := $MakeTypeStorageBase(0, 0x2e75402e, $LoadU256($t5)) if $AlignedStorageLoad($base_offset) { @@ -96,7 +96,7 @@ object "A2_M" { } // $t8 := 0x42 $t8 := 0x42 - // $t9 := borrow_global>($t8) + // $t9 := borrow_global<0x2::M::T>($t8) { let $base_offset := $MakeTypeStorageBase(0, 0x2e75402e, $t8) if iszero($AlignedStorageLoad($base_offset)) { @@ -104,7 +104,7 @@ object "A2_M" { } $t9 := $MakePtr(true, add($base_offset, 32)) } - // $t10 := borrow_field>.v($t9) + // $t10 := borrow_field<0x2::M::T>.v($t9) $t10 := $t9 // $t11 := read_ref($t10) $t11 := $LoadU256($t10) @@ -139,19 +139,19 @@ object "A2_M" { $t4 := 65 // vector::push_back($t3, $t4) A1_vector_push_back$u8$($t3, $t4) - // $t2 := vector::empty() + // $t2 := vector::empty<0x2::U256::U256>() mstore(add($locals, 32), A1_vector_empty$A2_U256_U256$()) // $t5 := borrow_local($t2) $t5 := $MakePtr(false, add($locals, 32)) // $t6 := 64 $t6 := 64 - // vector::push_back($t5, $t6) + // vector::push_back<0x2::U256::U256>($t5, $t6) A1_vector_push_back$A2_U256_U256$($t5, $t6) // $t7 := borrow_local($t2) $t7 := $MakePtr(false, add($locals, 32)) // $t8 := 65 $t8 := 65 - // vector::push_back($t7, $t8) + // vector::push_back<0x2::U256::U256>($t7, $t8) A1_vector_push_back$A2_U256_U256$($t7, $t8) // $t9 := 0x42 $t9 := 0x42 @@ -163,14 +163,14 @@ object "A2_M" { $t11 := mload($locals) // $t12 := move($t2) $t12 := mload(add($locals, 32)) - // $t13 := pack M::State($t11, $t12) + // $t13 := pack 0x2::M::State($t11, $t12) { let $mem := $Malloc(64) $MemoryStoreU256(add($mem, 0), $t11) $MemoryStoreU256(add($mem, 32), $t12) $t13 := $mem } - // move_to($t13, $t10) + // move_to<0x2::M::State>($t13, $t10) { let $base_offset := $MakeTypeStorageBase(0, 0x7d9cc237, $LoadU256($t10)) if $AlignedStorageLoad($base_offset) { @@ -213,7 +213,7 @@ object "A2_M" { } // $t14 := 0x42 $t14 := 0x42 - // $t15 := borrow_global($t14) + // $t15 := borrow_global<0x2::M::State>($t14) { let $base_offset := $MakeTypeStorageBase(0, 0x7d9cc237, $t14) if iszero($AlignedStorageLoad($base_offset)) { @@ -221,7 +221,7 @@ object "A2_M" { } $t15 := $MakePtr(true, add($base_offset, 32)) } - // $t16 := borrow_field.s1($t15) + // $t16 := borrow_field<0x2::M::State>.s1($t15) $t16 := $t15 // $t17 := read_ref($t16) $t17 := $LoadU256($t16) @@ -242,7 +242,7 @@ object "A2_M" { } // $t18 := 0x42 $t18 := 0x42 - // $t19 := borrow_global($t18) + // $t19 := borrow_global<0x2::M::State>($t18) { let $base_offset := $MakeTypeStorageBase(0, 0x7d9cc237, $t18) if iszero($AlignedStorageLoad($base_offset)) { @@ -250,7 +250,7 @@ object "A2_M" { } $t19 := $MakePtr(true, add($base_offset, 32)) } - // $t20 := borrow_field.s2($t19) + // $t20 := borrow_field<0x2::M::State>.s2($t19) $t20 := $IndexPtr($t19, 32) // $t21 := read_ref($t20) $t21 := $LoadU256($t20) diff --git a/third_party/move/evm/move-to-yul/tests/test-dispatcher/ExternalCall.exp b/third_party/move/evm/move-to-yul/tests/test-dispatcher/ExternalCall.exp index a4be571a266c6..3026fbefb4c60 100644 --- a/third_party/move/evm/move-to-yul/tests/test-dispatcher/ExternalCall.exp +++ b/third_party/move/evm/move-to-yul/tests/test-dispatcher/ExternalCall.exp @@ -114,7 +114,7 @@ object "A2_M" { // label L1 // $t4 := move($t1) $t4 := v - // $t5 := ExternalResult::unwrap($t4) + // $t5 := ExternalResult::unwrap<0x2::U256::U256>($t4) $t5 := A2_ExternalResult_unwrap$A2_U256_U256$($t4) // return $t5 $result := $t5 @@ -133,7 +133,7 @@ object "A2_M" { v := A2_M_success(addr) // $t2 := borrow_local($t1) $t2 := $MakePtr(false, v) - // $t3 := ExternalResult::is_ok($t2) + // $t3 := ExternalResult::is_ok<0x2::U256::U256>($t2) $t3 := A2_ExternalResult_is_ok$A2_U256_U256$($t2) // if ($t3) goto L1 else goto L0 switch $t3 @@ -152,7 +152,7 @@ object "A2_M" { // label L1 // $t4 := move($t1) $t4 := v - // $t5 := ExternalResult::unwrap($t4) + // $t5 := ExternalResult::unwrap<0x2::Evm::Unit>($t4) $t5 := A2_ExternalResult_unwrap$A2_Evm_Unit$($t4) // drop($t5) $Free($t5, 1) @@ -169,7 +169,7 @@ object "A2_M" { v := A2_M_test_unit(addr) // $t2 := borrow_local($t1) $t2 := $MakePtr(false, v) - // $t3 := ExternalResult::is_ok($t2) + // $t3 := ExternalResult::is_ok<0x2::Evm::Unit>($t2) $t3 := A2_ExternalResult_is_ok$A2_Evm_Unit$($t2) // if ($t3) goto L1 else goto L0 switch $t3 @@ -197,7 +197,7 @@ object "A2_M" { let data, v, $t2, $t3, $t4, $t5, $t6 // $t2 := 0 $t2 := 0 - // $t3 := vector::empty() + // $t3 := vector::empty<0x2::U256::U256>() $t3 := A1_vector_empty$A2_U256_U256$() // $t4 := 0x3 $t4 := 0x3 @@ -251,7 +251,7 @@ object "A2_M" { // label L0 // $t7 := borrow_local($t1) $t7 := $MakePtr(false, value) - // $t8 := ExternalResult::is_err_reason($t7) + // $t8 := ExternalResult::is_err_reason<0x2::U256::U256>($t7) $t8 := A2_ExternalResult_is_err_reason$A2_U256_U256$($t7) // if ($t8) goto L3 else goto L2 switch $t8 @@ -267,7 +267,7 @@ object "A2_M" { value := A2_M_test_try_call($t3, $t2) // $t4 := borrow_local($t1) $t4 := $MakePtr(false, value) - // $t5 := ExternalResult::is_ok($t4) + // $t5 := ExternalResult::is_ok<0x2::U256::U256>($t4) $t5 := A2_ExternalResult_is_ok$A2_U256_U256$($t4) // if ($t5) goto L1 else goto L0 switch $t5 @@ -286,7 +286,7 @@ object "A2_M" { // label L2 // $t10 := borrow_local($t1) $t10 := $MakePtr(false, value) - // $t11 := ExternalResult::is_panic($t10) + // $t11 := ExternalResult::is_panic<0x2::U256::U256>($t10) $t11 := A2_ExternalResult_is_panic$A2_U256_U256$($t10) // if ($t11) goto L5 else goto L4 switch $t11 @@ -314,12 +314,12 @@ object "A2_M" { function A2_ExternalResult_is_panic$A2_U256_U256$(result) -> $result { let $t1, $t2 - // $t1 := borrow_field>.panic_code($t0) + // $t1 := borrow_field<0x2::ExternalResult::ExternalResult<#0>>.panic_code($t0) { let $field_ptr := $IndexPtr(result, 96) $t1 := $MakePtr($IsStoragePtr($field_ptr), $LoadU256($field_ptr)) } - // $t2 := option::is_some($t1) + // $t2 := option::is_some<0x2::U256::U256>($t1) $t2 := A1_option_is_some$A2_U256_U256$($t1) // return $t2 $result := $t2 @@ -327,7 +327,7 @@ object "A2_M" { function A1_option_is_some$A2_U256_U256$(t) -> $result { let $t1, $t2, $t3 - // $t1 := borrow_field>.vec($t0) + // $t1 := borrow_field<0x1::option::Option<#0>>.vec($t0) $t1 := t // $t2 := vector::is_empty<#0>($t1) $t2 := A1_vector_is_empty$A2_U256_U256$($t1) @@ -356,7 +356,7 @@ object "A2_M" { } function A2_ExternalResult_is_ok$A2_U256_U256$(result) -> $result { let $t1, $t2 - // $t1 := borrow_field>.value($t0) + // $t1 := borrow_field<0x2::ExternalResult::ExternalResult<#0>>.value($t0) { $t1 := $MakePtr($IsStoragePtr(result), $LoadU256(result)) } @@ -412,9 +412,9 @@ object "A2_M" { $t2 := A1_option_some$vec$u8$$(error) // $t3 := option::none>() $t3 := A1_option_none$vec$u8$$() - // $t4 := option::none() + // $t4 := option::none<0x2::U256::U256>() $t4 := A1_option_none$A2_U256_U256$() - // $t5 := pack ExternalResult::ExternalResult<#0>($t1, $t2, $t3, $t4) + // $t5 := pack 0x2::ExternalResult::ExternalResult<#0>($t1, $t2, $t3, $t4) { let $mem := $Malloc(128) $MemoryStoreU256(add($mem, 0), $t1) @@ -431,7 +431,7 @@ object "A2_M" { let $t0, $t1 // $t0 := vector::empty<#0>() $t0 := A1_vector_empty$A2_U256_U256$() - // $t1 := pack option::Option<#0>($t0) + // $t1 := pack 0x1::option::Option<#0>($t0) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t0) @@ -449,7 +449,7 @@ object "A2_M" { let $t0, $t1 // $t0 := vector::empty<#0>() $t0 := A1_vector_empty$vec$u8$$() - // $t1 := pack option::Option<#0>($t0) + // $t1 := pack 0x1::option::Option<#0>($t0) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t0) @@ -467,7 +467,7 @@ object "A2_M" { let $t1, $t2 // $t1 := vector::singleton<#0>($t0) $t1 := A1_vector_singleton$vec$u8$$(e) - // $t2 := pack option::Option<#0>($t1) + // $t2 := pack 0x1::option::Option<#0>($t1) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t1) @@ -529,9 +529,9 @@ object "A2_M" { $t6 := A1_option_none$vec$u8$$() // $t7 := option::none>() $t7 := A1_option_none$vec$u8$$() - // $t8 := option::some($t0) + // $t8 := option::some<0x2::U256::U256>($t0) $t8 := A1_option_some$A2_U256_U256$(panic_code) - // $t9 := pack ExternalResult::ExternalResult<#0>($t5, $t7, $t6, $t8) + // $t9 := pack 0x2::ExternalResult::ExternalResult<#0>($t5, $t7, $t6, $t8) { let $mem := $Malloc(128) $MemoryStoreU256(add($mem, 0), $t5) @@ -548,7 +548,7 @@ object "A2_M" { let $t1, $t2 // $t1 := vector::singleton<#0>($t0) $t1 := A1_vector_singleton$A2_U256_U256$(e) - // $t2 := pack option::Option<#0>($t1) + // $t2 := pack 0x1::option::Option<#0>($t1) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t1) @@ -596,9 +596,9 @@ object "A2_M" { $t6 := A1_option_some$vec$u8$$(error) // $t7 := option::none>() $t7 := A1_option_none$vec$u8$$() - // $t8 := option::none() + // $t8 := option::none<0x2::U256::U256>() $t8 := A1_option_none$A2_U256_U256$() - // $t9 := pack ExternalResult::ExternalResult<#0>($t5, $t7, $t6, $t8) + // $t9 := pack 0x2::ExternalResult::ExternalResult<#0>($t5, $t7, $t6, $t8) { let $mem := $Malloc(128) $MemoryStoreU256(add($mem, 0), $t5) @@ -619,9 +619,9 @@ object "A2_M" { $t2 := A1_option_none$vec$u8$$() // $t3 := option::none>() $t3 := A1_option_none$vec$u8$$() - // $t4 := option::none() + // $t4 := option::none<0x2::U256::U256>() $t4 := A1_option_none$A2_U256_U256$() - // $t5 := pack ExternalResult::ExternalResult<#0>($t1, $t2, $t3, $t4) + // $t5 := pack 0x2::ExternalResult::ExternalResult<#0>($t1, $t2, $t3, $t4) { let $mem := $Malloc(128) $MemoryStoreU256(add($mem, 0), $t1) @@ -636,7 +636,7 @@ object "A2_M" { function A2_ExternalResult_is_err_reason$A2_U256_U256$(result) -> $result { let $t1, $t2 - // $t1 := borrow_field>.err_reason($t0) + // $t1 := borrow_field<0x2::ExternalResult::ExternalResult<#0>>.err_reason($t0) { let $field_ptr := $IndexPtr(result, 64) $t1 := $MakePtr($IsStoragePtr($field_ptr), $LoadU256($field_ptr)) @@ -649,7 +649,7 @@ object "A2_M" { function A1_option_is_some$vec$u8$$(t) -> $result { let $t1, $t2, $t3 - // $t1 := borrow_field>.vec($t0) + // $t1 := borrow_field<0x1::option::Option<#0>>.vec($t0) $t1 := t // $t2 := vector::is_empty<#0>($t1) $t2 := A1_vector_is_empty$vec$u8$$($t1) @@ -742,7 +742,7 @@ object "A2_M" { } function A2_ExternalResult_is_ok$A2_Evm_Unit$(result) -> $result { let $t1, $t2 - // $t1 := borrow_field>.value($t0) + // $t1 := borrow_field<0x2::ExternalResult::ExternalResult<#0>>.value($t0) { $t1 := $MakePtr($IsStoragePtr(result), $LoadU256(result)) } @@ -754,7 +754,7 @@ object "A2_M" { function A1_option_is_some$A2_Evm_Unit$(t) -> $result { let $t1, $t2, $t3 - // $t1 := borrow_field>.vec($t0) + // $t1 := borrow_field<0x1::option::Option<#0>>.vec($t0) $t1 := t // $t2 := vector::is_empty<#0>($t1) $t2 := A1_vector_is_empty$A2_Evm_Unit$($t1) @@ -827,9 +827,9 @@ object "A2_M" { $t2 := A1_option_some$vec$u8$$(error) // $t3 := option::none>() $t3 := A1_option_none$vec$u8$$() - // $t4 := option::none() + // $t4 := option::none<0x2::U256::U256>() $t4 := A1_option_none$A2_U256_U256$() - // $t5 := pack ExternalResult::ExternalResult<#0>($t1, $t2, $t3, $t4) + // $t5 := pack 0x2::ExternalResult::ExternalResult<#0>($t1, $t2, $t3, $t4) { let $mem := $Malloc(128) $MemoryStoreU256(add($mem, 0), $t1) @@ -846,7 +846,7 @@ object "A2_M" { let $t0, $t1 // $t0 := vector::empty<#0>() $t0 := A1_vector_empty$A2_Evm_Unit$() - // $t1 := pack option::Option<#0>($t0) + // $t1 := pack 0x1::option::Option<#0>($t0) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), $t0) @@ -868,9 +868,9 @@ object "A2_M" { $t6 := A1_option_none$vec$u8$$() // $t7 := option::none>() $t7 := A1_option_none$vec$u8$$() - // $t8 := option::some($t0) + // $t8 := option::some<0x2::U256::U256>($t0) $t8 := A1_option_some$A2_U256_U256$(panic_code) - // $t9 := pack ExternalResult::ExternalResult<#0>($t5, $t7, $t6, $t8) + // $t9 := pack 0x2::ExternalResult::ExternalResult<#0>($t5, $t7, $t6, $t8) { let $mem := $Malloc(128) $MemoryStoreU256(add($mem, 0), $t5) @@ -891,9 +891,9 @@ object "A2_M" { $t6 := A1_option_some$vec$u8$$(error) // $t7 := option::none>() $t7 := A1_option_none$vec$u8$$() - // $t8 := option::none() + // $t8 := option::none<0x2::U256::U256>() $t8 := A1_option_none$A2_U256_U256$() - // $t9 := pack ExternalResult::ExternalResult<#0>($t5, $t7, $t6, $t8) + // $t9 := pack 0x2::ExternalResult::ExternalResult<#0>($t5, $t7, $t6, $t8) { let $mem := $Malloc(128) $MemoryStoreU256(add($mem, 0), $t5) @@ -908,7 +908,7 @@ object "A2_M" { function A2_ExternalResult_unwrap$A2_Evm_Unit$(result) -> $result { let err_data, err_reason, panic_code, value, $t5, $t6, $t7, $t8, $t9 - // ($t5, $t6, $t7, $t8) := unpack ExternalResult::ExternalResult<#0>($t0) + // ($t5, $t6, $t7, $t8) := unpack 0x2::ExternalResult::ExternalResult<#0>($t0) $t5 := $MemoryLoadU256(add(result, 0)) $t6 := $MemoryLoadU256(add(result, 32)) $t7 := $MemoryLoadU256(add(result, 64)) @@ -918,7 +918,7 @@ object "A2_M" { A1_option_destroy_none$vec$u8$$($t6) // option::destroy_none>($t7) A1_option_destroy_none$vec$u8$$($t7) - // option::destroy_none($t8) + // option::destroy_none<0x2::U256::U256>($t8) A1_option_destroy_none$A2_U256_U256$($t8) // $t9 := option::destroy_some<#0>($t5) $t9 := A1_option_destroy_some$A2_Evm_Unit$($t5) @@ -958,7 +958,7 @@ object "A2_M" { // label L2 // $t6 := move($t0) $t6 := t - // $t2 := unpack option::Option<#0>($t6) + // $t2 := unpack 0x1::option::Option<#0>($t6) mstore($locals, $MemoryLoadU256(add($t6, 0))) $Free($t6, 32) // $t7 := borrow_local($t2) @@ -1031,7 +1031,7 @@ object "A2_M" { // label L2 // $t4 := move($t0) $t4 := t - // $t5 := unpack option::Option<#0>($t4) + // $t5 := unpack 0x1::option::Option<#0>($t4) $t5 := $MemoryLoadU256(add($t4, 0)) $Free($t4, 32) // vector::destroy_empty<#0>($t5) @@ -1050,7 +1050,7 @@ object "A2_M" { } function A1_option_is_none$A2_U256_U256$(t) -> $result { let $t1, $t2 - // $t1 := borrow_field>.vec($t0) + // $t1 := borrow_field<0x1::option::Option<#0>>.vec($t0) $t1 := t // $t2 := vector::is_empty<#0>($t1) $t2 := A1_vector_is_empty$A2_U256_U256$($t1) @@ -1089,7 +1089,7 @@ object "A2_M" { // label L2 // $t4 := move($t0) $t4 := t - // $t5 := unpack option::Option<#0>($t4) + // $t5 := unpack 0x1::option::Option<#0>($t4) $t5 := $MemoryLoadU256(add($t4, 0)) $Free($t4, 32) // vector::destroy_empty<#0>($t5) @@ -1108,7 +1108,7 @@ object "A2_M" { } function A1_option_is_none$vec$u8$$(t) -> $result { let $t1, $t2 - // $t1 := borrow_field>.vec($t0) + // $t1 := borrow_field<0x1::option::Option<#0>>.vec($t0) $t1 := t // $t2 := vector::is_empty<#0>($t1) $t2 := A1_vector_is_empty$vec$u8$$($t1) @@ -1164,7 +1164,7 @@ object "A2_M" { function A2_ExternalResult_unwrap$A2_U256_U256$(result) -> $result { let err_data, err_reason, panic_code, value, $t5, $t6, $t7, $t8, $t9 - // ($t5, $t6, $t7, $t8) := unpack ExternalResult::ExternalResult<#0>($t0) + // ($t5, $t6, $t7, $t8) := unpack 0x2::ExternalResult::ExternalResult<#0>($t0) $t5 := $MemoryLoadU256(add(result, 0)) $t6 := $MemoryLoadU256(add(result, 32)) $t7 := $MemoryLoadU256(add(result, 64)) @@ -1174,7 +1174,7 @@ object "A2_M" { A1_option_destroy_none$vec$u8$$($t6) // option::destroy_none>($t7) A1_option_destroy_none$vec$u8$$($t7) - // option::destroy_none($t8) + // option::destroy_none<0x2::U256::U256>($t8) A1_option_destroy_none$A2_U256_U256$($t8) // $t9 := option::destroy_some<#0>($t5) $t9 := A1_option_destroy_some$A2_U256_U256$($t5) @@ -1214,7 +1214,7 @@ object "A2_M" { // label L2 // $t6 := move($t0) $t6 := t - // $t2 := unpack option::Option<#0>($t6) + // $t2 := unpack 0x1::option::Option<#0>($t6) mstore($locals, $MemoryLoadU256(add($t6, 0))) $Free($t6, 32) // $t7 := borrow_local($t2) diff --git a/third_party/move/evm/move-to-yul/tests/test-events/CallEmit.exp b/third_party/move/evm/move-to-yul/tests/test-events/CallEmit.exp index ab8e443702abe..57270d396370a 100644 --- a/third_party/move/evm/move-to-yul/tests/test-events/CallEmit.exp +++ b/third_party/move/evm/move-to-yul/tests/test-events/CallEmit.exp @@ -137,7 +137,7 @@ object "A2_M" { $Abort(97) function A2_M_do_event_1(from, to, amount) { let $t3 - // $t3 := pack M::Event_1($t0, $t1, $t2) + // $t3 := pack 0x2::M::Event_1($t0, $t1, $t2) { let $mem := $Malloc(65) $MemoryStoreU8(add($mem, 64), from) @@ -145,14 +145,14 @@ object "A2_M" { $MemoryStoreU256(add($mem, 32), amount) $t3 := $mem } - // Evm::emit($t3) + // Evm::emit<0x2::M::Event_1>($t3) A2_Evm_emit$A2_M_Event_1$($t3) // return () } function A2_M_do_event_2(v1, v2, v3, v4) { let $t4 - // $t4 := pack M::Event_2($t0, $t1, $t2, $t3) + // $t4 := pack 0x2::M::Event_2($t0, $t1, $t2, $t3) { let $mem := $Malloc(73) $MemoryStoreU8(add($mem, 72), v1) @@ -161,14 +161,14 @@ object "A2_M" { $MemoryStoreU256(add($mem, 0), v4) $t4 := $mem } - // Evm::emit($t4) + // Evm::emit<0x2::M::Event_2>($t4) A2_Evm_emit$A2_M_Event_2$($t4) // return () } function A2_M_do_event_3(from, to, amount) { let $t3 - // $t3 := pack M::Event_3($t0, $t1, $t2) + // $t3 := pack 0x2::M::Event_3($t0, $t1, $t2) { let $mem := $Malloc(65) $MemoryStoreU8(add($mem, 64), from) @@ -176,14 +176,14 @@ object "A2_M" { $MemoryStoreU256(add($mem, 32), amount) $t3 := $mem } - // Evm::emit($t3) + // Evm::emit<0x2::M::Event_3>($t3) A2_Evm_emit$A2_M_Event_3$($t3) // return () } function A2_M_do_event_4(v1, v2, v3) { let $t3 - // $t3 := pack M::Event_4($t0, $t1, $t2) + // $t3 := pack 0x2::M::Event_4($t0, $t1, $t2) { let $mem := $Malloc(96) $MemoryStoreU256(add($mem, 0), v1) @@ -191,28 +191,28 @@ object "A2_M" { $MemoryStoreU256(add($mem, 64), v3) $t3 := $mem } - // Evm::emit($t3) + // Evm::emit<0x2::M::Event_4>($t3) A2_Evm_emit$A2_M_Event_4$($t3) // return () } function A2_M_do_event_5(bys, str) { let $t2 - // $t2 := pack M::Event_5($t0, $t1) + // $t2 := pack 0x2::M::Event_5($t0, $t1) { let $mem := $Malloc(64) $MemoryStoreU256(add($mem, 0), bys) $MemoryStoreU256(add($mem, 32), str) $t2 := $mem } - // Evm::emit($t2) + // Evm::emit<0x2::M::Event_5>($t2) A2_Evm_emit$A2_M_Event_5$($t2) // return () } function A2_M_do_event_6(bys, str, uint16_array) { let $t3 - // $t3 := pack M::Event_6($t0, $t1, $t2) + // $t3 := pack 0x2::M::Event_6($t0, $t1, $t2) { let $mem := $Malloc(96) $MemoryStoreU256(add($mem, 0), bys) @@ -220,41 +220,41 @@ object "A2_M" { $MemoryStoreU256(add($mem, 64), uint16_array) $t3 := $mem } - // Evm::emit($t3) + // Evm::emit<0x2::M::Event_6>($t3) A2_Evm_emit$A2_M_Event_6$($t3) // return () } function A2_M_do_event_7(bys) { let $t1 - // $t1 := pack M::Event_7($t0) + // $t1 := pack 0x2::M::Event_7($t0) { let $mem := $Malloc(32) $MemoryStoreU256(add($mem, 0), bys) $t1 := $mem } - // Evm::emit($t1) + // Evm::emit<0x2::M::Event_7>($t1) A2_Evm_emit$A2_M_Event_7$($t1) // return () } function A2_M_do_event_8(bys, strs) { let $t2 - // $t2 := pack M::Event_8($t0, $t1) + // $t2 := pack 0x2::M::Event_8($t0, $t1) { let $mem := $Malloc(64) $MemoryStoreU256(add($mem, 0), bys) $MemoryStoreU256(add($mem, 32), strs) $t2 := $mem } - // Evm::emit($t2) + // Evm::emit<0x2::M::Event_8>($t2) A2_Evm_emit$A2_M_Event_8$($t2) // return () } function A2_M_do_transfer(from, to, amount) { let $t3 - // $t3 := pack M::Transfer($t0, $t1, $t2) + // $t3 := pack 0x2::M::Transfer($t0, $t1, $t2) { let $mem := $Malloc(96) $MemoryStoreU256(add($mem, 0), from) @@ -262,7 +262,7 @@ object "A2_M" { $MemoryStoreU256(add($mem, 64), amount) $t3 := $mem } - // Evm::emit($t3) + // Evm::emit<0x2::M::Transfer>($t3) A2_Evm_emit$A2_M_Transfer$($t3) // return () } diff --git a/third_party/move/move-binary-format/src/check_bounds.rs b/third_party/move/move-binary-format/src/check_bounds.rs index 610d1f1b037b3..91e52ee07fb5e 100644 --- a/third_party/move/move-binary-format/src/check_bounds.rs +++ b/third_party/move/move-binary-format/src/check_bounds.rs @@ -271,6 +271,13 @@ impl<'a> BoundsChecker<'a> { fn check_variant_field_handle(&self, field_handle: &VariantFieldHandle) -> PartialVMResult<()> { check_bounds_impl_opt(&self.view.struct_defs(), field_handle.struct_index)?; + if field_handle.variants.is_empty() { + return Err(verification_error( + StatusCode::ZERO_VARIANTS_ERROR, + IndexKind::MemberCount, + field_handle.field, + )); + } let struct_def = self.view.struct_def_at(field_handle.struct_index)?; for variant in &field_handle.variants { Self::check_variant_index(struct_def, *variant)?; diff --git a/third_party/move/move-binary-format/src/file_format_common.rs b/third_party/move/move-binary-format/src/file_format_common.rs index 3a85904097856..b9790db3d35f8 100644 --- a/third_party/move/move-binary-format/src/file_format_common.rs +++ b/third_party/move/move-binary-format/src/file_format_common.rs @@ -518,9 +518,9 @@ pub const VERSION_8: u32 = 8; pub const VERSION_MAX: u32 = VERSION_7; /// Mark which version is the default version. This is the version used by default by tools like -/// the compiler. Notice that this version might be different than the one supported on nodes. +/// the compiler. Notice that this version might be different from the one supported on nodes. /// The node's max version is determined by the on-chain config for that node. -pub const VERSION_DEFAULT: u32 = VERSION_6; +pub const VERSION_DEFAULT: u32 = VERSION_7; /// Mark which version is the default version if compiling Move 2. pub const VERSION_DEFAULT_LANG_V2: u32 = VERSION_7; diff --git a/third_party/move/move-bytecode-verifier/transactional-tests/tests/type_safety/gerbens_test.exp b/third_party/move/move-bytecode-verifier/transactional-tests/tests/type_safety/gerbens_test.exp index 21191ffa107a6..739b14b80441d 100644 --- a/third_party/move/move-bytecode-verifier/transactional-tests/tests/type_safety/gerbens_test.exp +++ b/third_party/move/move-bytecode-verifier/transactional-tests/tests/type_safety/gerbens_test.exp @@ -5,6 +5,6 @@ Error: error[E04024]: invalid usage of function type ┌─ TEMPFILE:9:62 │ 9 │ public fun for_each_ref(v: &vector, f: |&Element|) { - │ ^^^^^^^^^^ function type only allowed for inline function arguments + │ ^^^^^^^^^^ function-typed values only allowed for inline function arguments diff --git a/third_party/move/move-compiler-v2/src/bytecode_generator.rs b/third_party/move/move-compiler-v2/src/bytecode_generator.rs index e20f3b09bf19e..9cb45341d9743 100644 --- a/third_party/move/move-compiler-v2/src/bytecode_generator.rs +++ b/third_party/move/move-compiler-v2/src/bytecode_generator.rs @@ -406,9 +406,12 @@ impl<'env> Generator<'env> { ), ); } - self.emit_call(*id, targets, BytecodeOperation::WriteRef, vec![ - lhs_temp, rhs_temp, - ]) + self.emit_call( + *id, + targets, + BytecodeOperation::WriteRef, + vec![lhs_temp, rhs_temp], + ) }, ExpData::Assign(id, lhs, rhs) => self.gen_assign(*id, lhs, rhs, None), ExpData::Return(id, exp) => { @@ -447,11 +450,11 @@ impl<'env> Generator<'env> { self.emit_with(*id, |attr| Bytecode::Jump(attr, continue_label)); self.emit_with(*id, |attr| Bytecode::Label(attr, break_label)); }, - ExpData::LoopCont(id, do_continue) => { + ExpData::LoopCont(id, nest, do_continue) => { if let Some(LoopContext { continue_label, break_label, - }) = self.loops.last() + }) = self.loops.iter().rev().nth(*nest) { let target = if *do_continue { *continue_label @@ -476,9 +479,16 @@ impl<'env> Generator<'env> { .rewrite_spec_descent(&SpecBlockTarget::Inline, spec); self.emit_with(*id, |attr| Bytecode::SpecBlock(attr, spec)); }, - ExpData::Invoke(id, _, _) | ExpData::Lambda(id, _, _) => { - self.internal_error(*id, format!("not yet implemented: {:?}", exp)) - }, + // TODO(LAMBDA) + ExpData::Lambda(id, _, _) => self.error( + *id, + "Function-typed values not yet supported except as parameters to calls to inline functions", + ), + // TODO(LAMBDA) + ExpData::Invoke(_, exp, _) => self.error( + exp.as_ref().node_id(), + "Calls to function values other than inline function parameters not yet supported", + ), ExpData::Quant(id, _, _, _, _, _) => { self.internal_error(*id, "unsupported specification construct") }, @@ -803,7 +813,11 @@ impl<'env> Generator<'env> { Operation::NoOp => {}, // do nothing - Operation::Closure(..) => self.internal_error(id, "closure not yet implemented"), + // TODO(LAMBDA) + Operation::Closure(..) => self.error( + id, + "Function-typed values not yet supported except as parameters to calls to inline functions", + ), // Non-supported specification related operations Operation::Exists(Some(_)) diff --git a/third_party/move/move-compiler-v2/src/env_pipeline/function_checker.rs b/third_party/move/move-compiler-v2/src/env_pipeline/function_checker.rs index 57a9976ed3635..d7dbcc3cfce2b 100644 --- a/third_party/move/move-compiler-v2/src/env_pipeline/function_checker.rs +++ b/third_party/move/move-compiler-v2/src/env_pipeline/function_checker.rs @@ -3,55 +3,180 @@ //! Do a few checks of functions and function calls. -use crate::Options; +use crate::{experiments::Experiment, Options}; use codespan_reporting::diagnostic::Severity; use move_binary_format::file_format::Visibility; use move_model::{ ast::{ExpData, Operation, Pattern}, - model::{FunId, FunctionEnv, GlobalEnv, Loc, ModuleEnv, NodeId, QualifiedId}, + model::{FunId, FunctionEnv, GlobalEnv, Loc, ModuleEnv, NodeId, Parameter, QualifiedId}, ty::Type, }; -use std::{collections::BTreeSet, iter::Iterator, vec::Vec}; +use std::{collections::BTreeSet, iter::Iterator, ops::Deref, vec::Vec}; type QualifiedFunId = QualifiedId; -/// check that non-inline function parameters do not have function type. +// Takes a list of function types, returns those which have a function type in their argument type +fn identify_function_types_with_functions_in_args(func_types: Vec) -> Vec { + func_types + .into_iter() + .filter_map(|ty| { + if let Type::Fun(argt, _) = &ty { + if argt.deref().has_function() { + Some(ty) + } else { + None + } + } else { + None + } + }) + .collect() +} + +// Takes a list of function-typed parameters, along with argument and result type +// Returns a list of any parameters whose result type has a function value, along with that result type. +fn identify_function_typed_params_with_functions_in_rets( + func_types: Vec<&Parameter>, +) -> Vec<(&Parameter, &Type)> { + func_types + .iter() + .filter_map(|param| { + if let Type::Fun(_argt, rest) = ¶m.1 { + let rest_unboxed = rest.deref(); + if rest_unboxed.has_function() { + Some((*param, rest_unboxed)) + } else { + None + } + } else { + None + } + }) + .collect() +} + +/// check that function parameters/results do not have function type unless allowed. +/// (1) is there a function type arg at the top level? This is allowed for inline or LAMBDA_IN_PARAMS +/// (2) is there a function type result at the top level? This is allowed only for LAMBDA_IN_RETURNS +/// (3) is there *any* function type with function type in an arg? This is allowed only for LAMBDA_IN_PARAMS +/// (4) is there *any* function type with function type in a result? This is allowed only for LAMBDA_IN_RETURNS pub fn check_for_function_typed_parameters(env: &mut GlobalEnv) { + let options = env + .get_extension::() + .expect("Options is available"); + let lambda_params_ok = options.experiment_on(Experiment::LAMBDA_IN_PARAMS); + let lambda_return_ok = options.experiment_on(Experiment::LAMBDA_IN_RETURNS); + if lambda_params_ok && lambda_return_ok { + return; + } + for caller_module in env.get_modules() { if caller_module.is_primary_target() { for caller_func in caller_module.get_functions() { - // Check that non-inline function parameters don't have function type - if !caller_func.is_inline() { - let parameters = caller_func.get_parameters(); - let bad_params: Vec<_> = parameters + if !lambda_params_ok || !lambda_return_ok { + let caller_name = caller_func.get_full_name_str(); + let return_type = caller_func.get_result_type(); + let func_returns: Vec<_> = return_type + .clone() + .flatten() + .into_iter() + .filter(|t| t.is_function()) + .collect(); + let type_display_ctx = caller_func.get_type_display_ctx(); + if !func_returns.is_empty() { + // (2) is there a function type result at the top level? This is allowed + // only for LAMBDA_IN_RETURNS + if !lambda_return_ok && !func_returns.is_empty() { + env.diag( + Severity::Error, + &caller_func.get_result_type_loc(), + &format!("Functions may not return function-typed values, but function `{}` return type is the function type `{}`:", + &caller_name, + return_type.display(&type_display_ctx)), + ) + } + if !lambda_params_ok { + // (3) is there *any* function type with function type in an arg? This + // is allowed only for LAMBDA_IN_PARAMS + let bad_returns = + identify_function_types_with_functions_in_args(func_returns); + if !bad_returns.is_empty() { + env.diag( + Severity::Error, + &caller_func.get_result_type_loc(), + &format!("Non-inline functions may not take function-typed parameters, but function `{}` return type is `{}`, which has a function type taking a function parameter:", + &caller_name, + return_type.display(&type_display_ctx)), + ) + } + } + } + + let parameters = caller_func.get_parameters_ref(); + let func_params: Vec<_> = parameters .iter() - .filter(|param| matches!(param.1, Type::Fun(_, _))) + .filter(|param| matches!(param.1, Type::Fun(..))) .collect(); - if !bad_params.is_empty() { - let caller_name = caller_func.get_full_name_str(); - let reasons: Vec<(Loc, String)> = bad_params - .iter() - .map(|param| { - ( - param.2.clone(), - format!( - "Parameter `{}` has a function type.", - param.0.display(env.symbol_pool()), + if !func_params.is_empty() { + // (1) is there a function type arg at the top level? This is allowed for + // inline or LAMBDA_IN_PARAMS + if !caller_func.is_inline() && !lambda_params_ok { + let reasons: Vec<(Loc, String)> = func_params + .iter() + .map(|param| { + ( + param.2.clone(), + format!( + "Parameter `{}` has function-valued type `{}`.", + param.0.display(env.symbol_pool()), + param.1.display(&type_display_ctx) + ), + ) + }) + .collect(); + env.diag_with_labels( + Severity::Error, + &caller_func.get_id_loc(), + &format!("Only inline functions may have function-typed parameters, but non-inline function `{}` has {}:", + caller_name, + if reasons.len() > 1 { "function parameters" } else { "a function parameter" }, + ), + reasons, + ); + } + if !lambda_return_ok { + // (4) is there *any* function type with function type in its result? This is + // allowed only for LAMBDA_IN_RETURNS + let bad_params = + identify_function_typed_params_with_functions_in_rets(func_params); + if !bad_params.is_empty() { + let reasons: Vec<(Loc, String)> = bad_params + .iter() + .map(|(param, ty)| { + ( + param.2.clone(), + format!( + "Parameter `{}` has type `{}`, which has function type `{}` as a function result type", + param.0.display(env.symbol_pool()), + param.1.display(&type_display_ctx), + ty.display(&type_display_ctx), + ), + ) + }) + .collect(); + env.diag_with_labels( + Severity::Error, + &caller_func.get_id_loc(), + &format!("Functions may not return function-typed values, but function `{}` has {} of function type with function-typed result:", + caller_name, + if reasons.len() > 1 { "parameters" } else { "a parameter" }, ), - ) - }) - .collect(); - env.diag_with_labels( - Severity::Error, - &caller_func.get_id_loc(), - &format!("Only inline functions may have function-typed parameters, but non-inline function `{}` has {}:", - caller_name, - if reasons.len() > 1 { "function parameters" } else { "a function parameter" }, - ), - reasons, - ); + reasons, + ); + } + } } - } + }; } } } diff --git a/third_party/move/move-compiler-v2/src/env_pipeline/inliner.rs b/third_party/move/move-compiler-v2/src/env_pipeline/inliner.rs index 6047eae1a82ab..77a4ce7eefede 100644 --- a/third_party/move/move-compiler-v2/src/env_pipeline/inliner.rs +++ b/third_party/move/move-compiler-v2/src/env_pipeline/inliner.rs @@ -800,7 +800,7 @@ impl<'env, 'rewriter> InlinedRewriter<'env, 'rewriter> { (lambda expressions)", ) }, - ExpData::LoopCont(node_id, is_continue) if !post && in_loop == 0 => { + ExpData::LoopCont(node_id, _, is_continue) if !post && in_loop == 0 => { let node_loc = env.get_node_loc(*node_id); env.error( &node_loc, @@ -1046,7 +1046,7 @@ impl<'env, 'rewriter> ExpRewriterFunctions for InlinedRewriter<'env, 'rewriter> self.in_loop += 1; true }, - ExpData::LoopCont(node_id, is_continue) if self.in_loop == 0 => { + ExpData::LoopCont(node_id, _, is_continue) if self.in_loop == 0 => { let node_loc = self.env.get_node_loc(*node_id); self.env.error( &node_loc, diff --git a/third_party/move/move-compiler-v2/src/env_pipeline/lambda_lifter.rs b/third_party/move/move-compiler-v2/src/env_pipeline/lambda_lifter.rs index cb11736440a66..8326c3e8af618 100644 --- a/third_party/move/move-compiler-v2/src/env_pipeline/lambda_lifter.rs +++ b/third_party/move/move-compiler-v2/src/env_pipeline/lambda_lifter.rs @@ -309,7 +309,7 @@ impl<'a> ExpRewriterFunctions for LambdaLifter<'a> { env.error( &loc, &format!( - "captured variable `{}` cannot be modified inside of a lambda", + "captured variable `{}` cannot be modified inside of a lambda", // TODO(LAMBDA) name.display(env.symbol_pool()) ), ); @@ -327,7 +327,7 @@ impl<'a> ExpRewriterFunctions for LambdaLifter<'a> { env.error( &loc, &format!( - "captured variable `{}` cannot be modified inside of a lambda", + "captured variable `{}` cannot be modified inside of a lambda", // TODO(LAMBDA) name.display(env.symbol_pool()) ), ); diff --git a/third_party/move/move-compiler-v2/src/env_pipeline/recursive_struct_checker.rs b/third_party/move/move-compiler-v2/src/env_pipeline/recursive_struct_checker.rs index 29c5a6b299bca..9f4d51838ff56 100644 --- a/third_party/move/move-compiler-v2/src/env_pipeline/recursive_struct_checker.rs +++ b/third_party/move/move-compiler-v2/src/env_pipeline/recursive_struct_checker.rs @@ -101,7 +101,7 @@ impl<'a> RecursiveStructChecker<'a> { self.report_invalid_field(&struct_env, &field_env); } }, - Type::Primitive(_) | Type::TypeParameter(_) => {}, + Type::Primitive(_) | Type::TypeParameter(_) | Type::Fun(..) => {}, _ => unreachable!("invalid field type"), } path.pop(); @@ -195,7 +195,7 @@ impl<'a> RecursiveStructChecker<'a> { .iter() .any(|ty| self.ty_contains_struct(path, ty, loc.clone(), struct_id, checked)) }, - Type::Primitive(_) | Type::TypeParameter(_) => false, + Type::Primitive(_) | Type::TypeParameter(_) | Type::Fun(..) => false, _ => panic!("ICE: {:?} used as a type parameter", ty), } } diff --git a/third_party/move/move-compiler-v2/src/env_pipeline/unused_params_checker.rs b/third_party/move/move-compiler-v2/src/env_pipeline/unused_params_checker.rs index 276b8dc3d675c..e9da0d5cbb053 100644 --- a/third_party/move/move-compiler-v2/src/env_pipeline/unused_params_checker.rs +++ b/third_party/move/move-compiler-v2/src/env_pipeline/unused_params_checker.rs @@ -55,12 +55,16 @@ fn used_type_parameters_in_fields(struct_env: &StructEnv) -> BTreeSet { fn used_type_parameters_in_ty(ty: &Type) -> BTreeSet { match ty { Type::Primitive(_) => BTreeSet::new(), - Type::Struct(_, _, tys) => tys.iter().flat_map(used_type_parameters_in_ty).collect(), + Type::Tuple(tys) | Type::Struct(_, _, tys) => { + tys.iter().flat_map(used_type_parameters_in_ty).collect() + }, Type::TypeParameter(i) => BTreeSet::from([*i]), Type::Vector(ty) => used_type_parameters_in_ty(ty), + Type::Fun(t1, t2) => [t1, t2] + .iter() + .flat_map(|t| used_type_parameters_in_ty(t)) + .collect(), Type::Reference(..) - | Type::Fun(..) - | Type::Tuple(..) | Type::TypeDomain(..) | Type::ResourceDomain(..) | Type::Error diff --git a/third_party/move/move-compiler-v2/src/experiments.rs b/third_party/move/move-compiler-v2/src/experiments.rs index d7bdf7820c00e..ceea7b65d8403 100644 --- a/third_party/move/move-compiler-v2/src/experiments.rs +++ b/third_party/move/move-compiler-v2/src/experiments.rs @@ -111,11 +111,32 @@ pub static EXPERIMENTS: Lazy> = Lazy::new(|| { description: "Turns on or off specification rewriting".to_string(), default: Given(false), }, + Experiment { + name: Experiment::LAMBDA_FIELDS.to_string(), + description: "Turns on or off function values in struct fields".to_string(), + default: Given(false), + }, Experiment { name: Experiment::LAMBDA_LIFTING.to_string(), description: "Turns on or off lambda lifting".to_string(), default: Given(false), }, + Experiment { + name: Experiment::LAMBDA_IN_PARAMS.to_string(), + description: "Turns on or off function values as parameters to non-inline functions" + .to_string(), + default: Given(false), + }, + Experiment { + name: Experiment::LAMBDA_IN_RETURNS.to_string(), + description: "Turns on or off function values in function return values".to_string(), + default: Given(false), + }, + Experiment { + name: Experiment::LAMBDA_VALUES.to_string(), + description: "Turns on or off first-class function values".to_string(), + default: Given(false), + }, Experiment { name: Experiment::RECURSIVE_TYPE_CHECK.to_string(), description: "Turns on or off checking of recursive structs and type instantiations" @@ -275,7 +296,11 @@ impl Experiment { pub const INLINING: &'static str = "inlining"; pub const KEEP_INLINE_FUNS: &'static str = "keep-inline-funs"; pub const KEEP_UNINIT_ANNOTATIONS: &'static str = "keep-uninit-annotations"; + pub const LAMBDA_FIELDS: &'static str = "lambda-fields"; + pub const LAMBDA_IN_PARAMS: &'static str = "lambda-in-params"; + pub const LAMBDA_IN_RETURNS: &'static str = "lambda-in-returns"; pub const LAMBDA_LIFTING: &'static str = "lambda-lifting"; + pub const LAMBDA_VALUES: &'static str = "lambda-values"; pub const LINT_CHECKS: &'static str = "lint-checks"; pub const OPTIMIZE: &'static str = "optimize"; pub const OPTIMIZE_EXTRA: &'static str = "optimize-extra"; diff --git a/third_party/move/move-compiler-v2/src/file_format_generator/module_generator.rs b/third_party/move/move-compiler-v2/src/file_format_generator/module_generator.rs index 4fd794aca6c6b..483020e61a99e 100644 --- a/third_party/move/move-compiler-v2/src/file_format_generator/module_generator.rs +++ b/third_party/move/move-compiler-v2/src/file_format_generator/module_generator.rs @@ -365,7 +365,18 @@ impl ModuleGenerator { ReferenceKind::Mutable => FF::SignatureToken::MutableReference(target_ty), } }, - Fun(_, _) | TypeDomain(_) | ResourceDomain(_, _, _) | Error | Var(_) => { + Fun(_param_ty, _result_ty) => { + // TODO(LAMBDA) + ctx.error( + loc, + format!( + "Unexpected type: {}", + ty.display(&ctx.env.get_type_display_ctx()) + ), + ); + FF::SignatureToken::Bool + }, + TypeDomain(_) | ResourceDomain(_, _, _) | Error | Var(_) => { ctx.internal_error( loc, format!( diff --git a/third_party/move/move-compiler-v2/tests/README.md b/third_party/move/move-compiler-v2/tests/README.md index 7cdf8a1683b1a..122c8c07d531b 100644 --- a/third_party/move/move-compiler-v2/tests/README.md +++ b/third_party/move/move-compiler-v2/tests/README.md @@ -1,29 +1,70 @@ # Compiler Unit Tests -This directory contains the unit tests for the compiler. For end-to-end tests, see the [`transactional_tests`](../transactional-tests). +This directory contains the unit tests for the compiler. For end-to-end tests, see +the [`transactional_tests`](../transactional-tests). ## Test Organization -Unit tests are organized along phases of the compiler. Ideally a unit test is focused on the particular aspect this phase implements. +Unit tests are organized along phases of the compiler. Ideally a unit test is focused on the +particular aspect this phase implements. The compiler phases are organized as follows: -- Building of the `GlobalEnv`, which includes type checking and inference of the program. Related tests are in [`checking`](./checking). +- Building of the `GlobalEnv`, which includes type checking and inference of the program. Related + tests are in [`checking`](./checking). - Transformation of the GlobalEnv (e.g. inlining) - Generation of stack-less bytecode, tests are in [`bytecode-generator`](./bytecode-generator). -- Any number of bytecode level checkers or transformers (currently `live-var` and `reference-safety` and `visibility-checker`) -- The last and final phase of the file format generator, tests are in [`file-format-generator`](./file_format_generator) - +- Any number of bytecode level checkers or transformers (currently `live-var` and `reference-safety` + and `visibility-checker`) +- The last and final phase of the file format generator, tests are + in [`file-format-generator`](./file_format_generator) ## V1 Test Migration -Tests from the v1 compiler test suite are incrementally ported to the v2 tree. Every single test should be vetted that the v2 compiler delivers the correct (equivalent) result before it is ported into v2. Exception to this rule should be marked with a github issue. +Tests from the v1 compiler test suite are incrementally ported to the v2 tree. Every single test +should be vetted that the v2 compiler delivers the correct (equivalent) result before it is ported +into v2. Exception to this rule should be marked with a github issue. There are two files which represent the current state of test migration: -- [`v1.unmatched`](./v1.unmatched): this contains a list of the tests which currently have no matching equivalent in the v2 test suite. -- [`v1.matched`](./v1.matched): this contains a list of the pairs of matched test expectation (`.exp`) files, for further processing - -To update those files run the script [`update_v1_diff.sh`](./update_v1_diff.sh). To see the rules how those lists are produced, see the code at [`tools/testdiff`](../tools/testdiff). +- [`v1.unmatched`](./v1.unmatched): this contains a list of the tests which currently have no + matching equivalent in the v2 test suite. +- [`v1.matched`](./v1.matched): this contains a list of the pairs of matched test + expectation (`.exp`) files, for further processing + +To update those files run the script [`update_v1_diff.sh`](./update_v1_diff.sh). To see the rules +how those lists are produced, see the code at [`tools/testdiff`](../tools/testdiff). + +In order to migrate a test such that the tool can keep track of it, ensure that you place it in a +similar named parent directory (anywhere in the v2 test tree). For example, for a +test `move-check/x/y.move`, ensure the test can be found somewhere at `x/y.move` in the v2 tree. + +### About v1/v2 test comparison + +Notice that test comparison is a tedious manual process for the following reasons: + +- The errors reported by v1 and v2 have a rather different structure (different text, more or + less additional notes and labels, etc.) . Also the order in which errors are generated is + different. A textual diff is therefore basically useless. Rather the manual comparison entails: ( + a) going one-by-one over each error in the v1 exp file. and find the error at the same line numer + in the v2 .exp file (b) deciding whether the errors are compatible (c) reasoning whether if one + error is missed, it is semantically represented by a different one (the same logical error can + reported at different locations in the file, an artifact of type inference) (d) checking out all + v2 errors whether non are obsolete. + +- v1 and v2 have different phase structure and order of analysis. For example, many files in the + v1 test suite do not fully compile, and don't need to, because they hit the tested blocking error + before a secondary one is produced. But then in the other compiler (either v1 or v2), the + secondary error may become the primary one, masking the tested error. For example, in v1 reference + analysis errors mask ability analysis errors, but in v2 its the other way around. This leads to + that test sources needed to be modified. + +- In the case of reference safety comparison becomes even more difficult because the semantics of + those both is different. For example, v1 enforces borrow rules on x in statement like x; (refer to + x and forget its value). Not so v2: one must actually use the variable (as in *x;). Those + differences have been discussed in multiple team meetings and are by design. -In order to migrate a test such that the tool can keep track of it, ensure that you place it in a similar named parent directory (anywhere in the v2 test tree). For example, for a test `move-check/x/y.move`, ensure the test can be found somewhere at `x/y.move` in the v2 tree. +Because of this it is expensive to do test comparison, and essential that we follow the migration +process as outlined above. Specifically, do _not_ bulk copy tests into the v2 tree without +manual auditing them, and do _not_ fork tests, even if they are modified, so the relation +between v1/v2 tests is maintained. \ No newline at end of file diff --git a/third_party/move/move-compiler-v2/tests/ability-check/ability_violation.exp b/third_party/move/move-compiler-v2/tests/ability-check/ability_violation.exp index 3910bd4b361dc..449755574b05a 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/ability_violation.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/ability_violation.exp @@ -1,6 +1,6 @@ Diagnostics: -error: local `x` of type `ability::Impotent` does not have the `copy` ability +error: local `x` of type `Impotent` does not have the `copy` ability ┌─ tests/ability-check/ability_violation.move:7:10 │ 7 │ (x, x); @@ -8,13 +8,13 @@ error: local `x` of type `ability::Impotent` does not have the `copy` ability │ │ │ copy needed here because value is still in use -error: value of type `ability::Impotent` does not have the `drop` ability +error: value of type `Impotent` does not have the `drop` ability ┌─ tests/ability-check/ability_violation.move:7:10 │ 7 │ (x, x); │ ^ implicitly dropped here since it is no longer used -error: value of type `ability::Impotent` does not have the `drop` ability +error: value of type `Impotent` does not have the `drop` ability ┌─ tests/ability-check/ability_violation.move:7:13 │ 7 │ (x, x); diff --git a/third_party/move/move-compiler-v2/tests/ability-check/alive_since_borrowed.exp b/third_party/move/move-compiler-v2/tests/ability-check/alive_since_borrowed.exp index 780b27d837589..558387a805c8a 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/alive_since_borrowed.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/alive_since_borrowed.exp @@ -1,12 +1,12 @@ Diagnostics: -error: local `x` of type `test::Impotent` does not have the `drop` ability +error: local `x` of type `Impotent` does not have the `drop` ability ┌─ tests/ability-check/alive_since_borrowed.move:7:17 │ 7 │ let y = &x; │ ^^ still borrowed but will be implicitly dropped later since it is no longer used -error: local `x` of type `test::S` does not have the `drop` ability +error: local `x` of type `S` does not have the `drop` ability ┌─ tests/ability-check/alive_since_borrowed.move:21:9 │ 21 │ x.g.h diff --git a/third_party/move/move-compiler-v2/tests/ability-check/assign.exp b/third_party/move/move-compiler-v2/tests/ability-check/assign.exp index 0b42b9566280a..342c5583dcf4a 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/assign.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/assign.exp @@ -1,6 +1,6 @@ Diagnostics: -error: local `s` of type `assign::S` does not have the `drop` ability +error: local `s` of type `S` does not have the `drop` ability ┌─ tests/ability-check/assign.move:17:9 │ 17 │ *s = S { f: 42, g: T { h: 42 } }; diff --git a/third_party/move/move-compiler-v2/tests/ability-check/bug_14189.exp b/third_party/move/move-compiler-v2/tests/ability-check/bug_14189.exp index 9be1b39f7a68d..1f466890ce1b3 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/bug_14189.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/bug_14189.exp @@ -1,6 +1,6 @@ Diagnostics: -error: local `x1` of type `test::S2` does not have the `copy` ability +error: local `x1` of type `S2` does not have the `copy` ability ┌─ tests/ability-check/bug_14189.move:34:18 │ 34 │ let x2 = S3 { x: x1, y: x0, z: x1 }; diff --git a/third_party/move/move-compiler-v2/tests/ability-check/bug_14223_unused_non_droppable_no_abort.exp b/third_party/move/move-compiler-v2/tests/ability-check/bug_14223_unused_non_droppable_no_abort.exp index 7686f9261cd64..b796cb2449c58 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/bug_14223_unused_non_droppable_no_abort.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/bug_14223_unused_non_droppable_no_abort.exp @@ -1,6 +1,6 @@ Diagnostics: -error: local `_x` of type `Module0::S` does not have the `drop` ability +error: local `_x` of type `S` does not have the `drop` ability ┌─ tests/ability-check/bug_14223_unused_non_droppable_no_abort.move:5:18 │ 5 │ let _x = S {}; diff --git a/third_party/move/move-compiler-v2/tests/ability-check/bug_14227.exp b/third_party/move/move-compiler-v2/tests/ability-check/bug_14227.exp index fd998cd08ec9e..9d9cc031d2a49 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/bug_14227.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/bug_14227.exp @@ -1,6 +1,6 @@ Diagnostics: -error: local `_common_fields` of type `m::CommonFields` does not have the `copy` ability +error: local `_common_fields` of type `CommonFields` does not have the `copy` ability ┌─ tests/ability-check/bug_14227.move:21:16 │ 21 │ y: vector[_common_fields] diff --git a/third_party/move/move-compiler-v2/tests/ability-check/explicit_move.exp b/third_party/move/move-compiler-v2/tests/ability-check/explicit_move.exp index 6d7bca3a95988..82955ef74829c 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/explicit_move.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/explicit_move.exp @@ -1,6 +1,6 @@ Diagnostics: -error: local `x` of type `m::R` does not have the `copy` ability +error: local `x` of type `R` does not have the `copy` ability ┌─ tests/ability-check/explicit_move.move:12:9 │ 12 │ some(x); diff --git a/third_party/move/move-compiler-v2/tests/ability-check/index_ability_err.exp b/third_party/move/move-compiler-v2/tests/ability-check/index_ability_err.exp index f43ecc4cd3d91..fa105ebb9bb60 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/index_ability_err.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/index_ability_err.exp @@ -1,6 +1,6 @@ Diagnostics: -error: value of type `test::Y>` does not have the `copy` ability +error: value of type `Y>` does not have the `copy` ability ┌─ tests/ability-check/index_ability_err.move:12:17 │ 12 │ let _ = Y>[addr]; diff --git a/third_party/move/move-compiler-v2/tests/ability-check/loop_abort.exp b/third_party/move/move-compiler-v2/tests/ability-check/loop_abort.exp index ad91b8f3de0f4..b1b16e0db1718 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/loop_abort.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/loop_abort.exp @@ -1,6 +1,6 @@ Diagnostics: -error: local `_x` of type `Test::Impotent` does not have the `drop` ability +error: local `_x` of type `Impotent` does not have the `drop` ability ┌─ tests/ability-check/loop_abort.move:11:18 │ 11 │ let _x = Impotent {}; diff --git a/third_party/move/move-compiler-v2/tests/ability-check/typing/assign_unpack_references.exp b/third_party/move/move-compiler-v2/tests/ability-check/typing/assign_unpack_references.exp index bbc6a9d36aef0..2c976c12f11c9 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/typing/assign_unpack_references.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/typing/assign_unpack_references.exp @@ -1,12 +1,12 @@ Diagnostics: -error: value of type `M::R` does not have the `drop` ability +error: value of type `R` does not have the `drop` ability ┌─ tests/ability-check/typing/assign_unpack_references.move:17:33 │ 17 │ R { s1: S { f }, s2 } = &R { s1: S{f: 0}, s2: S{f: 1} }; f; s2; │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ still borrowed but will be implicitly dropped later since it is no longer used -error: value of type `M::R` does not have the `drop` ability +error: value of type `R` does not have the `drop` ability ┌─ tests/ability-check/typing/assign_unpack_references.move:27:33 │ 27 │ R { s1: S { f }, s2 } = &mut R { s1: S{f: 0}, s2: S{f: 1} }; f; s2; diff --git a/third_party/move/move-compiler-v2/tests/ability-check/typing/bind_unpack_references.exp b/third_party/move/move-compiler-v2/tests/ability-check/typing/bind_unpack_references.exp index ac2b384d7f6a7..4ebf1bda482f5 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/typing/bind_unpack_references.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/typing/bind_unpack_references.exp @@ -1,12 +1,12 @@ Diagnostics: -error: value of type `M::R` does not have the `drop` ability +error: value of type `R` does not have the `drop` ability ┌─ tests/ability-check/typing/bind_unpack_references.move:13:41 │ 13 │ let R { s1: S { f }, s2 }: &R = &R { s1: S{f: 0}, s2: S{f: 1} }; f; s2; │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ still borrowed but will be implicitly dropped later since it is no longer used -error: value of type `M::R` does not have the `drop` ability +error: value of type `R` does not have the `drop` ability ┌─ tests/ability-check/typing/bind_unpack_references.move:20:45 │ 20 │ let R { s1: S { f }, s2 }: &mut R = &mut R { s1: S{f: 0}, s2: S{f: 1} }; f; s2; diff --git a/third_party/move/move-compiler-v2/tests/ability-check/typing/borrow_local_temp_resource.exp b/third_party/move/move-compiler-v2/tests/ability-check/typing/borrow_local_temp_resource.exp index 470ba80ca3109..e094596ecd526 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/typing/borrow_local_temp_resource.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/typing/borrow_local_temp_resource.exp @@ -1,12 +1,12 @@ Diagnostics: -error: value of type `M::R` does not have the `drop` ability +error: value of type `R` does not have the `drop` ability ┌─ tests/ability-check/typing/borrow_local_temp_resource.move:6:9 │ 6 │ &R{}; │ ^^^^ implicitly dropped here since it is no longer used -error: value of type `M::R` does not have the `drop` ability +error: value of type `R` does not have the `drop` ability ┌─ tests/ability-check/typing/borrow_local_temp_resource.move:7:9 │ 7 │ &mut R{}; diff --git a/third_party/move/move-compiler-v2/tests/ability-check/typing/derefrence_reference.exp b/third_party/move/move-compiler-v2/tests/ability-check/typing/derefrence_reference.exp index 75db7a1e361f1..b2f9787f94a1a 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/typing/derefrence_reference.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/typing/derefrence_reference.exp @@ -1,42 +1,42 @@ Diagnostics: -error: local `r` of type `M::R` does not have the `copy` ability +error: local `r` of type `R` does not have the `copy` ability ┌─ tests/ability-check/typing/derefrence_reference.move:6:16 │ 6 │ R {} = *r; │ ^^ reference content copied here -error: local `b` of type `M::B` does not have the `copy` ability +error: local `b` of type `B` does not have the `copy` ability ┌─ tests/ability-check/typing/derefrence_reference.move:7:24 │ 7 │ B { r: R{} } = *b; │ ^^ reference content copied here -error: value of type `M::R` does not have the `copy` ability +error: value of type `R` does not have the `copy` ability ┌─ tests/ability-check/typing/derefrence_reference.move:8:17 │ 8 │ R{} = *&b.r; │ ^^^ reference content copied here -error: local `r` of type `M::R` does not have the `copy` ability +error: local `r` of type `R` does not have the `copy` ability ┌─ tests/ability-check/typing/derefrence_reference.move:12:16 │ 12 │ R {} = *r; │ ^^ reference content copied here -error: local `b` of type `M::B` does not have the `copy` ability +error: local `b` of type `B` does not have the `copy` ability ┌─ tests/ability-check/typing/derefrence_reference.move:13:24 │ 13 │ B { r: R{} } = *b; │ ^^ reference content copied here -error: value of type `M::R` does not have the `copy` ability +error: value of type `R` does not have the `copy` ability ┌─ tests/ability-check/typing/derefrence_reference.move:14:17 │ 14 │ R{} = *&b.r; │ ^^^ reference content copied here -error: value of type `M::R` does not have the `copy` ability +error: value of type `R` does not have the `copy` ability ┌─ tests/ability-check/typing/derefrence_reference.move:15:21 │ 15 │ R{} = *&mut b.r; diff --git a/third_party/move/move-compiler-v2/tests/ability-check/typing/eq_invalid2.exp b/third_party/move/move-compiler-v2/tests/ability-check/typing/eq_invalid2.exp index 7fea4b627a31d..a415502fc4815 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/typing/eq_invalid2.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/typing/eq_invalid2.exp @@ -1,18 +1,18 @@ Diagnostics: -error: local `r` of type `M::R` does not have the `copy` ability +error: local `r` of type `R` does not have the `copy` ability ┌─ tests/ability-check/typing/eq_invalid2.move:11:9 │ 11 │ r == r; │ ^^^^^^ copy needed here because value is still in use -error: local `r` of type `M::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/ability-check/typing/eq_invalid2.move:11:9 │ 11 │ r == r; │ ^^^^^^ operator drops value here (consider borrowing the argument) -error: value of type `M::G1` does not have the `drop` ability +error: value of type `G1` does not have the `drop` ability ┌─ tests/ability-check/typing/eq_invalid2.move:15:9 │ 15 │ G1{ f: t } == G1{ f: t }; diff --git a/third_party/move/move-compiler-v2/tests/ability-check/typing/mutate_resource.exp b/third_party/move/move-compiler-v2/tests/ability-check/typing/mutate_resource.exp index 6be249a01f461..8a2dbc1c78b74 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/typing/mutate_resource.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/typing/mutate_resource.exp @@ -1,6 +1,6 @@ Diagnostics: -error: local `r` of type `M::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/ability-check/typing/mutate_resource.move:5:9 │ 5 │ *r = R {}; diff --git a/third_party/move/move-compiler-v2/tests/ability-check/typing/neq_invalid2.exp b/third_party/move/move-compiler-v2/tests/ability-check/typing/neq_invalid2.exp index c21ca3e6e5640..c07bea1c68b1b 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/typing/neq_invalid2.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/typing/neq_invalid2.exp @@ -1,36 +1,36 @@ Diagnostics: -error: local `s2` of type `M::S` does not have the `drop` ability +error: local `s2` of type `S` does not have the `drop` ability ┌─ tests/ability-check/typing/neq_invalid2.move:17:9 │ 17 │ s != s2; │ ^^^^^^^ operator drops value here (consider borrowing the argument) -error: local `s` of type `M::S` does not have the `drop` ability +error: local `s` of type `S` does not have the `drop` ability ┌─ tests/ability-check/typing/neq_invalid2.move:17:9 │ 17 │ s != s2; │ ^^^^^^^ operator drops value here (consider borrowing the argument) -error: local `r1` of type `M::R` does not have the `drop` ability +error: local `r1` of type `R` does not have the `drop` ability ┌─ tests/ability-check/typing/neq_invalid2.move:22:9 │ 22 │ r1 != r2; │ ^^^^^^^^ operator drops value here (consider borrowing the argument) -error: local `r2` of type `M::R` does not have the `drop` ability +error: local `r2` of type `R` does not have the `drop` ability ┌─ tests/ability-check/typing/neq_invalid2.move:22:9 │ 22 │ r1 != r2; │ ^^^^^^^^ operator drops value here (consider borrowing the argument) -error: value of type `M::G1` does not have the `drop` ability +error: value of type `G1` does not have the `drop` ability ┌─ tests/ability-check/typing/neq_invalid2.move:27:9 │ 27 │ G1{} != G1{}; │ ^^^^^^^^^^^^^^^^^^^^^^ operator drops value here (consider borrowing the argument) -error: value of type `M::G2` does not have the `drop` ability +error: value of type `G2` does not have the `drop` ability ┌─ tests/ability-check/typing/neq_invalid2.move:28:9 │ 28 │ G2{} != G2{}; diff --git a/third_party/move/move-compiler-v2/tests/ability-check/typing/pack.exp b/third_party/move/move-compiler-v2/tests/ability-check/typing/pack.exp index b35c01849ea5f..f413f2adcb141 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/typing/pack.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/typing/pack.exp @@ -1,6 +1,6 @@ Diagnostics: -error: local `s` of type `M::S` does not have the `copy` ability +error: local `s` of type `S` does not have the `copy` ability ┌─ tests/ability-check/typing/pack.move:14:29 │ 14 │ let n2 = Nat { f: *&s }; diff --git a/third_party/move/move-compiler-v2/tests/ability-check/typing/phantom_param_op_abilities_invalid2.exp b/third_party/move/move-compiler-v2/tests/ability-check/typing/phantom_param_op_abilities_invalid2.exp index 9ff4f8060bc86..412b4fac8c74d 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/typing/phantom_param_op_abilities_invalid2.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/typing/phantom_param_op_abilities_invalid2.exp @@ -1,18 +1,18 @@ Diagnostics: -error: local `ref` of type `M::HasDrop` does not have the `drop` ability +error: local `ref` of type `HasDrop` does not have the `drop` ability ┌─ tests/ability-check/typing/phantom_param_op_abilities_invalid2.move:11:9 │ 11 │ *ref = HasDrop { a: NoAbilities { } }; │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ reference content dropped here -error: value of type `M::HasDrop` does not have the `drop` ability +error: value of type `HasDrop` does not have the `drop` ability ┌─ tests/ability-check/typing/phantom_param_op_abilities_invalid2.move:16:9 │ 16 │ _ = HasDrop { a: NoAbilities { } }; │ ^ implicitly dropped here since it is no longer used -error: local `_x` of type `M::HasDrop` does not have the `drop` ability +error: local `_x` of type `HasDrop` does not have the `drop` ability ┌─ tests/ability-check/typing/phantom_param_op_abilities_invalid2.move:20:51 │ 20 │ fun f3(_x: HasDrop) { @@ -20,7 +20,7 @@ error: local `_x` of type `M::HasDrop` does not 21 │ │ } │ ╰─────^ implicitly dropped here since it is no longer used -error: local `x` of type `M::HasCopy` does not have the `copy` ability +error: local `x` of type `HasCopy` does not have the `copy` ability ┌─ tests/ability-check/typing/phantom_param_op_abilities_invalid2.move:25:10 │ 25 │ (copy x, x) diff --git a/third_party/move/move-compiler-v2/tests/ability-check/typing/type_variable_join_threaded_unpack.exp b/third_party/move/move-compiler-v2/tests/ability-check/typing/type_variable_join_threaded_unpack.exp index a00ded68efbf9..41c2aec98921a 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/typing/type_variable_join_threaded_unpack.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/typing/type_variable_join_threaded_unpack.exp @@ -1,6 +1,6 @@ Diagnostics: -error: local `f1` of type `M::Box>` does not have the `copy` ability +error: local `f1` of type `Box>` does not have the `copy` ability ┌─ tests/ability-check/typing/type_variable_join_threaded_unpack.move:40:44 │ 40 │ Container::put(&mut v, Box { f1: *&f1, f2 }); diff --git a/third_party/move/move-compiler-v2/tests/ability-check/typing/type_variable_join_threaded_unpack_assign.exp b/third_party/move/move-compiler-v2/tests/ability-check/typing/type_variable_join_threaded_unpack_assign.exp index 01898c35db8c6..d95c02ffe7b31 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/typing/type_variable_join_threaded_unpack_assign.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/typing/type_variable_join_threaded_unpack_assign.exp @@ -1,6 +1,6 @@ Diagnostics: -error: local `f1` of type `M::Box>` does not have the `copy` ability +error: local `f1` of type `Box>` does not have the `copy` ability ┌─ tests/ability-check/typing/type_variable_join_threaded_unpack_assign.move:44:44 │ 44 │ Container::put(&mut v, Box { f1: *&f1, f2 }); diff --git a/third_party/move/move-compiler-v2/tests/ability-check/unused_para_no_drop.exp b/third_party/move/move-compiler-v2/tests/ability-check/unused_para_no_drop.exp index 836f76fef6fd2..011b5e8f6e77d 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/unused_para_no_drop.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/unused_para_no_drop.exp @@ -8,7 +8,7 @@ error: local `_x` of type `T` does not have the `drop` ability 10 │ │ } │ ╰─────^ implicitly dropped here since it is no longer used -error: local `_x` of type `m::S` does not have the `drop` ability +error: local `_x` of type `S` does not have the `drop` ability ┌─ tests/ability-check/unused_para_no_drop.move:15:26 │ 15 │ public fun f3(_x: S) { @@ -16,7 +16,7 @@ error: local `_x` of type `m::S` does not have the `drop` ability 16 │ │ } │ ╰─────^ implicitly dropped here since it is no longer used -error: local `_x` of type `vector` does not have the `drop` ability +error: local `_x` of type `vector` does not have the `drop` ability ┌─ tests/ability-check/unused_para_no_drop.move:21:34 │ 21 │ public fun f5(_x: vector) { @@ -30,13 +30,13 @@ error: local `_y` of type `T` does not have the `drop` ability 38 │ x │ ^ implicitly dropped here since it is no longer used -error: local `x` of type `m::S` does not have the `drop` ability +error: local `x` of type `S` does not have the `drop` ability ┌─ tests/ability-check/unused_para_no_drop.move:42:9 │ 42 │ &x == &y │ ^^ still borrowed but will be implicitly dropped later since it is no longer used -error: local `y` of type `m::S` does not have the `drop` ability +error: local `y` of type `S` does not have the `drop` ability ┌─ tests/ability-check/unused_para_no_drop.move:42:15 │ 42 │ &x == &y @@ -54,13 +54,13 @@ error: local `y` of type `T` does not have the `drop` ability 46 │ &x == &y │ ^^ still borrowed but will be implicitly dropped later since it is no longer used -error: local `x` of type `m::S2` does not have the `drop` ability +error: local `x` of type `S2` does not have the `drop` ability ┌─ tests/ability-check/unused_para_no_drop.move:54:9 │ 54 │ x.foo == y.foo │ ^ still borrowed but will be implicitly dropped later since it is no longer used -error: local `y` of type `m::S2` does not have the `drop` ability +error: local `y` of type `S2` does not have the `drop` ability ┌─ tests/ability-check/unused_para_no_drop.move:54:18 │ 54 │ x.foo == y.foo diff --git a/third_party/move/move-compiler-v2/tests/ability-check/v1-borrow-tests/assign_resource.exp b/third_party/move/move-compiler-v2/tests/ability-check/v1-borrow-tests/assign_resource.exp index db2b4f2d7dfab..88b027952a193 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/v1-borrow-tests/assign_resource.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/v1-borrow-tests/assign_resource.exp @@ -1,12 +1,12 @@ Diagnostics: -error: local `t` of type `M::T` does not have the `drop` ability +error: local `t` of type `T` does not have the `drop` ability ┌─ tests/ability-check/v1-borrow-tests/assign_resource.move:5:22 │ 5 │ let t = T{}; &t; │ ^^ implicitly dropped here since it is no longer used -error: local `t` of type `M::T` does not have the `drop` ability +error: local `t` of type `T` does not have the `drop` ability ┌─ tests/ability-check/v1-borrow-tests/assign_resource.move:6:19 │ 6 │ t = T {}; &t; diff --git a/third_party/move/move-compiler-v2/tests/ability-check/v1-borrow-tests/no_drop.exp b/third_party/move/move-compiler-v2/tests/ability-check/v1-borrow-tests/no_drop.exp index 87e25ce461590..d0f0bd3816cb5 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/v1-borrow-tests/no_drop.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/v1-borrow-tests/no_drop.exp @@ -1,6 +1,6 @@ Diagnostics: -error: local `x` of type `M::X` does not have the `drop` ability +error: local `x` of type `X` does not have the `drop` ability ┌─ tests/ability-check/v1-borrow-tests/no_drop.move:6:9 │ 6 │ &x; diff --git a/third_party/move/move-compiler-v2/tests/ability-check/v1-locals/drop_conditional.exp b/third_party/move/move-compiler-v2/tests/ability-check/v1-locals/drop_conditional.exp index 3aeca4920a7b0..3d1e5403d7704 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/v1-locals/drop_conditional.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/v1-locals/drop_conditional.exp @@ -1,12 +1,12 @@ Diagnostics: -error: local `x` of type `M::Cup` does not have the `drop` ability +error: local `x` of type `Cup` does not have the `drop` ability ┌─ tests/ability-check/v1-locals/drop_conditional.move:12:9 │ 12 │ &x; │ ^^ implicitly dropped here since it is no longer used -error: local `x` of type `M::Pair` does not have the `drop` ability +error: local `x` of type `Pair` does not have the `drop` ability ┌─ tests/ability-check/v1-locals/drop_conditional.move:14:9 │ 14 │ &x; diff --git a/third_party/move/move-compiler-v2/tests/ability-check/v1-locals/reassign_parameter.exp b/third_party/move/move-compiler-v2/tests/ability-check/v1-locals/reassign_parameter.exp index 9b30add3a1359..f19e3cefbc39c 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/v1-locals/reassign_parameter.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/v1-locals/reassign_parameter.exp @@ -1,6 +1,6 @@ Diagnostics: -error: local `r` of type `M::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/ability-check/v1-locals/reassign_parameter.move:7:9 │ 7 │ ╭ if (true) { diff --git a/third_party/move/move-compiler-v2/tests/ability-check/v1-signer/copy_loc_transitive.exp b/third_party/move/move-compiler-v2/tests/ability-check/v1-signer/copy_loc_transitive.exp index 884b679059917..e9484ac844f4d 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/v1-signer/copy_loc_transitive.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/v1-signer/copy_loc_transitive.exp @@ -1,6 +1,6 @@ Diagnostics: -error: local `x` of type `M::S` does not have the `copy` ability +error: local `x` of type `S` does not have the `copy` ability ┌─ tests/ability-check/v1-signer/copy_loc_transitive.move:5:9 │ 5 │ copy x diff --git a/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/assign_pop_resource.exp b/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/assign_pop_resource.exp index 4094f23a8ee64..21b3423e4df58 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/assign_pop_resource.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/assign_pop_resource.exp @@ -1,18 +1,18 @@ Diagnostics: -error: value of type `M::R` does not have the `drop` ability +error: value of type `R` does not have the `drop` ability ┌─ tests/ability-check/v1-typing/assign_pop_resource.move:5:9 │ 5 │ _ = R{}; │ ^ implicitly dropped here since it is no longer used -error: value of type `M::R` does not have the `drop` ability +error: value of type `R` does not have the `drop` ability ┌─ tests/ability-check/v1-typing/assign_pop_resource.move:6:10 │ 6 │ (_, _) = (R{}, R{}); │ ^ implicitly dropped here since it is no longer used -error: value of type `M::R` does not have the `drop` ability +error: value of type `R` does not have the `drop` ability ┌─ tests/ability-check/v1-typing/assign_pop_resource.move:6:13 │ 6 │ (_, _) = (R{}, R{}); diff --git a/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/bind_pop_resource.exp b/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/bind_pop_resource.exp index 2c2f2e34d9686..553bf316dc87d 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/bind_pop_resource.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/bind_pop_resource.exp @@ -1,24 +1,24 @@ Diagnostics: -error: value of type `M::R` does not have the `drop` ability +error: value of type `R` does not have the `drop` ability ┌─ tests/ability-check/v1-typing/bind_pop_resource.move:5:13 │ 5 │ let _: R = R{}; │ ^ implicitly dropped here since it is no longer used -error: local `_r` of type `M::R` does not have the `drop` ability +error: local `_r` of type `R` does not have the `drop` ability ┌─ tests/ability-check/v1-typing/bind_pop_resource.move:8:21 │ 8 │ let _r: R = R{}; │ ^^^ implicitly dropped here since it is no longer used -error: value of type `M::R` does not have the `drop` ability +error: value of type `R` does not have the `drop` ability ┌─ tests/ability-check/v1-typing/bind_pop_resource.move:9:14 │ 9 │ let (_, _):(R, R) = (R{}, R{}); │ ^ implicitly dropped here since it is no longer used -error: value of type `M::R` does not have the `drop` ability +error: value of type `R` does not have the `drop` ability ┌─ tests/ability-check/v1-typing/bind_pop_resource.move:9:17 │ 9 │ let (_, _):(R, R) = (R{}, R{}); diff --git a/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/borrow_field_non_ref_non_local_root.exp b/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/borrow_field_non_ref_non_local_root.exp index 3ca8e651183f9..cd06c954f6388 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/borrow_field_non_ref_non_local_root.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/borrow_field_non_ref_non_local_root.exp @@ -1,6 +1,6 @@ Diagnostics: -error: value of type `M::S` does not have the `copy` ability +error: value of type `S` does not have the `copy` ability ┌─ tests/ability-check/v1-typing/borrow_field_non_ref_non_local_root.move:9:22 │ 9 │ (&(if (cond) *foo() else bar()).f : &u64); diff --git a/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/conditional_copy_invalid.exp b/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/conditional_copy_invalid.exp index 25893cff8e123..5c0feae4f836e 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/conditional_copy_invalid.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/conditional_copy_invalid.exp @@ -1,60 +1,60 @@ Diagnostics: -error: local `x` of type `M::Box` does not have the `copy` ability +error: local `x` of type `Box` does not have the `copy` ability ┌─ tests/ability-check/v1-typing/conditional_copy_invalid.move:15:16 │ 15 │ ignore(copy x); │ ^^^^^^ explicitly copied here -error: local `x` of type `M::Box>` does not have the `copy` ability +error: local `x` of type `Box>` does not have the `copy` ability ┌─ tests/ability-check/v1-typing/conditional_copy_invalid.move:17:16 │ 17 │ ignore(copy x); │ ^^^^^^ explicitly copied here -error: local `x` of type `M::Box` does not have the `copy` ability +error: local `x` of type `Box` does not have the `copy` ability ┌─ tests/ability-check/v1-typing/conditional_copy_invalid.move:19:16 │ 19 │ ignore(copy x); │ ^^^^^^ explicitly copied here -error: local `x` of type `M::Box>` does not have the `copy` ability +error: local `x` of type `Box>` does not have the `copy` ability ┌─ tests/ability-check/v1-typing/conditional_copy_invalid.move:21:16 │ 21 │ ignore(copy x); │ ^^^^^^ explicitly copied here -error: local `x` of type `M::Pair` does not have the `copy` ability +error: local `x` of type `Pair` does not have the `copy` ability ┌─ tests/ability-check/v1-typing/conditional_copy_invalid.move:23:16 │ 23 │ ignore(copy x); │ ^^^^^^ explicitly copied here -error: local `x` of type `M::Box` does not have the `copy` ability +error: local `x` of type `Box` does not have the `copy` ability ┌─ tests/ability-check/v1-typing/conditional_copy_invalid.move:27:16 │ 27 │ ignore(*x); │ ^^ reference content copied here -error: local `x` of type `M::Box>` does not have the `copy` ability +error: local `x` of type `Box>` does not have the `copy` ability ┌─ tests/ability-check/v1-typing/conditional_copy_invalid.move:29:16 │ 29 │ ignore(*x); │ ^^ reference content copied here -error: local `x` of type `M::Box` does not have the `copy` ability +error: local `x` of type `Box` does not have the `copy` ability ┌─ tests/ability-check/v1-typing/conditional_copy_invalid.move:31:16 │ 31 │ ignore(*x); │ ^^ reference content copied here -error: local `x` of type `M::Box>` does not have the `copy` ability +error: local `x` of type `Box>` does not have the `copy` ability ┌─ tests/ability-check/v1-typing/conditional_copy_invalid.move:33:16 │ 33 │ ignore(*x); │ ^^ reference content copied here -error: local `x` of type `M::Pair` does not have the `copy` ability +error: local `x` of type `Pair` does not have the `copy` ability ┌─ tests/ability-check/v1-typing/conditional_copy_invalid.move:35:16 │ 35 │ ignore(*x); diff --git a/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/conditional_drop_invalid.exp b/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/conditional_drop_invalid.exp index e450ad7e5cc9c..5f8413a07c1a8 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/conditional_drop_invalid.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/conditional_drop_invalid.exp @@ -1,66 +1,66 @@ Diagnostics: -error: value of type `M::Box` does not have the `drop` ability +error: value of type `Box` does not have the `drop` ability ┌─ tests/ability-check/v1-typing/conditional_drop_invalid.move:10:9 │ 10 │ Box { f: R{} }; │ ^^^^^^^^^^^^^^^^^ implicitly dropped here since it is no longer used -error: value of type `M::Box>` does not have the `drop` ability +error: value of type `Box>` does not have the `drop` ability ┌─ tests/ability-check/v1-typing/conditional_drop_invalid.move:11:9 │ 11 │ Box> { f: Box { f: R{} } }; │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ implicitly dropped here since it is no longer used -error: value of type `M::Box` does not have the `drop` ability +error: value of type `Box` does not have the `drop` ability ┌─ tests/ability-check/v1-typing/conditional_drop_invalid.move:12:9 │ 12 │ Box { f: t }; │ ^^^^^^^^^^^^^^^ implicitly dropped here since it is no longer used -error: value of type `M::Box>` does not have the `drop` ability +error: value of type `Box>` does not have the `drop` ability ┌─ tests/ability-check/v1-typing/conditional_drop_invalid.move:13:9 │ 13 │ Box> { f: Box { f: t } }; │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ implicitly dropped here since it is no longer used -error: value of type `M::Pair` does not have the `drop` ability +error: value of type `Pair` does not have the `drop` ability ┌─ tests/ability-check/v1-typing/conditional_drop_invalid.move:14:9 │ 14 │ Pair { f1: S{}, f2: R{} }; │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ implicitly dropped here since it is no longer used -error: value of type `M::Pair` does not have the `drop` ability +error: value of type `Pair` does not have the `drop` ability ┌─ tests/ability-check/v1-typing/conditional_drop_invalid.move:15:10 │ 15 │ (Pair { f1: S{}, f2: R{} }, 0, @0x1); │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ implicitly dropped here since it is no longer used -error: value of type `M::Box` does not have the `drop` ability +error: value of type `Box` does not have the `drop` ability ┌─ tests/ability-check/v1-typing/conditional_drop_invalid.move:17:9 │ 17 │ Box { f: R {} } == Box { f: R {} }; │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ operator drops value here (consider borrowing the argument) -error: value of type `M::Box>` does not have the `drop` ability +error: value of type `Box>` does not have the `drop` ability ┌─ tests/ability-check/v1-typing/conditional_drop_invalid.move:18:9 │ 18 │ Box> { f: Box { f: R {} } } == Box> { f: Box { f: R {} }}; │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ operator drops value here (consider borrowing the argument) -error: value of type `M::Box` does not have the `drop` ability +error: value of type `Box` does not have the `drop` ability ┌─ tests/ability-check/v1-typing/conditional_drop_invalid.move:19:9 │ 19 │ Box { f: t } == Box { f: t }; │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ operator drops value here (consider borrowing the argument) -error: value of type `M::Box>` does not have the `drop` ability +error: value of type `Box>` does not have the `drop` ability ┌─ tests/ability-check/v1-typing/conditional_drop_invalid.move:20:9 │ 20 │ Box> { f: Box { f: t } } == Box> { f: Box { f: t} }; │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ operator drops value here (consider borrowing the argument) -error: value of type `M::Pair` does not have the `drop` ability +error: value of type `Pair` does not have the `drop` ability ┌─ tests/ability-check/v1-typing/conditional_drop_invalid.move:21:9 │ 21 │ Pair { f1: R{}, f2: S{} } == Pair { f1: R{}, f2: S{} }; diff --git a/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/implicit_deref_borrow_field_not_copyable.exp b/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/implicit_deref_borrow_field_not_copyable.exp index 891b17c507714..f9b7fb8bde20e 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/implicit_deref_borrow_field_not_copyable.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/implicit_deref_borrow_field_not_copyable.exp @@ -1,12 +1,12 @@ Diagnostics: -error: value of type `M::R` does not have the `copy` ability +error: value of type `R` does not have the `copy` ability ┌─ tests/ability-check/v1-typing/implicit_deref_borrow_field_not_copyable.move:8:15 │ 8 │ R{} = b.r; │ ^^^ reference content copied here -error: value of type `M::R` does not have the `copy` ability +error: value of type `R` does not have the `copy` ability ┌─ tests/ability-check/v1-typing/implicit_deref_borrow_field_not_copyable.move:11:15 │ 11 │ R{} = bref.r; diff --git a/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/seq_cannot_ignore_resource.exp b/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/seq_cannot_ignore_resource.exp index 2f2544c85cbb7..a0b416f33dffa 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/seq_cannot_ignore_resource.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/seq_cannot_ignore_resource.exp @@ -1,36 +1,36 @@ Diagnostics: -error: value of type `M::R` does not have the `drop` ability +error: value of type `R` does not have the `drop` ability ┌─ tests/ability-check/v1-typing/seq_cannot_ignore_resource.move:5:9 │ 5 │ R{}; │ ^^^ implicitly dropped here since it is no longer used -error: value of type `M::R` does not have the `drop` ability +error: value of type `R` does not have the `drop` ability ┌─ tests/ability-check/v1-typing/seq_cannot_ignore_resource.move:10:9 │ 10 │ r; │ ^ implicitly dropped here since it is no longer used -error: value of type `M::R` does not have the `drop` ability +error: value of type `R` does not have the `drop` ability ┌─ tests/ability-check/v1-typing/seq_cannot_ignore_resource.move:14:20 │ 14 │ (0, false, R{}); │ ^^^ implicitly dropped here since it is no longer used -error: value of type `M::R` does not have the `drop` ability +error: value of type `R` does not have the `drop` ability ┌─ tests/ability-check/v1-typing/seq_cannot_ignore_resource.move:19:30 │ 19 │ if (true) (0, false, R{}) else (0, false, r); │ ^^^ implicitly dropped here since it is no longer used -error: local `r` of type `M::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/ability-check/v1-typing/seq_cannot_ignore_resource.move:19:19 │ 19 │ if (true) (0, false, R{}) else (0, false, r); │ ^^^^^^^^^^^^^^^ implicitly dropped here since it is no longer used -error: value of type `M::R` does not have the `drop` ability +error: value of type `R` does not have the `drop` ability ┌─ tests/ability-check/v1-typing/seq_cannot_ignore_resource.move:19:51 │ 19 │ if (true) (0, false, R{}) else (0, false, r); diff --git a/third_party/move/move-compiler-v2/tests/ability-transform/borrowed_from_one_path.exp b/third_party/move/move-compiler-v2/tests/ability-transform/borrowed_from_one_path.exp index 322649a3dde34..64fd1e4a61b95 100644 --- a/third_party/move/move-compiler-v2/tests/ability-transform/borrowed_from_one_path.exp +++ b/third_party/move/move-compiler-v2/tests/ability-transform/borrowed_from_one_path.exp @@ -6,7 +6,7 @@ fun m::f($t0: u8, $t1: &vector): u64 { var $t3: &vector var $t4: bool var $t5: u8 - var $t6: &m::R + var $t6: &0x42::m::R var $t7: address var $t8: &u64 var $t9: u64 @@ -15,8 +15,8 @@ fun m::f($t0: u8, $t1: &vector): u64 { 2: if ($t4) goto 3 else goto 8 3: label L0 4: $t7 := 0x1 - 5: $t6 := borrow_global($t7) - 6: $t3 := borrow_field.data($t6) + 5: $t6 := borrow_global<0x42::m::R>($t7) + 6: $t3 := borrow_field<0x42::m::R>.data($t6) 7: goto 10 8: label L1 9: $t3 := infer($t1) @@ -35,7 +35,7 @@ fun m::f($t0: u8, $t1: &vector): u64 { var $t3: &vector var $t4: bool var $t5: u8 - var $t6: &m::R + var $t6: &0x42::m::R var $t7: address var $t8: &u64 var $t9: u64 @@ -50,9 +50,9 @@ fun m::f($t0: u8, $t1: &vector): u64 { # live vars: 4: $t7 := 0x1 # live vars: $t7 - 5: $t6 := borrow_global($t7) + 5: $t6 := borrow_global<0x42::m::R>($t7) # live vars: $t6 - 6: $t3 := borrow_field.data($t6) + 6: $t3 := borrow_field<0x42::m::R>.data($t6) # live vars: $t3 7: goto 10 # live vars: $t1 @@ -79,7 +79,7 @@ fun m::f($t0: u8, $t1: &vector): u64 { var $t3: &vector var $t4: bool var $t5: u8 - var $t6: &m::R + var $t6: &0x42::m::R var $t7: address var $t8: &u64 var $t9: u64 @@ -94,9 +94,9 @@ fun m::f($t0: u8, $t1: &vector): u64 { # live vars: 4: $t7 := 0x1 # live vars: $t7 - 5: $t6 := borrow_global($t7) + 5: $t6 := borrow_global<0x42::m::R>($t7) # live vars: $t6 - 6: $t3 := borrow_field.data($t6) + 6: $t3 := borrow_field<0x42::m::R>.data($t6) # live vars: $t3 7: goto 10 # live vars: $t1 @@ -123,7 +123,7 @@ fun m::f($t0: u8, $t1: &vector): u64 { var $t3: &vector var $t4: bool var $t5: u8 - var $t6: &m::R + var $t6: &0x42::m::R var $t7: address var $t8: &u64 var $t9: u64 @@ -166,7 +166,7 @@ fun m::f($t0: u8, $t1: &vector): u64 { # live vars: $t7 # refs: [] # - 5: $t6 := borrow_global($t7) + 5: $t6 := borrow_global<0x42::m::R>($t7) # live vars: $t6 # refs: [$t6 => #6] # #6 @@ -174,7 +174,7 @@ fun m::f($t0: u8, $t1: &vector): u64 { # #root # -> #6 via [struct `m::R`] at line 12 # - 6: $t3 := borrow_field.data($t6) + 6: $t3 := borrow_field<0x42::m::R>.data($t6) # live vars: $t3 # refs: [$t3 => #3] # #3 @@ -245,7 +245,7 @@ fun m::f($t0: u8, $t1: &vector): u64 { var $t3: &vector var $t4: bool var $t5: u8 - var $t6: &m::R + var $t6: &0x42::m::R var $t7: address var $t8: &u64 var $t9: u64 @@ -294,7 +294,7 @@ fun m::f($t0: u8, $t1: &vector): u64 { # live vars: $t7 # refs: [] # - 5: $t6 := borrow_global($t7) + 5: $t6 := borrow_global<0x42::m::R>($t7) # abort state: {returns,aborts} # live vars: $t6 # refs: [$t6 => #6] @@ -303,7 +303,7 @@ fun m::f($t0: u8, $t1: &vector): u64 { # #root # -> #6 via [struct `m::R`] at line 12 # - 6: $t3 := borrow_field.data($t6) + 6: $t3 := borrow_field<0x42::m::R>.data($t6) # abort state: {returns,aborts} # live vars: $t3 # refs: [$t3 => #3] @@ -382,7 +382,7 @@ fun m::f($t0: u8, $t1: &vector): u64 { var $t3: &vector var $t4: bool var $t5: u8 - var $t6: &m::R + var $t6: &0x42::m::R var $t7: address var $t8: &u64 var $t9: u64 @@ -392,8 +392,8 @@ fun m::f($t0: u8, $t1: &vector): u64 { 3: label L0 4: drop($t1) 5: $t7 := 0x1 - 6: $t6 := borrow_global($t7) - 7: $t3 := borrow_field.data($t6) + 6: $t6 := borrow_global<0x42::m::R>($t7) + 7: $t3 := borrow_field<0x42::m::R>.data($t6) 8: goto 11 9: label L1 10: $t3 := move($t1) diff --git a/third_party/move/move-compiler-v2/tests/ability-transform/copy_ability_tuple.exp b/third_party/move/move-compiler-v2/tests/ability-transform/copy_ability_tuple.exp index f1da3a8a76fdd..3af453f1202a4 100644 --- a/third_party/move/move-compiler-v2/tests/ability-transform/copy_ability_tuple.exp +++ b/third_party/move/move-compiler-v2/tests/ability-transform/copy_ability_tuple.exp @@ -1,8 +1,8 @@ ============ initial bytecode ================ [variant baseline] -public fun M::f($t0: M::R): (M::R, u64) { - var $t1: M::R +public fun M::f($t0: 0x42::M::R): (0x42::M::R, u64) { + var $t1: 0x42::M::R var $t2: u64 0: $t1 := infer($t0) 1: $t2 := 0 @@ -12,22 +12,22 @@ public fun M::f($t0: M::R): (M::R, u64) { [variant baseline] public fun M::g($t0: &signer) { - var $t1: M::R + var $t1: 0x42::M::R var $t2: u64 var $t3: u64 0: $t2 := 1 - 1: $t1 := pack M::R($t2) + 1: $t1 := pack 0x42::M::R($t2) 2: $t3 := 3 3: ($t1, $t3) := M::f($t1) - 4: move_to($t0, $t1) + 4: move_to<0x42::M::R>($t0, $t1) 5: return () } ============ after LiveVarAnalysisProcessor: ================ [variant baseline] -public fun M::f($t0: M::R): (M::R, u64) { - var $t1: M::R +public fun M::f($t0: 0x42::M::R): (0x42::M::R, u64) { + var $t1: 0x42::M::R var $t2: u64 # live vars: $t0 0: $t1 := infer($t0) @@ -40,19 +40,19 @@ public fun M::f($t0: M::R): (M::R, u64) { [variant baseline] public fun M::g($t0: &signer) { - var $t1: M::R + var $t1: 0x42::M::R var $t2: u64 var $t3: u64 # live vars: $t0 0: $t2 := 1 # live vars: $t0, $t2 - 1: $t1 := pack M::R($t2) + 1: $t1 := pack 0x42::M::R($t2) # live vars: $t0, $t1 2: $t3 := 3 # live vars: $t0, $t1 3: ($t1, $t3) := M::f($t1) # live vars: $t0, $t1 - 4: move_to($t0, $t1) + 4: move_to<0x42::M::R>($t0, $t1) # live vars: 5: return () } @@ -60,8 +60,8 @@ public fun M::g($t0: &signer) { ============ after LiveVarAnalysisProcessor: ================ [variant baseline] -public fun M::f($t0: M::R): (M::R, u64) { - var $t1: M::R +public fun M::f($t0: 0x42::M::R): (0x42::M::R, u64) { + var $t1: 0x42::M::R var $t2: u64 # live vars: $t0 0: $t1 := infer($t0) @@ -74,19 +74,19 @@ public fun M::f($t0: M::R): (M::R, u64) { [variant baseline] public fun M::g($t0: &signer) { - var $t1: M::R + var $t1: 0x42::M::R var $t2: u64 var $t3: u64 # live vars: $t0 0: $t2 := 1 # live vars: $t0, $t2 - 1: $t1 := pack M::R($t2) + 1: $t1 := pack 0x42::M::R($t2) # live vars: $t0, $t1 2: $t3 := 3 # live vars: $t0, $t1 3: ($t1, $t3) := M::f($t1) # live vars: $t0, $t1 - 4: move_to($t0, $t1) + 4: move_to<0x42::M::R>($t0, $t1) # live vars: 5: return () } @@ -94,8 +94,8 @@ public fun M::g($t0: &signer) { ============ after ReferenceSafetyProcessor: ================ [variant baseline] -public fun M::f($t0: M::R): (M::R, u64) { - var $t1: M::R +public fun M::f($t0: 0x42::M::R): (0x42::M::R, u64) { + var $t1: 0x42::M::R var $t2: u64 # live vars: $t0 # refs: [] @@ -114,7 +114,7 @@ public fun M::f($t0: M::R): (M::R, u64) { [variant baseline] public fun M::g($t0: &signer) { - var $t1: M::R + var $t1: 0x42::M::R var $t2: u64 var $t3: u64 # live vars: $t0 @@ -132,7 +132,7 @@ public fun M::g($t0: &signer) { # #root # # - 1: $t1 := pack M::R($t2) + 1: $t1 := pack 0x42::M::R($t2) # live vars: $t0, $t1 # refs: [$t0 => #0] # #0 @@ -156,7 +156,7 @@ public fun M::g($t0: &signer) { # #root # # - 4: move_to($t0, $t1) + 4: move_to<0x42::M::R>($t0, $t1) # live vars: # refs: [] # @@ -166,8 +166,8 @@ public fun M::g($t0: &signer) { ============ after AbortAnalysisProcessor: ================ [variant baseline] -public fun M::f($t0: M::R): (M::R, u64) { - var $t1: M::R +public fun M::f($t0: 0x42::M::R): (0x42::M::R, u64) { + var $t1: 0x42::M::R var $t2: u64 # abort state: {returns} # live vars: $t0 @@ -189,7 +189,7 @@ public fun M::f($t0: M::R): (M::R, u64) { [variant baseline] public fun M::g($t0: &signer) { - var $t1: M::R + var $t1: 0x42::M::R var $t2: u64 var $t3: u64 # abort state: {returns,aborts} @@ -209,7 +209,7 @@ public fun M::g($t0: &signer) { # #root # # - 1: $t1 := pack M::R($t2) + 1: $t1 := pack 0x42::M::R($t2) # abort state: {returns,aborts} # live vars: $t0, $t1 # refs: [$t0 => #0] @@ -236,7 +236,7 @@ public fun M::g($t0: &signer) { # #root # # - 4: move_to($t0, $t1) + 4: move_to<0x42::M::R>($t0, $t1) # abort state: {returns} # live vars: # refs: [] @@ -247,8 +247,8 @@ public fun M::g($t0: &signer) { ============ after AbilityProcessor: ================ [variant baseline] -public fun M::f($t0: M::R): (M::R, u64) { - var $t1: M::R +public fun M::f($t0: 0x42::M::R): (0x42::M::R, u64) { + var $t1: 0x42::M::R var $t2: u64 0: $t1 := move($t0) 1: $t2 := 0 @@ -258,13 +258,13 @@ public fun M::f($t0: M::R): (M::R, u64) { [variant baseline] public fun M::g($t0: &signer) { - var $t1: M::R + var $t1: 0x42::M::R var $t2: u64 var $t3: u64 0: $t2 := 1 - 1: $t1 := pack M::R($t2) + 1: $t1 := pack 0x42::M::R($t2) 2: $t3 := 3 3: ($t1, $t3) := M::f($t1) - 4: move_to($t0, $t1) + 4: move_to<0x42::M::R>($t0, $t1) 5: return () } diff --git a/third_party/move/move-compiler-v2/tests/ability-transform/mutate_vector.exp b/third_party/move/move-compiler-v2/tests/ability-transform/mutate_vector.exp index 358ffa25771a9..7259af56e7847 100644 --- a/third_party/move/move-compiler-v2/tests/ability-transform/mutate_vector.exp +++ b/third_party/move/move-compiler-v2/tests/ability-transform/mutate_vector.exp @@ -1,16 +1,16 @@ ============ initial bytecode ================ [variant baseline] -public fun m::new_scalar_from_u8($t0: u8): m::Scalar { - var $t1: m::Scalar - var $t2: m::Scalar +public fun m::new_scalar_from_u8($t0: u8): 0x42::m::Scalar { + var $t1: 0x42::m::Scalar + var $t2: 0x42::m::Scalar var $t3: &mut u8 var $t4: &mut vector - var $t5: &mut m::Scalar + var $t5: &mut 0x42::m::Scalar var $t6: u64 0: $t2 := m::scalar_zero() 1: $t5 := borrow_local($t2) - 2: $t4 := borrow_field.data($t5) + 2: $t4 := borrow_field<0x42::m::Scalar>.data($t5) 3: $t6 := 0 4: $t3 := vector::borrow_mut($t4, $t6) 5: write_ref($t3, $t0) @@ -20,30 +20,30 @@ public fun m::new_scalar_from_u8($t0: u8): m::Scalar { [variant baseline] -public fun m::scalar_zero(): m::Scalar { - var $t0: m::Scalar +public fun m::scalar_zero(): 0x42::m::Scalar { + var $t0: 0x42::m::Scalar var $t1: vector 0: $t1 := [0] - 1: $t0 := pack m::Scalar($t1) + 1: $t0 := pack 0x42::m::Scalar($t1) 2: return $t0 } ============ after LiveVarAnalysisProcessor: ================ [variant baseline] -public fun m::new_scalar_from_u8($t0: u8): m::Scalar { - var $t1: m::Scalar - var $t2: m::Scalar +public fun m::new_scalar_from_u8($t0: u8): 0x42::m::Scalar { + var $t1: 0x42::m::Scalar + var $t2: 0x42::m::Scalar var $t3: &mut u8 var $t4: &mut vector - var $t5: &mut m::Scalar + var $t5: &mut 0x42::m::Scalar var $t6: u64 # live vars: $t0 0: $t2 := m::scalar_zero() # live vars: $t0, $t2 1: $t5 := borrow_local($t2) # live vars: $t0, $t2, $t5 - 2: $t4 := borrow_field.data($t5) + 2: $t4 := borrow_field<0x42::m::Scalar>.data($t5) # live vars: $t0, $t2, $t4 3: $t6 := 0 # live vars: $t0, $t2, $t4, $t6 @@ -58,13 +58,13 @@ public fun m::new_scalar_from_u8($t0: u8): m::Scalar { [variant baseline] -public fun m::scalar_zero(): m::Scalar { - var $t0: m::Scalar +public fun m::scalar_zero(): 0x42::m::Scalar { + var $t0: 0x42::m::Scalar var $t1: vector # live vars: 0: $t1 := [0] # live vars: $t1 - 1: $t0 := pack m::Scalar($t1) + 1: $t0 := pack 0x42::m::Scalar($t1) # live vars: $t0 2: return $t0 } @@ -72,19 +72,19 @@ public fun m::scalar_zero(): m::Scalar { ============ after LiveVarAnalysisProcessor: ================ [variant baseline] -public fun m::new_scalar_from_u8($t0: u8): m::Scalar { - var $t1: m::Scalar - var $t2: m::Scalar +public fun m::new_scalar_from_u8($t0: u8): 0x42::m::Scalar { + var $t1: 0x42::m::Scalar + var $t2: 0x42::m::Scalar var $t3: &mut u8 var $t4: &mut vector - var $t5: &mut m::Scalar + var $t5: &mut 0x42::m::Scalar var $t6: u64 # live vars: $t0 0: $t2 := m::scalar_zero() # live vars: $t0, $t2 1: $t5 := borrow_local($t2) # live vars: $t0, $t2, $t5 - 2: $t4 := borrow_field.data($t5) + 2: $t4 := borrow_field<0x42::m::Scalar>.data($t5) # live vars: $t0, $t2, $t4 3: $t6 := 0 # live vars: $t0, $t2, $t4, $t6 @@ -99,13 +99,13 @@ public fun m::new_scalar_from_u8($t0: u8): m::Scalar { [variant baseline] -public fun m::scalar_zero(): m::Scalar { - var $t0: m::Scalar +public fun m::scalar_zero(): 0x42::m::Scalar { + var $t0: 0x42::m::Scalar var $t1: vector # live vars: 0: $t1 := [0] # live vars: $t1 - 1: $t0 := pack m::Scalar($t1) + 1: $t0 := pack 0x42::m::Scalar($t1) # live vars: $t0 2: return $t0 } @@ -113,12 +113,12 @@ public fun m::scalar_zero(): m::Scalar { ============ after ReferenceSafetyProcessor: ================ [variant baseline] -public fun m::new_scalar_from_u8($t0: u8): m::Scalar { - var $t1: m::Scalar - var $t2: m::Scalar +public fun m::new_scalar_from_u8($t0: u8): 0x42::m::Scalar { + var $t1: 0x42::m::Scalar + var $t2: 0x42::m::Scalar var $t3: &mut u8 var $t4: &mut vector - var $t5: &mut m::Scalar + var $t5: &mut 0x42::m::Scalar var $t6: u64 # live vars: $t0 # refs: [] @@ -135,7 +135,7 @@ public fun m::new_scalar_from_u8($t0: u8): m::Scalar { # #root # => (mut) #5 via [local `s`] at line 11 # - 2: $t4 := borrow_field.data($t5) + 2: $t4 := borrow_field<0x42::m::Scalar>.data($t5) # live vars: $t0, $t2, $t4 # refs: [$t4 => #4] # #4 @@ -172,8 +172,8 @@ public fun m::new_scalar_from_u8($t0: u8): m::Scalar { [variant baseline] -public fun m::scalar_zero(): m::Scalar { - var $t0: m::Scalar +public fun m::scalar_zero(): 0x42::m::Scalar { + var $t0: 0x42::m::Scalar var $t1: vector # live vars: # refs: [] @@ -182,7 +182,7 @@ public fun m::scalar_zero(): m::Scalar { # live vars: $t1 # refs: [] # - 1: $t0 := pack m::Scalar($t1) + 1: $t0 := pack 0x42::m::Scalar($t1) # live vars: $t0 # refs: [] # @@ -192,12 +192,12 @@ public fun m::scalar_zero(): m::Scalar { ============ after AbortAnalysisProcessor: ================ [variant baseline] -public fun m::new_scalar_from_u8($t0: u8): m::Scalar { - var $t1: m::Scalar - var $t2: m::Scalar +public fun m::new_scalar_from_u8($t0: u8): 0x42::m::Scalar { + var $t1: 0x42::m::Scalar + var $t2: 0x42::m::Scalar var $t3: &mut u8 var $t4: &mut vector - var $t5: &mut m::Scalar + var $t5: &mut 0x42::m::Scalar var $t6: u64 # abort state: {returns,aborts} # live vars: $t0 @@ -217,7 +217,7 @@ public fun m::new_scalar_from_u8($t0: u8): m::Scalar { # #root # => (mut) #5 via [local `s`] at line 11 # - 2: $t4 := borrow_field.data($t5) + 2: $t4 := borrow_field<0x42::m::Scalar>.data($t5) # abort state: {returns,aborts} # live vars: $t0, $t2, $t4 # refs: [$t4 => #4] @@ -259,8 +259,8 @@ public fun m::new_scalar_from_u8($t0: u8): m::Scalar { [variant baseline] -public fun m::scalar_zero(): m::Scalar { - var $t0: m::Scalar +public fun m::scalar_zero(): 0x42::m::Scalar { + var $t0: 0x42::m::Scalar var $t1: vector # abort state: {returns} # live vars: @@ -271,7 +271,7 @@ public fun m::scalar_zero(): m::Scalar { # live vars: $t1 # refs: [] # - 1: $t0 := pack m::Scalar($t1) + 1: $t0 := pack 0x42::m::Scalar($t1) # abort state: {returns} # live vars: $t0 # refs: [] @@ -282,16 +282,16 @@ public fun m::scalar_zero(): m::Scalar { ============ after AbilityProcessor: ================ [variant baseline] -public fun m::new_scalar_from_u8($t0: u8): m::Scalar { - var $t1: m::Scalar - var $t2: m::Scalar +public fun m::new_scalar_from_u8($t0: u8): 0x42::m::Scalar { + var $t1: 0x42::m::Scalar + var $t2: 0x42::m::Scalar var $t3: &mut u8 var $t4: &mut vector - var $t5: &mut m::Scalar + var $t5: &mut 0x42::m::Scalar var $t6: u64 0: $t2 := m::scalar_zero() 1: $t5 := borrow_local($t2) - 2: $t4 := borrow_field.data($t5) + 2: $t4 := borrow_field<0x42::m::Scalar>.data($t5) 3: $t6 := 0 4: $t3 := vector::borrow_mut($t4, $t6) 5: write_ref($t3, $t0) @@ -301,10 +301,10 @@ public fun m::new_scalar_from_u8($t0: u8): m::Scalar { [variant baseline] -public fun m::scalar_zero(): m::Scalar { - var $t0: m::Scalar +public fun m::scalar_zero(): 0x42::m::Scalar { + var $t0: 0x42::m::Scalar var $t1: vector 0: $t1 := [0] - 1: $t0 := pack m::Scalar($t1) + 1: $t0 := pack 0x42::m::Scalar($t1) 2: return $t0 } diff --git a/third_party/move/move-compiler-v2/tests/abort-analysis/drop_on_abort.exp b/third_party/move/move-compiler-v2/tests/abort-analysis/drop_on_abort.exp index 702e8dcfb42c3..95b75cc0831fd 100644 --- a/third_party/move/move-compiler-v2/tests/abort-analysis/drop_on_abort.exp +++ b/third_party/move/move-compiler-v2/tests/abort-analysis/drop_on_abort.exp @@ -1,8 +1,8 @@ ============ initial bytecode ================ [variant baseline] -public fun m::from_vec<#0>($t0: vector<#0>): m::Option<#0> { - var $t1: m::Option<#0> +public fun m::from_vec<#0>($t0: vector<#0>): 0x42::m::Option<#0> { + var $t1: 0x42::m::Option<#0> var $t2: bool var $t3: u64 var $t4: &vector<#0> @@ -19,15 +19,15 @@ public fun m::from_vec<#0>($t0: vector<#0>): m::Option<#0> { 8: goto 10 9: label L1 10: label L2 - 11: $t1 := pack m::Option<#0>($t0) + 11: $t1 := pack 0x42::m::Option<#0>($t0) 12: return $t1 } ============ after AbortAnalysisProcessor: ================ [variant baseline] -public fun m::from_vec<#0>($t0: vector<#0>): m::Option<#0> { - var $t1: m::Option<#0> +public fun m::from_vec<#0>($t0: vector<#0>): 0x42::m::Option<#0> { + var $t1: 0x42::m::Option<#0> var $t2: bool var $t3: u64 var $t4: &vector<#0> @@ -94,7 +94,7 @@ public fun m::from_vec<#0>($t0: vector<#0>): m::Option<#0> { # live vars: $t0 # refs: [] # - 11: $t1 := pack m::Option<#0>($t0) + 11: $t1 := pack 0x42::m::Option<#0>($t0) # abort state: {returns} # live vars: $t1 # refs: [] diff --git a/third_party/move/move-compiler-v2/tests/abort-analysis/loop_abort.exp b/third_party/move/move-compiler-v2/tests/abort-analysis/loop_abort.exp index 931538cfa9bf7..2e42d922264cd 100644 --- a/third_party/move/move-compiler-v2/tests/abort-analysis/loop_abort.exp +++ b/third_party/move/move-compiler-v2/tests/abort-analysis/loop_abort.exp @@ -2,10 +2,10 @@ [variant baseline] fun Test::test0() { - var $t0: Test::Impotent + var $t0: 0x42::Test::Impotent var $t1: bool 0: $t1 := false - 1: $t0 := pack Test::Impotent($t1) + 1: $t0 := pack 0x42::Test::Impotent($t1) 2: label L0 3: goto 2 4: label L1 @@ -15,11 +15,11 @@ fun Test::test0() { [variant baseline] fun Test::test1() { - var $t0: Test::Impotent + var $t0: 0x42::Test::Impotent var $t1: bool var $t2: bool 0: $t1 := false - 1: $t0 := pack Test::Impotent($t1) + 1: $t0 := pack 0x42::Test::Impotent($t1) 2: label L0 3: $t2 := true 4: if ($t2) goto 5 else goto 7 @@ -36,11 +36,11 @@ fun Test::test1() { [variant baseline] fun Test::test2($t0: bool) { - var $t1: Test::Impotent + var $t1: 0x42::Test::Impotent var $t2: bool var $t3: u64 0: $t2 := false - 1: $t1 := pack Test::Impotent($t2) + 1: $t1 := pack 0x42::Test::Impotent($t2) 2: if ($t0) goto 3 else goto 8 3: label L0 4: label L3 @@ -58,7 +58,7 @@ fun Test::test2($t0: bool) { [variant baseline] fun Test::test0() { - var $t0: Test::Impotent + var $t0: 0x42::Test::Impotent var $t1: bool # abort state: {} # live vars: @@ -69,7 +69,7 @@ fun Test::test0() { # live vars: $t1 # refs: [] # - 1: $t0 := pack Test::Impotent($t1) + 1: $t0 := pack 0x42::Test::Impotent($t1) # abort state: {} # live vars: # refs: [] @@ -91,7 +91,7 @@ fun Test::test0() { [variant baseline] fun Test::test1() { - var $t0: Test::Impotent + var $t0: 0x42::Test::Impotent var $t1: bool var $t2: bool # abort state: {returns} @@ -103,7 +103,7 @@ fun Test::test1() { # live vars: $t1 # refs: [] # - 1: $t0 := pack Test::Impotent($t1) + 1: $t0 := pack 0x42::Test::Impotent($t1) # abort state: {returns} # live vars: # refs: [] @@ -164,7 +164,7 @@ fun Test::test1() { [variant baseline] fun Test::test2($t0: bool) { - var $t1: Test::Impotent + var $t1: 0x42::Test::Impotent var $t2: bool var $t3: u64 # abort state: {aborts} @@ -176,7 +176,7 @@ fun Test::test2($t0: bool) { # live vars: $t0, $t2 # refs: [] # - 1: $t1 := pack Test::Impotent($t2) + 1: $t1 := pack 0x42::Test::Impotent($t2) # abort state: {aborts} # live vars: $t0 # refs: [] diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/assign.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/assign.exp index 61a31095ee499..19a38d4478a92 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/assign.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/assign.exp @@ -5,32 +5,56 @@ module 0x42::assign { } struct S { f: u64, - g: assign::T, + g: T, } - private fun assign_field(s: &mut assign::S,f: u64) { - select assign::S.f<&mut assign::S>(s) = f; + private fun assign_field(s: &mut S,f: u64) { + select assign::S.f<&mut S>(s) = f; Tuple() } private fun assign_int(x: &mut u64) { x = 42; Tuple() } - private fun assign_pattern(s: assign::S,f: u64,h: u64): u64 { + private fun assign_pattern(s: S,f: u64,h: u64): u64 { assign::S{ f, g: assign::T{ h } } = s; Add(f, h) } - private fun assign_struct(s: &mut assign::S) { + private fun assign_struct(s: &mut S) { s = pack assign::S(42, pack assign::T(42)); Tuple() } } // end 0x42::assign +// -- Sourcified model before bytecode pipeline +module 0x42::assign { + struct T has drop { + h: u64, + } + struct S has drop { + f: u64, + g: T, + } + fun assign_field(s: &mut S, f: u64) { + s.f = f; + } + fun assign_int(x: &mut u64) { + *x = 42; + } + fun assign_pattern(s: S, f: u64, h: u64): u64 { + S{f: f,g: T{h: h}} = s; + f + h + } + fun assign_struct(s: &mut S) { + *s = S{f: 42,g: T{h: 42}}; + } +} + ============ initial bytecode ================ [variant baseline] -fun assign::assign_field($t0: &mut assign::S, $t1: u64) { +fun assign::assign_field($t0: &mut 0x42::assign::S, $t1: u64) { var $t2: &mut u64 - 0: $t2 := borrow_field.f($t0) + 0: $t2 := borrow_field<0x42::assign::S>.f($t0) 1: write_ref($t2, $t1) 2: return () } @@ -46,26 +70,26 @@ fun assign::assign_int($t0: &mut u64) { [variant baseline] -fun assign::assign_pattern($t0: assign::S, $t1: u64, $t2: u64): u64 { +fun assign::assign_pattern($t0: 0x42::assign::S, $t1: u64, $t2: u64): u64 { var $t3: u64 - var $t4: assign::T - 0: ($t1, $t4) := unpack assign::S($t0) - 1: $t2 := unpack assign::T($t4) + var $t4: 0x42::assign::T + 0: ($t1, $t4) := unpack 0x42::assign::S($t0) + 1: $t2 := unpack 0x42::assign::T($t4) 2: $t3 := +($t1, $t2) 3: return $t3 } [variant baseline] -fun assign::assign_struct($t0: &mut assign::S) { - var $t1: assign::S +fun assign::assign_struct($t0: &mut 0x42::assign::S) { + var $t1: 0x42::assign::S var $t2: u64 - var $t3: assign::T + var $t3: 0x42::assign::T var $t4: u64 0: $t2 := 42 1: $t4 := 42 - 2: $t3 := pack assign::T($t4) - 3: $t1 := pack assign::S($t2, $t3) + 2: $t3 := pack 0x42::assign::T($t4) + 3: $t1 := pack 0x42::assign::S($t2, $t3) 4: write_ref($t0, $t1) 5: return () } diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/assign_inline.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/assign_inline.exp index 29e5c85a6a3a8..4a09468c4d890 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/assign_inline.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/assign_inline.exp @@ -8,6 +8,16 @@ module 0x42::assign { } } // end 0x42::assign +// -- Sourcified model before bytecode pipeline +module 0x42::assign { + public inline fun expose(x: u64): (u64, u64) { + (1, x) + } + public fun main(): (u64, u64) { + (1, 3) + } +} + ============ initial bytecode ================ [variant baseline] diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/borrow.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/borrow.exp index 1c64fb91482d1..2ede6b56aa928 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/borrow.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/borrow.exp @@ -17,9 +17,9 @@ module 0x42::borrow { struct S { f: u64, } - private fun field(s: &borrow::S): u64 { + private fun field(s: &S): u64 { { - let r: &u64 = Borrow(Immutable)(select borrow::S.f<&borrow::S>(s)); + let r: &u64 = Borrow(Immutable)(select borrow::S.f<&S>(s)); Deref(r) } } @@ -35,9 +35,9 @@ module 0x42::borrow { Deref(r) } } - private fun mut_field(s: &mut borrow::S): u64 { + private fun mut_field(s: &mut S): u64 { { - let r: &mut u64 = Borrow(Mutable)(select borrow::S.f<&mut borrow::S>(s)); + let r: &mut u64 = Borrow(Mutable)(select borrow::S.f<&mut S>(s)); r = 22; Deref(r) } @@ -61,13 +61,48 @@ module 0x42::borrow { } } // end 0x42::borrow +// -- Sourcified model before bytecode pipeline +module 0x42::borrow { + struct S { + f: u64, + } + fun field(s: &S): u64 { + let r = &s.f; + *r + } + fun local(param: u64): u64 { + let r = &33; + *r + } + fun param(param: u64): u64 { + let r = ¶m; + *r + } + fun mut_field(s: &mut S): u64 { + let r = &mut s.f; + *r = 22; + *r + } + fun mut_local(param: u64): u64 { + let local = 33; + let r = &mut local; + *r = 22; + *r + } + fun mut_param(param: u64): u64 { + let r = &mut param; + *r = 22; + *r + } +} + ============ initial bytecode ================ [variant baseline] -fun borrow::field($t0: &borrow::S): u64 { +fun borrow::field($t0: &0x42::borrow::S): u64 { var $t1: u64 var $t2: &u64 - 0: $t2 := borrow_field.f($t0) + 0: $t2 := borrow_field<0x42::borrow::S>.f($t0) 1: $t1 := read_ref($t2) 2: return $t1 } @@ -96,11 +131,11 @@ fun borrow::param($t0: u64): u64 { [variant baseline] -fun borrow::mut_field($t0: &mut borrow::S): u64 { +fun borrow::mut_field($t0: &mut 0x42::borrow::S): u64 { var $t1: u64 var $t2: &mut u64 var $t3: u64 - 0: $t2 := borrow_field.f($t0) + 0: $t2 := borrow_field<0x42::borrow::S>.f($t0) 1: $t3 := 22 2: write_ref($t2, $t3) 3: $t1 := read_ref($t2) diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/borrow_deref_optimize.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/borrow_deref_optimize.exp index a80f9f4e4a890..bc23929966e0e 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/borrow_deref_optimize.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/borrow_deref_optimize.exp @@ -4,11 +4,11 @@ module 0x42::test { value: bool, } private fun no_optimize_resource(): bool - acquires test::X(*) + acquires X(*) { { - let x: &mut test::X = Borrow(Mutable)(Deref(BorrowGlobal(Immutable)(0x1))); - select test::X.value<&mut test::X>(x) + let x: &mut X = Borrow(Mutable)(Deref(BorrowGlobal(Immutable)(0x1))); + select test::X.value<&mut X>(x) } } private fun no_optimize_vector() { @@ -18,11 +18,11 @@ module 0x42::test { } } private fun optimize_resource(): bool - acquires test::X(*) + acquires X(*) { { - let x: &test::X = Borrow(Immutable)(Deref(BorrowGlobal(Immutable)(0x1))); - select test::X.value<&test::X>(x) + let x: &X = Borrow(Immutable)(Deref(BorrowGlobal(Immutable)(0x1))); + select test::X.value<&X>(x) } } private fun optimize_vector() { @@ -36,21 +36,47 @@ module 0x42::test { } } // end 0x42::test +// -- Sourcified model before bytecode pipeline +module 0x42::test { + struct X has copy, drop, key { + value: bool, + } + fun no_optimize_resource(): bool + acquires X + { + let x = &mut *borrow_global(0x1); + x.value + } + fun no_optimize_vector() { + let _ = 0x1::vector::borrow_mut(&mut *0x1::vector::borrow>(&vector[vector[1, 2]], 0), 1); + } + fun optimize_resource(): bool + acquires X + { + let x = &*borrow_global(0x1); + x.value + } + fun optimize_vector() { + let x = vector[vector[1, 2]]; + let _ = 0x1::vector::borrow_mut(&mut *0x1::vector::borrow_mut>(&mut x, 0), 1); + } +} + ============ initial bytecode ================ [variant baseline] fun test::no_optimize_resource(): bool { var $t0: bool - var $t1: &mut test::X - var $t2: test::X - var $t3: &test::X + var $t1: &mut 0x42::test::X + var $t2: 0x42::test::X + var $t3: &0x42::test::X var $t4: address var $t5: &bool 0: $t4 := 0x1 - 1: $t3 := borrow_global($t4) + 1: $t3 := borrow_global<0x42::test::X>($t4) 2: $t2 := read_ref($t3) 3: $t1 := borrow_local($t2) - 4: $t5 := borrow_field.value($t1) + 4: $t5 := borrow_field<0x42::test::X>.value($t1) 5: $t0 := read_ref($t5) 6: return $t0 } @@ -83,12 +109,12 @@ fun test::no_optimize_vector() { [variant baseline] fun test::optimize_resource(): bool { var $t0: bool - var $t1: &test::X + var $t1: &0x42::test::X var $t2: address var $t3: &bool 0: $t2 := 0x1 - 1: $t1 := borrow_global($t2) - 2: $t3 := borrow_field.value($t1) + 1: $t1 := borrow_global<0x42::test::X>($t2) + 2: $t3 := borrow_field<0x42::test::X>.value($t1) 3: $t0 := read_ref($t3) 4: return $t0 } diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/bug_14300_update_variant_select.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/bug_14300_update_variant_select.exp index 0a25e2fb1e722..453eb08801151 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/bug_14300_update_variant_select.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/bug_14300_update_variant_select.exp @@ -16,51 +16,84 @@ module 0x815::m { } private fun update_common_field(): u64 { { - let common: m::CommonFields = pack m::CommonFields::Bar(30, 40, 50); - select_variants m::CommonFields.Foo.x|m::CommonFields.Bar.x(common) = 15; - select_variants m::CommonFields.Foo.x|m::CommonFields.Bar.x(common) + let common: CommonFields = pack m::CommonFields::Bar(30, 40, 50); + select_variants m::CommonFields.Foo.x|m::CommonFields.Bar.x(common) = 15; + select_variants m::CommonFields.Foo.x|m::CommonFields.Bar.x(common) } } private fun update_common_field_different_offset(): u8 { { - let common: m::CommonFields = pack m::CommonFields::Bar(30, 40, 50); - select_variants m::CommonFields.Foo.y|m::CommonFields.Bar.y|m::CommonFields.Baz.y(common) = 15; - select_variants m::CommonFields.Foo.y|m::CommonFields.Bar.y|m::CommonFields.Baz.y(common) + let common: CommonFields = pack m::CommonFields::Bar(30, 40, 50); + select_variants m::CommonFields.Foo.y|m::CommonFields.Bar.y|m::CommonFields.Baz.y(common) = 15; + select_variants m::CommonFields.Foo.y|m::CommonFields.Bar.y|m::CommonFields.Baz.y(common) } } private fun update_non_common_field(): u32 { { - let common: m::CommonFields = pack m::CommonFields::Bar(30, 40, 50); - select_variants m::CommonFields.Bar.z(common) = 15; - select_variants m::CommonFields.Bar.z(common) + let common: CommonFields = pack m::CommonFields::Bar(30, 40, 50); + select_variants m::CommonFields.Bar.z(common) = 15; + select_variants m::CommonFields.Bar.z(common) } } } // end 0x815::m +// -- Sourcified model before bytecode pipeline +module 0x815::m { + enum CommonFields has drop { + Foo { + x: u64, + y: u8, + } + Bar { + x: u64, + y: u8, + z: u32, + } + Baz { + y: u8, + } + } + fun update_common_field(): u64 { + let common = CommonFields::Bar{x: 30,y: 40u8,z: 50u32}; + common.Foo.x = 15; + common.Foo.x + } + fun update_common_field_different_offset(): u8 { + let common = CommonFields::Bar{x: 30,y: 40u8,z: 50u32}; + common.Foo.y = 15u8; + common.Foo.y + } + fun update_non_common_field(): u32 { + let common = CommonFields::Bar{x: 30,y: 40u8,z: 50u32}; + common.Bar.z = 15u32; + common.Bar.z + } +} + ============ initial bytecode ================ [variant baseline] fun m::update_common_field(): u64 { var $t0: u64 - var $t1: m::CommonFields + var $t1: 0x815::m::CommonFields var $t2: u64 var $t3: u8 var $t4: u32 var $t5: u64 var $t6: &mut u64 - var $t7: &mut m::CommonFields - var $t8: &m::CommonFields + var $t7: &mut 0x815::m::CommonFields + var $t8: &0x815::m::CommonFields var $t9: &u64 0: $t2 := 30 1: $t3 := 40 2: $t4 := 50 - 3: $t1 := pack_variant m::CommonFields::Bar($t2, $t3, $t4) + 3: $t1 := pack_variant 0x815::m::CommonFields::Bar($t2, $t3, $t4) 4: $t5 := 15 5: $t7 := borrow_local($t1) - 6: $t6 := borrow_variant_field.x($t7) + 6: $t6 := borrow_variant_field<0x815::m::CommonFields::Foo|Bar>.x($t7) 7: write_ref($t6, $t5) 8: $t8 := borrow_local($t1) - 9: $t9 := borrow_variant_field.x($t8) + 9: $t9 := borrow_variant_field<0x815::m::CommonFields::Foo|Bar>.x($t8) 10: $t0 := read_ref($t9) 11: return $t0 } @@ -69,50 +102,50 @@ fun m::update_common_field(): u64 { [variant baseline] fun m::update_common_field_different_offset(): u8 { var $t0: u8 - var $t1: m::CommonFields + var $t1: 0x815::m::CommonFields var $t2: u64 var $t3: u8 var $t4: u32 var $t5: u8 var $t6: &mut u8 - var $t7: &mut m::CommonFields + var $t7: &mut 0x815::m::CommonFields var $t8: bool - var $t9: &m::CommonFields + var $t9: &0x815::m::CommonFields var $t10: &u8 var $t11: bool 0: $t2 := 30 1: $t3 := 40 2: $t4 := 50 - 3: $t1 := pack_variant m::CommonFields::Bar($t2, $t3, $t4) + 3: $t1 := pack_variant 0x815::m::CommonFields::Bar($t2, $t3, $t4) 4: $t5 := 15 5: $t7 := borrow_local($t1) - 6: $t8 := test_variant m::CommonFields::Foo($t7) + 6: $t8 := test_variant 0x815::m::CommonFields::Foo($t7) 7: if ($t8) goto 13 else goto 8 8: label L3 - 9: $t8 := test_variant m::CommonFields::Bar($t7) + 9: $t8 := test_variant 0x815::m::CommonFields::Bar($t7) 10: if ($t8) goto 13 else goto 11 11: label L4 12: goto 16 13: label L2 - 14: $t6 := borrow_variant_field.y($t7) + 14: $t6 := borrow_variant_field<0x815::m::CommonFields::Foo|Bar>.y($t7) 15: goto 18 16: label L1 - 17: $t6 := borrow_variant_field.y($t7) + 17: $t6 := borrow_variant_field<0x815::m::CommonFields::Baz>.y($t7) 18: label L0 19: write_ref($t6, $t5) 20: $t9 := borrow_local($t1) - 21: $t11 := test_variant m::CommonFields::Foo($t9) + 21: $t11 := test_variant 0x815::m::CommonFields::Foo($t9) 22: if ($t11) goto 28 else goto 23 23: label L8 - 24: $t11 := test_variant m::CommonFields::Bar($t9) + 24: $t11 := test_variant 0x815::m::CommonFields::Bar($t9) 25: if ($t11) goto 28 else goto 26 26: label L9 27: goto 31 28: label L7 - 29: $t10 := borrow_variant_field.y($t9) + 29: $t10 := borrow_variant_field<0x815::m::CommonFields::Foo|Bar>.y($t9) 30: goto 33 31: label L6 - 32: $t10 := borrow_variant_field.y($t9) + 32: $t10 := borrow_variant_field<0x815::m::CommonFields::Baz>.y($t9) 33: label L5 34: $t0 := read_ref($t10) 35: return $t0 @@ -122,25 +155,25 @@ fun m::update_common_field_different_offset(): u8 { [variant baseline] fun m::update_non_common_field(): u32 { var $t0: u32 - var $t1: m::CommonFields + var $t1: 0x815::m::CommonFields var $t2: u64 var $t3: u8 var $t4: u32 var $t5: u32 var $t6: &mut u32 - var $t7: &mut m::CommonFields - var $t8: &m::CommonFields + var $t7: &mut 0x815::m::CommonFields + var $t8: &0x815::m::CommonFields var $t9: &u32 0: $t2 := 30 1: $t3 := 40 2: $t4 := 50 - 3: $t1 := pack_variant m::CommonFields::Bar($t2, $t3, $t4) + 3: $t1 := pack_variant 0x815::m::CommonFields::Bar($t2, $t3, $t4) 4: $t5 := 15 5: $t7 := borrow_local($t1) - 6: $t6 := borrow_variant_field.z($t7) + 6: $t6 := borrow_variant_field<0x815::m::CommonFields::Bar>.z($t7) 7: write_ref($t6, $t5) 8: $t8 := borrow_local($t1) - 9: $t9 := borrow_variant_field.z($t8) + 9: $t9 := borrow_variant_field<0x815::m::CommonFields::Bar>.z($t8) 10: $t0 := read_ref($t9) 11: return $t0 } diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/bug_14300_variant_select_autoref.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/bug_14300_variant_select_autoref.exp index 285f4a28191fe..0d74991c8f9b0 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/bug_14300_variant_select_autoref.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/bug_14300_variant_select_autoref.exp @@ -10,28 +10,45 @@ module 0x815::m { } private fun test_common_access(): u8 { { - let x: m::Positional = pack m::Positional::A(42); - select_variants m::Positional.A.0|m::Positional.B.0(x) = 19; + let x: Positional = pack m::Positional::A(42); + select_variants m::Positional.A.0|m::Positional.B.0(x) = 19; 20 } } } // end 0x815::m +// -- Sourcified model before bytecode pipeline +module 0x815::m { + enum Positional has drop { + A { + 0: u8, + } + B { + 0: u8, + } + } + fun test_common_access(): u8 { + let x = Positional::A(42u8); + x.A.0 = 19u8; + 20u8 + } +} + ============ initial bytecode ================ [variant baseline] fun m::test_common_access(): u8 { var $t0: u8 - var $t1: m::Positional + var $t1: 0x815::m::Positional var $t2: u8 var $t3: u8 var $t4: &mut u8 - var $t5: &mut m::Positional + var $t5: &mut 0x815::m::Positional 0: $t2 := 42 - 1: $t1 := pack_variant m::Positional::A($t2) + 1: $t1 := pack_variant 0x815::m::Positional::A($t2) 2: $t3 := 19 3: $t5 := borrow_local($t1) - 4: $t4 := borrow_variant_field.0($t5) + 4: $t4 := borrow_variant_field<0x815::m::Positional::A|B>.0($t5) 5: write_ref($t4, $t3) 6: $t0 := 20 7: return $t0 diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/bug_14471_receiver_inference.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/bug_14471_receiver_inference.exp index c8a13847d8cae..693daacb1ff71 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/bug_14471_receiver_inference.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/bug_14471_receiver_inference.exp @@ -1,30 +1,30 @@ // -- Model dump before bytecode pipeline module 0x815::m { struct MyMap { - table: m::Table, + table: Table, } - struct Table { - x: #0, - y: #1, + struct Table { + x: T1, + y: T2, } struct ValueWrap { val: u64, } - private fun contains(self: &m::Table<#0, #1>,_key: #0): bool { + private fun contains(self: &Table,_key: T1): bool { true } - private fun add(self: &mut m::Table<#0, #1>,_key: #0,_val: #1) { + private fun add(self: &mut Table,_key: T1,_val: T2) { Tuple() } public fun add_when_missing(key: address,val: u64) - acquires m::MyMap(*) + acquires MyMap(*) { { - let my_map: &mut m::MyMap = BorrowGlobal(Mutable)(0x815); - if Not(m::contains(Borrow(Immutable)(select m::MyMap.table<&mut m::MyMap>(my_map)), key)) { + let my_map: &mut MyMap = BorrowGlobal(Mutable)(0x815); + if Not(m::contains(Borrow(Immutable)(select m::MyMap.table<&mut MyMap>(my_map)), key)) { { - let wrap: m::ValueWrap = pack m::ValueWrap(val); - m::add(Borrow(Mutable)(select m::MyMap.table<&mut m::MyMap>(my_map)), key, wrap); + let wrap: ValueWrap = pack m::ValueWrap(val); + m::add(Borrow(Mutable)(select m::MyMap.table<&mut MyMap>(my_map)), key, wrap); Tuple() } } else { @@ -34,10 +34,38 @@ module 0x815::m { } } // end 0x815::m +// -- Sourcified model before bytecode pipeline +module 0x815::m { + struct MyMap has key { + table: Table, + } + struct Table has store { + x: T1, + y: T2, + } + struct ValueWrap has drop, store { + val: u64, + } + fun contains(self: &Table, _key: T1): bool { + true + } + fun add(self: &mut Table, _key: T1, _val: T2) { + } + public fun add_when_missing(key: address, val: u64) + acquires MyMap + { + let my_map = borrow_global_mut(0x815); + if (!contains(&my_map.table, key)) { + let wrap = ValueWrap{val: val}; + add(&mut my_map.table, key, wrap); + } + } +} + ============ initial bytecode ================ [variant baseline] -fun m::contains<#0, #1>($t0: &m::Table<#0, #1>, $t1: #0): bool { +fun m::contains<#0, #1>($t0: &0x815::m::Table<#0, #1>, $t1: #0): bool { var $t2: bool 0: $t2 := true 1: return $t2 @@ -45,30 +73,30 @@ fun m::contains<#0, #1>($t0: &m::Table<#0, #1>, $t1: #0): bool { [variant baseline] -fun m::add<#0, #1>($t0: &mut m::Table<#0, #1>, $t1: #0, $t2: #1) { +fun m::add<#0, #1>($t0: &mut 0x815::m::Table<#0, #1>, $t1: #0, $t2: #1) { 0: return () } [variant baseline] public fun m::add_when_missing($t0: address, $t1: u64) { - var $t2: &mut m::MyMap + var $t2: &mut 0x815::m::MyMap var $t3: address var $t4: bool var $t5: bool - var $t6: &m::Table - var $t7: m::ValueWrap - var $t8: &mut m::Table + var $t6: &0x815::m::Table + var $t7: 0x815::m::ValueWrap + var $t8: &mut 0x815::m::Table 0: $t3 := 0x815 - 1: $t2 := borrow_global($t3) - 2: $t6 := borrow_field.table($t2) - 3: $t5 := m::contains($t6, $t0) + 1: $t2 := borrow_global<0x815::m::MyMap>($t3) + 2: $t6 := borrow_field<0x815::m::MyMap>.table($t2) + 3: $t5 := m::contains($t6, $t0) 4: $t4 := !($t5) 5: if ($t4) goto 6 else goto 11 6: label L0 - 7: $t7 := pack m::ValueWrap($t1) - 8: $t8 := borrow_field.table($t2) - 9: m::add($t8, $t0, $t7) + 7: $t7 := pack 0x815::m::ValueWrap($t1) + 8: $t8 := borrow_field<0x815::m::MyMap>.table($t2) + 9: m::add($t8, $t0, $t7) 10: goto 12 11: label L1 12: label L2 diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/conditional_borrow.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/conditional_borrow.exp index 9c7e360aea5b9..2e4693e518bd7 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/conditional_borrow.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/conditional_borrow.exp @@ -47,39 +47,39 @@ module 0x8675::M { } } } - private fun test1b(r: M::S): u64 { + private fun test1b(r: S): u64 { { - let x: M::S = pack M::S(3); + let x: S = pack M::S(3); { - let tref: &mut M::S = Borrow(Mutable)(if Lt(select M::S.f(r), 4) { + let tref: &mut S = Borrow(Mutable)(if Lt(select M::S.f(r), 4) { r } else { x }); - select M::S.f(Deref(tref)) = 10; + select M::S.f(Deref(tref)) = 10; { - let y: M::S = r; + let y: S = r; { - let tref2: &mut M::S = Borrow(Mutable)(y); - select M::S.f(Deref(tref2)) = Add(select M::S.f(Deref(tref2)), 1); + let tref2: &mut S = Borrow(Mutable)(y); + select M::S.f(Deref(tref2)) = Add(select M::S.f(Deref(tref2)), 1); { - let z: M::S = y; + let z: S = y; { - let tref3: &mut u64 = Borrow(Mutable)(select M::S.f(z)); + let tref3: &mut u64 = Borrow(Mutable)(select M::S.f(z)); tref3 = Add(Deref(tref3), 1); { - let a: M::S = z; + let a: S = z; { - let tref4: &mut u64 = Borrow(Mutable)(select M::S.f(a)); + let tref4: &mut u64 = Borrow(Mutable)(select M::S.f(a)); tref4 = Add(Deref(tref4), 1); { - let tref5: &mut u64 = Borrow(Mutable)(select M::S.f(a)); + let tref5: &mut u64 = Borrow(Mutable)(select M::S.f(a)); tref5 = Add(Deref(tref5), 8); { let tref6: &mut u64 = Borrow(Mutable)(3; - select M::S.f(a)); + select M::S.f(a)); tref6 = Add(Deref(tref6), 16); - select M::S.f(a) + select M::S.f(a) } } } @@ -96,6 +96,62 @@ module 0x8675::M { } } // end 0x8675::M +// -- Sourcified model before bytecode pipeline +module 0x8675::M { + struct S has copy, drop { + f: u64, + } + public fun test(): u64 { + test1(7) + test1(2) + } + fun test1(r: u64): u64 { + let tref = &mut (if (r < 4) r else 3); + *tref = 10; + let y = r; + let tref2 = &mut y; + *tref2 = *tref2 + 1; + let z = y; + let tref3 = &mut (z + 0); + *tref3 = *tref3 + 2; + let a = z; + let tref4 = &mut a; + *tref4 = *tref4 + 4; + let tref5 = &mut a; + *tref5 = *tref5 + 8; + let tref6 = &mut { + 3; + a + }; + *tref6 = *tref6 + 16; + a + } + fun test1b(r: S): u64 { + let x = S{f: 3}; + let tref = &mut (if (r.f < 4) r else x); + (*tref).f = 10; + let y = r; + let tref2 = &mut y; + (*tref2).f = (*tref2).f + 1; + let z = y; + let tref3 = &mut z.f; + *tref3 = *tref3 + 1; + let a = z; + let tref4 = &mut a.f; + *tref4 = *tref4 + 1; + let tref5 = &mut a.f; + *tref5 = *tref5 + 8; + let tref6 = &mut { + 3; + a.f + }; + *tref6 = *tref6 + 16; + a.f + } + public fun testb(): u64 { + test1b(S{f: 7}) + test1b(S{f: 2}) + } +} + ============ initial bytecode ================ [variant baseline] @@ -203,49 +259,49 @@ fun M::test1($t0: u64): u64 { [variant baseline] -fun M::test1b($t0: M::S): u64 { +fun M::test1b($t0: 0x8675::M::S): u64 { var $t1: u64 - var $t2: M::S + var $t2: 0x8675::M::S var $t3: u64 - var $t4: &mut M::S - var $t5: M::S + var $t4: &mut 0x8675::M::S + var $t5: 0x8675::M::S var $t6: bool var $t7: u64 - var $t8: &M::S + var $t8: &0x8675::M::S var $t9: &u64 var $t10: u64 var $t11: u64 var $t12: &mut u64 - var $t13: M::S - var $t14: &mut M::S - var $t15: M::S - var $t16: &mut M::S + var $t13: 0x8675::M::S + var $t14: &mut 0x8675::M::S + var $t15: 0x8675::M::S + var $t16: &mut 0x8675::M::S var $t17: u64 var $t18: u64 - var $t19: M::S - var $t20: &M::S + var $t19: 0x8675::M::S + var $t20: &0x8675::M::S var $t21: &u64 var $t22: u64 var $t23: &mut u64 - var $t24: M::S - var $t25: &mut M::S - var $t26: M::S + var $t24: 0x8675::M::S + var $t25: &mut 0x8675::M::S + var $t26: 0x8675::M::S var $t27: &mut u64 - var $t28: &mut M::S + var $t28: &mut 0x8675::M::S var $t29: u64 var $t30: u64 var $t31: u64 - var $t32: M::S + var $t32: 0x8675::M::S var $t33: &mut u64 var $t34: u64 - var $t35: &M::S + var $t35: &0x8675::M::S var $t36: &u64 var $t37: u64 var $t38: u64 var $t39: u64 var $t40: &mut u64 var $t41: u64 - var $t42: &M::S + var $t42: &0x8675::M::S var $t43: &u64 var $t44: u64 var $t45: u64 @@ -253,17 +309,17 @@ fun M::test1b($t0: M::S): u64 { var $t47: &mut u64 var $t48: u64 var $t49: u64 - var $t50: &M::S + var $t50: &0x8675::M::S var $t51: &u64 var $t52: u64 var $t53: u64 var $t54: u64 - var $t55: &M::S + var $t55: &0x8675::M::S var $t56: &u64 0: $t3 := 3 - 1: $t2 := pack M::S($t3) + 1: $t2 := pack 0x8675::M::S($t3) 2: $t8 := borrow_local($t0) - 3: $t9 := borrow_field.f($t8) + 3: $t9 := borrow_field<0x8675::M::S>.f($t8) 4: $t7 := read_ref($t9) 5: $t10 := 4 6: $t6 := <($t7, $t10) @@ -278,30 +334,30 @@ fun M::test1b($t0: M::S): u64 { 15: $t11 := 10 16: $t13 := read_ref($t4) 17: $t14 := borrow_local($t13) - 18: $t12 := borrow_field.f($t14) + 18: $t12 := borrow_field<0x8675::M::S>.f($t14) 19: write_ref($t12, $t11) 20: $t15 := infer($t0) 21: $t16 := borrow_local($t15) 22: $t19 := read_ref($t16) 23: $t20 := borrow_local($t19) - 24: $t21 := borrow_field.f($t20) + 24: $t21 := borrow_field<0x8675::M::S>.f($t20) 25: $t18 := read_ref($t21) 26: $t22 := 1 27: $t17 := +($t18, $t22) 28: $t24 := read_ref($t16) 29: $t25 := borrow_local($t24) - 30: $t23 := borrow_field.f($t25) + 30: $t23 := borrow_field<0x8675::M::S>.f($t25) 31: write_ref($t23, $t17) 32: $t26 := infer($t15) 33: $t28 := borrow_local($t26) - 34: $t27 := borrow_field.f($t28) + 34: $t27 := borrow_field<0x8675::M::S>.f($t28) 35: $t30 := read_ref($t27) 36: $t31 := 1 37: $t29 := +($t30, $t31) 38: write_ref($t27, $t29) 39: $t32 := infer($t26) 40: $t35 := borrow_local($t32) - 41: $t36 := borrow_field.f($t35) + 41: $t36 := borrow_field<0x8675::M::S>.f($t35) 42: $t34 := read_ref($t36) 43: $t33 := borrow_local($t34) 44: $t38 := read_ref($t33) @@ -309,7 +365,7 @@ fun M::test1b($t0: M::S): u64 { 46: $t37 := +($t38, $t39) 47: write_ref($t33, $t37) 48: $t42 := borrow_local($t32) - 49: $t43 := borrow_field.f($t42) + 49: $t43 := borrow_field<0x8675::M::S>.f($t42) 50: $t41 := read_ref($t43) 51: $t40 := borrow_local($t41) 52: $t45 := read_ref($t40) @@ -318,7 +374,7 @@ fun M::test1b($t0: M::S): u64 { 55: write_ref($t40, $t44) 56: $t49 := 3 57: $t50 := borrow_local($t32) - 58: $t51 := borrow_field.f($t50) + 58: $t51 := borrow_field<0x8675::M::S>.f($t50) 59: $t48 := read_ref($t51) 60: $t47 := borrow_local($t48) 61: $t53 := read_ref($t47) @@ -326,7 +382,7 @@ fun M::test1b($t0: M::S): u64 { 63: $t52 := +($t53, $t54) 64: write_ref($t47, $t52) 65: $t55 := borrow_local($t32) - 66: $t56 := borrow_field.f($t55) + 66: $t56 := borrow_field<0x8675::M::S>.f($t55) 67: $t1 := read_ref($t56) 68: return $t1 } @@ -336,16 +392,16 @@ fun M::test1b($t0: M::S): u64 { public fun M::testb(): u64 { var $t0: u64 var $t1: u64 - var $t2: M::S + var $t2: 0x8675::M::S var $t3: u64 var $t4: u64 - var $t5: M::S + var $t5: 0x8675::M::S var $t6: u64 0: $t3 := 7 - 1: $t2 := pack M::S($t3) + 1: $t2 := pack 0x8675::M::S($t3) 2: $t1 := M::test1b($t2) 3: $t6 := 2 - 4: $t5 := pack M::S($t6) + 4: $t5 := pack 0x8675::M::S($t6) 5: $t4 := M::test1b($t5) 6: $t0 := +($t1, $t4) 7: return $t0 diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/escape_autoref.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/escape_autoref.exp index 3dfca1c2c5e39..aae0dcbebfe42 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/escape_autoref.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/escape_autoref.exp @@ -6,32 +6,59 @@ module 0x42::m { struct ObjectCore { owner: address, } - private fun make(): m::Object { + private fun make(): Object { Abort(0) } - private fun owner_correct(o: m::Object): address - acquires m::ObjectCore(*) + private fun owner_correct(o: Object): address + acquires ObjectCore(*) { { - let addr: address = select m::Object.inner(o); - select m::ObjectCore.owner<&m::ObjectCore>(BorrowGlobal(Immutable)(addr)) + let addr: address = select m::Object.inner(o); + select m::ObjectCore.owner<&ObjectCore>(BorrowGlobal(Immutable)(addr)) } } - private fun owner_read_ref_missing(o: m::Object): address - acquires m::ObjectCore(*) + private fun owner_read_ref_missing(o: Object): address + acquires ObjectCore(*) { - select m::ObjectCore.owner<&m::ObjectCore>(BorrowGlobal(Immutable)(select m::Object.inner(o))) + select m::ObjectCore.owner<&ObjectCore>(BorrowGlobal(Immutable)(select m::Object.inner(o))) } private fun will_autoref(): address { - select m::Object.inner(m::make()) + select m::Object.inner(m::make()) } } // end 0x42::m +// -- Sourcified model before bytecode pipeline +module 0x42::m { + struct Object has copy, drop { + inner: address, + } + struct ObjectCore has key { + owner: address, + } + fun make(): Object { + abort 0 + } + fun owner_correct(o: Object): address + acquires ObjectCore + { + let addr = o.inner; + borrow_global(addr).owner + } + fun owner_read_ref_missing(o: Object): address + acquires ObjectCore + { + borrow_global(o.inner).owner + } + fun will_autoref(): address { + make().inner + } +} + ============ initial bytecode ================ [variant baseline] -fun m::make(): m::Object { - var $t0: m::Object +fun m::make(): 0x42::m::Object { + var $t0: 0x42::m::Object var $t1: u64 0: $t1 := 0 1: abort($t1) @@ -40,36 +67,36 @@ fun m::make(): m::Object { [variant baseline] -fun m::owner_correct($t0: m::Object): address { +fun m::owner_correct($t0: 0x42::m::Object): address { var $t1: address var $t2: address - var $t3: &m::Object + var $t3: &0x42::m::Object var $t4: &address - var $t5: &m::ObjectCore + var $t5: &0x42::m::ObjectCore var $t6: &address 0: $t3 := borrow_local($t0) - 1: $t4 := borrow_field.inner($t3) + 1: $t4 := borrow_field<0x42::m::Object>.inner($t3) 2: $t2 := read_ref($t4) - 3: $t5 := borrow_global($t2) - 4: $t6 := borrow_field.owner($t5) + 3: $t5 := borrow_global<0x42::m::ObjectCore>($t2) + 4: $t6 := borrow_field<0x42::m::ObjectCore>.owner($t5) 5: $t1 := read_ref($t6) 6: return $t1 } [variant baseline] -fun m::owner_read_ref_missing($t0: m::Object): address { +fun m::owner_read_ref_missing($t0: 0x42::m::Object): address { var $t1: address - var $t2: &m::ObjectCore + var $t2: &0x42::m::ObjectCore var $t3: address - var $t4: &m::Object + var $t4: &0x42::m::Object var $t5: &address var $t6: &address 0: $t4 := borrow_local($t0) - 1: $t5 := borrow_field.inner($t4) + 1: $t5 := borrow_field<0x42::m::Object>.inner($t4) 2: $t3 := read_ref($t5) - 3: $t2 := borrow_global($t3) - 4: $t6 := borrow_field.owner($t2) + 3: $t2 := borrow_global<0x42::m::ObjectCore>($t3) + 4: $t6 := borrow_field<0x42::m::ObjectCore>.owner($t2) 5: $t1 := read_ref($t6) 6: return $t1 } @@ -78,12 +105,12 @@ fun m::owner_read_ref_missing($t0: m::Object): address { [variant baseline] fun m::will_autoref(): address { var $t0: address - var $t1: m::Object - var $t2: &m::Object + var $t1: 0x42::m::Object + var $t2: &0x42::m::Object var $t3: &address 0: $t1 := m::make() 1: $t2 := borrow_local($t1) - 2: $t3 := borrow_field.inner($t2) + 2: $t3 := borrow_field<0x42::m::Object>.inner($t2) 3: $t0 := read_ref($t3) 4: return $t0 } diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/fields.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/fields.exp index 9a0e87baab823..72ad2c417ca09 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/fields.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/fields.exp @@ -3,130 +3,180 @@ module 0x42::fields { struct T { h: u64, } - struct G { - f: #0, + struct G { + f: X, } struct S { f: u64, - g: fields::T, + g: T, } - private fun read_generic_val(x: fields::G): u64 { - select fields::G.f>(x) + private fun read_generic_val(x: G): u64 { + select fields::G.f>(x) } - private fun read_ref(x: &fields::S): u64 { - select fields::T.h(select fields::S.g<&fields::S>(x)) + private fun read_ref(x: &S): u64 { + select fields::T.h(select fields::S.g<&S>(x)) } - private fun read_val(x: fields::S): u64 { - select fields::T.h(select fields::S.g(x)) + private fun read_val(x: S): u64 { + select fields::T.h(select fields::S.g(x)) } - private fun write_generic_val(x: &mut fields::G,v: u64) { - select fields::G.f<&mut fields::G>(x) = v + private fun write_generic_val(x: &mut G,v: u64) { + select fields::G.f<&mut G>(x) = v } - private fun write_local_direct(): fields::S { + private fun write_local_direct(): S { { - let x: fields::S = pack fields::S(0, pack fields::T(0)); - select fields::T.h(select fields::S.g(x)) = 42; + let x: S = pack fields::S(0, pack fields::T(0)); + select fields::T.h(select fields::S.g(x)) = 42; x } } - private fun write_local_via_ref(): fields::S { + private fun write_local_via_ref(): S { { - let x: fields::S = pack fields::S(0, pack fields::T(0)); + let x: S = pack fields::S(0, pack fields::T(0)); { - let r: &mut fields::S = Borrow(Mutable)(x); - select fields::T.h(select fields::S.g<&mut fields::S>(r)) = 42; + let r: &mut S = Borrow(Mutable)(x); + select fields::T.h(select fields::S.g<&mut S>(r)) = 42; x } } } - private fun write_local_via_ref_2(): fields::S { + private fun write_local_via_ref_2(): S { { - let x: fields::S = pack fields::S(0, pack fields::T(0)); + let x: S = pack fields::S(0, pack fields::T(0)); { - let r: &mut u64 = Borrow(Mutable)(select fields::T.h(select fields::S.g(x))); + let r: &mut u64 = Borrow(Mutable)(select fields::T.h(select fields::S.g(x))); r = 42; x } } } - private fun write_param(x: &mut fields::S) { - select fields::T.h(select fields::S.g<&mut fields::S>(x)) = 42; + private fun write_param(x: &mut S) { + select fields::T.h(select fields::S.g<&mut S>(x)) = 42; Tuple() } - private fun write_val(x: fields::S): fields::S { - select fields::T.h(select fields::S.g(x)) = 42; + private fun write_val(x: S): S { + select fields::T.h(select fields::S.g(x)) = 42; x } } // end 0x42::fields +// -- Sourcified model before bytecode pipeline +module 0x42::fields { + struct T has drop { + h: u64, + } + struct G has drop { + f: X, + } + struct S has drop { + f: u64, + g: T, + } + fun read_generic_val(x: G): u64 { + x.f + } + fun read_ref(x: &S): u64 { + x.g.h + } + fun read_val(x: S): u64 { + x.g.h + } + fun write_generic_val(x: &mut G, v: u64) { + x.f = v + } + fun write_local_direct(): S { + let x = S{f: 0,g: T{h: 0}}; + x.g.h = 42; + x + } + fun write_local_via_ref(): S { + let x = S{f: 0,g: T{h: 0}}; + let r = &mut x; + r.g.h = 42; + x + } + fun write_local_via_ref_2(): S { + let x = S{f: 0,g: T{h: 0}}; + let r = &mut x.g.h; + *r = 42; + x + } + fun write_param(x: &mut S) { + x.g.h = 42; + } + fun write_val(x: S): S { + x.g.h = 42; + x + } +} + ============ initial bytecode ================ [variant baseline] -fun fields::read_generic_val($t0: fields::G): u64 { +fun fields::read_generic_val($t0: 0x42::fields::G): u64 { var $t1: u64 - var $t2: &fields::G + var $t2: &0x42::fields::G var $t3: &u64 0: $t2 := borrow_local($t0) - 1: $t3 := borrow_field>.f($t2) + 1: $t3 := borrow_field<0x42::fields::G>.f($t2) 2: $t1 := read_ref($t3) 3: return $t1 } [variant baseline] -fun fields::read_ref($t0: &fields::S): u64 { +fun fields::read_ref($t0: &0x42::fields::S): u64 { var $t1: u64 - var $t2: &fields::T + var $t2: &0x42::fields::T var $t3: &u64 - 0: $t2 := borrow_field.g($t0) - 1: $t3 := borrow_field.h($t2) + 0: $t2 := borrow_field<0x42::fields::S>.g($t0) + 1: $t3 := borrow_field<0x42::fields::T>.h($t2) 2: $t1 := read_ref($t3) 3: return $t1 } [variant baseline] -fun fields::read_val($t0: fields::S): u64 { +fun fields::read_val($t0: 0x42::fields::S): u64 { var $t1: u64 - var $t2: &fields::T - var $t3: &fields::S + var $t2: &0x42::fields::T + var $t3: &0x42::fields::S var $t4: &u64 0: $t3 := borrow_local($t0) - 1: $t2 := borrow_field.g($t3) - 2: $t4 := borrow_field.h($t2) + 1: $t2 := borrow_field<0x42::fields::S>.g($t3) + 2: $t4 := borrow_field<0x42::fields::T>.h($t2) 3: $t1 := read_ref($t4) 4: return $t1 } [variant baseline] -fun fields::write_generic_val($t0: &mut fields::G, $t1: u64) { +fun fields::write_generic_val($t0: &mut 0x42::fields::G, $t1: u64) { var $t2: &mut u64 - 0: $t2 := borrow_field>.f($t0) + 0: $t2 := borrow_field<0x42::fields::G>.f($t0) 1: write_ref($t2, $t1) 2: return () } [variant baseline] -fun fields::write_local_direct(): fields::S { - var $t0: fields::S - var $t1: fields::S +fun fields::write_local_direct(): 0x42::fields::S { + var $t0: 0x42::fields::S + var $t1: 0x42::fields::S var $t2: u64 - var $t3: fields::T + var $t3: 0x42::fields::T var $t4: u64 var $t5: u64 var $t6: &mut u64 - var $t7: &mut fields::T - var $t8: &mut fields::S + var $t7: &mut 0x42::fields::T + var $t8: &mut 0x42::fields::S 0: $t2 := 0 1: $t4 := 0 - 2: $t3 := pack fields::T($t4) - 3: $t1 := pack fields::S($t2, $t3) + 2: $t3 := pack 0x42::fields::T($t4) + 3: $t1 := pack 0x42::fields::S($t2, $t3) 4: $t5 := 42 5: $t8 := borrow_local($t1) - 6: $t7 := borrow_field.g($t8) - 7: $t6 := borrow_field.h($t7) + 6: $t7 := borrow_field<0x42::fields::S>.g($t8) + 7: $t6 := borrow_field<0x42::fields::T>.h($t7) 8: write_ref($t6, $t5) 9: $t0 := infer($t1) 10: return $t0 @@ -134,24 +184,24 @@ fun fields::write_local_direct(): fields::S { [variant baseline] -fun fields::write_local_via_ref(): fields::S { - var $t0: fields::S - var $t1: fields::S +fun fields::write_local_via_ref(): 0x42::fields::S { + var $t0: 0x42::fields::S + var $t1: 0x42::fields::S var $t2: u64 - var $t3: fields::T + var $t3: 0x42::fields::T var $t4: u64 - var $t5: &mut fields::S + var $t5: &mut 0x42::fields::S var $t6: u64 var $t7: &mut u64 - var $t8: &mut fields::T + var $t8: &mut 0x42::fields::T 0: $t2 := 0 1: $t4 := 0 - 2: $t3 := pack fields::T($t4) - 3: $t1 := pack fields::S($t2, $t3) + 2: $t3 := pack 0x42::fields::T($t4) + 3: $t1 := pack 0x42::fields::S($t2, $t3) 4: $t5 := borrow_local($t1) 5: $t6 := 42 - 6: $t8 := borrow_field.g($t5) - 7: $t7 := borrow_field.h($t8) + 6: $t8 := borrow_field<0x42::fields::S>.g($t5) + 7: $t7 := borrow_field<0x42::fields::T>.h($t8) 8: write_ref($t7, $t6) 9: $t0 := infer($t1) 10: return $t0 @@ -159,23 +209,23 @@ fun fields::write_local_via_ref(): fields::S { [variant baseline] -fun fields::write_local_via_ref_2(): fields::S { - var $t0: fields::S - var $t1: fields::S +fun fields::write_local_via_ref_2(): 0x42::fields::S { + var $t0: 0x42::fields::S + var $t1: 0x42::fields::S var $t2: u64 - var $t3: fields::T + var $t3: 0x42::fields::T var $t4: u64 var $t5: &mut u64 - var $t6: &mut fields::T - var $t7: &mut fields::S + var $t6: &mut 0x42::fields::T + var $t7: &mut 0x42::fields::S var $t8: u64 0: $t2 := 0 1: $t4 := 0 - 2: $t3 := pack fields::T($t4) - 3: $t1 := pack fields::S($t2, $t3) + 2: $t3 := pack 0x42::fields::T($t4) + 3: $t1 := pack 0x42::fields::S($t2, $t3) 4: $t7 := borrow_local($t1) - 5: $t6 := borrow_field.g($t7) - 6: $t5 := borrow_field.h($t6) + 5: $t6 := borrow_field<0x42::fields::S>.g($t7) + 6: $t5 := borrow_field<0x42::fields::T>.h($t6) 7: $t8 := 42 8: write_ref($t5, $t8) 9: $t0 := infer($t1) @@ -184,29 +234,29 @@ fun fields::write_local_via_ref_2(): fields::S { [variant baseline] -fun fields::write_param($t0: &mut fields::S) { +fun fields::write_param($t0: &mut 0x42::fields::S) { var $t1: u64 var $t2: &mut u64 - var $t3: &mut fields::T + var $t3: &mut 0x42::fields::T 0: $t1 := 42 - 1: $t3 := borrow_field.g($t0) - 2: $t2 := borrow_field.h($t3) + 1: $t3 := borrow_field<0x42::fields::S>.g($t0) + 2: $t2 := borrow_field<0x42::fields::T>.h($t3) 3: write_ref($t2, $t1) 4: return () } [variant baseline] -fun fields::write_val($t0: fields::S): fields::S { - var $t1: fields::S +fun fields::write_val($t0: 0x42::fields::S): 0x42::fields::S { + var $t1: 0x42::fields::S var $t2: u64 var $t3: &mut u64 - var $t4: &mut fields::T - var $t5: &mut fields::S + var $t4: &mut 0x42::fields::T + var $t5: &mut 0x42::fields::S 0: $t2 := 42 1: $t5 := borrow_local($t0) - 2: $t4 := borrow_field.g($t5) - 3: $t3 := borrow_field.h($t4) + 2: $t4 := borrow_field<0x42::fields::S>.g($t5) + 3: $t3 := borrow_field<0x42::fields::T>.h($t4) 4: write_ref($t3, $t2) 5: $t1 := infer($t0) 6: return $t1 diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/fields_invalid.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/fields_invalid.exp index b8cd6367f2bae..91fc860a9e2da 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/fields_invalid.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/fields_invalid.exp @@ -5,17 +5,31 @@ module 0x42::fields { } struct S { f: u64, - g: fields::T, + g: T, } - private fun write_ref(x: &fields::S) { - select fields::T.h(select fields::S.g<&fields::S>(x)) = 42; + private fun write_ref(x: &S) { + select fields::T.h(select fields::S.g<&S>(x)) = 42; Tuple() } } // end 0x42::fields +// -- Sourcified model before bytecode pipeline +module 0x42::fields { + struct T { + h: u64, + } + struct S { + f: u64, + g: T, + } + fun write_ref(x: &S) { + x.g.h = 42; + } +} + Diagnostics: -error: expected `&mut` but found `&fields::S` +error: expected `&mut` but found `&S` ┌─ tests/bytecode-generator/fields_invalid.move:13:9 │ 13 │ x.g.h = 42; diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/freeze_mut_ref.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/freeze_mut_ref.exp index 4e123090b3f54..cf24cf3290946 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/freeze_mut_ref.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/freeze_mut_ref.exp @@ -7,20 +7,20 @@ module 0x42::freeze_mut_ref { struct S { dummy_field: bool, } - public fun borrow_mut(map: &mut vector<#0>): � { + public fun borrow_mut(map: &mut vector): &Element { Freeze(false)(vector::borrow_mut(map, 0)) } - public fun borrow_mut2(v: &mut #0): � { + public fun borrow_mut2(v: &mut Element): &Element { Freeze(false)(v) } - public fun borrow_mut3(v1: &mut #0,v2: �): � { + public fun borrow_mut3(v1: &mut Element,v2: &Element): &Element { if true { Freeze(false)(v1) } else { v2 } } - public fun borrow_mut4(v: &mut #0): � { + public fun borrow_mut4(v: &mut Element): &Element { return Freeze(false)(v) } private fun t0() { @@ -30,7 +30,7 @@ module 0x42::freeze_mut_ref { Tuple() } } - private fun t1(s: &mut freeze_mut_ref::S): &freeze_mut_ref::S { + private fun t1(s: &mut S): &S { Freeze(false)(s) } private fun t2(u1: &mut u64,u2: &mut u64): (&u64, &mut u64) { @@ -46,14 +46,14 @@ module 0x42::freeze_mut_ref { } } } - public fun t5(s: &mut freeze_mut_ref::G) { + public fun t5(s: &mut G) { { let x: u64 = 0; { - let f: &mut u64 = Borrow(Mutable)(select freeze_mut_ref::G.f<&mut freeze_mut_ref::G>(x: u64 = Add(x, 1); + let f: &mut u64 = Borrow(Mutable)(select freeze_mut_ref::G.f<&mut G>(x: u64 = Add(x, 1); s)); { - let g: &mut u64 = Borrow(Mutable)(select freeze_mut_ref::G.f<&mut freeze_mut_ref::G>(x: u64 = Add(x, 1); + let g: &mut u64 = Borrow(Mutable)(select freeze_mut_ref::G.f<&mut G>(x: u64 = Add(x, 1); s)); { let y: &mut u64 = Borrow(Mutable)(2); @@ -69,21 +69,21 @@ module 0x42::freeze_mut_ref { } } } - private fun t6(cond: bool,s: &mut freeze_mut_ref::S,other: &freeze_mut_ref::S) { + private fun t6(cond: bool,s: &mut S,other: &S) { { - let x: &freeze_mut_ref::S; + let x: &S; if cond { - x: &freeze_mut_ref::S = Freeze(false)(Copy(s)) + x: &S = Freeze(false)(Copy(s)) } else { - x: &freeze_mut_ref::S = other + x: &S = other }; Tuple() } } - private fun t7(cond: bool,s: &mut freeze_mut_ref::S,other: &freeze_mut_ref::S) { + private fun t7(cond: bool,s: &mut S,other: &S) { { - let _x: &freeze_mut_ref::S; - _x: &freeze_mut_ref::S = if cond { + let _x: &S; + _x: &S = if cond { Freeze(false)(s) } else { other @@ -91,9 +91,9 @@ module 0x42::freeze_mut_ref { Tuple() } } - private fun t8(cond: bool,s: &mut freeze_mut_ref::S,other: &freeze_mut_ref::S) { + private fun t8(cond: bool,s: &mut S,other: &S) { { - let _x: &freeze_mut_ref::S = if cond { + let _x: &S = if cond { Freeze(false)(s) } else { other @@ -103,6 +103,71 @@ module 0x42::freeze_mut_ref { } } // end 0x42::freeze_mut_ref +// -- Sourcified model before bytecode pipeline +module 0x42::freeze_mut_ref { + struct G { + f: u64, + } + struct S has drop { + } + public fun borrow_mut(map: &mut vector): &Element { + /*freeze*/0x1::vector::borrow_mut(map, 0) + } + public fun borrow_mut2(v: &mut Element): &Element { + /*freeze*/v + } + public fun borrow_mut3(v1: &mut Element, v2: &Element): &Element { + if (true) /*freeze*/v1 else v2 + } + public fun borrow_mut4(v: &mut Element): &Element { + /*freeze*/v + } + fun t0() { + let x = /*freeze*/&mut 0; + x; + } + fun t1(s: &mut S): &S { + /*freeze*/s + } + fun t2(u1: &mut u64, u2: &mut u64): (&u64, &mut u64) { + (/*freeze*/u1, u2) + } + public fun t4() { + let x; + let y; + (x,y) = (/*freeze*/&mut 0, /*freeze*/&mut 0); + } + public fun t5(s: &mut G) { + let x = 0; + let f = &mut { + x = x + 1; + s + }.f; + let g = &mut { + x = x + 1; + s + }.f; + let y = &mut 2; + let z; + *{ + *f = 0; + z = /*freeze*/y; + g + } = 2; + } + fun t6(cond: bool, s: &mut S, other: &S) { + let x; + if (cond) x = /*freeze*/copy s else x = other; + } + fun t7(cond: bool, s: &mut S, other: &S) { + let _x; + _x = if (cond) /*freeze*/s else other; + } + fun t8(cond: bool, s: &mut S, other: &S) { + let _x = if (cond) /*freeze*/s else other; + } +} + ============ initial bytecode ================ [variant baseline] @@ -165,8 +230,8 @@ fun freeze_mut_ref::t0() { [variant baseline] -fun freeze_mut_ref::t1($t0: &mut freeze_mut_ref::S): &freeze_mut_ref::S { - var $t1: &freeze_mut_ref::S +fun freeze_mut_ref::t1($t0: &mut 0x42::freeze_mut_ref::S): &0x42::freeze_mut_ref::S { + var $t1: &0x42::freeze_mut_ref::S 0: $t1 := freeze_ref(implicit)($t0) 1: return $t1 } @@ -205,14 +270,14 @@ public fun freeze_mut_ref::t4() { [variant baseline] -public fun freeze_mut_ref::t5($t0: &mut freeze_mut_ref::G) { +public fun freeze_mut_ref::t5($t0: &mut 0x42::freeze_mut_ref::G) { var $t1: u64 var $t2: &mut u64 - var $t3: &mut freeze_mut_ref::G + var $t3: &mut 0x42::freeze_mut_ref::G var $t4: u64 var $t5: u64 var $t6: &mut u64 - var $t7: &mut freeze_mut_ref::G + var $t7: &mut 0x42::freeze_mut_ref::G var $t8: u64 var $t9: u64 var $t10: &mut u64 @@ -227,12 +292,12 @@ public fun freeze_mut_ref::t5($t0: &mut freeze_mut_ref::G) { 2: $t4 := +($t1, $t5) 3: $t1 := infer($t4) 4: $t3 := infer($t0) - 5: $t2 := borrow_field.f($t3) + 5: $t2 := borrow_field<0x42::freeze_mut_ref::G>.f($t3) 6: $t9 := 1 7: $t8 := +($t1, $t9) 8: $t1 := infer($t8) 9: $t7 := infer($t0) - 10: $t6 := borrow_field.f($t7) + 10: $t6 := borrow_field<0x42::freeze_mut_ref::G>.f($t7) 11: $t11 := 2 12: $t10 := borrow_local($t11) 13: $t13 := 2 @@ -247,10 +312,10 @@ public fun freeze_mut_ref::t5($t0: &mut freeze_mut_ref::G) { [variant baseline] -fun freeze_mut_ref::t6($t0: bool, $t1: &mut freeze_mut_ref::S, $t2: &freeze_mut_ref::S) { - var $t3: &freeze_mut_ref::S - var $t4: &freeze_mut_ref::S - var $t5: &mut freeze_mut_ref::S +fun freeze_mut_ref::t6($t0: bool, $t1: &mut 0x42::freeze_mut_ref::S, $t2: &0x42::freeze_mut_ref::S) { + var $t3: &0x42::freeze_mut_ref::S + var $t4: &0x42::freeze_mut_ref::S + var $t5: &mut 0x42::freeze_mut_ref::S 0: if ($t0) goto 1 else goto 6 1: label L0 2: $t5 := copy($t1) @@ -265,9 +330,9 @@ fun freeze_mut_ref::t6($t0: bool, $t1: &mut freeze_mut_ref::S, $t2: &freeze_mut_ [variant baseline] -fun freeze_mut_ref::t7($t0: bool, $t1: &mut freeze_mut_ref::S, $t2: &freeze_mut_ref::S) { - var $t3: &freeze_mut_ref::S - var $t4: &freeze_mut_ref::S +fun freeze_mut_ref::t7($t0: bool, $t1: &mut 0x42::freeze_mut_ref::S, $t2: &0x42::freeze_mut_ref::S) { + var $t3: &0x42::freeze_mut_ref::S + var $t4: &0x42::freeze_mut_ref::S 0: if ($t0) goto 1 else goto 4 1: label L0 2: $t4 := freeze_ref(implicit)($t1) @@ -281,8 +346,8 @@ fun freeze_mut_ref::t7($t0: bool, $t1: &mut freeze_mut_ref::S, $t2: &freeze_mut_ [variant baseline] -fun freeze_mut_ref::t8($t0: bool, $t1: &mut freeze_mut_ref::S, $t2: &freeze_mut_ref::S) { - var $t3: &freeze_mut_ref::S +fun freeze_mut_ref::t8($t0: bool, $t1: &mut 0x42::freeze_mut_ref::S, $t2: &0x42::freeze_mut_ref::S) { + var $t3: &0x42::freeze_mut_ref::S 0: if ($t0) goto 1 else goto 4 1: label L0 2: $t3 := freeze_ref(implicit)($t1) diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/globals.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/globals.exp index 5015f6fb1c18f..58f17e58316dc 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/globals.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/globals.exp @@ -12,48 +12,74 @@ module 0x42::globals { f: u64, } private fun check(a: address): bool { - exists(a) + exists(a) } private fun publish(s: &signer) { - MoveTo(s, pack globals::R(1)); + MoveTo(s, pack globals::R(1)); Tuple() } private fun read(a: address): u64 - acquires globals::R(*) + acquires R(*) { { - let r: &globals::R = BorrowGlobal(Immutable)(a); - select globals::R.f<&globals::R>(r) + let r: &R = BorrowGlobal(Immutable)(a); + select globals::R.f<&R>(r) } } private fun write(a: address,x: u64): u64 - acquires globals::R(*) + acquires R(*) { { - let r: &mut globals::R = BorrowGlobal(Mutable)(a); - select globals::R.f<&mut globals::R>(r) = 2; + let r: &mut R = BorrowGlobal(Mutable)(a); + select globals::R.f<&mut R>(r) = 2; 9 } } } // end 0x42::globals +// -- Sourcified model before bytecode pipeline +module 0x42::globals { + struct R has store, key { + f: u64, + } + fun check(a: address): bool { + exists(a) + } + fun publish(s: &signer) { + move_to(s, R{f: 1}); + } + fun read(a: address): u64 + acquires R + { + let r = borrow_global(a); + r.f + } + fun write(a: address, x: u64): u64 + acquires R + { + let r = borrow_global_mut(a); + r.f = 2; + 9 + } +} + ============ initial bytecode ================ [variant baseline] fun globals::check($t0: address): bool { var $t1: bool - 0: $t1 := exists($t0) + 0: $t1 := exists<0x42::globals::R>($t0) 1: return $t1 } [variant baseline] fun globals::publish($t0: &signer) { - var $t1: globals::R + var $t1: 0x42::globals::R var $t2: u64 0: $t2 := 1 - 1: $t1 := pack globals::R($t2) - 2: move_to($t0, $t1) + 1: $t1 := pack 0x42::globals::R($t2) + 2: move_to<0x42::globals::R>($t0, $t1) 3: return () } @@ -61,10 +87,10 @@ fun globals::publish($t0: &signer) { [variant baseline] fun globals::read($t0: address): u64 { var $t1: u64 - var $t2: &globals::R + var $t2: &0x42::globals::R var $t3: &u64 - 0: $t2 := borrow_global($t0) - 1: $t3 := borrow_field.f($t2) + 0: $t2 := borrow_global<0x42::globals::R>($t0) + 1: $t3 := borrow_field<0x42::globals::R>.f($t2) 2: $t1 := read_ref($t3) 3: return $t1 } @@ -73,12 +99,12 @@ fun globals::read($t0: address): u64 { [variant baseline] fun globals::write($t0: address, $t1: u64): u64 { var $t2: u64 - var $t3: &mut globals::R + var $t3: &mut 0x42::globals::R var $t4: u64 var $t5: &mut u64 - 0: $t3 := borrow_global($t0) + 0: $t3 := borrow_global<0x42::globals::R>($t0) 1: $t4 := 2 - 2: $t5 := borrow_field.f($t3) + 2: $t5 := borrow_field<0x42::globals::R>.f($t3) 3: write_ref($t5, $t4) 4: $t2 := 9 5: return $t2 diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/if_else.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/if_else.exp index b8f50f7ed5fbf..a8faa766b2a7a 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/if_else.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/if_else.exp @@ -20,6 +20,16 @@ module 0x42::if_else { } } // end 0x42::if_else +// -- Sourcified model before bytecode pipeline +module 0x42::if_else { + fun if_else(cond: bool, x: u64): u64 { + if (cond) x + 1 else x - 1 + } + fun if_else_nested(cond: bool, x: u64): u64 { + if ((if (cond) x + 1 else x - 1) > 10) x * 2 else x / 2 + } +} + ============ initial bytecode ================ [variant baseline] diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/inline_specs.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/inline_specs.exp index 523f94760adc5..2f77eb1351425 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/inline_specs.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/inline_specs.exp @@ -20,6 +20,30 @@ module 0x42::inline_specs { } } // end 0x42::inline_specs +// -- Sourcified model before bytecode pipeline +module 0x42::inline_specs { + fun specs(): u64 { + let x = 0; + + /* spec { + assert Eq(x, 0); + } + */ + ; + x = succ(x); + + /* spec { + assert Eq(x, 1); + } + */ + ; + x + } + fun succ(x: u64): u64 { + x + 1 + } +} + ============ initial bytecode ================ [variant baseline] diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/loop.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/loop.exp index 7c1cc30de33ce..85bca4397bfc9 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/loop.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/loop.exp @@ -55,6 +55,35 @@ module 0x42::loops { } } // end 0x42::loops +// -- Sourcified model before bytecode pipeline +module 0x42::loops { + fun nested_loop(x: u64): u64 { + while (x > 0) { + while (x > 10) { + x = x - 1; + break; + }; + x = x - 1; + continue; + }; + x + } + fun while_loop(x: u64): u64 { + while (x > 0) { + x = x - 1; + }; + x + } + fun while_loop_with_break_and_continue(x: u64): u64 { + while (x > 0) { + if (x == 42) break; + if (x == 21) continue; + x = x - 1; + }; + x + } +} + ============ initial bytecode ================ [variant baseline] diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/loop_invalid.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/loop_invalid.exp index ba517bb313e92..d5397a3b06b12 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/loop_invalid.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/loop_invalid.exp @@ -24,6 +24,20 @@ module 0x42::loop_invalid { } } // end 0x42::loop_invalid +// -- Sourcified model before bytecode pipeline +module 0x42::loop_invalid { + fun misplaced_break(x: u64): u64 { + while (x > 0) break; + break; + x + } + fun misplaced_continue(x: u64): u64 { + continue; + while (x > 0) continue; + x + } +} + Diagnostics: error: missing enclosing loop statement diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/loop_labels.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/loop_labels.exp new file mode 100644 index 0000000000000..247f1e144dfdb --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/loop_labels.exp @@ -0,0 +1,78 @@ +// -- Model dump before bytecode pipeline +module 0x815::test { + private fun f1() { + loop { + loop { + loop { + if true { + loop { + if false { + continue[3] + } else { + break[1] + }; + break + } + } else { + continue[2] + } + } + }; + break + } + } +} // end 0x815::test + +// -- Sourcified model before bytecode pipeline +module 0x815::test { + fun f1() { + 'l0: loop { + loop 'l1: loop if (true) loop { + if (false) continue 'l0 else break 'l1; + break + } else continue 'l0; + break + } + } +} + +============ initial bytecode ================ + +[variant baseline] +fun test::f1() { + var $t0: bool + var $t1: bool + 0: label L0 + 1: label L2 + 2: label L4 + 3: $t0 := true + 4: if ($t0) goto 5 else goto 19 + 5: label L6 + 6: label L9 + 7: $t1 := false + 8: if ($t1) goto 9 else goto 12 + 9: label L11 + 10: goto 0 + 11: goto 14 + 12: label L12 + 13: goto 23 + 14: label L13 + 15: goto 17 + 16: goto 6 + 17: label L10 + 18: goto 21 + 19: label L7 + 20: goto 0 + 21: label L8 + 22: goto 2 + 23: label L5 + 24: goto 1 + 25: label L3 + 26: goto 28 + 27: goto 0 + 28: label L1 + 29: return () +} + + +============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/loop_labels.move b/third_party/move/move-compiler-v2/tests/bytecode-generator/loop_labels.move new file mode 100644 index 0000000000000..e91763e82ae00 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/loop_labels.move @@ -0,0 +1,14 @@ +module 0x815::test { + fun f1() { + 'outer: loop { + // unlabeled loop, but counts in nesting in AST + loop { + 'inner: loop if (true) loop { + if (false) continue 'outer else break 'inner; + break + } else continue 'outer + }; + break + } + } +} diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/matching_ability_err.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/matching_ability_err.exp index efb2eb10cc3cd..06001c73b254d 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/matching_ability_err.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/matching_ability_err.exp @@ -15,25 +15,25 @@ module 0xc0ffee::m { enum Outer { None, One { - i: m::Inner, + i: Inner, } Two { - i: m::Inner, - b: m::Box, + i: Inner, + b: Box, } } - public fun condition_requires_copy(o: m::Outer): m::Outer { + public fun condition_requires_copy(o: Outer): Outer { match (o) { m::Outer::One{ i } if m::consume(i) => { pack m::Outer::One(i) } - o: m::Outer => { + o: Outer => { o } } } - private fun consume(self: m::Inner): bool { + private fun consume(self: Inner): bool { match (self) { m::Inner::Inner1{ x: _ } => { Tuple() @@ -45,12 +45,12 @@ module 0xc0ffee::m { ; true } - public fun matched_value_not_consumed(o: m::Outer) { + public fun matched_value_not_consumed(o: Outer) { match (o) { m::Outer::One{ i: _ } => { Tuple() } - _: m::Outer => { + _: Outer => { Tuple() } } @@ -58,29 +58,74 @@ module 0xc0ffee::m { } } // end 0xc0ffee::m +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + struct Box has drop { + x: u64, + } + enum Inner { + Inner1 { + x: u64, + } + Inner2 { + x: u64, + y: u64, + } + } + enum Outer { + None, + One { + i: Inner, + } + Two { + i: Inner, + b: Box, + } + } + public fun condition_requires_copy(o: Outer): Outer { + match (o) { + Outer::One{i: i} if consume(i) => Outer::One{i: i}, + o => o, + } + } + fun consume(self: Inner): bool { + match (self) { + Inner::Inner1{x: _} => (), + Inner::Inner2{x: _,y: _} => (), + }; + true + } + public fun matched_value_not_consumed(o: Outer) { + match (o) { + Outer::One{i: _} => (), + _ => (), + } + } +} + ============ initial bytecode ================ [variant baseline] -public fun m::condition_requires_copy($t0: m::Outer): m::Outer { - var $t1: m::Outer - var $t2: &m::Outer +public fun m::condition_requires_copy($t0: 0xc0ffee::m::Outer): 0xc0ffee::m::Outer { + var $t1: 0xc0ffee::m::Outer + var $t2: &0xc0ffee::m::Outer var $t3: bool - var $t4: &m::Inner - var $t5: m::Inner - var $t6: m::Inner - var $t7: m::Outer + var $t4: &0xc0ffee::m::Inner + var $t5: 0xc0ffee::m::Inner + var $t6: 0xc0ffee::m::Inner + var $t7: 0xc0ffee::m::Outer var $t8: u64 0: $t2 := borrow_local($t0) - 1: $t3 := test_variant m::Outer::One($t2) + 1: $t3 := test_variant 0xc0ffee::m::Outer::One($t2) 2: if ($t3) goto 3 else goto 12 3: label L2 - 4: $t4 := borrow_variant_field.i($t2) + 4: $t4 := borrow_variant_field<0xc0ffee::m::Outer::One>.i($t2) 5: $t5 := read_ref($t4) 6: $t3 := m::consume($t5) 7: if ($t3) goto 8 else goto 12 8: label L3 - 9: $t6 := unpack_variant m::Outer::One($t0) - 10: $t1 := pack_variant m::Outer::One($t6) + 9: $t6 := unpack_variant 0xc0ffee::m::Outer::One($t0) + 10: $t1 := pack_variant 0xc0ffee::m::Outer::One($t6) 11: goto 19 12: label L1 13: $t7 := infer($t0) @@ -95,25 +140,25 @@ public fun m::condition_requires_copy($t0: m::Outer): m::Outer { [variant baseline] -fun m::consume($t0: m::Inner): bool { +fun m::consume($t0: 0xc0ffee::m::Inner): bool { var $t1: bool - var $t2: &m::Inner + var $t2: &0xc0ffee::m::Inner var $t3: bool var $t4: u64 var $t5: u64 var $t6: u64 var $t7: u64 0: $t2 := borrow_local($t0) - 1: $t3 := test_variant m::Inner::Inner1($t2) + 1: $t3 := test_variant 0xc0ffee::m::Inner::Inner1($t2) 2: if ($t3) goto 3 else goto 6 3: label L2 - 4: $t4 := unpack_variant m::Inner::Inner1($t0) + 4: $t4 := unpack_variant 0xc0ffee::m::Inner::Inner1($t0) 5: goto 15 6: label L1 - 7: $t3 := test_variant m::Inner::Inner2($t2) + 7: $t3 := test_variant 0xc0ffee::m::Inner::Inner2($t2) 8: if ($t3) goto 9 else goto 12 9: label L4 - 10: ($t5, $t6) := unpack_variant m::Inner::Inner2($t0) + 10: ($t5, $t6) := unpack_variant 0xc0ffee::m::Inner::Inner2($t0) 11: goto 15 12: label L3 13: $t7 := 14566554180833181697 @@ -125,17 +170,17 @@ fun m::consume($t0: m::Inner): bool { [variant baseline] -public fun m::matched_value_not_consumed($t0: m::Outer) { - var $t1: &m::Outer +public fun m::matched_value_not_consumed($t0: 0xc0ffee::m::Outer) { + var $t1: &0xc0ffee::m::Outer var $t2: bool - var $t3: m::Inner - var $t4: m::Outer + var $t3: 0xc0ffee::m::Inner + var $t4: 0xc0ffee::m::Outer var $t5: u64 0: $t1 := borrow_local($t0) - 1: $t2 := test_variant m::Outer::One($t1) + 1: $t2 := test_variant 0xc0ffee::m::Outer::One($t1) 2: if ($t2) goto 3 else goto 6 3: label L2 - 4: $t3 := unpack_variant m::Outer::One($t0) + 4: $t3 := unpack_variant 0xc0ffee::m::Outer::One($t0) 5: goto 12 6: label L1 7: $t4 := infer($t0) @@ -149,19 +194,19 @@ public fun m::matched_value_not_consumed($t0: m::Outer) { Diagnostics: -error: value of type `m::Inner` does not have the `drop` ability +error: value of type `Inner` does not have the `drop` ability ┌─ tests/bytecode-generator/matching_ability_err.move:28:13 │ 28 │ One{i: _} => {} │ ^^^^^^^^^ implicitly dropped here since it is no longer used -error: value of type `m::Outer` does not have the `drop` ability +error: value of type `Outer` does not have the `drop` ability ┌─ tests/bytecode-generator/matching_ability_err.move:29:13 │ 29 │ _ => {} │ ^ implicitly dropped here since it is no longer used -error: local `i` of type `m::Inner` does not have the `copy` ability +error: local `i` of type `Inner` does not have the `copy` ability ┌─ tests/bytecode-generator/matching_ability_err.move:35:31 │ 35 │ One{i} if consume(i) => Outer::One{i}, diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/matching_coverage_err.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/matching_coverage_err.exp index bd94845343123..cebf2819486bb 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/matching_coverage_err.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/matching_coverage_err.exp @@ -15,25 +15,25 @@ module 0xc0ffee::m { enum Outer { None, One { - i: m::Inner, + i: Inner, } Two { - i: m::Inner, - b: m::Box, + i: Inner, + b: Box, } } - public fun exhaustive_tuple(i: &m::Inner) { + public fun exhaustive_tuple(i: &Inner) { match (Tuple(i, i)) { - (m::Inner::Inner1{ x: _ }, _: &m::Inner): (&m::Inner, &m::Inner) => { + (m::Inner::Inner1{ x: _ }, _: &Inner): (&Inner, &Inner) => { Tuple() } - (m::Inner::Inner2{ x: _, y: _ }, _: &m::Inner): (&m::Inner, &m::Inner) => { + (m::Inner::Inner2{ x: _, y: _ }, _: &Inner): (&Inner, &Inner) => { Tuple() } } } - public fun exhaustive_via_merge(o: &m::Outer) { + public fun exhaustive_via_merge(o: &Outer) { match (o) { m::Outer::None => { Tuple() @@ -50,7 +50,7 @@ module 0xc0ffee::m { } } - public fun non_exhaustive(o: &m::Outer) { + public fun non_exhaustive(o: &Outer) { match (o) { m::Outer::None => { Tuple() @@ -61,7 +61,7 @@ module 0xc0ffee::m { } } - public fun non_exhaustive_because_of_cond(o: &m::Outer) { + public fun non_exhaustive_because_of_cond(o: &Outer) { match (o) { m::Outer::None => { Tuple() @@ -69,13 +69,13 @@ module 0xc0ffee::m { m::Outer::One{ i: _ } => { Tuple() } - m::Outer::Two{ i: _, b } if Gt(select m::Box.x<&m::Box>(b), 0) => { + m::Outer::Two{ i: _, b } if Gt(select m::Box.x<&Box>(b), 0) => { Tuple() } } } - public fun non_exhaustive_because_of_nested(o: &m::Outer) { + public fun non_exhaustive_because_of_nested(o: &Outer) { match (o) { m::Outer::None => { Tuple() @@ -89,26 +89,26 @@ module 0xc0ffee::m { } } - public fun non_exhaustive_tuple(i: &m::Inner) { + public fun non_exhaustive_tuple(i: &Inner) { match (Tuple(i, i)) { - (m::Inner::Inner1{ x: _ }, _: &m::Inner): (&m::Inner, &m::Inner) => { + (m::Inner::Inner1{ x: _ }, _: &Inner): (&Inner, &Inner) => { Tuple() } } } - public fun non_exhaustive_tuple2(i: &m::Inner) { + public fun non_exhaustive_tuple2(i: &Inner) { match (Tuple(i, i)) { - (m::Inner::Inner1{ x: _ }, _: &m::Inner): (&m::Inner, &m::Inner) => { + (m::Inner::Inner1{ x: _ }, _: &Inner): (&Inner, &Inner) => { Tuple() } - (_: &m::Inner, m::Inner::Inner2{ x: _, y: _ }): (&m::Inner, &m::Inner) => { + (_: &Inner, m::Inner::Inner2{ x: _, y: _ }): (&Inner, &Inner) => { Tuple() } } } - public fun unreachable(o: &m::Outer) { + public fun unreachable(o: &Outer) { match (o) { m::Outer::None => { Tuple() @@ -119,13 +119,13 @@ module 0xc0ffee::m { m::Outer::Two{ i: _, b: _ } => { Tuple() } - _: &m::Outer => { + _: &Outer => { Tuple() } } } - public fun unreachable_via_overlaying_pattern(o: &m::Outer) { + public fun unreachable_via_overlaying_pattern(o: &Outer) { match (o) { m::Outer::None => { Tuple() @@ -139,13 +139,13 @@ module 0xc0ffee::m { m::Outer::One{ i: m::Inner::Inner1{ x: _ } } => { Tuple() } - _: &m::Outer => { + _: &Outer => { Tuple() } } } - public fun unreachable_via_repeated_pattern(o: &m::Outer) { + public fun unreachable_via_repeated_pattern(o: &Outer) { match (o) { m::Outer::None => { Tuple() @@ -156,7 +156,7 @@ module 0xc0ffee::m { m::Outer::One{ i: _ } => { Tuple() } - _: &m::Outer => { + _: &Outer => { Tuple() } } @@ -164,6 +164,102 @@ module 0xc0ffee::m { } } // end 0xc0ffee::m +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + struct Box has drop { + x: u64, + } + enum Inner { + Inner1 { + x: u64, + } + Inner2 { + x: u64, + y: u64, + } + } + enum Outer { + None, + One { + i: Inner, + } + Two { + i: Inner, + b: Box, + } + } + public fun exhaustive_tuple(i: &Inner) { + match ((i, i)) { + (Inner::Inner1{x: _},_) => (), + (Inner::Inner2{x: _,y: _},_) => (), + } + } + public fun exhaustive_via_merge(o: &Outer) { + match (o) { + Outer::None{} => (), + Outer::One{i: Inner::Inner1{x: _}} => (), + Outer::One{i: Inner::Inner2{x: _,y: _}} => (), + Outer::Two{i: _,b: _} => (), + } + } + public fun non_exhaustive(o: &Outer) { + match (o) { + Outer::None{} => (), + Outer::One{i: _} => (), + } + } + public fun non_exhaustive_because_of_cond(o: &Outer) { + match (o) { + Outer::None{} => (), + Outer::One{i: _} => (), + Outer::Two{i: _,b: b} if b.x > 0 => (), + } + } + public fun non_exhaustive_because_of_nested(o: &Outer) { + match (o) { + Outer::None{} => (), + Outer::One{i: Inner::Inner1{x: _}} => (), + Outer::Two{i: _,b: _} => (), + } + } + public fun non_exhaustive_tuple(i: &Inner) { + match ((i, i)) { + (Inner::Inner1{x: _},_) => (), + } + } + public fun non_exhaustive_tuple2(i: &Inner) { + match ((i, i)) { + (Inner::Inner1{x: _},_) => (), + (_,Inner::Inner2{x: _,y: _}) => (), + } + } + public fun unreachable(o: &Outer) { + match (o) { + Outer::None{} => (), + Outer::One{i: _} => (), + Outer::Two{i: _,b: _} => (), + _ => (), + } + } + public fun unreachable_via_overlaying_pattern(o: &Outer) { + match (o) { + Outer::None{} => (), + Outer::One{i: Inner::Inner1{x: _}} => (), + Outer::One{i: _} => (), + Outer::One{i: Inner::Inner1{x: _}} => (), + _ => (), + } + } + public fun unreachable_via_repeated_pattern(o: &Outer) { + match (o) { + Outer::None{} => (), + Outer::One{i: _} => (), + Outer::One{i: _} => (), + _ => (), + } + } +} + Diagnostics: error: match not exhaustive diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/matching_ok.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/matching_ok.exp index 27c69ad451e84..a49fde8a4b9a3 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/matching_ok.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/matching_ok.exp @@ -39,23 +39,23 @@ module 0xc0ffee::m { y: u64, } } - enum Option { + enum Option { None, Some { - value: #0, + value: A, } } enum Outer { None, One { - i: m::Inner, + i: Inner, } Two { - i: m::Inner, - b: m::Box, + i: Inner, + b: Box, } } - public fun inner_value(self: m::Inner): u64 { + public fun inner_value(self: Inner): u64 { match (self) { m::Inner::Inner1{ x } => { x @@ -66,18 +66,18 @@ module 0xc0ffee::m { } } - public fun is_inner1(self: &m::Inner): bool { + public fun is_inner1(self: &Inner): bool { match (self) { m::Inner::Inner1{ x: _ } => { true } - _: &m::Inner => { + _: &Inner => { false } } } - public fun is_some(x: &m::Option<#0>): bool { + public fun is_some(x: &Option): bool { match (x) { m::Option::None => { false @@ -88,32 +88,32 @@ module 0xc0ffee::m { } } - public fun is_some_dropped(x: m::Option<#0>): bool { + public fun is_some_dropped(x: Option): bool { match (x) { m::Option::None => { false } - _: m::Option => { + _: Option => { true } } } - public fun is_some_specialized(x: &m::Option>): bool { + public fun is_some_specialized(x: &Option>): bool { match (x) { - m::Option::None> => { + m::Option::None> => { false } - m::Option::Some>{ value: m::Option::None } => { + m::Option::Some>{ value: m::Option::None } => { false } - m::Option::Some>{ value: m::Option::Some{ value: _ } } => { + m::Option::Some>{ value: m::Option::Some{ value: _ } } => { true } } } - public fun outer_value(o: m::Outer): u64 { + public fun outer_value(o: Outer): u64 { match (o) { m::Outer::None => { 0 @@ -122,12 +122,12 @@ module 0xc0ffee::m { m::inner_value(i) } m::Outer::Two{ i, b } => { - Add(m::inner_value(i), select m::Box.x(b)) + Add(m::inner_value(i), select m::Box.x(b)) } } } - public fun outer_value_nested(o: m::Outer): u64 { + public fun outer_value_nested(o: Outer): u64 { match (o) { m::Outer::None => { 0 @@ -139,12 +139,12 @@ module 0xc0ffee::m { m::inner_value(i) } m::Outer::Two{ i, b } => { - Add(m::inner_value(i), select m::Box.x(b)) + Add(m::inner_value(i), select m::Box.x(b)) } } } - public fun outer_value_with_cond(o: m::Outer): u64 { + public fun outer_value_with_cond(o: Outer): u64 { match (o) { m::Outer::None => { 0 @@ -156,12 +156,12 @@ module 0xc0ffee::m { m::inner_value(i) } m::Outer::Two{ i, b } => { - Add(m::inner_value(i), select m::Box.x(b)) + Add(m::inner_value(i), select m::Box.x(b)) } } } - public fun outer_value_with_cond_ref(o: &m::Outer): bool { + public fun outer_value_with_cond_ref(o: &Outer): bool { match (o) { m::Outer::None => { false @@ -178,8 +178,8 @@ module 0xc0ffee::m { } } - private fun select_common_fields(s: m::CommonFields): u64 { - Add(select_variants m::CommonFields.Foo.x|m::CommonFields.Bar.x(s), match (s) { + private fun select_common_fields(s: CommonFields): u64 { + Add(select_variants m::CommonFields.Foo.x|m::CommonFields.Bar.x(s), match (s) { m::CommonFields::Foo{ x: _, y } => { y } @@ -189,40 +189,176 @@ module 0xc0ffee::m { } ) } - private fun select_common_fields_different_offset(s: m::CommonFieldsAtDifferentOffset): u64 { - select_variants m::CommonFieldsAtDifferentOffset.Bar.z|m::CommonFieldsAtDifferentOffset.Baz.z|m::CommonFieldsAtDifferentOffset.Balt.z(s) + private fun select_common_fields_different_offset(s: CommonFieldsAtDifferentOffset): u64 { + select_variants m::CommonFieldsAtDifferentOffset.Bar.z|m::CommonFieldsAtDifferentOffset.Baz.z|m::CommonFieldsAtDifferentOffset.Balt.z(s) } - private fun test_common(s: m::CommonFields): bool { + private fun test_common(s: CommonFields): bool { test_variants m::CommonFields::Foo|Bar(s) } - private fun test_common_ref(s: &m::CommonFields): bool { + private fun test_common_ref(s: &CommonFields): bool { test_variants m::CommonFields::Foo|Bar(s) } } // end 0xc0ffee::m +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + struct Box has drop { + x: u64, + } + enum CommonFields { + Foo { + x: u64, + y: u64, + } + Bar { + x: u64, + z: u64, + } + } + enum CommonFieldsAtDifferentOffset has drop { + Foo { + x: u64, + y: u64, + } + Bar { + x: u64, + z: u64, + } + Baz { + z: u64, + } + Balt { + foo: u8, + z: u64, + } + } + enum Inner { + Inner1 { + x: u64, + } + Inner2 { + x: u64, + y: u64, + } + } + enum Option has drop { + None, + Some { + value: A, + } + } + enum Outer { + None, + One { + i: Inner, + } + Two { + i: Inner, + b: Box, + } + } + public fun inner_value(self: Inner): u64 { + match (self) { + Inner::Inner1{x: x} => x, + Inner::Inner2{x: x,y: y} => x + y, + } + } + public fun is_inner1(self: &Inner): bool { + match (self) { + Inner::Inner1{x: _} => true, + _ => false, + } + } + public fun is_some(x: &Option): bool { + match (x) { + Option::None{} => false, + Option::Some{value: _} => true, + } + } + public fun is_some_dropped(x: Option): bool { + match (x) { + Option::None{} => false, + _ => true, + } + } + public fun is_some_specialized(x: &Option>): bool { + match (x) { + Option::None>{} => false, + Option::Some>{value: Option::None{}} => false, + Option::Some>{value: Option::Some{value: _}} => true, + } + } + public fun outer_value(o: Outer): u64 { + match (o) { + Outer::None{} => 0, + Outer::One{i: i} => inner_value(i), + Outer::Two{i: i,b: b} => inner_value(i) + b.x, + } + } + public fun outer_value_nested(o: Outer): u64 { + match (o) { + Outer::None{} => 0, + Outer::One{i: Inner::Inner1{x: x}} => x, + Outer::One{i: i} => inner_value(i), + Outer::Two{i: i,b: b} => inner_value(i) + b.x, + } + } + public fun outer_value_with_cond(o: Outer): u64 { + match (o) { + Outer::None{} => 0, + Outer::One{i: i} if is_inner1(&i) => inner_value(i) % 2, + Outer::One{i: i} => inner_value(i), + Outer::Two{i: i,b: b} => inner_value(i) + b.x, + } + } + public fun outer_value_with_cond_ref(o: &Outer): bool { + match (o) { + Outer::None{} => false, + Outer::One{i: i} if is_inner1(i) => true, + Outer::One{i: i} => is_inner1(i), + Outer::Two{i: i,b: _} => is_inner1(i), + } + } + fun select_common_fields(s: CommonFields): u64 { + s.Foo.x + (match (s) { + CommonFields::Foo{x: _,y: y} => y, + CommonFields::Bar{x: _,z: z} => z, + }) + } + fun select_common_fields_different_offset(s: CommonFieldsAtDifferentOffset): u64 { + s.Bar.z + } + fun test_common(s: CommonFields): bool { + s is Foo | Bar + } + fun test_common_ref(s: &CommonFields): bool { + s is Foo | Bar + } +} + ============ initial bytecode ================ [variant baseline] -public fun m::inner_value($t0: m::Inner): u64 { +public fun m::inner_value($t0: 0xc0ffee::m::Inner): u64 { var $t1: u64 - var $t2: &m::Inner + var $t2: &0xc0ffee::m::Inner var $t3: bool var $t4: u64 var $t5: u64 var $t6: u64 var $t7: u64 0: $t2 := borrow_local($t0) - 1: $t3 := test_variant m::Inner::Inner1($t2) + 1: $t3 := test_variant 0xc0ffee::m::Inner::Inner1($t2) 2: if ($t3) goto 3 else goto 7 3: label L2 - 4: $t4 := unpack_variant m::Inner::Inner1($t0) + 4: $t4 := unpack_variant 0xc0ffee::m::Inner::Inner1($t0) 5: $t1 := infer($t4) 6: goto 17 7: label L1 - 8: $t3 := test_variant m::Inner::Inner2($t2) + 8: $t3 := test_variant 0xc0ffee::m::Inner::Inner2($t2) 9: if ($t3) goto 10 else goto 14 10: label L4 - 11: ($t5, $t6) := unpack_variant m::Inner::Inner2($t0) + 11: ($t5, $t6) := unpack_variant 0xc0ffee::m::Inner::Inner2($t0) 12: $t1 := +($t5, $t6) 13: goto 17 14: label L3 @@ -234,12 +370,12 @@ public fun m::inner_value($t0: m::Inner): u64 { [variant baseline] -public fun m::is_inner1($t0: &m::Inner): bool { +public fun m::is_inner1($t0: &0xc0ffee::m::Inner): bool { var $t1: bool var $t2: bool - var $t3: &m::Inner + var $t3: &0xc0ffee::m::Inner var $t4: u64 - 0: $t2 := test_variant m::Inner::Inner1($t0) + 0: $t2 := test_variant 0xc0ffee::m::Inner::Inner1($t0) 1: if ($t2) goto 2 else goto 5 2: label L2 3: $t1 := true @@ -257,17 +393,17 @@ public fun m::is_inner1($t0: &m::Inner): bool { [variant baseline] -public fun m::is_some<#0>($t0: &m::Option<#0>): bool { +public fun m::is_some<#0>($t0: &0xc0ffee::m::Option<#0>): bool { var $t1: bool var $t2: bool var $t3: u64 - 0: $t2 := test_variant m::Option<#0>::None($t0) + 0: $t2 := test_variant 0xc0ffee::m::Option<#0>::None($t0) 1: if ($t2) goto 2 else goto 5 2: label L2 3: $t1 := false 4: goto 14 5: label L1 - 6: $t2 := test_variant m::Option<#0>::Some($t0) + 6: $t2 := test_variant 0xc0ffee::m::Option<#0>::Some($t0) 7: if ($t2) goto 8 else goto 11 8: label L4 9: $t1 := true @@ -281,17 +417,17 @@ public fun m::is_some<#0>($t0: &m::Option<#0>): bool { [variant baseline] -public fun m::is_some_dropped<#0>($t0: m::Option<#0>): bool { +public fun m::is_some_dropped<#0>($t0: 0xc0ffee::m::Option<#0>): bool { var $t1: bool - var $t2: &m::Option<#0> + var $t2: &0xc0ffee::m::Option<#0> var $t3: bool - var $t4: m::Option<#0> + var $t4: 0xc0ffee::m::Option<#0> var $t5: u64 0: $t2 := borrow_local($t0) - 1: $t3 := test_variant m::Option<#0>::None($t2) + 1: $t3 := test_variant 0xc0ffee::m::Option<#0>::None($t2) 2: if ($t3) goto 3 else goto 7 3: label L2 - 4: unpack_variant m::Option<#0>::None($t0) + 4: unpack_variant 0xc0ffee::m::Option<#0>::None($t0) 5: $t1 := false 6: goto 14 7: label L1 @@ -307,33 +443,33 @@ public fun m::is_some_dropped<#0>($t0: m::Option<#0>): bool { [variant baseline] -public fun m::is_some_specialized($t0: &m::Option>): bool { +public fun m::is_some_specialized($t0: &0xc0ffee::m::Option<0xc0ffee::m::Option>): bool { var $t1: bool var $t2: bool - var $t3: &m::Option - var $t4: &m::Option + var $t3: &0xc0ffee::m::Option + var $t4: &0xc0ffee::m::Option var $t5: u64 - 0: $t2 := test_variant m::Option>::None($t0) + 0: $t2 := test_variant 0xc0ffee::m::Option<0xc0ffee::m::Option>::None($t0) 1: if ($t2) goto 2 else goto 5 2: label L2 3: $t1 := false 4: goto 28 5: label L1 - 6: $t2 := test_variant m::Option>::Some($t0) + 6: $t2 := test_variant 0xc0ffee::m::Option<0xc0ffee::m::Option>::Some($t0) 7: if ($t2) goto 8 else goto 15 8: label L4 - 9: $t3 := borrow_variant_field>::Some>.value($t0) - 10: $t2 := test_variant m::Option::None($t3) + 9: $t3 := borrow_variant_field<0xc0ffee::m::Option<0xc0ffee::m::Option>::Some>.value($t0) + 10: $t2 := test_variant 0xc0ffee::m::Option::None($t3) 11: if ($t2) goto 12 else goto 15 12: label L5 13: $t1 := false 14: goto 28 15: label L3 - 16: $t2 := test_variant m::Option>::Some($t0) + 16: $t2 := test_variant 0xc0ffee::m::Option<0xc0ffee::m::Option>::Some($t0) 17: if ($t2) goto 18 else goto 25 18: label L7 - 19: $t4 := borrow_variant_field>::Some>.value($t0) - 20: $t2 := test_variant m::Option::Some($t4) + 19: $t4 := borrow_variant_field<0xc0ffee::m::Option<0xc0ffee::m::Option>::Some>.value($t0) + 20: $t2 := test_variant 0xc0ffee::m::Option::Some($t4) 21: if ($t2) goto 22 else goto 25 22: label L8 23: $t1 := true @@ -347,40 +483,40 @@ public fun m::is_some_specialized($t0: &m::Option>): bool { [variant baseline] -public fun m::outer_value($t0: m::Outer): u64 { +public fun m::outer_value($t0: 0xc0ffee::m::Outer): u64 { var $t1: u64 - var $t2: &m::Outer + var $t2: &0xc0ffee::m::Outer var $t3: bool - var $t4: m::Inner - var $t5: m::Inner - var $t6: m::Box + var $t4: 0xc0ffee::m::Inner + var $t5: 0xc0ffee::m::Inner + var $t6: 0xc0ffee::m::Box var $t7: u64 var $t8: u64 - var $t9: &m::Box + var $t9: &0xc0ffee::m::Box var $t10: &u64 var $t11: u64 0: $t2 := borrow_local($t0) - 1: $t3 := test_variant m::Outer::None($t2) + 1: $t3 := test_variant 0xc0ffee::m::Outer::None($t2) 2: if ($t3) goto 3 else goto 7 3: label L2 - 4: unpack_variant m::Outer::None($t0) + 4: unpack_variant 0xc0ffee::m::Outer::None($t0) 5: $t1 := 0 6: goto 28 7: label L1 - 8: $t3 := test_variant m::Outer::One($t2) + 8: $t3 := test_variant 0xc0ffee::m::Outer::One($t2) 9: if ($t3) goto 10 else goto 14 10: label L4 - 11: $t4 := unpack_variant m::Outer::One($t0) + 11: $t4 := unpack_variant 0xc0ffee::m::Outer::One($t0) 12: $t1 := m::inner_value($t4) 13: goto 28 14: label L3 - 15: $t3 := test_variant m::Outer::Two($t2) + 15: $t3 := test_variant 0xc0ffee::m::Outer::Two($t2) 16: if ($t3) goto 17 else goto 25 17: label L6 - 18: ($t5, $t6) := unpack_variant m::Outer::Two($t0) + 18: ($t5, $t6) := unpack_variant 0xc0ffee::m::Outer::Two($t0) 19: $t7 := m::inner_value($t5) 20: $t9 := borrow_local($t6) - 21: $t10 := borrow_field.x($t9) + 21: $t10 := borrow_field<0xc0ffee::m::Box>.x($t9) 22: $t8 := read_ref($t10) 23: $t1 := +($t7, $t8) 24: goto 28 @@ -393,55 +529,55 @@ public fun m::outer_value($t0: m::Outer): u64 { [variant baseline] -public fun m::outer_value_nested($t0: m::Outer): u64 { +public fun m::outer_value_nested($t0: 0xc0ffee::m::Outer): u64 { var $t1: u64 - var $t2: &m::Outer + var $t2: &0xc0ffee::m::Outer var $t3: bool - var $t4: &m::Inner + var $t4: &0xc0ffee::m::Inner var $t5: u64 - var $t6: m::Inner - var $t7: m::Inner - var $t8: m::Inner - var $t9: m::Box + var $t6: 0xc0ffee::m::Inner + var $t7: 0xc0ffee::m::Inner + var $t8: 0xc0ffee::m::Inner + var $t9: 0xc0ffee::m::Box var $t10: u64 var $t11: u64 - var $t12: &m::Box + var $t12: &0xc0ffee::m::Box var $t13: &u64 var $t14: u64 0: $t2 := borrow_local($t0) - 1: $t3 := test_variant m::Outer::None($t2) + 1: $t3 := test_variant 0xc0ffee::m::Outer::None($t2) 2: if ($t3) goto 3 else goto 7 3: label L2 - 4: unpack_variant m::Outer::None($t0) + 4: unpack_variant 0xc0ffee::m::Outer::None($t0) 5: $t1 := 0 6: goto 40 7: label L1 - 8: $t3 := test_variant m::Outer::One($t2) + 8: $t3 := test_variant 0xc0ffee::m::Outer::One($t2) 9: if ($t3) goto 10 else goto 19 10: label L4 - 11: $t4 := borrow_variant_field.i($t2) - 12: $t3 := test_variant m::Inner::Inner1($t4) + 11: $t4 := borrow_variant_field<0xc0ffee::m::Outer::One>.i($t2) + 12: $t3 := test_variant 0xc0ffee::m::Inner::Inner1($t4) 13: if ($t3) goto 14 else goto 19 14: label L5 - 15: $t6 := unpack_variant m::Outer::One($t0) - 16: $t5 := unpack_variant m::Inner::Inner1($t6) + 15: $t6 := unpack_variant 0xc0ffee::m::Outer::One($t0) + 16: $t5 := unpack_variant 0xc0ffee::m::Inner::Inner1($t6) 17: $t1 := infer($t5) 18: goto 40 19: label L3 - 20: $t3 := test_variant m::Outer::One($t2) + 20: $t3 := test_variant 0xc0ffee::m::Outer::One($t2) 21: if ($t3) goto 22 else goto 26 22: label L7 - 23: $t7 := unpack_variant m::Outer::One($t0) + 23: $t7 := unpack_variant 0xc0ffee::m::Outer::One($t0) 24: $t1 := m::inner_value($t7) 25: goto 40 26: label L6 - 27: $t3 := test_variant m::Outer::Two($t2) + 27: $t3 := test_variant 0xc0ffee::m::Outer::Two($t2) 28: if ($t3) goto 29 else goto 37 29: label L9 - 30: ($t8, $t9) := unpack_variant m::Outer::Two($t0) + 30: ($t8, $t9) := unpack_variant 0xc0ffee::m::Outer::Two($t0) 31: $t10 := m::inner_value($t8) 32: $t12 := borrow_local($t9) - 33: $t13 := borrow_field.x($t12) + 33: $t13 := borrow_field<0xc0ffee::m::Box>.x($t12) 34: $t11 := read_ref($t13) 35: $t1 := +($t10, $t11) 36: goto 40 @@ -454,59 +590,59 @@ public fun m::outer_value_nested($t0: m::Outer): u64 { [variant baseline] -public fun m::outer_value_with_cond($t0: m::Outer): u64 { +public fun m::outer_value_with_cond($t0: 0xc0ffee::m::Outer): u64 { var $t1: u64 - var $t2: &m::Outer + var $t2: &0xc0ffee::m::Outer var $t3: bool - var $t4: &m::Inner - var $t5: &m::Inner - var $t6: m::Inner + var $t4: &0xc0ffee::m::Inner + var $t5: &0xc0ffee::m::Inner + var $t6: 0xc0ffee::m::Inner var $t7: u64 var $t8: u64 - var $t9: m::Inner - var $t10: m::Inner - var $t11: m::Box + var $t9: 0xc0ffee::m::Inner + var $t10: 0xc0ffee::m::Inner + var $t11: 0xc0ffee::m::Box var $t12: u64 var $t13: u64 - var $t14: &m::Box + var $t14: &0xc0ffee::m::Box var $t15: &u64 var $t16: u64 0: $t2 := borrow_local($t0) - 1: $t3 := test_variant m::Outer::None($t2) + 1: $t3 := test_variant 0xc0ffee::m::Outer::None($t2) 2: if ($t3) goto 3 else goto 7 3: label L2 - 4: unpack_variant m::Outer::None($t0) + 4: unpack_variant 0xc0ffee::m::Outer::None($t0) 5: $t1 := 0 6: goto 42 7: label L1 - 8: $t3 := test_variant m::Outer::One($t2) + 8: $t3 := test_variant 0xc0ffee::m::Outer::One($t2) 9: if ($t3) goto 10 else goto 21 10: label L4 - 11: $t4 := borrow_variant_field.i($t2) + 11: $t4 := borrow_variant_field<0xc0ffee::m::Outer::One>.i($t2) 12: $t5 := infer($t4) 13: $t3 := m::is_inner1($t5) 14: if ($t3) goto 15 else goto 21 15: label L5 - 16: $t6 := unpack_variant m::Outer::One($t0) + 16: $t6 := unpack_variant 0xc0ffee::m::Outer::One($t0) 17: $t7 := m::inner_value($t6) 18: $t8 := 2 19: $t1 := %($t7, $t8) 20: goto 42 21: label L3 - 22: $t3 := test_variant m::Outer::One($t2) + 22: $t3 := test_variant 0xc0ffee::m::Outer::One($t2) 23: if ($t3) goto 24 else goto 28 24: label L7 - 25: $t9 := unpack_variant m::Outer::One($t0) + 25: $t9 := unpack_variant 0xc0ffee::m::Outer::One($t0) 26: $t1 := m::inner_value($t9) 27: goto 42 28: label L6 - 29: $t3 := test_variant m::Outer::Two($t2) + 29: $t3 := test_variant 0xc0ffee::m::Outer::Two($t2) 30: if ($t3) goto 31 else goto 39 31: label L9 - 32: ($t10, $t11) := unpack_variant m::Outer::Two($t0) + 32: ($t10, $t11) := unpack_variant 0xc0ffee::m::Outer::Two($t0) 33: $t12 := m::inner_value($t10) 34: $t14 := borrow_local($t11) - 35: $t15 := borrow_field.x($t14) + 35: $t15 := borrow_field<0xc0ffee::m::Box>.x($t14) 36: $t13 := read_ref($t15) 37: $t1 := +($t12, $t13) 38: goto 42 @@ -519,40 +655,40 @@ public fun m::outer_value_with_cond($t0: m::Outer): u64 { [variant baseline] -public fun m::outer_value_with_cond_ref($t0: &m::Outer): bool { +public fun m::outer_value_with_cond_ref($t0: &0xc0ffee::m::Outer): bool { var $t1: bool var $t2: bool - var $t3: &m::Inner - var $t4: &m::Inner - var $t5: &m::Inner + var $t3: &0xc0ffee::m::Inner + var $t4: &0xc0ffee::m::Inner + var $t5: &0xc0ffee::m::Inner var $t6: u64 - 0: $t2 := test_variant m::Outer::None($t0) + 0: $t2 := test_variant 0xc0ffee::m::Outer::None($t0) 1: if ($t2) goto 2 else goto 5 2: label L2 3: $t1 := false 4: goto 32 5: label L1 - 6: $t2 := test_variant m::Outer::One($t0) + 6: $t2 := test_variant 0xc0ffee::m::Outer::One($t0) 7: if ($t2) goto 8 else goto 15 8: label L4 - 9: $t3 := borrow_variant_field.i($t0) + 9: $t3 := borrow_variant_field<0xc0ffee::m::Outer::One>.i($t0) 10: $t2 := m::is_inner1($t3) 11: if ($t2) goto 12 else goto 15 12: label L5 13: $t1 := true 14: goto 32 15: label L3 - 16: $t2 := test_variant m::Outer::One($t0) + 16: $t2 := test_variant 0xc0ffee::m::Outer::One($t0) 17: if ($t2) goto 18 else goto 22 18: label L7 - 19: $t4 := borrow_variant_field.i($t0) + 19: $t4 := borrow_variant_field<0xc0ffee::m::Outer::One>.i($t0) 20: $t1 := m::is_inner1($t4) 21: goto 32 22: label L6 - 23: $t2 := test_variant m::Outer::Two($t0) + 23: $t2 := test_variant 0xc0ffee::m::Outer::Two($t0) 24: if ($t2) goto 25 else goto 29 25: label L9 - 26: $t5 := borrow_variant_field.i($t0) + 26: $t5 := borrow_variant_field<0xc0ffee::m::Outer::Two>.i($t0) 27: $t1 := m::is_inner1($t5) 28: goto 32 29: label L8 @@ -564,13 +700,13 @@ public fun m::outer_value_with_cond_ref($t0: &m::Outer): bool { [variant baseline] -fun m::select_common_fields($t0: m::CommonFields): u64 { +fun m::select_common_fields($t0: 0xc0ffee::m::CommonFields): u64 { var $t1: u64 var $t2: u64 - var $t3: &m::CommonFields + var $t3: &0xc0ffee::m::CommonFields var $t4: &u64 var $t5: u64 - var $t6: &m::CommonFields + var $t6: &0xc0ffee::m::CommonFields var $t7: bool var $t8: u64 var $t9: u64 @@ -578,20 +714,20 @@ fun m::select_common_fields($t0: m::CommonFields): u64 { var $t11: u64 var $t12: u64 0: $t3 := borrow_local($t0) - 1: $t4 := borrow_variant_field.x($t3) + 1: $t4 := borrow_variant_field<0xc0ffee::m::CommonFields::Foo|Bar>.x($t3) 2: $t2 := read_ref($t4) 3: $t6 := borrow_local($t0) - 4: $t7 := test_variant m::CommonFields::Foo($t6) + 4: $t7 := test_variant 0xc0ffee::m::CommonFields::Foo($t6) 5: if ($t7) goto 6 else goto 10 6: label L2 - 7: ($t9, $t8) := unpack_variant m::CommonFields::Foo($t0) + 7: ($t9, $t8) := unpack_variant 0xc0ffee::m::CommonFields::Foo($t0) 8: $t5 := infer($t8) 9: goto 20 10: label L1 - 11: $t7 := test_variant m::CommonFields::Bar($t6) + 11: $t7 := test_variant 0xc0ffee::m::CommonFields::Bar($t6) 12: if ($t7) goto 13 else goto 17 13: label L4 - 14: ($t11, $t10) := unpack_variant m::CommonFields::Bar($t0) + 14: ($t11, $t10) := unpack_variant 0xc0ffee::m::CommonFields::Bar($t0) 15: $t5 := infer($t10) 16: goto 20 17: label L3 @@ -604,24 +740,24 @@ fun m::select_common_fields($t0: m::CommonFields): u64 { [variant baseline] -fun m::select_common_fields_different_offset($t0: m::CommonFieldsAtDifferentOffset): u64 { +fun m::select_common_fields_different_offset($t0: 0xc0ffee::m::CommonFieldsAtDifferentOffset): u64 { var $t1: u64 - var $t2: &m::CommonFieldsAtDifferentOffset + var $t2: &0xc0ffee::m::CommonFieldsAtDifferentOffset var $t3: &u64 var $t4: bool 0: $t2 := borrow_local($t0) - 1: $t4 := test_variant m::CommonFieldsAtDifferentOffset::Bar($t2) + 1: $t4 := test_variant 0xc0ffee::m::CommonFieldsAtDifferentOffset::Bar($t2) 2: if ($t4) goto 8 else goto 3 3: label L3 - 4: $t4 := test_variant m::CommonFieldsAtDifferentOffset::Balt($t2) + 4: $t4 := test_variant 0xc0ffee::m::CommonFieldsAtDifferentOffset::Balt($t2) 5: if ($t4) goto 8 else goto 6 6: label L4 7: goto 11 8: label L2 - 9: $t3 := borrow_variant_field.z($t2) + 9: $t3 := borrow_variant_field<0xc0ffee::m::CommonFieldsAtDifferentOffset::Bar|Balt>.z($t2) 10: goto 13 11: label L1 - 12: $t3 := borrow_variant_field.z($t2) + 12: $t3 := borrow_variant_field<0xc0ffee::m::CommonFieldsAtDifferentOffset::Baz>.z($t2) 13: label L0 14: $t1 := read_ref($t3) 15: return $t1 @@ -629,14 +765,14 @@ fun m::select_common_fields_different_offset($t0: m::CommonFieldsAtDifferentOffs [variant baseline] -fun m::test_common($t0: m::CommonFields): bool { +fun m::test_common($t0: 0xc0ffee::m::CommonFields): bool { var $t1: bool - var $t2: &m::CommonFields + var $t2: &0xc0ffee::m::CommonFields 0: $t2 := borrow_local($t0) - 1: $t1 := test_variant m::CommonFields::Foo($t2) + 1: $t1 := test_variant 0xc0ffee::m::CommonFields::Foo($t2) 2: if ($t1) goto 7 else goto 3 3: label L1 - 4: $t1 := test_variant m::CommonFields::Bar($t2) + 4: $t1 := test_variant 0xc0ffee::m::CommonFields::Bar($t2) 5: if ($t1) goto 7 else goto 6 6: label L2 7: label L0 @@ -645,12 +781,12 @@ fun m::test_common($t0: m::CommonFields): bool { [variant baseline] -fun m::test_common_ref($t0: &m::CommonFields): bool { +fun m::test_common_ref($t0: &0xc0ffee::m::CommonFields): bool { var $t1: bool - 0: $t1 := test_variant m::CommonFields::Foo($t0) + 0: $t1 := test_variant 0xc0ffee::m::CommonFields::Foo($t0) 1: if ($t1) goto 6 else goto 2 2: label L1 - 3: $t1 := test_variant m::CommonFields::Bar($t0) + 3: $t1 := test_variant 0xc0ffee::m::CommonFields::Bar($t0) 4: if ($t1) goto 6 else goto 5 5: label L2 6: label L0 @@ -659,7 +795,7 @@ fun m::test_common_ref($t0: &m::CommonFields): bool { Diagnostics: -error: local `s` of type `m::CommonFields` does not have the `drop` ability +error: local `s` of type `CommonFields` does not have the `drop` ability ┌─ tests/bytecode-generator/matching_ok.move:128:10 │ 128 │ (s is Foo|Bar) diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/matching_refutable_err.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/matching_refutable_err.exp index 2072c014526ea..c930a13a552b1 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/matching_refutable_err.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/matching_refutable_err.exp @@ -6,7 +6,7 @@ module 0x815::m { 0: u64, } } - private fun t(self: m::E): u64 { + private fun t(self: E): u64 { { let m::E::Some{ 0: x } = self; x @@ -14,13 +14,27 @@ module 0x815::m { } } // end 0x815::m +// -- Sourcified model before bytecode pipeline +module 0x815::m { + enum E { + None, + Some { + 0: u64, + } + } + fun t(self: E): u64 { + let E::Some(x) = self; + x + } +} + ============ initial bytecode ================ [variant baseline] -fun m::t($t0: m::E): u64 { +fun m::t($t0: 0x815::m::E): u64 { var $t1: u64 var $t2: u64 - 0: $t2 := unpack_variant m::E::Some($t0) + 0: $t2 := unpack_variant 0x815::m::E::Some($t0) 1: $t1 := infer($t2) 2: return $t1 } diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/moved_var_not_simplified3.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/moved_var_not_simplified3.exp index fa669c34cd583..b46e5b602e42e 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/moved_var_not_simplified3.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/moved_var_not_simplified3.exp @@ -14,6 +14,16 @@ module 0xc0ffee::m { } } // end 0xc0ffee::m +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + public fun test(): u8 { + let x = 40u8; + let y = move x; + let _ = x; + y + } +} + ============ initial bytecode ================ [variant baseline] diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/mutate_immutable_cmp.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/mutate_immutable_cmp.exp index 7acf2de5d5ae7..bd21055b8ac2e 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/mutate_immutable_cmp.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/mutate_immutable_cmp.exp @@ -1,7 +1,7 @@ // -- Model dump before bytecode pipeline module 0x8675309::M { struct T { - s: M::S, + s: S, } struct G { dummy_field: bool, @@ -9,7 +9,7 @@ module 0x8675309::M { struct S { f: u64, } - private fun t0(s: &mut M::S) { + private fun t0(s: &mut S) { s = pack M::S(2); s = pack M::S(0); Borrow(Immutable)(0) = 1; @@ -21,20 +21,20 @@ module 0x8675309::M { let x_ref: &u64 = Freeze(false)(x_ref); x_ref = 0; { - let g: M::S = pack M::S(0); + let g: S = pack M::S(0); { - let g_ref: &mut M::S = Borrow(Mutable)(g); + let g_ref: &mut S = Borrow(Mutable)(g); g_ref = pack M::S(2); { - let t: M::T = pack M::T(g); + let t: T = pack M::T(g); { - let t_ref: &mut M::T = Borrow(Mutable)(t); + let t_ref: &mut T = Borrow(Mutable)(t); { - let g: M::S = pack M::S(2); - select M::T.s<&mut M::T>(t_ref) = g; + let g: S = pack M::S(2); + select M::T.s<&mut T>(t_ref) = g; { - let g: M::S = pack M::S(3); - select M::T.s<&M::T>(t_ref) = g; + let g: S = pack M::S(3); + select M::T.s<&T>(t_ref) = g; Tuple() } } @@ -55,9 +55,43 @@ module 0x8675309::M { } } // end 0x8675309::M +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct T has drop { + s: S, + } + struct G has drop, key { + } + struct S has drop { + f: u64, + } + fun t0(s: &mut S) { + *s = S{f: 2}; + *s = S{f: 0}; + *&0 = 1; + let x = 0; + let x_ref = &mut x; + let x_ref = /*freeze*/x_ref; + *x_ref = 0; + let g = S{f: 0}; + let g_ref = &mut g; + *g_ref = S{f: 2}; + let t = T{s: g}; + let t_ref = &mut t; + let g = S{f: 2}; + t_ref.s = g; + let g = S{f: 3}; + t_ref.s = g; + } + fun t1() { + let x = 3; + *&mut x = 5; + } +} + Diagnostics: -error: expected `&mut` but found `&M::S` +error: expected `&mut` but found `&S` ┌─ tests/bytecode-generator/mutate_immutable_cmp.move:7:11 │ 7 │ *(s: &S) = S { f: 0 }; // this is not OK @@ -75,13 +109,13 @@ error: expected `&mut` but found `&u64` 12 │ *x_ref = 0; │ ^^^^^ -error: expected `&mut` but found `&M::S` +error: expected `&mut` but found `&S` ┌─ tests/bytecode-generator/mutate_immutable_cmp.move:15:11 │ 15 │ *(g_ref: &S) = S {f : 2}; │ ^^^^^ -error: expected `&mut` but found `&M::T` +error: expected `&mut` but found `&T` ┌─ tests/bytecode-generator/mutate_immutable_cmp.move:21:10 │ 21 │ (t_ref: &T).s = g; // this is not OK diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/operators.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/operators.exp index 0dad714bc08b3..3260942440feb 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/operators.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/operators.exp @@ -9,10 +9,10 @@ module 0x42::operators { private fun bools(x: bool,y: bool): bool { Or(Or(Or(And(x, y), And(x, Not(y))), And(Not(x), y)), And(Not(x), Not(y))) } - private fun equality(x: #0,y: #0): bool { + private fun equality(x: T,y: T): bool { Eq(x, y) } - private fun inequality(x: #0,y: #0): bool { + private fun inequality(x: T,y: T): bool { Neq(x, y) } private fun order(x: u64,y: u64): bool { @@ -20,6 +20,28 @@ module 0x42::operators { } } // end 0x42::operators +// -- Sourcified model before bytecode pipeline +module 0x42::operators { + fun arithm(x: u64, y: u64): u64 { + x + y / (x - y) * y % x + } + fun bits(x: u64, y: u8): u64 { + x << y & x + } + fun bools(x: bool, y: bool): bool { + x && y || x && !y || !x && y || !x && !y + } + fun equality(x: T, y: T): bool { + x == y + } + fun inequality(x: T, y: T): bool { + x != y + } + fun order(x: u64, y: u64): bool { + x < y && x <= y && !(x > y) && !(x >= y) + } +} + ============ initial bytecode ================ [variant baseline] diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/pack_order.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/pack_order.exp index 813c92392ef60..52cea517af73a 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/pack_order.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/pack_order.exp @@ -5,10 +5,10 @@ module 0x42::pack_unpack { f2: u8, f3: u8, } - private fun pack1(x: u8,y: u8,z: u8): pack_unpack::S { + private fun pack1(x: u8,y: u8,z: u8): S { pack pack_unpack::S(x, y, z) } - private fun pack2(x: u8,y: u8,z: u8): pack_unpack::S { + private fun pack2(x: u8,y: u8,z: u8): S { { let $f1: u8 = x; { @@ -17,13 +17,13 @@ module 0x42::pack_unpack { } } } - private fun pack3(x: u8,y: u8,z: u8): pack_unpack::S { + private fun pack3(x: u8,y: u8,z: u8): S { { let $f2: u8 = x; pack pack_unpack::S(y, $f2, z) } } - private fun pack4(x: u8,y: u8,z: u8): pack_unpack::S { + private fun pack4(x: u8,y: u8,z: u8): S { { let $f2: u8 = x; { @@ -32,13 +32,13 @@ module 0x42::pack_unpack { } } } - private fun pack5(x: u8,y: u8,z: u8): pack_unpack::S { + private fun pack5(x: u8,y: u8,z: u8): S { { let $f3: u8 = x; pack pack_unpack::S(y, z, $f3) } } - private fun pack6(x: u8,y: u8,z: u8): pack_unpack::S { + private fun pack6(x: u8,y: u8,z: u8): S { { let $f3: u8 = x; { @@ -49,68 +49,103 @@ module 0x42::pack_unpack { } } // end 0x42::pack_unpack +// -- Sourcified model before bytecode pipeline +module 0x42::pack_unpack { + struct S { + f1: u8, + f2: u8, + f3: u8, + } + fun pack1(x: u8, y: u8, z: u8): S { + S{f1: x,f2: y,f3: z} + } + fun pack2(x: u8, y: u8, z: u8): S { + let $f1 = x; + let $f3 = y; + S{f1: $f1,f2: z,f3: $f3} + } + fun pack3(x: u8, y: u8, z: u8): S { + let $f2 = x; + S{f1: y,f2: $f2,f3: z} + } + fun pack4(x: u8, y: u8, z: u8): S { + let $f2 = x; + let $f3 = y; + S{f1: z,f2: $f2,f3: $f3} + } + fun pack5(x: u8, y: u8, z: u8): S { + let $f3 = x; + S{f1: y,f2: z,f3: $f3} + } + fun pack6(x: u8, y: u8, z: u8): S { + let $f3 = x; + let $f2 = y; + S{f1: z,f2: $f2,f3: $f3} + } +} + ============ initial bytecode ================ [variant baseline] -fun pack_unpack::pack1($t0: u8, $t1: u8, $t2: u8): pack_unpack::S { - var $t3: pack_unpack::S - 0: $t3 := pack pack_unpack::S($t0, $t1, $t2) +fun pack_unpack::pack1($t0: u8, $t1: u8, $t2: u8): 0x42::pack_unpack::S { + var $t3: 0x42::pack_unpack::S + 0: $t3 := pack 0x42::pack_unpack::S($t0, $t1, $t2) 1: return $t3 } [variant baseline] -fun pack_unpack::pack2($t0: u8, $t1: u8, $t2: u8): pack_unpack::S { - var $t3: pack_unpack::S +fun pack_unpack::pack2($t0: u8, $t1: u8, $t2: u8): 0x42::pack_unpack::S { + var $t3: 0x42::pack_unpack::S var $t4: u8 var $t5: u8 0: $t4 := infer($t0) 1: $t5 := infer($t1) - 2: $t3 := pack pack_unpack::S($t4, $t2, $t5) + 2: $t3 := pack 0x42::pack_unpack::S($t4, $t2, $t5) 3: return $t3 } [variant baseline] -fun pack_unpack::pack3($t0: u8, $t1: u8, $t2: u8): pack_unpack::S { - var $t3: pack_unpack::S +fun pack_unpack::pack3($t0: u8, $t1: u8, $t2: u8): 0x42::pack_unpack::S { + var $t3: 0x42::pack_unpack::S var $t4: u8 0: $t4 := infer($t0) - 1: $t3 := pack pack_unpack::S($t1, $t4, $t2) + 1: $t3 := pack 0x42::pack_unpack::S($t1, $t4, $t2) 2: return $t3 } [variant baseline] -fun pack_unpack::pack4($t0: u8, $t1: u8, $t2: u8): pack_unpack::S { - var $t3: pack_unpack::S +fun pack_unpack::pack4($t0: u8, $t1: u8, $t2: u8): 0x42::pack_unpack::S { + var $t3: 0x42::pack_unpack::S var $t4: u8 var $t5: u8 0: $t4 := infer($t0) 1: $t5 := infer($t1) - 2: $t3 := pack pack_unpack::S($t2, $t4, $t5) + 2: $t3 := pack 0x42::pack_unpack::S($t2, $t4, $t5) 3: return $t3 } [variant baseline] -fun pack_unpack::pack5($t0: u8, $t1: u8, $t2: u8): pack_unpack::S { - var $t3: pack_unpack::S +fun pack_unpack::pack5($t0: u8, $t1: u8, $t2: u8): 0x42::pack_unpack::S { + var $t3: 0x42::pack_unpack::S var $t4: u8 0: $t4 := infer($t0) - 1: $t3 := pack pack_unpack::S($t1, $t2, $t4) + 1: $t3 := pack 0x42::pack_unpack::S($t1, $t2, $t4) 2: return $t3 } [variant baseline] -fun pack_unpack::pack6($t0: u8, $t1: u8, $t2: u8): pack_unpack::S { - var $t3: pack_unpack::S +fun pack_unpack::pack6($t0: u8, $t1: u8, $t2: u8): 0x42::pack_unpack::S { + var $t3: 0x42::pack_unpack::S var $t4: u8 var $t5: u8 0: $t4 := infer($t0) 1: $t5 := infer($t1) - 2: $t3 := pack pack_unpack::S($t2, $t5, $t4) + 2: $t3 := pack 0x42::pack_unpack::S($t2, $t5, $t4) 3: return $t3 } diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/pack_unpack.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/pack_unpack.exp index 2bb77b7ee5e8e..17e454f6177ee 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/pack_unpack.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/pack_unpack.exp @@ -5,12 +5,12 @@ module 0x42::pack_unpack { } struct S { f: u64, - g: pack_unpack::T, + g: T, } - private fun pack(x: u64,y: u64): pack_unpack::S { + private fun pack(x: u64,y: u64): S { pack pack_unpack::S(x, pack pack_unpack::T(y)) } - private fun unpack(s: pack_unpack::S): (u64, u64) { + private fun unpack(s: S): (u64, u64) { { let pack_unpack::S{ f, g: pack_unpack::T{ h } } = s; Tuple(f, h) @@ -18,27 +18,45 @@ module 0x42::pack_unpack { } } // end 0x42::pack_unpack +// -- Sourcified model before bytecode pipeline +module 0x42::pack_unpack { + struct T { + h: u64, + } + struct S { + f: u64, + g: T, + } + fun pack(x: u64, y: u64): S { + S{f: x,g: T{h: y}} + } + fun unpack(s: S): (u64, u64) { + let S{f: f,g: T{h: h}} = s; + (f, h) + } +} + ============ initial bytecode ================ [variant baseline] -fun pack_unpack::pack($t0: u64, $t1: u64): pack_unpack::S { - var $t2: pack_unpack::S - var $t3: pack_unpack::T - 0: $t3 := pack pack_unpack::T($t1) - 1: $t2 := pack pack_unpack::S($t0, $t3) +fun pack_unpack::pack($t0: u64, $t1: u64): 0x42::pack_unpack::S { + var $t2: 0x42::pack_unpack::S + var $t3: 0x42::pack_unpack::T + 0: $t3 := pack 0x42::pack_unpack::T($t1) + 1: $t2 := pack 0x42::pack_unpack::S($t0, $t3) 2: return $t2 } [variant baseline] -fun pack_unpack::unpack($t0: pack_unpack::S): (u64, u64) { +fun pack_unpack::unpack($t0: 0x42::pack_unpack::S): (u64, u64) { var $t1: u64 var $t2: u64 var $t3: u64 var $t4: u64 - var $t5: pack_unpack::T - 0: ($t3, $t5) := unpack pack_unpack::S($t0) - 1: $t4 := unpack pack_unpack::T($t5) + var $t5: 0x42::pack_unpack::T + 0: ($t3, $t5) := unpack 0x42::pack_unpack::S($t0) + 1: $t4 := unpack 0x42::pack_unpack::T($t5) 2: $t1 := infer($t3) 3: $t2 := infer($t4) 4: return ($t1, $t2) diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/reference_conversion.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/reference_conversion.exp index 406be0d625315..587b47b31883a 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/reference_conversion.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/reference_conversion.exp @@ -15,6 +15,19 @@ module 0x42::reference_conversion { } } // end 0x42::reference_conversion +// -- Sourcified model before bytecode pipeline +module 0x42::reference_conversion { + fun deref(r: &u64): u64 { + *r + } + fun use_it(): u64 { + let x = 42; + let r = &mut x; + *r = 43; + deref(/*freeze*/r) + } +} + ============ initial bytecode ================ [variant baseline] diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/spec_construct.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/spec_construct.exp index d304b793db035..6108d5abdaa96 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/spec_construct.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/spec_construct.exp @@ -5,26 +5,39 @@ module 0x42::m { k: u8, } struct S { - data: vector, + data: vector, } - public fun foo(v: &m::S): u8 { - select m::E.k<&m::E>(vector::borrow(Borrow(Immutable)(select m::S.data<&m::S>(v)), 0)) + public fun foo(v: &S): u8 { + select m::E.k<&E>(vector::borrow(Borrow(Immutable)(select m::S.data<&S>(v)), 0)) } } // end 0x42::m +// -- Sourcified model before bytecode pipeline +module 0x42::m { + struct E { + k: u8, + } + struct S { + data: vector, + } + public fun foo(v: &S): u8 { + 0x1::vector::borrow(&v.data, 0).k + } +} + ============ initial bytecode ================ [variant baseline] -public fun m::foo($t0: &m::S): u8 { +public fun m::foo($t0: &0x42::m::S): u8 { var $t1: u8 - var $t2: &m::E - var $t3: &vector + var $t2: &0x42::m::E + var $t3: &vector<0x42::m::E> var $t4: u64 var $t5: &u8 - 0: $t3 := borrow_field.data($t0) + 0: $t3 := borrow_field<0x42::m::S>.data($t0) 1: $t4 := 0 - 2: $t2 := vector::borrow($t3, $t4) - 3: $t5 := borrow_field.k($t2) + 2: $t2 := vector::borrow<0x42::m::E>($t3, $t4) + 3: $t5 := borrow_field<0x42::m::E>.k($t2) 4: $t1 := read_ref($t5) 5: return $t1 } diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-commands/break_outside_loop.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-commands/break_outside_loop.exp index 65d9bf167c6fb..30aff0255e225 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-commands/break_outside_loop.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-commands/break_outside_loop.exp @@ -5,6 +5,13 @@ module _0 { } } // end _0 +// -- Sourcified model before bytecode pipeline +script { + fun main() { + break + } +} + Diagnostics: error: missing enclosing loop statement diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-commands/break_outside_loop_in_else.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-commands/break_outside_loop_in_else.exp index fd3592ca4a473..d527f8cbda14d 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-commands/break_outside_loop_in_else.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-commands/break_outside_loop_in_else.exp @@ -10,6 +10,13 @@ module _0 { } } // end _0 +// -- Sourcified model before bytecode pipeline +script { + fun main() { + if (false) () else break; + } +} + Diagnostics: error: missing enclosing loop statement diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-commands/break_outside_loop_in_if.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-commands/break_outside_loop_in_if.exp index 4ba118a1a902b..37b5c4c56ce10 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-commands/break_outside_loop_in_if.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-commands/break_outside_loop_in_if.exp @@ -9,6 +9,13 @@ module _0 { } } // end _0 +// -- Sourcified model before bytecode pipeline +script { + fun main() { + if (true) break + } +} + Diagnostics: error: missing enclosing loop statement diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-commands/continue_outside_loop.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-commands/continue_outside_loop.exp index dd49daa2df427..53b0d1c9d22b9 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-commands/continue_outside_loop.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-commands/continue_outside_loop.exp @@ -5,6 +5,13 @@ module _0 { } } // end _0 +// -- Sourcified model before bytecode pipeline +script { + fun main() { + continue + } +} + Diagnostics: error: missing enclosing loop statement diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-commands/continue_outside_loop_in_if.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-commands/continue_outside_loop_in_if.exp index 9bca0da5da53c..5d59d11e23937 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-commands/continue_outside_loop_in_if.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-commands/continue_outside_loop_in_if.exp @@ -10,6 +10,13 @@ module _0 { } } // end _0 +// -- Sourcified model before bytecode pipeline +script { + fun main() { + if (true) continue; + } +} + Diagnostics: error: missing enclosing loop statement diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-typing/global_invalid.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-typing/global_invalid.exp index 21c2baac2a0de..34f06097556ec 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-typing/global_invalid.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-typing/global_invalid.exp @@ -14,6 +14,15 @@ module 0x42::m { } } // end 0x42::m +// -- Sourcified model before bytecode pipeline +module 0x42::m { + fun invalid(addr: address) { + if (exists(addr)) () else abort 0; + let _ = borrow_global(addr); + move_from(addr); + } +} + Diagnostics: error: Expected a struct type. Global storage operations are restricted to struct types declared in the current module. Found: 'T' diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-typing/mutate_immutable.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-typing/mutate_immutable.exp index 6f70d7b27924b..232c30beb914d 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-typing/mutate_immutable.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/v1-typing/mutate_immutable.exp @@ -3,7 +3,7 @@ module 0x8675309::M { struct S { f: u64, } - private fun t0(s: &mut M::S) { + private fun t0(s: &mut S) { s = pack M::S(0); Borrow(Immutable)(0) = 1; { @@ -20,9 +20,24 @@ module 0x8675309::M { } } // end 0x8675309::M +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct S has drop { + f: u64, + } + fun t0(s: &mut S) { + *s = S{f: 0}; + *&0 = 1; + let x = 0; + let x_ref = &mut x; + let x_ref = /*freeze*/x_ref; + *x_ref = 0; + } +} + Diagnostics: -error: expected `&mut` but found `&M::S` +error: expected `&mut` but found `&S` ┌─ tests/bytecode-generator/v1-typing/mutate_immutable.move:5:11 │ 5 │ *(s: &S) = S { f: 0 }; diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/vector.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/vector.exp index 278b6ca0e9eda..8843ffc5a4ac7 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/vector.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/vector.exp @@ -5,6 +5,13 @@ module 0x42::vector { } } // end 0x42::vector +// -- Sourcified model before bytecode pipeline +module 0x42::vector { + fun create(): vector { + vector[1, 2, 3] + } +} + ============ initial bytecode ================ [variant baseline] diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard2.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard2.exp index dd31701d7fac8..ac8b896a9e622 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard2.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard2.exp @@ -11,6 +11,14 @@ module 0xc0ffee::m { } } // end 0xc0ffee::m +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + public fun baz() { + let x; + let _ = x; + } +} + ============ initial bytecode ================ [variant baseline] diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard3.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard3.exp index 8b17531c4b9e8..d88443f618863 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard3.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard3.exp @@ -5,51 +5,64 @@ module 0xc0ffee::m { } public fun bar() { { - let s: m::S = pack m::S(false); + let s: S = pack m::S(false); { - let _: m::S = s; + let _: S = s; Tuple() } } } - public fun foo(s: m::S) { + public fun foo(s: S) { { - let _: m::S = s; + let _: S = s; Tuple() } } } // end 0xc0ffee::m +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + struct S { + } + public fun bar() { + let s = S{}; + let _ = s; + } + public fun foo(s: S) { + let _ = s; + } +} + ============ initial bytecode ================ [variant baseline] public fun m::bar() { - var $t0: m::S + var $t0: 0xc0ffee::m::S var $t1: bool - var $t2: m::S + var $t2: 0xc0ffee::m::S 0: $t1 := false - 1: $t0 := pack m::S($t1) + 1: $t0 := pack 0xc0ffee::m::S($t1) 2: $t2 := infer($t0) 3: return () } [variant baseline] -public fun m::foo($t0: m::S) { - var $t1: m::S +public fun m::foo($t0: 0xc0ffee::m::S) { + var $t1: 0xc0ffee::m::S 0: $t1 := infer($t0) 1: return () } Diagnostics: -error: value of type `m::S` does not have the `drop` ability +error: value of type `S` does not have the `drop` ability ┌─ tests/bytecode-generator/wildcard3.move:5:13 │ 5 │ let _ = s; │ ^ implicitly dropped here since it is no longer used -error: value of type `m::S` does not have the `drop` ability +error: value of type `S` does not have the `drop` ability ┌─ tests/bytecode-generator/wildcard3.move:10:13 │ 10 │ let _ = s; diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard4.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard4.exp index 10580ab9b060f..60d6ee1b6c7c2 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard4.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard4.exp @@ -18,6 +18,17 @@ module 0xc0ffee::m { } } // end 0xc0ffee::m +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + fun test() { + let x = 3; + let r = &mut x; + let y = &mut x; + let _ = /*freeze*/y; + *r = 4; + } +} + ============ initial bytecode ================ [variant baseline] diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard5.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard5.exp index 1608427e72ff7..44dca347d8756 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard5.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard5.exp @@ -6,7 +6,7 @@ module 0xc0ffee::m { } public fun test() { { - let s: m::S = pack m::S(3, 4); + let s: S = pack m::S(3, 4); { let m::S{ x: _, y: _ } = s; Tuple() @@ -15,19 +15,31 @@ module 0xc0ffee::m { } } // end 0xc0ffee::m +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + struct S { + x: u64, + y: u64, + } + public fun test() { + let s = S{x: 3,y: 4}; + let S{x: _,y: _} = s; + } +} + ============ initial bytecode ================ [variant baseline] public fun m::test() { - var $t0: m::S + var $t0: 0xc0ffee::m::S var $t1: u64 var $t2: u64 var $t3: u64 var $t4: u64 0: $t1 := 3 1: $t2 := 4 - 2: $t0 := pack m::S($t1, $t2) - 3: ($t3, $t4) := unpack m::S($t0) + 2: $t0 := pack 0xc0ffee::m::S($t1, $t2) + 3: ($t3, $t4) := unpack 0xc0ffee::m::S($t0) 4: return () } diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard6.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard6.exp index a7e13399a8a0e..da81e9dba809e 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard6.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard6.exp @@ -11,6 +11,15 @@ module 0xc0ffee::m { } } // end 0xc0ffee::m +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + public fun test2(): u64 { + let x = 40; + let (y,_) = (move x, x); + y + } +} + ============ initial bytecode ================ [variant baseline] diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard7.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard7.exp index 86431acca3905..255518c657e76 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard7.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard7.exp @@ -22,6 +22,16 @@ module 0xc0ffee::m { } } // end 0xc0ffee::m +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + public fun test(): u8 { + let x = 40u8; + let y = move x; + let (_,q) = (x, 30); + y + } +} + ============ initial bytecode ================ [variant baseline] diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard8.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard8.exp index 5b6d62f20943e..f564b6003d986 100644 --- a/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard8.exp +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/wildcard8.exp @@ -11,6 +11,14 @@ module 0xc0ffee::m { } } // end 0xc0ffee::m +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + public fun test() { + let x; + let (_,_) = (x, x); + } +} + ============ initial bytecode ================ [variant baseline] diff --git a/third_party/move/move-compiler-v2/tests/checking-lang-v1/entry_inline_err_no_report.exp b/third_party/move/move-compiler-v2/tests/checking-lang-v1/entry_inline_err_no_report.exp index 65d74465d64d0..3483724ff69a6 100644 --- a/third_party/move/move-compiler-v2/tests/checking-lang-v1/entry_inline_err_no_report.exp +++ b/third_party/move/move-compiler-v2/tests/checking-lang-v1/entry_inline_err_no_report.exp @@ -12,3 +12,16 @@ module 0x123::b { b::a() } } // end 0x123::b + +// -- Sourcified model before bytecode pipeline +module 0x123::a { + friend entry fun a() { + } +} +module 0x123::b { + entry fun a() { + } + fun b() { + a() + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking-lang-v1/eq_inline_typed.exp b/third_party/move/move-compiler-v2/tests/checking-lang-v1/eq_inline_typed.exp index 7eecb2c8d0891..13aa78ff14597 100644 --- a/third_party/move/move-compiler-v2/tests/checking-lang-v1/eq_inline_typed.exp +++ b/third_party/move/move-compiler-v2/tests/checking-lang-v1/eq_inline_typed.exp @@ -16,3 +16,12 @@ module 0x42::m { Tuple() } } // end 0x42::m + +// -- Sourcified model before bytecode pipeline +module 0x42::m { + inline fun foo(f: |&u64|) { + } + fun g() { + (); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking-lang-v1/loop_labels.exp b/third_party/move/move-compiler-v2/tests/checking-lang-v1/loop_labels.exp new file mode 100644 index 0000000000000..ca660b6e4e138 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking-lang-v1/loop_labels.exp @@ -0,0 +1,31 @@ + +Diagnostics: +error: unsupported language construct + ┌─ tests/checking-lang-v1/loop_labels.move:3:9 + │ +3 │ 'outer: loop { + │ ^^^^^^ loop labels are not enabled before version 2.1 + +error: unsupported language construct + ┌─ tests/checking-lang-v1/loop_labels.move:6:17 + │ +6 │ 'inner: loop if (true) loop { + │ ^^^^^^ loop labels are not enabled before version 2.1 + +error: unsupported language construct + ┌─ tests/checking-lang-v1/loop_labels.move:7:41 + │ +7 │ if (false) continue 'outer else break 'inner; + │ ^^^^^^ loop labels are not enabled before version 2.1 + +error: unsupported language construct + ┌─ tests/checking-lang-v1/loop_labels.move:7:59 + │ +7 │ if (false) continue 'outer else break 'inner; + │ ^^^^^^ loop labels are not enabled before version 2.1 + +error: unsupported language construct + ┌─ tests/checking-lang-v1/loop_labels.move:9:33 + │ +9 │ } else continue 'outer + │ ^^^^^^ loop labels are not enabled before version 2.1 diff --git a/third_party/move/move-compiler-v2/tests/checking-lang-v1/loop_labels.move b/third_party/move/move-compiler-v2/tests/checking-lang-v1/loop_labels.move new file mode 100644 index 0000000000000..e91763e82ae00 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking-lang-v1/loop_labels.move @@ -0,0 +1,14 @@ +module 0x815::test { + fun f1() { + 'outer: loop { + // unlabeled loop, but counts in nesting in AST + loop { + 'inner: loop if (true) loop { + if (false) continue 'outer else break 'inner; + break + } else continue 'outer + }; + break + } + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking-lang-v1/use_struct_overlap_with_module.exp b/third_party/move/move-compiler-v2/tests/checking-lang-v1/use_struct_overlap_with_module.exp index 8b717c666f02b..e554327c1f4f0 100644 --- a/third_party/move/move-compiler-v2/tests/checking-lang-v1/use_struct_overlap_with_module.exp +++ b/third_party/move/move-compiler-v2/tests/checking-lang-v1/use_struct_overlap_with_module.exp @@ -11,3 +11,16 @@ module 0x2::M { f2: X::S, } } // end 0x2::M + +// -- Sourcified model before bytecode pipeline +module 0x2::X { + struct S { + } +} +module 0x2::M { + use 0x2::X; + struct A { + f1: X::S, + f2: X::S, + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/abilities/tuple.exp b/third_party/move/move-compiler-v2/tests/checking/abilities/tuple.exp index 132846113b36a..2b347473d4478 100644 --- a/third_party/move/move-compiler-v2/tests/checking/abilities/tuple.exp +++ b/third_party/move/move-compiler-v2/tests/checking/abilities/tuple.exp @@ -3,13 +3,27 @@ module 0x42::tuple { struct S { f: u64, } - private fun tuple(x: u64): (u64, tuple::S) { + private fun tuple(x: u64): (u64, S) { Tuple(x, pack tuple::S(Add(x, 1))) } private fun use_tuple(x: u64): u64 { { - let (x: u64, tuple::S{ f: y }): (u64, tuple::S) = tuple::tuple(x); + let (x: u64, tuple::S{ f: y }): (u64, S) = tuple::tuple(x); Add(x, y) } } } // end 0x42::tuple + +// -- Sourcified model before bytecode pipeline +module 0x42::tuple { + struct S { + f: u64, + } + fun tuple(x: u64): (u64, S) { + (x, S{f: x + 1}) + } + fun use_tuple(x: u64): u64 { + let (x,S{f: y}) = tuple(x); + x + y + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/abilities/v1/ability_constraints.exp b/third_party/move/move-compiler-v2/tests/checking/abilities/v1/ability_constraints.exp index 93bcfd12e634f..f749653a7f251 100644 --- a/third_party/move/move-compiler-v2/tests/checking/abilities/v1/ability_constraints.exp +++ b/third_party/move/move-compiler-v2/tests/checking/abilities/v1/ability_constraints.exp @@ -1,11 +1,11 @@ // -- Model dump before bytecode pipeline module 0x42::M { - struct Box { - f: #0, + struct Box { + f: T, } - struct Pair { - f1: #0, - f2: #1, + struct Pair { + f1: T1, + f2: T2, } struct R { dummy_field: bool, @@ -13,22 +13,22 @@ module 0x42::M { struct S { dummy_field: bool, } - struct Sc { + struct Sc { dummy_field: bool, } - struct Scds { + struct Scds { dummy_field: bool, } - struct Sd { + struct Sd { dummy_field: bool, } - struct Sk { + struct Sk { dummy_field: bool, } - struct Ss { + struct Ss { dummy_field: bool, } - struct Ssk { + struct Ssk { dummy_field: bool, } private fun c() { @@ -125,75 +125,75 @@ module 0x42::M { let M::Ssk{ dummy_field: _ } = pack M::Ssk(false); { let M::Scds{ dummy_field: _ } = pack M::Scds(false); - M::c(); - M::c>(); - M::c, M::S>>(); - M::d(); - M::d>(); - M::d, M::S>>(); - M::s(); - M::s(); - M::s>(); - M::s>(); - M::s, M::S>>(); - M::s>>>(); - M::k(); - M::k>(); - M::k>>(); - M::k>>>(); - M::sk(); - M::sk>(); - M::sk>>(); - M::sk>>>(); - M::cds(); - M::cds>(); - M::cds, M::S>>(); + M::c(); + M::c>(); + M::c, S>>(); + M::d(); + M::d>(); + M::d, S>>(); + M::s(); + M::s(); + M::s>(); + M::s>(); + M::s, S>>(); + M::s>>>(); + M::k(); + M::k>(); + M::k>>(); + M::k>>>(); + M::sk(); + M::sk>(); + M::sk>>(); + M::sk>>>(); + M::cds(); + M::cds>(); + M::cds, S>>(); { - let M::Sc{ dummy_field: _ } = pack M::Sc(false); + let M::Sc{ dummy_field: _ } = pack M::Sc(false); { - let M::Sc>{ dummy_field: _ } = pack M::Sc>(false); + let M::Sc>{ dummy_field: _ } = pack M::Sc>(false); { - let M::Sc, M::S>>{ dummy_field: _ } = pack M::Sc, M::S>>(false); + let M::Sc, S>>{ dummy_field: _ } = pack M::Sc, S>>(false); { - let M::Sd{ dummy_field: _ } = pack M::Sd(false); + let M::Sd{ dummy_field: _ } = pack M::Sd(false); { - let M::Sd>{ dummy_field: _ } = pack M::Sd>(false); + let M::Sd>{ dummy_field: _ } = pack M::Sd>(false); { - let M::Sd, M::S>>{ dummy_field: _ } = pack M::Sd, M::S>>(false); + let M::Sd, S>>{ dummy_field: _ } = pack M::Sd, S>>(false); { - let M::Ss{ dummy_field: _ } = pack M::Ss(false); + let M::Ss{ dummy_field: _ } = pack M::Ss(false); { - let M::Ss{ dummy_field: _ } = pack M::Ss(false); + let M::Ss{ dummy_field: _ } = pack M::Ss(false); { - let M::Ss>{ dummy_field: _ } = pack M::Ss>(false); + let M::Ss>{ dummy_field: _ } = pack M::Ss>(false); { - let M::Ss>{ dummy_field: _ } = pack M::Ss>(false); + let M::Ss>{ dummy_field: _ } = pack M::Ss>(false); { - let M::Ss, M::S>>{ dummy_field: _ } = pack M::Ss, M::S>>(false); + let M::Ss, S>>{ dummy_field: _ } = pack M::Ss, S>>(false); { - let M::Ss>>>{ dummy_field: _ } = pack M::Ss>>>(false); + let M::Ss>>>{ dummy_field: _ } = pack M::Ss>>>(false); { - let M::Sk{ dummy_field: _ } = pack M::Sk(false); + let M::Sk{ dummy_field: _ } = pack M::Sk(false); { - let M::Sk>{ dummy_field: _ } = pack M::Sk>(false); + let M::Sk>{ dummy_field: _ } = pack M::Sk>(false); { - let M::Sk>>{ dummy_field: _ } = pack M::Sk>>(false); + let M::Sk>>{ dummy_field: _ } = pack M::Sk>>(false); { - let M::Sk>>>{ dummy_field: _ } = pack M::Sk>>>(false); + let M::Sk>>>{ dummy_field: _ } = pack M::Sk>>>(false); { - let M::Ssk{ dummy_field: _ } = pack M::Ssk(false); + let M::Ssk{ dummy_field: _ } = pack M::Ssk(false); { - let M::Ssk>{ dummy_field: _ } = pack M::Ssk>(false); + let M::Ssk>{ dummy_field: _ } = pack M::Ssk>(false); { - let M::Ssk>>{ dummy_field: _ } = pack M::Ssk>>(false); + let M::Ssk>>{ dummy_field: _ } = pack M::Ssk>>(false); { - let M::Ssk>>>{ dummy_field: _ } = pack M::Ssk>>>(false); + let M::Ssk>>>{ dummy_field: _ } = pack M::Ssk>>>(false); { - let M::Scds{ dummy_field: _ } = pack M::Scds(false); + let M::Scds{ dummy_field: _ } = pack M::Scds(false); { - let M::Scds>{ dummy_field: _ } = pack M::Scds>(false); + let M::Scds>{ dummy_field: _ } = pack M::Scds>(false); { - let M::Scds, M::S>>{ dummy_field: _ } = pack M::Scds, M::S>>(false); + let M::Scds, S>>{ dummy_field: _ } = pack M::Scds, S>>(false); Tuple() } } @@ -245,3 +245,140 @@ module 0x42::M { } } } // end 0x42::M + +// -- Sourcified model before bytecode pipeline +module 0x42::M { + struct Box has copy, drop, store, key { + f: T, + } + struct Pair has copy, drop, store, key { + f1: T1, + f2: T2, + } + struct R has store, key { + } + struct S has copy, drop, store, key { + } + struct Sc { + } + struct Scds { + } + struct Sd { + } + struct Sk { + } + struct Ss { + } + struct Ssk { + } + fun c() { + } + fun cds() { + } + fun d() { + } + fun k() { + } + fun s() { + } + fun sk() { + } + fun t1() { + c(); + d(); + s(); + cds
(); + c>(); + d>(); + s>(); + cds>(); + let Sc{} = Sc{}; + let Sd{} = Sd{}; + let Ss{} = Ss{}; + let Scds
{} = Scds
{}; + let Sc>{} = Sc>{}; + let Sd>{} = Sd>{}; + let Ss>{} = Ss>{}; + let Scds>{} = Scds>{}; + c(); + c(); + c(); + c(); + d(); + d(); + d(); + d(); + s(); + s(); + s(); + s(); + s(); + k(); + k(); + sk(); + cds(); + let Sc{} = Sc{}; + let Sc{} = Sc{}; + let Sc{} = Sc{}; + let Sc{} = Sc{}; + let Sd{} = Sd{}; + let Sd{} = Sd{}; + let Sd{} = Sd{}; + let Sd{} = Sd{}; + let Ss{} = Ss{}; + let Ss{} = Ss{}; + let Ss{} = Ss{}; + let Ss{} = Ss{}; + let Ss{} = Ss{}; + let Sk{} = Sk{}; + let Sk{} = Sk{}; + let Ssk{} = Ssk{}; + let Scds{} = Scds{}; + c(); + c>(); + c, S>>(); + d(); + d>(); + d, S>>(); + s(); + s(); + s>(); + s>(); + s, S>>(); + s>>>(); + k(); + k>(); + k>>(); + k>>>(); + sk(); + sk>(); + sk>>(); + sk>>>(); + cds(); + cds>(); + cds, S>>(); + let Sc{} = Sc{}; + let Sc>{} = Sc>{}; + let Sc, S>>{} = Sc, S>>{}; + let Sd{} = Sd{}; + let Sd>{} = Sd>{}; + let Sd, S>>{} = Sd, S>>{}; + let Ss{} = Ss{}; + let Ss{} = Ss{}; + let Ss>{} = Ss>{}; + let Ss>{} = Ss>{}; + let Ss, S>>{} = Ss, S>>{}; + let Ss>>>{} = Ss>>>{}; + let Sk{} = Sk{}; + let Sk>{} = Sk>{}; + let Sk>>{} = Sk>>{}; + let Sk>>>{} = Sk>>>{}; + let Ssk{} = Ssk{}; + let Ssk>{} = Ssk>{}; + let Ssk>>{} = Ssk>>{}; + let Ssk>>>{} = Ssk>>>{}; + let Scds{} = Scds{}; + let Scds>{} = Scds>{}; + let Scds, S>>{} = Scds, S>>{}; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/abilities/v1/phantom_param_op_abilities.exp b/third_party/move/move-compiler-v2/tests/checking/abilities/v1/phantom_param_op_abilities.exp index 438f27a7322ab..0e83fb52e54e8 100644 --- a/third_party/move/move-compiler-v2/tests/checking/abilities/v1/phantom_param_op_abilities.exp +++ b/third_party/move/move-compiler-v2/tests/checking/abilities/v1/phantom_param_op_abilities.exp @@ -1,50 +1,96 @@ // -- Model dump before bytecode pipeline module 0x42::M { - struct HasCopy { - a: #1, + struct HasCopy { + a: T2, } - struct HasDrop { - a: #1, + struct HasDrop { + a: T2, } - struct HasKey { - a: #1, + struct HasKey { + a: T2, } - struct HasStore { - a: #1, + struct HasStore { + a: T2, } struct NoAbilities { dummy_field: bool, } - struct RequireStore { - a: #0, + struct RequireStore { + a: T, } - private fun f1(ref: &mut M::HasDrop) { - ref = pack M::HasDrop(1); + private fun f1(ref: &mut HasDrop) { + ref = pack M::HasDrop(1); Tuple() } private fun f2() { - _: M::HasDrop = pack M::HasDrop(1); + _: HasDrop = pack M::HasDrop(1); Tuple() } - private fun f3(_x: M::HasDrop) { + private fun f3(_x: HasDrop) { Tuple() } - private fun f4(x: M::HasCopy): (M::HasCopy, M::HasCopy) { + private fun f4(x: HasCopy): (HasCopy, HasCopy) { Tuple(Copy(x), x) } - private fun f5(s: &signer,x: M::HasKey) { - MoveTo>(s, x); + private fun f5(s: &signer,x: HasKey) { + MoveTo>(s, x); Tuple() } - private fun f6(): M::HasKey - acquires M::HasKey(*) + private fun f6(): HasKey + acquires HasKey(*) { - MoveFrom>(0x0) + MoveFrom>(0x0) } private fun f7(): bool { - exists>(0x0) + exists>(0x0) } - private fun f8(): M::RequireStore> { - pack M::RequireStore>(pack M::HasStore(1)) + private fun f8(): RequireStore> { + pack M::RequireStore>(pack M::HasStore(1)) } } // end 0x42::M + +// -- Sourcified model before bytecode pipeline +module 0x42::M { + struct HasCopy has copy { + a: T2, + } + struct HasDrop has drop { + a: T2, + } + struct HasKey has key { + a: T2, + } + struct HasStore has store { + a: T2, + } + struct NoAbilities { + } + struct RequireStore { + a: T, + } + fun f1(ref: &mut HasDrop) { + *ref = HasDrop{a: 1}; + } + fun f2() { + _ = HasDrop{a: 1}; + } + fun f3(_x: HasDrop) { + } + fun f4(x: HasCopy): (HasCopy, HasCopy) { + (copy x, x) + } + fun f5(s: &signer, x: HasKey) { + move_to>(s, x); + } + fun f6(): HasKey + acquires HasKey + { + move_from>(0x0) + } + fun f7(): bool { + exists>(0x0) + } + fun f8(): RequireStore> { + RequireStore>{a: HasStore{a: 1}} + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/abilities/v1/phantom_params_constraint_abilities.exp b/third_party/move/move-compiler-v2/tests/checking/abilities/v1/phantom_params_constraint_abilities.exp index cbc230acd5142..ff67e3db96804 100644 --- a/third_party/move/move-compiler-v2/tests/checking/abilities/v1/phantom_params_constraint_abilities.exp +++ b/third_party/move/move-compiler-v2/tests/checking/abilities/v1/phantom_params_constraint_abilities.exp @@ -1,50 +1,97 @@ // -- Model dump before bytecode pipeline module 0x42::M { - struct HasAbilities { - a: #1, + struct HasAbilities { + a: T2, } - struct HasCopy { - a: #1, + struct HasCopy { + a: T2, } - struct HasDrop { - a: #1, + struct HasDrop { + a: T2, } - struct HasKey { - a: #1, + struct HasKey { + a: T2, } - struct HasStore { - a: #1, + struct HasStore { + a: T2, } struct NoAbilities { a: bool, } - struct S1 { - a: #0, + struct S1 { + a: T, } struct S2 { - a: M::S1>, + a: S1>, } - struct S3 { - a: #0, - b: #1, - c: #2, - d: #3, + struct S3 { + a: T1, + b: T2, + c: T3, + d: T4, } struct S4 { - a: M::S3, M::HasCopy, M::HasStore, M::HasKey>, + a: S3, HasCopy, HasStore, HasKey>, } private fun f1() { Tuple() } private fun f2() { - M::f1>(); + M::f1>(); Tuple() } private fun f3() { Tuple() } private fun f4() { - M::f3, M::HasCopy, M::HasStore, M::HasKey>(); + M::f3, HasCopy, HasStore, HasKey>(); Tuple() } } // end 0x42::M + +// -- Sourcified model before bytecode pipeline +module 0x42::M { + struct HasAbilities has copy, drop, store, key { + a: T2, + } + struct HasCopy has copy { + a: T2, + } + struct HasDrop has drop { + a: T2, + } + struct HasKey has key { + a: T2, + } + struct HasStore has store { + a: T2, + } + struct NoAbilities { + a: bool, + } + struct S1 { + a: T, + } + struct S2 { + a: S1>, + } + struct S3 { + a: T1, + b: T2, + c: T3, + d: T4, + } + struct S4 { + a: S3, HasCopy, HasStore, HasKey>, + } + fun f1() { + } + fun f2() { + f1>(); + } + fun f3() { + } + fun f4() { + f3,HasCopy,HasStore,HasKey>(); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/abilities/v1/phantom_params_field_abilities.exp b/third_party/move/move-compiler-v2/tests/checking/abilities/v1/phantom_params_field_abilities.exp index bfa5c712d716a..b7bc1372321de 100644 --- a/third_party/move/move-compiler-v2/tests/checking/abilities/v1/phantom_params_field_abilities.exp +++ b/third_party/move/move-compiler-v2/tests/checking/abilities/v1/phantom_params_field_abilities.exp @@ -1,30 +1,60 @@ // -- Model dump before bytecode pipeline module 0x42::M { - struct HasCopy { - a: #1, + struct HasCopy { + a: T2, } - struct HasDrop { - a: #1, + struct HasDrop { + a: T2, } - struct HasKey { - a: #1, + struct HasKey { + a: T2, } - struct HasStore { - a: #1, + struct HasStore { + a: T2, } struct NoAbilities { dummy_field: bool, } struct S1 { - a: M::HasDrop, + a: HasDrop, } struct S2 { - a: M::HasCopy, + a: HasCopy, } struct S3 { - a: M::HasStore, + a: HasStore, } struct S4 { - a: M::HasStore, + a: HasStore, } } // end 0x42::M + +// -- Sourcified model before bytecode pipeline +module 0x42::M { + struct HasCopy has copy { + a: T2, + } + struct HasDrop has drop { + a: T2, + } + struct HasKey has key { + a: T2, + } + struct HasStore has store { + a: T2, + } + struct NoAbilities { + } + struct S1 has drop { + a: HasDrop, + } + struct S2 has copy { + a: HasCopy, + } + struct S3 has store { + a: HasStore, + } + struct S4 has key { + a: HasStore, + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/access_specifiers/access_ok.exp b/third_party/move/move-compiler-v2/tests/checking/access_specifiers/access_ok.exp index b2e3bc2231280..8298da9baae10 100644 --- a/third_party/move/move-compiler-v2/tests/checking/access_specifiers/access_ok.exp +++ b/third_party/move/move-compiler-v2/tests/checking/access_specifiers/access_ok.exp @@ -23,7 +23,7 @@ module 0x42::m { struct T { dummy_field: bool, } - struct G { + struct G { dummy_field: bool, } struct R { @@ -33,7 +33,7 @@ module 0x42::m { dummy_field: bool, } private fun f1() - acquires m::S(*) + acquires S(*) { Tuple() } @@ -53,17 +53,17 @@ module 0x42::m { Tuple() } private fun f2() - reads m::S(*) + reads S(*) { Tuple() } private fun f3() - writes m::S(*) + writes S(*) { Tuple() } private fun f4() - acquires m::S(*) + acquires S(*) { Tuple() } @@ -93,11 +93,11 @@ module 0x42::m { Tuple() } private fun f_multiple() - acquires m::R(*) - reads m::R(*) - writes m::T(*) - writes m::S(*) - reads m::G(*) + acquires R(*) + reads R(*) + writes T(*) + writes S(*) + reads G(*) { Tuple() } @@ -105,3 +105,75 @@ module 0x42::m { 0x42 } } // end 0x42::m + +// -- Sourcified model before bytecode pipeline +module 0x42::m { + struct T has store { + } + struct G has store { + } + struct R has store { + } + struct S has store { + } + fun f1() + acquires S + { + } + fun f10(x: u64) + acquires *(m::make_up_address(x)) + + { + } + fun f11() + !reads *(0x42) + !reads *(0x43) + + { + } + fun f12() + + { + } + fun f2() + reads S + { + } + fun f3() + writes S + { + } + fun f4() + acquires S + { + } + fun f5() + acquires 0x42::* + { + } + fun f6() + acquires 0x42::m::* + { + } + fun f7() + acquires * + { + } + fun f8() + acquires *(0x42) + + { + } + fun f9(a: address) + acquires *(a) + + { + } + fun f_multiple() + acquires Rreads Rwrites Twrites Sreads G + { + } + fun make_up_address(x: u64): address { + 0x42 + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/access_specifiers/acquires_list_generic.exp b/third_party/move/move-compiler-v2/tests/checking/access_specifiers/acquires_list_generic.exp index fa0d50a8ea13d..82e8a78a2518a 100644 --- a/third_party/move/move-compiler-v2/tests/checking/access_specifiers/acquires_list_generic.exp +++ b/third_party/move/move-compiler-v2/tests/checking/access_specifiers/acquires_list_generic.exp @@ -1,17 +1,32 @@ // -- Model dump before bytecode pipeline module 0x42::M { - struct B { + struct B { dummy_field: bool, } - struct CupC { + struct CupC { dummy_field: bool, } struct R { dummy_field: bool, } private fun foo() - acquires M::B>(*) + acquires B>(*) { Abort(0) } } // end 0x42::M + +// -- Sourcified model before bytecode pipeline +module 0x42::M { + struct B { + } + struct CupC { + } + struct R { + } + fun foo() + acquires B> + { + abort 0 + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/attributes/aptos_stdlib_attributes2.exp b/third_party/move/move-compiler-v2/tests/checking/attributes/aptos_stdlib_attributes2.exp index 094f58d19cec5..0b0e0375c67f8 100644 --- a/third_party/move/move-compiler-v2/tests/checking/attributes/aptos_stdlib_attributes2.exp +++ b/third_party/move/move-compiler-v2/tests/checking/attributes/aptos_stdlib_attributes2.exp @@ -15,3 +15,11 @@ module 0x1::M { Tuple() } } // end 0x1::M + +// -- Sourcified model before bytecode pipeline +module 0x1::M { + fun bar() { + } + fun foo() { + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/attributes/attribute_placement.exp b/third_party/move/move-compiler-v2/tests/checking/attributes/attribute_placement.exp index 9798299691c27..082f863b09a34 100644 --- a/third_party/move/move-compiler-v2/tests/checking/attributes/attribute_placement.exp +++ b/third_party/move/move-compiler-v2/tests/checking/attributes/attribute_placement.exp @@ -106,3 +106,24 @@ module _0 { Tuple() } } // end _0 + +// -- Sourcified model before bytecode pipeline +module 0x42::N { + friend 0x42::M; + public fun bar() { + } +} +module 0x42::M { + use 0x42::N; + struct S { + } + public fun foo() { + N::bar() + } +} +script { + use 0x42::M; + fun main() { + M::foo(); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/attributes/attribute_variants.exp b/third_party/move/move-compiler-v2/tests/checking/attributes/attribute_variants.exp index c8f499f0d0cbe..9f6a73399eca2 100644 --- a/third_party/move/move-compiler-v2/tests/checking/attributes/attribute_variants.exp +++ b/third_party/move/move-compiler-v2/tests/checking/attributes/attribute_variants.exp @@ -63,3 +63,7 @@ warning: unknown attribute // -- Model dump before bytecode pipeline module 0x42::M { } // end 0x42::M + +// -- Sourcified model before bytecode pipeline +module 0x42::M { +} diff --git a/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_after_loop.exp b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_after_loop.exp index 08bf31d7ac079..74054ff1f182e 100644 --- a/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_after_loop.exp +++ b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_after_loop.exp @@ -9,3 +9,11 @@ module _0 { } } } // end _0 + +// -- Sourcified model before bytecode pipeline +script { + fun main() { + loop break; + loop () + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_check_err.exp b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_check_err.exp new file mode 100644 index 0000000000000..fd0b9ddf3ef67 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_check_err.exp @@ -0,0 +1,21 @@ + +Diagnostics: +error: label `'outer` undefined + ┌─ tests/checking/control_flow/loop_labels_check_err.move:3:15 + │ +3 │ break 'outer; + │ ^^^^^^ + +error: label `'inner` undefined + ┌─ tests/checking/control_flow/loop_labels_check_err.move:5:19 + │ +5 │ break 'inner + │ ^^^^^^ + +error: label `'l1` already used by outer loop + ┌─ tests/checking/control_flow/loop_labels_check_err.move:11:19 + │ +11 │ 'l1: loop 'l1: loop {}; + │ --- ^^^ + │ │ + │ outer definition of label diff --git a/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_check_err.move b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_check_err.move new file mode 100644 index 0000000000000..6a1a1f616934e --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_check_err.move @@ -0,0 +1,14 @@ +module 0x815::test { + fun undefined_label() { + break 'outer; + 'outer: loop { + break 'inner + } + } + + fun duplicate_label() { + 'l1: loop {}; + 'l1: loop 'l1: loop {}; + 'l1: loop {} + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_check_ok.exp b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_check_ok.exp new file mode 100644 index 0000000000000..3bf1a2816123f --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_check_ok.exp @@ -0,0 +1,37 @@ +// -- Model dump before bytecode pipeline +module 0x815::test { + private fun f1() { + loop { + loop { + loop { + if true { + loop { + if false { + continue[3] + } else { + break[1] + }; + break + } + } else { + continue[2] + } + } + }; + break + } + } +} // end 0x815::test + +// -- Sourcified model before bytecode pipeline +module 0x815::test { + fun f1() { + 'l0: loop { + loop 'l1: loop if (true) loop { + if (false) continue 'l0 else break 'l1; + break + } else continue 'l0; + break + } + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_check_ok.move b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_check_ok.move new file mode 100644 index 0000000000000..e91763e82ae00 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_check_ok.move @@ -0,0 +1,14 @@ +module 0x815::test { + fun f1() { + 'outer: loop { + // unlabeled loop, but counts in nesting in AST + loop { + 'inner: loop if (true) loop { + if (false) continue 'outer else break 'inner; + break + } else continue 'outer + }; + break + } + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_parse_err1.exp b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_parse_err1.exp new file mode 100644 index 0000000000000..5da0a89107cbc --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_parse_err1.exp @@ -0,0 +1,10 @@ + +Diagnostics: +error: unexpected token + ┌─ tests/checking/control_flow/loop_labels_parse_err1.move:3:13 + │ +3 │ 'a: if (true) false else true + │ ^^ + │ │ + │ Unexpected 'if' + │ Expected one of: `while` or `loop` diff --git a/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_parse_err1.move b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_parse_err1.move new file mode 100644 index 0000000000000..319268dd01537 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_parse_err1.move @@ -0,0 +1,5 @@ +module 0x815::test { + fun f1(): bool { + 'a: if (true) false else true + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_parse_err2.exp b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_parse_err2.exp new file mode 100644 index 0000000000000..f2275ffd0e61e --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_parse_err2.exp @@ -0,0 +1,10 @@ + +Diagnostics: +error: unexpected token + ┌─ tests/checking/control_flow/loop_labels_parse_err2.move:3:13 + │ +3 │ 'a: if (true) false else true + │ ^^ + │ │ + │ Unexpected 'if' + │ Expected one of: `while` or `loop` diff --git a/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_parse_err2.move b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_parse_err2.move new file mode 100644 index 0000000000000..319268dd01537 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_parse_err2.move @@ -0,0 +1,5 @@ +module 0x815::test { + fun f1(): bool { + 'a: if (true) false else true + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_parse_err3.exp b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_parse_err3.exp new file mode 100644 index 0000000000000..59cf17aac2daa --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_parse_err3.exp @@ -0,0 +1,7 @@ + +Diagnostics: +error: invalid character + ┌─ tests/checking/control_flow/loop_labels_parse_err3.move:3:10 + │ +3 │ ': if (true) false else true + │ ^ Label quote must be followed by 'A-Z', `a-z', or '_' diff --git a/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_parse_err3.move b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_parse_err3.move new file mode 100644 index 0000000000000..150322d37c6b9 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_parse_err3.move @@ -0,0 +1,9 @@ +module 0x815::test { + fun f1(): bool { + ': if (true) false else true + } + + fun f1(): bool { + '0x: if (true) false else true + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_parse_err4.exp b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_parse_err4.exp new file mode 100644 index 0000000000000..b8bfb04974e35 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_parse_err4.exp @@ -0,0 +1,7 @@ + +Diagnostics: +error: invalid character + ┌─ tests/checking/control_flow/loop_labels_parse_err4.move:3:10 + │ +3 │ '0x: if (true) false else true + │ ^ Label quote must be followed by 'A-Z', `a-z', or '_' diff --git a/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_parse_err4.move b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_parse_err4.move new file mode 100644 index 0000000000000..4ec39b2125b2d --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/control_flow/loop_labels_parse_err4.move @@ -0,0 +1,5 @@ +module 0x815::test { + fun f1(): bool { + '0x: if (true) false else true + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/dotdot/dotdot_valid.exp b/third_party/move/move-compiler-v2/tests/checking/dotdot/dotdot_valid.exp index cd061f4b76d74..6b25517efe372 100644 --- a/third_party/move/move-compiler-v2/tests/checking/dotdot/dotdot_valid.exp +++ b/third_party/move/move-compiler-v2/tests/checking/dotdot/dotdot_valid.exp @@ -10,7 +10,7 @@ module 0x42::test { } C { x: u8, - y: test::S1, + y: S1, } } struct S0 { @@ -21,23 +21,23 @@ module 0x42::test { } struct S2 { 0: bool, - 1: test::S0, + 1: S0, } struct S3 { x: bool, y: u8, } - struct S4 { - x: #0, - y: test::S3, + struct S4 { + x: T, + y: S3, } - struct S5 { - 0: #0, - 1: #1, + struct S5 { + 0: T, + 1: U, } - struct S6 { - x: #0, - y: #1, + struct S6 { + x: T, + y: U, } struct S7 { 0: u8, @@ -45,13 +45,13 @@ module 0x42::test { 2: u32, 3: u64, } - private inline fun lambda_param(f: |test::S2|bool): bool { + private inline fun lambda_param(f: |S2|bool): bool { { - let x: test::S2 = pack test::S2(true, pack test::S0(false)); + let x: S2 = pack test::S2(true, pack test::S0(false)); (f)(x) } } - private fun nested1(x: test::S4) { + private fun nested1(x: S4) { { let test::S4{ x: _x, y: _ } = x; { @@ -75,7 +75,7 @@ module 0x42::test { } } } - private fun nested1_ref(x: &test::S4) { + private fun nested1_ref(x: &S4) { { let test::S4{ x: _x, y: _ } = x; { @@ -99,67 +99,67 @@ module 0x42::test { } } } - private fun nested2(x: test::S5) { + private fun nested2(x: S5) { { - let test::S5{ 0: _, 1: test::S1{ 0: _ } } = x; + let test::S5{ 0: _, 1: test::S1{ 0: _ } } = x; Tuple() } } - private fun nested2_ref(x: &test::S5) { + private fun nested2_ref(x: &S5) { { - let test::S5{ 0: _, 1: test::S1{ 0: _ } } = x; + let test::S5{ 0: _, 1: test::S1{ 0: _ } } = x; Tuple() } } - private fun nested3(x: test::S5>) { + private fun nested3(x: S5>) { { - let test::S5>{ 0: _, 1: test::S4{ x: _, y: _ } } = x; + let test::S5>{ 0: _, 1: test::S4{ x: _, y: _ } } = x; Tuple() } } - private fun nested3_ref(x: &test::S5>) { + private fun nested3_ref(x: &S5>) { { - let test::S5>{ 0: _, 1: test::S4{ x: _, y: _ } } = x; + let test::S5>{ 0: _, 1: test::S4{ x: _, y: _ } } = x; Tuple() } } - private fun nested4(x: test::S4) { + private fun nested4(x: S4) { { - let test::S4{ x: test::S1{ 0: _ }, y: _ } = x; + let test::S4{ x: test::S1{ 0: _ }, y: _ } = x; Tuple() } } - private fun nested4_ref(x: &test::S4) { + private fun nested4_ref(x: &S4) { { - let test::S4{ x: test::S1{ 0: _ }, y: _ } = x; + let test::S4{ x: test::S1{ 0: _ }, y: _ } = x; Tuple() } } - private fun simple_0(x: test::S0) { + private fun simple_0(x: S0) { { let test::S0{ dummy_field: _ } = x; Tuple() } } - private fun simple_0_ref(x: &test::S0) { + private fun simple_0_ref(x: &S0) { { let test::S0{ dummy_field: _ } = x; Tuple() } } - private fun simple_1(x: test::S1) { + private fun simple_1(x: S1) { { let test::S1{ 0: _ } = x; Tuple() } } - private fun simple_1_ref(x: &mut test::S1) { + private fun simple_1_ref(x: &mut S1) { { let test::S1{ 0: _ } = x; Tuple() } } - private fun simple_2(x: test::S2) { + private fun simple_2(x: S2) { { let test::S2{ 0: _, 1: _ } = x; { @@ -186,7 +186,7 @@ module 0x42::test { } } } - private fun simple_2_ref(x: &test::S2) { + private fun simple_2_ref(x: &S2) { { let test::S2{ 0: _, 1: _ } = x; { @@ -213,7 +213,7 @@ module 0x42::test { } } } - private fun simple_3(x: test::S3) { + private fun simple_3(x: S3) { { let test::S3{ x: _, y: _ } = x; { @@ -225,7 +225,7 @@ module 0x42::test { } } } - private fun simple_3_ref(x: test::S3) { + private fun simple_3_ref(x: S3) { { let test::S3{ x: _, y: _ } = x; { @@ -237,7 +237,7 @@ module 0x42::test { } } } - private fun simple_4(x: test::E1): u8 { + private fun simple_4(x: E1): u8 { match (x) { test::E1::A{ 0: x, 1: _ } => { x @@ -251,7 +251,7 @@ module 0x42::test { } } - private fun simple_4_ref(x: &test::E1): &u8 { + private fun simple_4_ref(x: &E1): &u8 { match (x) { test::E1::A{ 0: x, 1: _ } => { x @@ -262,7 +262,7 @@ module 0x42::test { } } - private fun simple_5(x: test::E1): u8 { + private fun simple_5(x: E1): u8 { match (x) { test::E1::A{ 0: _, 1: y } => { if y { @@ -280,7 +280,7 @@ module 0x42::test { } } - private fun simple_6(x: &test::S7) { + private fun simple_6(x: &S7) { { let test::S7{ 0: _w, 1: _, 2: _, 3: _z } = x; { @@ -291,15 +291,174 @@ module 0x42::test { } private fun test_lambda_param(): bool { { - let x: test::S2 = pack test::S2(true, pack test::S0(false)); + let x: S2 = pack test::S2(true, pack test::S0(false)); { - let (test::S2{ 0: x, 1: _ }): (test::S2) = Tuple(x); + let (test::S2{ 0: x, 1: _ }): (S2) = Tuple(x); x } } } } // end 0x42::test +// -- Sourcified model before bytecode pipeline +module 0x42::test { + enum E1 has drop { + A { + 0: u8, + 1: bool, + } + B { + 0: u8, + } + C { + x: u8, + y: S1, + } + } + struct S0 has copy { + } + struct S1 has copy, drop { + 0: u8, + } + struct S2 has copy { + 0: bool, + 1: S0, + } + struct S3 has copy { + x: bool, + y: u8, + } + struct S4 has copy { + x: T, + y: S3, + } + struct S5 { + 0: T, + 1: U, + } + struct S6 { + x: T, + y: U, + } + struct S7 { + 0: u8, + 1: u16, + 2: u32, + 3: u64, + } + inline fun lambda_param(f: |S2|bool): bool { + let x = S2(true,S0{}); + f(x) + } + fun nested1(x: S4) { + let S4{x: _x,y: _} = x; + let S4{x: _,y: _y} = x; + let S4{x: _,y: S3{x: _,y: _}} = x; + let S4{x: _,y: S3{x: _x,y: _}} = x; + let S4{x: _x2,y: S3{x: _x1,y: _}} = x; + let S4{x: _,y: S3{x: _,y: _y}} = x; + let S4{x: _x2,y: S3{x: _x1,y: _}} = x; + } + fun nested1_ref(x: &S4) { + let S4{x: _x,y: _} = x; + let S4{x: _,y: _y} = x; + let S4{x: _,y: S3{x: _,y: _}} = x; + let S4{x: _,y: S3{x: _x,y: _}} = x; + let S4{x: _x2,y: S3{x: _x1,y: _}} = x; + let S4{x: _,y: S3{x: _,y: _y}} = x; + let S4{x: _x2,y: S3{x: _x1,y: _}} = x; + } + fun nested2(x: S5) { + let S5(_,S1(_)) = x; + } + fun nested2_ref(x: &S5) { + let S5(_,S1(_)) = x; + } + fun nested3(x: S5>) { + let S5>(_,S4{x: _,y: _}) = x; + } + fun nested3_ref(x: &S5>) { + let S5>(_,S4{x: _,y: _}) = x; + } + fun nested4(x: S4) { + let S4{x: S1(_),y: _} = x; + } + fun nested4_ref(x: &S4) { + let S4{x: S1(_),y: _} = x; + } + fun simple_0(x: S0) { + let S0{} = x; + } + fun simple_0_ref(x: &S0) { + let S0{} = x; + } + fun simple_1(x: S1) { + let S1(_) = x; + } + fun simple_1_ref(x: &mut S1) { + let S1(_) = x; + } + fun simple_2(x: S2) { + let S2(_,_) = x; + let S2(_x,_) = x; + let S2(_,_x) = x; + let S2(_,_) = x; + let S2(_,_) = x; + let S2(_x,_y) = x; + let S2(_x,_y) = x; + let S2(_x,_y) = x; + } + fun simple_2_ref(x: &S2) { + let S2(_,_) = x; + let S2(_x,_) = x; + let S2(_,_x) = x; + let S2(_,_) = x; + let S2(_,_) = x; + let S2(_x,_y) = x; + let S2(_x,_y) = x; + let S2(_x,_y) = x; + } + fun simple_3(x: S3) { + let S3{x: _,y: _} = x; + let S3{x: _x,y: _} = x; + let S3{x: _,y: _y} = x; + } + fun simple_3_ref(x: S3) { + let S3{x: _,y: _} = x; + let S3{x: _x,y: _} = x; + let S3{x: _,y: _y} = x; + } + fun simple_4(x: E1): u8 { + match (x) { + E1::A(x,_) => x, + E1::B(x) => x, + E1::C{x: x,y: _} => x, + } + } + fun simple_4_ref(x: &E1): &u8 { + match (x) { + E1::A(x,_) => x, + E1::B(x) => x, + } + } + fun simple_5(x: E1): u8 { + match (x) { + E1::A(_,y) => if (y) 1u8 else 0u8, + E1::B(x) => x, + E1::C{x: _,y: S1(x)} => x, + } + } + fun simple_6(x: &S7) { + let S7(_w,_,_,_z) = x; + let S7(_w,_x,_y,_z) = x; + } + fun test_lambda_param(): bool { + let x = S2(true,S0{}); + let (S2(x,_)) = (x); + x + } +} + Diagnostics: error: match not exhaustive diff --git a/third_party/move/move-compiler-v2/tests/checking/dotdot/extra_dotdot.exp b/third_party/move/move-compiler-v2/tests/checking/dotdot/extra_dotdot.exp index d772036e3fe4c..cf7a808320552 100644 --- a/third_party/move/move-compiler-v2/tests/checking/dotdot/extra_dotdot.exp +++ b/third_party/move/move-compiler-v2/tests/checking/dotdot/extra_dotdot.exp @@ -10,7 +10,7 @@ module 0x42::test { 1: u8, 2: address, } - private fun extra_dotdot(x: test::S,y: test::T) { + private fun extra_dotdot(x: S,y: T) { { let test::S{ 0: _x, 1: _, 2: _ } = x; { @@ -26,3 +26,23 @@ module 0x42::test { } } } // end 0x42::test + +// -- Sourcified model before bytecode pipeline +module 0x42::test { + struct T { + x: bool, + y: u8, + z: address, + } + struct S { + 0: bool, + 1: u8, + 2: address, + } + fun extra_dotdot(x: S, y: T) { + let S(_x,_,_) = x; + let S(_,_,_) = x; + let S(_,_,_) = x; + let T{x: _,y: _,z: _} = y; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/indexing/examples_book.exp b/third_party/move/move-compiler-v2/tests/checking/indexing/examples_book.exp index db8a5d33f66f5..979ec749c6216 100644 --- a/third_party/move/move-compiler-v2/tests/checking/indexing/examples_book.exp +++ b/third_party/move/move-compiler-v2/tests/checking/indexing/examples_book.exp @@ -4,18 +4,18 @@ module 0x1::m { value: bool, } private fun f1() - acquires m::R(*) + acquires R(*) { { - let x: &mut m::R = BorrowGlobal(Mutable)(0x1); - select m::R.value<&mut m::R>(x) = false; - if Eq(select m::R.value(BorrowGlobal(Immutable)(0x1)), false) { + let x: &mut R = BorrowGlobal(Mutable)(0x1); + select m::R.value<&mut R>(x) = false; + if Eq(select m::R.value(BorrowGlobal(Immutable)(0x1)), false) { Tuple() } else { Abort(1) }; - select m::R.value(BorrowGlobal(Mutable)(0x1)) = true; - if Eq(select m::R.value(BorrowGlobal(Immutable)(0x1)), true) { + select m::R.value(BorrowGlobal(Mutable)(0x1)) = true; + if Eq(select m::R.value(BorrowGlobal(Immutable)(0x1)), true) { Tuple() } else { Abort(2) @@ -24,3 +24,19 @@ module 0x1::m { } } } // end 0x1::m + +// -- Sourcified model before bytecode pipeline +module 0x1::m { + struct R has drop, key { + value: bool, + } + fun f1() + acquires R + { + let x = borrow_global_mut(0x1); + x.value = false; + if (borrow_global(0x1).value == false) () else abort 1; + borrow_global_mut(0x1).value = true; + if (borrow_global(0x1).value == true) () else abort 2; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/bug_11112.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/bug_11112.exp index 2d26e677fd493..fb79d81744afa 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/bug_11112.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/bug_11112.exp @@ -35,5 +35,26 @@ module 0xcafe::vectors { } } // end 0xcafe::vectors +// -- Sourcified model before bytecode pipeline +module 0xcafe::vectors { + fun test_for_each_mut() { + let v = vector[1, 2, 3]; + let s = 2; + { + let (v) = (&mut v); + let i = 0; + while (i < 0x1::vector::length(/*freeze*/v)) { + { + let (e) = (0x1::vector::borrow_mut(v, i)); + *e = s; + s = s + 1 + }; + i = i + 1 + } + }; + if (v == vector[2, 3, 4]) () else abort 0; + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/bug_11223.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/bug_11223.exp index 88ecb0c6d4f29..cd772a8c8663b 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/bug_11223.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/bug_11223.exp @@ -18,5 +18,16 @@ module 0xcafe::vectors { } } // end 0xcafe::vectors +// -- Sourcified model before bytecode pipeline +module 0xcafe::vectors { + public entry fun guess_flips_break2(flips: vector): u64 { + let flipsref5 = &flips; + let _v = copy flips; + let _v2 = flips; + let x = flipsref5; + 0x1::vector::length(x) + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/bug_9717.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/bug_9717.exp index 6f66dccff2e22..5462c607e72ad 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/bug_9717.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/bug_9717.exp @@ -169,5 +169,80 @@ module 0xcafe::vectors { } } // end 0xcafe::vectors +// -- Sourcified model before bytecode pipeline +module 0xcafe::vectors { + public entry fun guess_flips(flips: vector) { + { + let (flips) = (&flips); + let i = 0; + while (i < 0x1::vector::length(flips)) { + if (*0x1::vector::borrow(flips, i) != 0u8) break; + i = i + 1; + }; + }; + let _v = copy flips; + let _v2 = flips; + } + public entry fun guess_flips_directly(flips: vector) { + let i = 0; + while (i < 0x1::vector::length(&flips)) { + if (*0x1::vector::borrow(&flips, i) != 0u8) break; + i = i + 1; + }; + let _v = copy flips; + let _v2 = flips; + } + public entry fun guess_with_break_without_inline(flips: vector) { + loops_with_break_no_inline(&flips); + let _v = copy flips; + let _v2 = flips; + } + public entry fun guess_without_break_with_inline(flips: vector) { + { + let (flips) = (&flips); + let i = 0; + while (i < 0x1::vector::length(flips)) { + if (*0x1::vector::borrow(flips, i) == 0u8) () else abort 3; + i = i + 1; + }; + }; + let _v = flips; + let _v2 = copy flips; + } + inline fun loops_with_break(flips: &vector) { + let i = 0; + while (i < 0x1::vector::length(flips)) { + if (*0x1::vector::borrow(flips, i) != 0u8) break; + i = i + 1; + }; + } + fun loops_with_break_no_inline(flips: &vector) { + let i = 0; + while (i < 0x1::vector::length(flips)) { + if (*0x1::vector::borrow(flips, i) != 0u8) break; + i = i + 1; + }; + } + inline fun loops_without_break(flips: &vector) { + let i = 0; + while (i < 0x1::vector::length(flips)) { + if (*0x1::vector::borrow(flips, i) == 0u8) () else abort 3; + i = i + 1; + }; + } + fun test_guess_directly() { + guess_flips_directly(vector[0u8, 0u8, 0u8, 0u8]); + } + fun test_guess_with_break_no_inline() { + guess_with_break_without_inline(vector[0u8, 0u8, 0u8, 0u8]); + } + fun test_guess_with_inline_break() { + guess_flips(vector[0u8, 0u8, 0u8, 0u8]); + } + fun test_guess_without_break() { + guess_without_break_with_inline(vector[0u8, 0u8, 0u8, 0u8]); + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/bug_9717_looponly.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/bug_9717_looponly.exp index 0e16c76cedfe5..a0ccb34192e63 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/bug_9717_looponly.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/bug_9717_looponly.exp @@ -39,5 +39,22 @@ module 0xcafe::vectors { } } // end 0xcafe::vectors +// -- Sourcified model before bytecode pipeline +module 0xcafe::vectors { + public entry fun guess_flips_break2(flips: vector): u64 { + let i = 0; + let flipsref5 = &flips; + while (i < 0x1::vector::length(flipsref5)) { + if (*0x1::vector::borrow(flipsref5, i) != 0u8) break; + i = i + 1; + if (*0x1::vector::borrow(flipsref5, i) == 5u8) break; + }; + let _v = copy flips; + let _v2 = flips; + let x = flipsref5; + 0x1::vector::length(x) + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/continue_without_loop.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/continue_without_loop.exp index 858b6714484a5..881149b8ba3ce 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/continue_without_loop.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/continue_without_loop.exp @@ -6,6 +6,13 @@ module 0xc0ffee::m { } } // end 0xc0ffee::m +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + fun continued() { + continue; + } +} + Diagnostics: error: missing enclosing loop statement diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/deep_exp.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/deep_exp.exp index 5651a5e32c294..9d4e0ede67812 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/deep_exp.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/deep_exp.exp @@ -2930,5 +2930,2549 @@ module 0x42::Test { } } // end 0x42::Test +// -- Sourcified model before bytecode pipeline +module 0x42::Test { + inline fun f1(x: u64): u64 { + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + } + inline fun f2(x: u64): u64 { + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + } + inline fun f3(x: u64): u64 { + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + } + inline fun f4(x: u64): u64 { + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + } + inline fun f5(x: u64): u64 { + x + 1 + } + public fun test(): u64 { + 625 + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/double_nesting.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/double_nesting.exp index 704e33333decc..013e348b7560a 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/double_nesting.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/double_nesting.exp @@ -22,5 +22,24 @@ module 0x42::test { } } // end 0x42::test +// -- Sourcified model before bytecode pipeline +module 0x42::mathtest2 { + public inline fun fun2(a: u64, b: u64, c: u64): u64 { + 7u128 * (a as u128) + 11u128 * (b as u128) + 13u128 * (c as u128) as u64 + } +} +module 0x42::mathtest { + public inline fun fun1(a: u64, b: u64, c: u64): u64 { + 2u128 * (a as u128) + 3u128 * (b as u128) + 5u128 * (c as u128) as u64 + } +} +module 0x42::test { + use 0x42::mathtest2; + use 0x42::mathtest; + fun test_nested_fun1() { + if (true) () else abort 0; + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/inline_accessing_constant.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/inline_accessing_constant.exp index 92bb26104541b..c7758a90c4508 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/inline_accessing_constant.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/inline_accessing_constant.exp @@ -10,5 +10,17 @@ module 0xc0ffee::dummy2 { } } // end 0xc0ffee::dummy2 +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::dummy1 { + public inline fun expose(): u64 { + 1 + } +} +module 0xc0ffee::dummy2 { + public fun main(): u64 { + 1 + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/lambda.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/lambda.exp index 11a8de528f5cb..b9080d09fb545 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/lambda.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/lambda.exp @@ -16,7 +16,7 @@ module 0x42::LambdaTest1 { module 0x42::LambdaTest2 { use 0x42::LambdaTest1; // resolved as: 0x42::LambdaTest1 use std::vector; - public inline fun foreach(v: &vector<#0>,action: |�|) { + public inline fun foreach(v: &vector,action: |&T|) { { let i: u64 = 0; loop { @@ -94,5 +94,74 @@ module 0x42::LambdaTest { } } // end 0x42::LambdaTest +// -- Sourcified model before bytecode pipeline +module 0x42::LambdaTest1 { + public inline fun inline_apply(f: |u64|u64, b: u64): u64 { + f(b) + } + public inline fun inline_apply1(f: |u64|u64, b: u64): u64 { + let (a,b) = (f(b) + 1, 12); + a * 12 + } + public inline fun inline_mul(a: u64, b: u64): u64 { + a * b + } +} +module 0x42::LambdaTest2 { + use 0x42::LambdaTest1; + public inline fun foreach(v: &vector, action: |&T|) { + let i = 0; + while (i < 0x1::vector::length(v)) { + action(0x1::vector::borrow(v, i)); + i = i + 1; + } + } + public inline fun inline_apply2(g: |u64|u64, c: u64): u64 { + { + let (b) = (g({ + let (a,b) = (c, 3); + a * 3 + })); + let (a,b) = ({ + let (z) = (b); + z + } + 1, 12); + a * 12 + } + 2 + } + public inline fun inline_apply3(g: |u64|u64, c: u64): u64 { + LambdaTest1::inline_apply1(g, LambdaTest1::inline_mul(c, LambdaTest1::inline_apply(|x| LambdaTest1::inline_apply(|y| y, x), 3))) + 4 + } + public fun test_inline_lambda() { + let product = 1; + { + let (v) = (&vector[1, 2, 3]); + let i = 0; + while (i < 0x1::vector::length(v)) { + { + let (e) = (0x1::vector::borrow(v, i)); + product = { + let (a,b) = (product, *e); + a * b + } + }; + i = i + 1; + } + }; + } +} +module 0x42::LambdaTest { + use 0x42::LambdaTest2; + public inline fun inline_apply(f: |u64|u64, b: u64): u64 { + f(b) + } + public inline fun inline_apply_test(): u64 { + 1120 + } + fun test_lambda() { + if (false) () else abort 0; + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/lambda3.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/lambda3.exp new file mode 100644 index 0000000000000..7d3fd078fd386 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/lambda3.exp @@ -0,0 +1,24 @@ +// -- Model dump before bytecode pipeline +module 0x8675309::M { + public fun lambda_not_allowed() { + { + let _x: |u64|u64 = |i: u64| Add(i, 1); + Tuple() + } + } +} // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + public fun lambda_not_allowed() { + let _x = |i| i + 1; + } +} + + +Diagnostics: +error: Function-typed values not yet supported except as parameters to calls to inline functions + ┌─ tests/checking/inlining/lambda3.move:77:18 + │ +77 │ let _x = |i| i + 1; // expected lambda not allowed + │ ^^^^^^^^^ diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/lambda3.move b/third_party/move/move-compiler-v2/tests/checking/inlining/lambda3.move new file mode 100644 index 0000000000000..5450bac87c1cb --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/lambda3.move @@ -0,0 +1,99 @@ +module 0x8675309::M { + // use 0x1::XVector; + + // public inline fun foreach(v: &vector, action: |&T|) { // expected to be not implemented + // let i = 0; + // while (i < XVector::length(v)) { + // action(XVector::borrow(v, i)); + // i = i + 1; + // } + // } + + // public inline fun reduce(v: vector, accu: R, reducer: |T, R|R): R { + // while (!XVector::is_empty(&v)) { + // accu = reducer(XVector::pop_back(&mut v), accu); + // }; + // accu + // } + + + // public fun correct_foreach() { + // let v = vector[1, 2, 3]; + // let sum = 0; + // foreach(&v, |e| sum = sum + *e) // expected to be not implemented + // } + + // public fun correct_reduce(): u64 { + // let v = vector[1, 2, 3]; + // reduce(v, 0, |t, r| t + r) + // } + + // public fun corrected_nested() { + // let v = vector[vector[1,2], vector[3]]; + // let sum = 0; + // foreach(&v, |e| sum = sum + reduce!(*e, 0, |t, r| t + r)); + // } + + // public inline fun wrong_local_call_arg_count(v: &vector, action: |&T|) { + // let i = 0; + // while (i < XVector::length(v)) { + // action(XVector::borrow(v, i), i); // expected to have wrong argument count + // i = i + 1; + // } + // } + + // public inline fun wrong_local_call_arg_type(v: &vector, action: |&T|) { + // let i = 0; + // while (i < XVector::length(v)) { + // action(i); // expected to have wrong argument type + // i = i + 1; + // } + // } + + // public inline fun wrong_local_call_result_type(v: &vector, action: |&T|) { + // let i = 0; + // while (i < XVector::length(v)) { + // i = i + action(XVector::borrow(v, i)); // expected to have wrong result type + // } + // } + + // public fun wrong_local_call_no_fun(x: u64) { + // x(1) // expected to be not a function + // } + + // public fun wrong_lambda_inferred_type() { + // let v = vector[1, 2, 3]; + // let sum = 0; + // foreach(&v, |e| sum = sum + e) // expected to cannot infer type + // } + + // public fun wrong_lambda_result_type() { + // let v = vector[1, 2, 3]; + // let sum = 0; + // foreach(&v, |e| { sum = sum + *e; *e }) // expected to have wrong result type of lambda + // } + + public fun lambda_not_allowed() { + let _x = |i| i + 1; // expected lambda not allowed + } + + // struct FieldFunNotAllowed { + // f: |u64|u64, // expected lambda not allowed + // } + + // public fun fun_arg_lambda_not_allowed(x: |u64|) {} // expected lambda not allowed + + // public inline fun macro_result_lambda_not_allowed(): |u64| { // expected lambda not allowed + // abort (1) + // } + // public fun fun_result_lambda_not_allowed(): |u64| { // expected lambda not allowed + // abort (1) + // } +} + +// module 0x1::XVector { +// public fun length(v: &vector): u64 { abort(1) } +// public fun is_empty(v: &vector): bool { abort(1) } +// public fun borrow(v: &vector, i: u64): &T { abort(1) } +// public fun pop_back(v: &mut vector): T { abort(1) } +// } diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/lambda4.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/lambda4.exp new file mode 100644 index 0000000000000..14e896bb69ca5 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/lambda4.exp @@ -0,0 +1,13 @@ + +Diagnostics: +error: Functions may not return function-typed values, but function `M::macro_result_lambda_not_allowed` return type is the function type `|u64|`: + ┌─ tests/checking/inlining/lambda4.move:86:58 + │ +86 │ public inline fun macro_result_lambda_not_allowed(): |u64| { // expected lambda not allowed + │ ^^^^^ + +error: Functions may not return function-typed values, but function `M::fun_result_lambda_not_allowed` return type is the function type `|u64|`: + ┌─ tests/checking/inlining/lambda4.move:89:49 + │ +89 │ public fun fun_result_lambda_not_allowed(): |u64| { // expected lambda not allowed + │ ^^^^^ diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/lambda4.move b/third_party/move/move-compiler-v2/tests/checking/inlining/lambda4.move new file mode 100644 index 0000000000000..4a378b7e14b10 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/lambda4.move @@ -0,0 +1,99 @@ +module 0x8675309::M { + // use 0x1::XVector; + + // public inline fun foreach(v: &vector, action: |&T|) { // expected to be not implemented + // let i = 0; + // while (i < XVector::length(v)) { + // action(XVector::borrow(v, i)); + // i = i + 1; + // } + // } + + // public inline fun reduce(v: vector, accu: R, reducer: |T, R|R): R { + // while (!XVector::is_empty(&v)) { + // accu = reducer(XVector::pop_back(&mut v), accu); + // }; + // accu + // } + + + // public fun correct_foreach() { + // let v = vector[1, 2, 3]; + // let sum = 0; + // foreach(&v, |e| sum = sum + *e) // expected to be not implemented + // } + + // public fun correct_reduce(): u64 { + // let v = vector[1, 2, 3]; + // reduce(v, 0, |t, r| t + r) + // } + + // public fun corrected_nested() { + // let v = vector[vector[1,2], vector[3]]; + // let sum = 0; + // foreach(&v, |e| sum = sum + reduce!(*e, 0, |t, r| t + r)); + // } + + // public inline fun wrong_local_call_arg_count(v: &vector, action: |&T|) { + // let i = 0; + // while (i < XVector::length(v)) { + // action(XVector::borrow(v, i), i); // expected to have wrong argument count + // i = i + 1; + // } + // } + + // public inline fun wrong_local_call_arg_type(v: &vector, action: |&T|) { + // let i = 0; + // while (i < XVector::length(v)) { + // action(i); // expected to have wrong argument type + // i = i + 1; + // } + // } + + // public inline fun wrong_local_call_result_type(v: &vector, action: |&T|) { + // let i = 0; + // while (i < XVector::length(v)) { + // i = i + action(XVector::borrow(v, i)); // expected to have wrong result type + // } + // } + + // public fun wrong_local_call_no_fun(x: u64) { + // x(1) // expected to be not a function + // } + + // public fun wrong_lambda_inferred_type() { + // let v = vector[1, 2, 3]; + // let sum = 0; + // foreach(&v, |e| sum = sum + e) // expected to cannot infer type + // } + + // public fun wrong_lambda_result_type() { + // let v = vector[1, 2, 3]; + // let sum = 0; + // foreach(&v, |e| { sum = sum + *e; *e }) // expected to have wrong result type of lambda + // } + + // public fun lambda_not_allowed() { + // let _x = |i| i + 1; // expected lambda not allowed + // } + + // struct FieldFunNotAllowed { + // f: |u64|u64, // expected lambda not allowed + // } + + // public fun fun_arg_lambda_not_allowed(x: |u64|) {} // expected lambda not allowed + + public inline fun macro_result_lambda_not_allowed(): |u64| { // expected lambda not allowed + abort (1) + } + public fun fun_result_lambda_not_allowed(): |u64| { // expected lambda not allowed + abort (1) + } +} + +// module 0x1::XVector { +// public fun length(v: &vector): u64 { abort(1) } +// public fun is_empty(v: &vector): bool { abort(1) } +// public fun borrow(v: &vector, i: u64): &T { abort(1) } +// public fun pop_back(v: &mut vector): T { abort(1) } +// } diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/lambda5.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/lambda5.exp new file mode 100644 index 0000000000000..78669969275c4 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/lambda5.exp @@ -0,0 +1,7 @@ + +Diagnostics: +error: Functions may not return function-typed values, but function `M::macro_result_lambda_not_allowed` return type is the function type `|u64|`: + ┌─ tests/checking/inlining/lambda5.move:86:58 + │ +86 │ public inline fun macro_result_lambda_not_allowed(): |u64| { // expected lambda not allowed + │ ^^^^^ diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/lambda5.move b/third_party/move/move-compiler-v2/tests/checking/inlining/lambda5.move new file mode 100644 index 0000000000000..291c8e5cab61f --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/lambda5.move @@ -0,0 +1,99 @@ +module 0x8675309::M { + // use 0x1::XVector; + + // public inline fun foreach(v: &vector, action: |&T|) { // expected to be not implemented + // let i = 0; + // while (i < XVector::length(v)) { + // action(XVector::borrow(v, i)); + // i = i + 1; + // } + // } + + // public inline fun reduce(v: vector, accu: R, reducer: |T, R|R): R { + // while (!XVector::is_empty(&v)) { + // accu = reducer(XVector::pop_back(&mut v), accu); + // }; + // accu + // } + + + // public fun correct_foreach() { + // let v = vector[1, 2, 3]; + // let sum = 0; + // foreach(&v, |e| sum = sum + *e) // expected to be not implemented + // } + + // public fun correct_reduce(): u64 { + // let v = vector[1, 2, 3]; + // reduce(v, 0, |t, r| t + r) + // } + + // public fun corrected_nested() { + // let v = vector[vector[1,2], vector[3]]; + // let sum = 0; + // foreach(&v, |e| sum = sum + reduce!(*e, 0, |t, r| t + r)); + // } + + // public inline fun wrong_local_call_arg_count(v: &vector, action: |&T|) { + // let i = 0; + // while (i < XVector::length(v)) { + // action(XVector::borrow(v, i), i); // expected to have wrong argument count + // i = i + 1; + // } + // } + + // public inline fun wrong_local_call_arg_type(v: &vector, action: |&T|) { + // let i = 0; + // while (i < XVector::length(v)) { + // action(i); // expected to have wrong argument type + // i = i + 1; + // } + // } + + // public inline fun wrong_local_call_result_type(v: &vector, action: |&T|) { + // let i = 0; + // while (i < XVector::length(v)) { + // i = i + action(XVector::borrow(v, i)); // expected to have wrong result type + // } + // } + + // public fun wrong_local_call_no_fun(x: u64) { + // x(1) // expected to be not a function + // } + + // public fun wrong_lambda_inferred_type() { + // let v = vector[1, 2, 3]; + // let sum = 0; + // foreach(&v, |e| sum = sum + e) // expected to cannot infer type + // } + + // public fun wrong_lambda_result_type() { + // let v = vector[1, 2, 3]; + // let sum = 0; + // foreach(&v, |e| { sum = sum + *e; *e }) // expected to have wrong result type of lambda + // } + + // public fun lambda_not_allowed() { + // let _x = |i| i + 1; // expected lambda not allowed + // } + + // struct FieldFunNotAllowed { + // f: |u64|u64, // expected lambda not allowed + // } + + // public fun fun_arg_lambda_not_allowed(x: |u64|) {} // expected lambda not allowed + + public inline fun macro_result_lambda_not_allowed(): |u64| { // expected lambda not allowed + abort (1) + } + // public fun fun_result_lambda_not_allowed(): |u64| { // expected lambda not allowed + // abort (1) + // } +} + +// module 0x1::XVector { +// public fun length(v: &vector): u64 { abort(1) } +// public fun is_empty(v: &vector): bool { abort(1) } +// public fun borrow(v: &vector, i: u64): &T { abort(1) } +// public fun pop_back(v: &mut vector): T { abort(1) } +// } diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/lambda_cast.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/lambda_cast.exp index 57cee1446037e..68c24b3325227 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/lambda_cast.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/lambda_cast.exp @@ -59,5 +59,45 @@ module 0x12391283::M { } } // end 0x12391283::M +// -- Sourcified model before bytecode pipeline +module 0x12391283::M { + fun test_1(): u64 { + let accu = 0; + { + let (v) = (vector[115u8, 115u8, 95u8, 112u8, 97u8, 99u8, 107u8, 101u8, 100u8, 32u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8]); + 0x1::vector::reverse(&mut v); + while (!0x1::vector::is_empty(&v)) { + let e = 0x1::vector::pop_back(&mut v); + { + let (elem) = (e); + accu = { + let (sum,addend) = (accu, elem); + sum + (addend as u64) + } + }; + }; + }; + accu + } + fun test_2(): u64 { + let accu = 0; + { + let (v) = (vector[115u8, 115u8, 95u8, 112u8, 97u8, 99u8, 107u8, 101u8, 100u8, 32u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8]); + 0x1::vector::reverse(&mut v); + while (!0x1::vector::is_empty(&v)) { + let e = 0x1::vector::pop_back(&mut v); + { + let (elem) = (e); + accu = { + let (sum,addend) = (accu, elem); + sum + (addend as u64) + } + }; + }; + }; + accu + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/lambda_typed.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/lambda_typed.exp index 11a8de528f5cb..b9080d09fb545 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/lambda_typed.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/lambda_typed.exp @@ -16,7 +16,7 @@ module 0x42::LambdaTest1 { module 0x42::LambdaTest2 { use 0x42::LambdaTest1; // resolved as: 0x42::LambdaTest1 use std::vector; - public inline fun foreach(v: &vector<#0>,action: |�|) { + public inline fun foreach(v: &vector,action: |&T|) { { let i: u64 = 0; loop { @@ -94,5 +94,74 @@ module 0x42::LambdaTest { } } // end 0x42::LambdaTest +// -- Sourcified model before bytecode pipeline +module 0x42::LambdaTest1 { + public inline fun inline_apply(f: |u64|u64, b: u64): u64 { + f(b) + } + public inline fun inline_apply1(f: |u64|u64, b: u64): u64 { + let (a,b) = (f(b) + 1, 12); + a * 12 + } + public inline fun inline_mul(a: u64, b: u64): u64 { + a * b + } +} +module 0x42::LambdaTest2 { + use 0x42::LambdaTest1; + public inline fun foreach(v: &vector, action: |&T|) { + let i = 0; + while (i < 0x1::vector::length(v)) { + action(0x1::vector::borrow(v, i)); + i = i + 1; + } + } + public inline fun inline_apply2(g: |u64|u64, c: u64): u64 { + { + let (b) = (g({ + let (a,b) = (c, 3); + a * 3 + })); + let (a,b) = ({ + let (z) = (b); + z + } + 1, 12); + a * 12 + } + 2 + } + public inline fun inline_apply3(g: |u64|u64, c: u64): u64 { + LambdaTest1::inline_apply1(g, LambdaTest1::inline_mul(c, LambdaTest1::inline_apply(|x| LambdaTest1::inline_apply(|y| y, x), 3))) + 4 + } + public fun test_inline_lambda() { + let product = 1; + { + let (v) = (&vector[1, 2, 3]); + let i = 0; + while (i < 0x1::vector::length(v)) { + { + let (e) = (0x1::vector::borrow(v, i)); + product = { + let (a,b) = (product, *e); + a * b + } + }; + i = i + 1; + } + }; + } +} +module 0x42::LambdaTest { + use 0x42::LambdaTest2; + public inline fun inline_apply(f: |u64|u64, b: u64): u64 { + f(b) + } + public inline fun inline_apply_test(): u64 { + 1120 + } + fun test_lambda() { + if (false) () else abort 0; + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/nested_mul.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/nested_mul.exp index d2a9c2da6684a..fd17730d179b6 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/nested_mul.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/nested_mul.exp @@ -16,5 +16,18 @@ module 0x42::test { } } // end 0x42::test +// -- Sourcified model before bytecode pipeline +module 0x42::mathtest { + public inline fun mul_div(a: u64, b: u64, c: u64): u64 { + (a as u128) * (b as u128) / (c as u128) as u64 + } +} +module 0x42::test { + use 0x42::mathtest; + fun test_nested_mul_div() { + if (true) () else abort 0; + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/non_lambda_arg.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/non_lambda_arg.exp index aa83fbc21a0bd..88cfe01e12e08 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/non_lambda_arg.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/non_lambda_arg.exp @@ -4,10 +4,10 @@ error: Only inline functions may have function-typed parameters, but non-inline ┌─ tests/checking/inlining/non_lambda_arg.move:4:16 │ 4 │ public fun incorrect_sort(arr: &mut vector, a_less_b: |T, T| bool) { - │ ^^^^^^^^^^^^^^ -------- Parameter `a_less_b` has a function type. + │ ^^^^^^^^^^^^^^ -------- Parameter `a_less_b` has function-valued type `|(T, T)|bool`. error: Only inline functions may have function-typed parameters, but non-inline function `sort::incorrect_sort_recursive` has a function parameter: ┌─ tests/checking/inlining/non_lambda_arg.move:9:16 │ 9 │ public fun incorrect_sort_recursive(arr: &mut vector, low: u64, high: u64, a_less_b: |T, T| bool) { - │ ^^^^^^^^^^^^^^^^^^^^^^^^ -------- Parameter `a_less_b` has a function type. + │ ^^^^^^^^^^^^^^^^^^^^^^^^ -------- Parameter `a_less_b` has function-valued type `|(T, T)|bool`. diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/order_sensitive.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/order_sensitive.exp index 5db0ee1deb70d..e6febebcd74cb 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/order_sensitive.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/order_sensitive.exp @@ -92,5 +92,99 @@ module 0x42::OrderSensitiveTest3 { } } // end 0x42::OrderSensitiveTest3 +// -- Sourcified model before bytecode pipeline +module 0x42::OrderSensitiveTest1 { + public inline fun inline_fun1(a: u64, b: u64): u64 { + a * b + } + public inline fun inline_fun2(a: u64, b: u64): u64 { + { + let (a,b) = (a, b); + a * b + } + 2 * { + let (a,b) = (a, b); + a * b + 2 + } + } + public inline fun inline_fun3(a: u64, b: u64): u64 { + a * b + 2 + } +} +module 0x42::OrderSensitiveTest2 { + use 0x42::OrderSensitiveTest1; + public inline fun inline_fun1(a: u64, b: u64): u64 { + a * b + 3 + } + public inline fun inline_fun2(a: u64, b: u64): u64 { + { + let (a,b) = ({ + let (a,b) = (a, b); + a * b + 3 + }, { + let (a,b) = (a, b); + a * b + 4 + }); + { + let (a,b) = (a, b); + a * b + } + 2 * { + let (a,b) = (a, b); + a * b + 2 + } + } + 3 * { + let (a,b) = (a, b); + a * b + 3 + } + 5 * { + let (a,b) = (a, b); + a * b + 4 + } + } + public inline fun inline_fun3(a: u64, b: u64): u64 { + a * b + 4 + } +} +module 0x42::OrderSensitiveTest3 { + use 0x42::OrderSensitiveTest2; + public inline fun fun1(a: u64, b: u64): u64 { + a * b + 5 + } + public fun fun2(a: u64, b: u64): u64 { + { + let (a,b) = (7 * { + let (a,b) = (a, b); + a * b + 5 + }, b); + { + let (a,b) = ({ + let (a,b) = (a, b); + a * b + 3 + }, { + let (a,b) = (a, b); + a * b + 4 + }); + { + let (a,b) = (a, b); + a * b + } + 2 * { + let (a,b) = (a, b); + a * b + 2 + } + } + 3 * { + let (a,b) = (a, b); + a * b + 3 + } + 5 * { + let (a,b) = (a, b); + a * b + 4 + } + } + 9 * { + let (a,b) = (a, b); + a * b + 6 + } + } + public inline fun fun3(a: u64, b: u64): u64 { + a * b + 6 + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/recursive_nesting.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/recursive_nesting.exp index a7c00dc82c2a1..be3551312c2b4 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/recursive_nesting.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/recursive_nesting.exp @@ -39,5 +39,35 @@ module 0x42::test { } } // end 0x42::test +// -- Sourcified model before bytecode pipeline +module 0x42::mathtest { + public inline fun mul_div(a: u64, b: u64, c: u64): u64 { + (a as u128) * (b as u128) / (c as u128) as u64 + } +} +module 0x42::mathtest2 { + use 0x42::mathtest; + public inline fun mul_div2(a: u64, b: u64, c: u64): u64 { + let (a,b,c) = (b, a, c); + (a as u128) * (b as u128) / (c as u128) as u64 + } +} +module 0x42::mathtest3 { + use 0x42::mathtest2; + public inline fun mul_div3(a: u64, b: u64, c: u64): u64 { + let (a,b,c) = (b, a, c); + let (a,b,c) = (b, a, c); + (a as u128) * (b as u128) / (c as u128) as u64 + } +} +module 0x42::test { + use 0x42::mathtest; + use 0x42::mathtest2; + use 0x42::mathtest3; + fun test_nested_mul_div() { + if (true) () else abort 0; + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/resources_valid.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/resources_valid.exp index cf71f9d1297c0..36dc07fcb1f05 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/resources_valid.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/resources_valid.exp @@ -1,12 +1,12 @@ // -- Model dump before bytecode pipeline module 0x42::objects { - struct ReaderRef { + struct ReaderRef { addr: address, } - public fun get_addr(ref: &objects::ReaderRef<#0>): address { - select objects::ReaderRef.addr<&objects::ReaderRef>(ref) + public fun get_addr(ref: &ReaderRef): address { + select objects::ReaderRef.addr<&ReaderRef>(ref) } - public inline fun reader(ref: &objects::ReaderRef<#0>): � { + public inline fun reader(ref: &ReaderRef): &T { BorrowGlobal(Immutable)(objects::get_addr(ref)) } } // end 0x42::objects @@ -15,15 +15,42 @@ module 0x42::token { struct Token { val: u64, } - public fun get_value(ref: &objects::ReaderRef): u64 - acquires token::Token(*) + public fun get_value(ref: &objects::ReaderRef): u64 + acquires Token(*) { - select token::Token.val<&token::Token>({ - let (ref: &objects::ReaderRef): (&objects::ReaderRef) = Tuple(ref); - BorrowGlobal(Immutable)(objects::get_addr(ref)) + select token::Token.val<&Token>({ + let (ref: &objects::ReaderRef): (&objects::ReaderRef) = Tuple(ref); + BorrowGlobal(Immutable)(objects::get_addr(ref)) }) } } // end 0x42::token +// -- Sourcified model before bytecode pipeline +module 0x42::objects { + struct ReaderRef has store { + addr: address, + } + public fun get_addr(ref: &ReaderRef): address { + ref.addr + } + public inline fun reader(ref: &ReaderRef): &T { + borrow_global(get_addr(ref)) + } +} +module 0x42::token { + use 0x42::objects; + struct Token has key { + val: u64, + } + public fun get_value(ref: &objects::ReaderRef): u64 + acquires Token + { + { + let (ref) = (ref); + borrow_global(objects::get_addr(ref)) + }.val + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/shadowing_unused.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/shadowing_unused.exp index 2c3a1c4b158bd..5eb27791a5827 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/shadowing_unused.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/shadowing_unused.exp @@ -39,5 +39,34 @@ module 0x42::Test { } } // end 0x42::Test +// -- Sourcified model before bytecode pipeline +module 0x42::Test { + public inline fun foo(f: |(u64, u64)|, z: u64) { + { + let (_z) = (z); + f(3, 5); + }; + } + public inline fun quux(f: |(u64, u64)|, _z: u64) { + f(3, 5); + } + public fun test_shadowing() { + let _x = 1; + { + { + _x = 3; + }; + }; + if (_x == 3) () else abort 0 + } + public fun test_shadowing2() { + let _x = 1; + { + _x = 3; + }; + if (_x == 3) () else abort 0 + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/shadowing_unused_nodecl.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/shadowing_unused_nodecl.exp index 6a990cf6bcf0c..96b8d2c461add 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/shadowing_unused_nodecl.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/shadowing_unused_nodecl.exp @@ -47,5 +47,34 @@ module 0x42::Test { } } // end 0x42::Test +// -- Sourcified model before bytecode pipeline +module 0x42::Test { + public inline fun foo(f: |(u64, u64)|, z: u64) { + { + let (z) = (z); + f(3, 5); + }; + } + public inline fun quux(f: |(u64, u64)|, z: u64) { + f(3, 5); + } + public fun test_shadowing() { + let _x = 1; + { + { + _x = 3; + }; + }; + if (_x == 3) () else abort 0 + } + public fun test_shadowing2() { + let _x = 1; + { + _x = 3; + }; + if (_x == 3) () else abort 0 + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/shadowing_unused_nodecl_typed.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/shadowing_unused_nodecl_typed.exp index 4f7f74661aa59..9dc677d49a25e 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/shadowing_unused_nodecl_typed.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/shadowing_unused_nodecl_typed.exp @@ -47,5 +47,34 @@ module 0x42::Test { } } // end 0x42::Test +// -- Sourcified model before bytecode pipeline +module 0x42::Test { + public inline fun foo(f: |(u64, u64)|, z: u64) { + { + let (z) = (z); + f(3, 5); + }; + } + public inline fun quux(f: |(u64, u64)|, z: u64) { + f(3, 5); + } + public fun test_shadowing() { + let _x = 1; + { + { + _x = 3; + }; + }; + if (_x == 3) () else abort 0 + } + public fun test_shadowing2() { + let _x = 1; + { + _x = 3; + }; + if (_x == 3) () else abort 0 + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/shadowing_unused_typed.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/shadowing_unused_typed.exp index 2c3a1c4b158bd..5eb27791a5827 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/shadowing_unused_typed.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/shadowing_unused_typed.exp @@ -39,5 +39,34 @@ module 0x42::Test { } } // end 0x42::Test +// -- Sourcified model before bytecode pipeline +module 0x42::Test { + public inline fun foo(f: |(u64, u64)|, z: u64) { + { + let (_z) = (z); + f(3, 5); + }; + } + public inline fun quux(f: |(u64, u64)|, _z: u64) { + f(3, 5); + } + public fun test_shadowing() { + let _x = 1; + { + { + _x = 3; + }; + }; + if (_x == 3) () else abort 0 + } + public fun test_shadowing2() { + let _x = 1; + { + _x = 3; + }; + if (_x == 3) () else abort 0 + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/spec_inlining.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/spec_inlining.exp index f5a5f2de0f028..c63cdd3aaa435 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/spec_inlining.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/spec_inlining.exp @@ -56,5 +56,56 @@ module 0x42::Test { } } // end 0x42::Test +// -- Sourcified model before bytecode pipeline +module 0x42::Test { + inline fun apply(v: u64, predicate: |u64|bool): bool { + + /* spec { + assert Ge($t0, 0); + } + */ + ; + predicate(v) + } + public fun test_apply(x: u64) { + let r1 = { + let (v) = (x); + + /* spec { + assert Ge(v, 0); + } + */ + ; + let (v) = (v); + v >= 0 + }; + + /* spec { + assert r1; + } + */ + ; + if (r1) () else abort 1; + let r2 = { + let (v) = (x); + + /* spec { + assert Ge(v, 0); + } + */ + ; + let (v) = (v); + v != 0 + }; + + /* spec { + assert r2; + } + */ + ; + if (r2) () else abort 2; + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/spec_inlining_typed.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/spec_inlining_typed.exp index f5a5f2de0f028..c63cdd3aaa435 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/spec_inlining_typed.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/spec_inlining_typed.exp @@ -56,5 +56,56 @@ module 0x42::Test { } } // end 0x42::Test +// -- Sourcified model before bytecode pipeline +module 0x42::Test { + inline fun apply(v: u64, predicate: |u64|bool): bool { + + /* spec { + assert Ge($t0, 0); + } + */ + ; + predicate(v) + } + public fun test_apply(x: u64) { + let r1 = { + let (v) = (x); + + /* spec { + assert Ge(v, 0); + } + */ + ; + let (v) = (v); + v >= 0 + }; + + /* spec { + assert r1; + } + */ + ; + if (r1) () else abort 1; + let r2 = { + let (v) = (x); + + /* spec { + assert Ge(v, 0); + } + */ + ; + let (v) = (v); + v != 0 + }; + + /* spec { + assert r2; + } + */ + ; + if (r2) () else abort 2; + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/temp_shadowing.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/temp_shadowing.exp index 74d14cbaa3f32..21b28b8d8ae5c 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/temp_shadowing.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/temp_shadowing.exp @@ -57,5 +57,37 @@ module 0x42::Test { } } // end 0x42::Test +// -- Sourcified model before bytecode pipeline +module 0x42::Test { + public fun other(a: u64, b: u64): u64 { + let sum = 0; + while (a < b) { + a = a + 1; + sum = { + let (a,b) = (a, b); + let sum = 0; + while (a < b) { + a = a + 1; + sum = sum + a; + }; + sum + } + sum; + }; + sum + } + public inline fun nested(a: u64, b: u64): u64 { + let sum = 0; + while (a < b) { + a = a + 1; + sum = sum + a; + }; + sum + } + public fun test_shadowing() { + let z = other(1, 4); + if (z == 10) () else abort z + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/test_12670.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/test_12670.exp index e110f2e38ae4f..8122d025a34bb 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/test_12670.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/test_12670.exp @@ -4,18 +4,18 @@ module 0x1::Test { struct S { x: u8, } - private fun foo(xs: vector) { + private fun foo(xs: vector) { { let sum: u8 = 0; { - let (v: &vector): (&vector) = Tuple(Borrow(Immutable)(xs)); + let (v: &vector): (&vector) = Tuple(Borrow(Immutable)(xs)); { let i: u64 = 0; loop { - if Lt(i, vector::length(v)) { + if Lt(i, vector::length(v)) { { - let (e: &Test::S): (&Test::S) = Tuple(vector::borrow(v, i)); - sum: u8 = Add(sum, select Test::S.x<&Test::S>(e)); + let (e: &S): (&S) = Tuple(vector::borrow(v, i)); + sum: u8 = Add(sum, select Test::S.x<&S>(e)); Tuple() }; i: u64 = Add(i, 1) @@ -30,5 +30,26 @@ module 0x1::Test { } } // end 0x1::Test +// -- Sourcified model before bytecode pipeline +module 0x1::Test { + struct S has drop { + x: u8, + } + fun foo(xs: vector) { + let sum = 0u8; + { + let (v) = (&xs); + let i = 0; + while (i < 0x1::vector::length(v)) { + { + let (e) = (0x1::vector::borrow(v, i)); + sum = sum + e.x; + }; + i = i + 1 + } + }; + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/checking/inlining/unused_inline.exp b/third_party/move/move-compiler-v2/tests/checking/inlining/unused_inline.exp index 2418f6a234d5d..ba49606523aa6 100644 --- a/third_party/move/move-compiler-v2/tests/checking/inlining/unused_inline.exp +++ b/third_party/move/move-compiler-v2/tests/checking/inlining/unused_inline.exp @@ -40,5 +40,29 @@ module 0xc0ffee::m { } } // end 0xc0ffee::m +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + public fun bar(): u64 { + let i = 0; + while (i < 10) { + i = i + 1; + if (i == 5) { + break; + } + }; + i + } + inline fun foo(): u64 { + let i = 0; + while (i < 10) { + i = i + 1; + if (i == 5) { + break; + } + }; + i + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/checking/naming/duplicate_acquires_list_item.exp b/third_party/move/move-compiler-v2/tests/checking/naming/duplicate_acquires_list_item.exp index c1969b53e490a..3a1b5c04d51a4 100644 --- a/third_party/move/move-compiler-v2/tests/checking/naming/duplicate_acquires_list_item.exp +++ b/third_party/move/move-compiler-v2/tests/checking/naming/duplicate_acquires_list_item.exp @@ -7,23 +7,43 @@ module 0x8675309::M { dummy_field: bool, } private fun t0() - acquires M::R(*) - acquires M::X(*) - acquires M::R(*) + acquires R(*) + acquires X(*) + acquires R(*) { - BorrowGlobal(Mutable)(0x1); - BorrowGlobal(Mutable)(0x1); + BorrowGlobal(Mutable)(0x1); + BorrowGlobal(Mutable)(0x1); Tuple() } private fun t1() - acquires M::R(*) - acquires M::X(*) - acquires M::R(*) - acquires M::R(*) - acquires M::R(*) + acquires R(*) + acquires X(*) + acquires R(*) + acquires R(*) + acquires R(*) { - BorrowGlobal(Mutable)(0x1); - BorrowGlobal(Mutable)(0x1); + BorrowGlobal(Mutable)(0x1); + BorrowGlobal(Mutable)(0x1); Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R has key { + } + struct X has key { + } + fun t0() + acquires Racquires Xacquires R + { + borrow_global_mut(0x1); + borrow_global_mut(0x1); + } + fun t1() + acquires Racquires Xacquires Racquires Racquires R + { + borrow_global_mut(0x1); + borrow_global_mut(0x1); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/naming/generics_shadowing.exp b/third_party/move/move-compiler-v2/tests/checking/naming/generics_shadowing.exp index 7d025d34595a6..8edd453e33526 100644 --- a/third_party/move/move-compiler-v2/tests/checking/naming/generics_shadowing.exp +++ b/third_party/move/move-compiler-v2/tests/checking/naming/generics_shadowing.exp @@ -3,7 +3,7 @@ module 0x2::M { struct S { dummy_field: bool, } - private fun foo(s: #0): #0 { + private fun foo(s: S): S { { let s: S = s; { @@ -13,3 +13,14 @@ module 0x2::M { } } } // end 0x2::M + +// -- Sourcified model before bytecode pipeline +module 0x2::M { + struct S { + } + fun foo(s: S): S { + let s = s; + let s = s; + s + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/naming/global_builtin_one_type_argument.exp b/third_party/move/move-compiler-v2/tests/checking/naming/global_builtin_one_type_argument.exp index 441e25f4f2b3b..b2fecf533bafc 100644 --- a/third_party/move/move-compiler-v2/tests/checking/naming/global_builtin_one_type_argument.exp +++ b/third_party/move/move-compiler-v2/tests/checking/naming/global_builtin_one_type_argument.exp @@ -4,18 +4,18 @@ module 0x8675309::M { dummy_field: bool, } private fun t(account: &signer) - acquires M::R(*) + acquires R(*) { { - let _: bool = exists(0x0); + let _: bool = exists(0x0); { - let (): () = MoveTo(account, pack M::R(false)); + let (): () = MoveTo(account, pack M::R(false)); { - let _: &M::R = BorrowGlobal(Immutable)(0x0); + let _: &R = BorrowGlobal(Immutable)(0x0); { - let _: &mut M::R = BorrowGlobal(Mutable)(0x0); + let _: &mut R = BorrowGlobal(Mutable)(0x0); { - let M::R{ dummy_field: _ } = MoveFrom(0x0); + let M::R{ dummy_field: _ } = MoveFrom(0x0); Tuple() } } @@ -24,3 +24,18 @@ module 0x8675309::M { } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R has key { + } + fun t(account: &signer) + acquires R + { + let _ = exists(0x0); + let () = move_to(account, R{}); + let _ = borrow_global(0x0); + let _ = borrow_global_mut(0x0); + let R{} = move_from(0x0); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/naming/struct_in_current_module.exp b/third_party/move/move-compiler-v2/tests/checking/naming/struct_in_current_module.exp index 8e0fcca2100b4..981ba43a7d97e 100644 --- a/third_party/move/move-compiler-v2/tests/checking/naming/struct_in_current_module.exp +++ b/third_party/move/move-compiler-v2/tests/checking/naming/struct_in_current_module.exp @@ -8,7 +8,7 @@ module 0x8675309::M { } private fun foo() { { - let _: M::S = pack M::S(0); + let _: S = pack M::S(0); { let M::R{ f: _ } = pack M::R(0); Tuple() @@ -16,3 +16,17 @@ module 0x8675309::M { } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + f: u64, + } + struct S has drop { + f: u64, + } + fun foo() { + let _ = S{f: 0}; + let R{f: _} = R{f: 0}; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/naming/unused_type_parameter_struct.exp b/third_party/move/move-compiler-v2/tests/checking/naming/unused_type_parameter_struct.exp index bc74029181400..68d6effb7bd3f 100644 --- a/third_party/move/move-compiler-v2/tests/checking/naming/unused_type_parameter_struct.exp +++ b/third_party/move/move-compiler-v2/tests/checking/naming/unused_type_parameter_struct.exp @@ -42,23 +42,43 @@ warning: unused type parameter // -- Model dump before bytecode pipeline module 0x42::test { - struct S0 { + struct S0 { dummy_field: bool, } - struct S1 { + struct S1 { dummy_field: bool, } - struct S2 { - f: test::S3<#1>, + struct S2 { + f: S3, } - struct S3 { + struct S3 { dummy_field: bool, } - struct S4 { - f: vector<#0>, + struct S4 { + f: vector, } - struct S5 { - f: vector<#0>, - g: vector<#1>, + struct S5 { + f: vector, + g: vector, } } // end 0x42::test + +// -- Sourcified model before bytecode pipeline +module 0x42::test { + struct S0 { + } + struct S1 { + } + struct S2 { + f: S3, + } + struct S3 { + } + struct S4 { + f: vector, + } + struct S5 { + f: vector, + g: vector, + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/naming/warning_dependency.exp b/third_party/move/move-compiler-v2/tests/checking/naming/warning_dependency.exp index d18cbc88a3820..2c4e4059c504f 100644 --- a/third_party/move/move-compiler-v2/tests/checking/naming/warning_dependency.exp +++ b/third_party/move/move-compiler-v2/tests/checking/naming/warning_dependency.exp @@ -11,23 +11,27 @@ module 0x42::dependency { use 0x42::test::{S0}; } // end 0x42::dependency module 0x42::test { - struct S0 { + struct S0 { dummy_field: bool, } - struct S1 { + struct S1 { dummy_field: bool, } - struct S2 { - f: test::S3<#1>, + struct S2 { + f: S3, } - struct S3 { + struct S3 { dummy_field: bool, } - struct S4 { - f: vector<#0>, + struct S4 { + f: vector, } - struct S5 { - f: vector<#0>, - g: vector<#1>, + struct S5 { + f: vector, + g: vector, } } // end 0x42::test + +// -- Sourcified model before bytecode pipeline +module 0x42::dependency { +} diff --git a/third_party/move/move-compiler-v2/tests/checking/positional_fields/assign_field.exp b/third_party/move/move-compiler-v2/tests/checking/positional_fields/assign_field.exp index f84ae045ce12c..c84034b0a95a8 100644 --- a/third_party/move/move-compiler-v2/tests/checking/positional_fields/assign_field.exp +++ b/third_party/move/move-compiler-v2/tests/checking/positional_fields/assign_field.exp @@ -6,7 +6,7 @@ module 0x42::test { 1: bool, } V2 { - 0: test::S3, + 0: S3, } } struct S0 { @@ -17,20 +17,20 @@ module 0x42::test { 1: bool, } struct S2 { - 0: test::S0, + 0: S0, 1: u8, } struct S3 { - 0: test::S2, - 1: test::S0, - 2: test::S2, + 0: S2, + 1: S0, + 2: S2, } private fun assign0(a: u64,b: bool) { { - let x: test::S1 = pack test::S1(a, b); + let x: S1 = pack test::S1(a, b); loop { - if select test::S1.1(x) { - x: test::S1 = pack test::S1(Sub(select test::S1.0(x), 1), Ge(select test::S1.0(x), 1)); + if select test::S1.1(x) { + x: S1 = pack test::S1(Sub(select test::S1.0(x), 1), Ge(select test::S1.0(x), 1)); Tuple() } else { break @@ -38,18 +38,18 @@ module 0x42::test { } } } - private fun assign1(x: test::S1): u64 { + private fun assign1(x: S1): u64 { { let count: u64 = 0; loop { - if select test::S1.1(x) { + if select test::S1.1(x) { { - let y: u64 = if Gt(select test::S1.0(x), 0) { - Sub(select test::S1.0(x), 1) + let y: u64 = if Gt(select test::S1.0(x), 0) { + Sub(select test::S1.0(x), 1) } else { 0 }; - x: test::S1 = pack test::S1(y, Ge(y, 1)); + x: S1 = pack test::S1(y, Ge(y, 1)); count: u64 = Add(count, 1); Tuple() } @@ -60,14 +60,14 @@ module 0x42::test { count } } - private fun assign_chained(x: test::S3) { - Add(Add(select test::S0.x(select test::S2.0(select test::S3.0(x))), select test::S0.x(select test::S3.1(x))), select test::S0.x(select test::S2.0(select test::S3.2(x)))); - select test::S0.x(select test::S2.0(select test::S3.0(x))) = 0; - select test::S0.x(select test::S3.1(x)) = 1; - select test::S0.x(select test::S2.0(select test::S3.2(x))) = 2; + private fun assign_chained(x: S3) { + Add(Add(select test::S0.x(select test::S2.0(select test::S3.0(x))), select test::S0.x(select test::S3.1(x))), select test::S0.x(select test::S2.0(select test::S3.2(x)))); + select test::S0.x(select test::S2.0(select test::S3.0(x))) = 0; + select test::S0.x(select test::S3.1(x)) = 1; + select test::S0.x(select test::S2.0(select test::S3.2(x))) = 2; Tuple() } - private fun assign_enum(x: &mut test::E) { + private fun assign_enum(x: &mut E) { match (x) { test::E::V1{ 0: x, 1: y } => { x = 42; @@ -75,22 +75,93 @@ module 0x42::test { Tuple() } test::E::V2{ 0: x } => { - select test::S0.x(select test::S2.0(select test::S3.0<&mut test::S3>(x))) = 0; - select test::S0.x(select test::S3.1<&mut test::S3>(x)) = 1; - select test::S0.x(select test::S2.0(select test::S3.2<&mut test::S3>(x))) = 2; + select test::S0.x(select test::S2.0(select test::S3.0<&mut S3>(x))) = 0; + select test::S0.x(select test::S3.1<&mut S3>(x)) = 1; + select test::S0.x(select test::S2.0(select test::S3.2<&mut S3>(x))) = 2; Tuple() } } } - private fun simple(x: test::S1) { - select test::S1.0(x) = 42; - select test::S1.1(x) = true; + private fun simple(x: S1) { + select test::S1.0(x) = 42; + select test::S1.1(x) = true; Tuple() } - private fun simple_ref(x: &mut test::S1) { - select test::S1.0<&mut test::S1>(x) = 42; - select test::S1.1<&mut test::S1>(x) = true; + private fun simple_ref(x: &mut S1) { + select test::S1.0<&mut S1>(x) = 42; + select test::S1.1<&mut S1>(x) = true; Tuple() } } // end 0x42::test + +// -- Sourcified model before bytecode pipeline +module 0x42::test { + enum E { + V1 { + 0: u8, + 1: bool, + } + V2 { + 0: S3, + } + } + struct S0 { + x: u8, + } + struct S1 { + 0: u64, + 1: bool, + } + struct S2 { + 0: S0, + 1: u8, + } + struct S3 { + 0: S2, + 1: S0, + 2: S2, + } + fun assign0(a: u64, b: bool) { + let x = S1(a,b); + while (x.1) { + x = S1(x.0 - 1,x.0 >= 1); + } + } + fun assign1(x: S1): u64 { + let count = 0; + while (x.1) { + let y = if (x.0 > 0) x.0 - 1 else 0; + x = S1(y,y >= 1); + count = count + 1; + }; + count + } + fun assign_chained(x: S3) { + x.0.0.x + x.1.x + x.2.0.x; + x.0.0.x = 0u8; + x.1.x = 1u8; + x.2.0.x = 2u8; + } + fun assign_enum(x: &mut E) { + match (x) { + E::V1(x,y) => { + *x = 42u8; + *y = true; + }, + E::V2(x) => { + x.0.0.x = 0u8; + x.1.x = 1u8; + x.2.0.x = 2u8; + }, + } + } + fun simple(x: S1) { + x.0 = 42; + x.1 = true; + } + fun simple_ref(x: &mut S1) { + x.0 = 42; + x.1 = true; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/positional_fields/bind_anonymous_field.exp b/third_party/move/move-compiler-v2/tests/checking/positional_fields/bind_anonymous_field.exp index 09aaa32647c33..f0ef1a992ba40 100644 --- a/third_party/move/move-compiler-v2/tests/checking/positional_fields/bind_anonymous_field.exp +++ b/third_party/move/move-compiler-v2/tests/checking/positional_fields/bind_anonymous_field.exp @@ -2,10 +2,10 @@ module 0x42::test { enum E1 { V1 { - 0: test::S0, + 0: S0, } V2 { - 0: test::S1, + 0: S1, } } struct S0 { @@ -13,9 +13,9 @@ module 0x42::test { } struct S1 { 0: bool, - 1: test::S0, + 1: S0, } - private fun match(x: test::E1) { + private fun match(x: E1) { match (x) { test::E1::V1{ 0: test::S0{ 0: _x } } => { Tuple() @@ -26,16 +26,47 @@ module 0x42::test { } } - private fun nested(x: test::S1) { + private fun nested(x: S1) { { let test::S1{ 0: _x, 1: test::S0{ 0: _y } } = x; Tuple() } } - private fun simple(x: test::S0) { + private fun simple(x: S0) { { let test::S0{ 0: _x } = x; Tuple() } } } // end 0x42::test + +// -- Sourcified model before bytecode pipeline +module 0x42::test { + enum E1 { + V1 { + 0: S0, + } + V2 { + 0: S1, + } + } + struct S0 { + 0: u8, + } + struct S1 { + 0: bool, + 1: S0, + } + fun match(x: E1) { + match (x) { + E1::V1(S0(_x)) => (), + E1::V2(S1(_x,S0(_y))) => (), + } + } + fun nested(x: S1) { + let S1(_x,S0(_y)) = x; + } + fun simple(x: S0) { + let S0(_x) = x; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/positional_fields/common_access.exp b/third_party/move/move-compiler-v2/tests/checking/positional_fields/common_access.exp index b0f4b0957c213..38c21b2652857 100644 --- a/third_party/move/move-compiler-v2/tests/checking/positional_fields/common_access.exp +++ b/third_party/move/move-compiler-v2/tests/checking/positional_fields/common_access.exp @@ -8,7 +8,22 @@ module 0x42::test { 0: u8, } } - private fun common_access(x: test::Foo): u8 { - select_variants test::Foo.A.0|test::Foo.B.0(x) + private fun common_access(x: Foo): u8 { + select_variants test::Foo.A.0|test::Foo.B.0(x) } } // end 0x42::test + +// -- Sourcified model before bytecode pipeline +module 0x42::test { + enum Foo has drop { + A { + 0: u8, + } + B { + 0: u8, + } + } + fun common_access(x: Foo): u8 { + x.A.0 + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/positional_fields/decl_ok.exp b/third_party/move/move-compiler-v2/tests/checking/positional_fields/decl_ok.exp index 5280e3ddd0a51..69f0be3a2e1ee 100644 --- a/third_party/move/move-compiler-v2/tests/checking/positional_fields/decl_ok.exp +++ b/third_party/move/move-compiler-v2/tests/checking/positional_fields/decl_ok.exp @@ -15,13 +15,13 @@ module 0x42::test { 0: u8, 1: bool, } - struct S3 { - 0: #1, + struct S3 { + 0: T2, 1: u8, - 2: #0, + 2: T1, } - private fun bar(x: test::S2) { - select test::S2.0(x); + private fun bar(x: S2) { + select test::S2.0(x); Tuple() } private fun baz() { @@ -30,9 +30,44 @@ module 0x42::test { pack test::E1::V3(42, true); Tuple() } - private fun foo(x: test::S2) { - select test::S2.0(x); - select test::S2.1(x); + private fun foo(x: S2) { + select test::S2.0(x); + select test::S2.1(x); Tuple() } } // end 0x42::test + +// -- Sourcified model before bytecode pipeline +module 0x42::test { + enum E1 { + V1, + V2, + V3 { + 0: u8, + 1: bool, + } + } + struct S1 { + } + struct S2 { + 0: u8, + 1: bool, + } + struct S3 { + 0: T2, + 1: u8, + 2: T1, + } + fun bar(x: S2) { + x.0; + } + fun baz() { + E1::V1{}; + E1::V2{}; + E1::V3(42u8,true); + } + fun foo(x: S2) { + x.0; + x.1; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/positional_fields/named_tuple_ability_decl_ok.exp b/third_party/move/move-compiler-v2/tests/checking/positional_fields/named_tuple_ability_decl_ok.exp index 3271dea2b7566..5b6703568a6ab 100644 --- a/third_party/move/move-compiler-v2/tests/checking/positional_fields/named_tuple_ability_decl_ok.exp +++ b/third_party/move/move-compiler-v2/tests/checking/positional_fields/named_tuple_ability_decl_ok.exp @@ -6,23 +6,50 @@ module 0x42::test { struct S1 { 0: u8, } - struct S2 { - 0: #0, + struct S2 { + 0: T, 1: u8, } - struct S3 { - 0: #0, + struct S3 { + 0: T, 1: u8, } - struct S4 { + struct S4 { x: u8, - y: #0, + y: T, } - struct S5 { - 0: #0, - 1: test::S3<#0>, + struct S5 { + 0: T, + 1: S3, } - struct S6 { + struct S6 { dummy_field: bool, } } // end 0x42::test + +// -- Sourcified model before bytecode pipeline +module 0x42::test { + struct S has copy, key { + } + struct S1 has drop { + 0: u8, + } + struct S2 has key { + 0: T, + 1: u8, + } + struct S3 has key { + 0: T, + 1: u8, + } + struct S4 has drop { + x: u8, + y: T, + } + struct S5 has key { + 0: T, + 1: S3, + } + struct S6 { + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/positional_fields/named_tuple_construct_ok.exp b/third_party/move/move-compiler-v2/tests/checking/positional_fields/named_tuple_construct_ok.exp index c56d7f9c7a5c1..faaf3481933e0 100644 --- a/third_party/move/move-compiler-v2/tests/checking/positional_fields/named_tuple_construct_ok.exp +++ b/third_party/move/move-compiler-v2/tests/checking/positional_fields/named_tuple_construct_ok.exp @@ -18,34 +18,34 @@ module 0x42::test { 0: u8, 1: bool, } - struct S3 { - 0: #0, + struct S3 { + 0: T, 1: u8, } struct S4 { dummy_field: bool, } - struct S5 { - x: #0, + struct S5 { + x: T, y: u8, } - private fun S0_inhabited(): test::S0 { + private fun S0_inhabited(): S0 { pack test::S0(false) } - private fun S1_inhabited(): test::S1 { + private fun S1_inhabited(): S1 { pack test::S1(0) } - private fun S2_inhabited(): test::S2 { + private fun S2_inhabited(): S2 { pack test::S2(0, false) } - private fun S3_test(x: #0): test::S3<#0> { + private fun S3_test(x: T): S3 { pack test::S3(x, 0) } - private fun nested_0(): test::S3 { - pack test::S3(pack test::S4(false), 0) + private fun nested_0(): S3 { + pack test::S3(pack test::S4(false), 0) } - private fun nested_1(): test::S5 { - pack test::S5(pack test::S0(false), 0) + private fun nested_1(): S5 { + pack test::S5(pack test::S0(false), 0) } private fun test_variant() { pack test::E1::V1(); @@ -54,3 +54,57 @@ module 0x42::test { Tuple() } } // end 0x42::test + +// -- Sourcified model before bytecode pipeline +module 0x42::test { + enum E1 { + V1, + V2, + V3 { + 0: u8, + 1: bool, + } + } + struct S0 { + } + struct S1 { + 0: u8, + } + struct S2 { + 0: u8, + 1: bool, + } + struct S3 { + 0: T, + 1: u8, + } + struct S4 { + } + struct S5 { + x: T, + y: u8, + } + fun S0_inhabited(): S0 { + S0{} + } + fun S1_inhabited(): S1 { + S1(0u8) + } + fun S2_inhabited(): S2 { + S2(0u8,false) + } + fun S3_test(x: T): S3 { + S3(x,0u8) + } + fun nested_0(): S3 { + S3(S4{},0u8) + } + fun nested_1(): S5 { + S5{x: S0{},y: 0u8} + } + fun test_variant() { + E1::V1{}; + E1::V2{}; + E1::V3(42u8,true); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/positional_fields/variant_ability_decl_ok.exp b/third_party/move/move-compiler-v2/tests/checking/positional_fields/variant_ability_decl_ok.exp index 2b5cb1622dfa7..ee8fd6fdb431b 100644 --- a/third_party/move/move-compiler-v2/tests/checking/positional_fields/variant_ability_decl_ok.exp +++ b/third_party/move/move-compiler-v2/tests/checking/positional_fields/variant_ability_decl_ok.exp @@ -1,17 +1,17 @@ // -- Model dump before bytecode pipeline module 0x42::test { - enum Bar { + enum Bar { A { - 0: #0, + 0: T, } B { 0: u8, 1: bool, } } - enum Foo { + enum Foo { A { - 0: #0, + 0: T, } B { 0: u8, @@ -19,3 +19,25 @@ module 0x42::test { } } } // end 0x42::test + +// -- Sourcified model before bytecode pipeline +module 0x42::test { + enum Bar has copy, drop { + A { + 0: T, + } + B { + 0: u8, + 1: bool, + } + } + enum Foo has copy, drop { + A { + 0: T, + } + B { + 0: u8, + 1: bool, + } + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/receiver/calls.exp b/third_party/move/move-compiler-v2/tests/checking/receiver/calls.exp index 7710862209187..db1d6cf8b7e2f 100644 --- a/third_party/move/move-compiler-v2/tests/checking/receiver/calls.exp +++ b/third_party/move/move-compiler-v2/tests/checking/receiver/calls.exp @@ -3,25 +3,51 @@ module 0x42::m { struct S { x: u64, } - private inline fun inline_receiver_ref_mut(self: &mut m::S,y: u64): u64 { - Add(select m::S.x<&mut m::S>(self), y) + private inline fun inline_receiver_ref_mut(self: &mut S,y: u64): u64 { + Add(select m::S.x<&mut S>(self), y) } - private fun receiver(self: m::S,y: u64): u64 { - Add(select m::S.x(self), y) + private fun receiver(self: S,y: u64): u64 { + Add(select m::S.x(self), y) } - private fun receiver_ref(self: &m::S,y: u64): u64 { - Add(select m::S.x<&m::S>(self), y) + private fun receiver_ref(self: &S,y: u64): u64 { + Add(select m::S.x<&S>(self), y) } - private fun receiver_ref_mut(self: &mut m::S,y: u64): u64 { - Add(select m::S.x<&mut m::S>(self), y) + private fun receiver_ref_mut(self: &mut S,y: u64): u64 { + Add(select m::S.x<&mut S>(self), y) } - private fun test_call_styles(s: m::S): u64 { + private fun test_call_styles(s: S): u64 { m::receiver(s, 1); m::receiver_ref(Borrow(Immutable)(s), 1); m::receiver_ref_mut(Borrow(Mutable)(s), 1); { - let (self: &mut m::S, y: u64): (&mut m::S, u64) = Tuple(Borrow(Mutable)(s), 1); - Add(select m::S.x<&mut m::S>(self), 1) + let (self: &mut S, y: u64): (&mut S, u64) = Tuple(Borrow(Mutable)(s), 1); + Add(select m::S.x<&mut S>(self), 1) } } } // end 0x42::m + +// -- Sourcified model before bytecode pipeline +module 0x42::m { + struct S { + x: u64, + } + inline fun inline_receiver_ref_mut(self: &mut S, y: u64): u64 { + self.x + y + } + fun receiver(self: S, y: u64): u64 { + self.x + y + } + fun receiver_ref(self: &S, y: u64): u64 { + self.x + y + } + fun receiver_ref_mut(self: &mut S, y: u64): u64 { + self.x + y + } + fun test_call_styles(s: S): u64 { + receiver(s, 1); + receiver_ref(&s, 1); + receiver_ref_mut(&mut s, 1); + let (self,y) = (&mut s, 1); + self.x + 1 + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/receiver/calls_with_freeze.exp b/third_party/move/move-compiler-v2/tests/checking/receiver/calls_with_freeze.exp index b21f306bed416..3568ee31e45af 100644 --- a/third_party/move/move-compiler-v2/tests/checking/receiver/calls_with_freeze.exp +++ b/third_party/move/move-compiler-v2/tests/checking/receiver/calls_with_freeze.exp @@ -3,20 +3,20 @@ module 0x42::m { struct S { x: u64, } - private fun sum(self: &m::S,_other: &m::S): u64 { + private fun sum(self: &S,_other: &S): u64 { Abort(1) } - private fun test_arg_freeze(s: m::S): u64 { + private fun test_arg_freeze(s: S): u64 { { - let p1: &m::S = Borrow(Immutable)(s); + let p1: &S = Borrow(Immutable)(s); { - let p1m: &mut m::S = Borrow(Mutable)(s); + let p1m: &mut S = Borrow(Mutable)(s); { - let s2: m::S = pack m::S(4); + let s2: S = pack m::S(4); { - let p2: &m::S = Borrow(Immutable)(s2); + let p2: &S = Borrow(Immutable)(s2); { - let p2m: &mut m::S = Borrow(Mutable)(s); + let p2m: &mut S = Borrow(Mutable)(s); { let x1: u64 = m::sum(Freeze(false)(p1m), p1); { @@ -37,3 +37,25 @@ module 0x42::m { } } } // end 0x42::m + +// -- Sourcified model before bytecode pipeline +module 0x42::m { + struct S { + x: u64, + } + fun sum(self: &S, _other: &S): u64 { + abort 1 + } + fun test_arg_freeze(s: S): u64 { + let p1 = &s; + let p1m = &mut s; + let s2 = S{x: 4}; + let p2 = &s2; + let p2m = &mut s; + let x1 = sum(/*freeze*/p1m, p1); + let x2 = sum(/*freeze*/p1m, /*freeze*/p1m); + let x3 = sum(/*freeze*/p1m, p2); + let x4 = sum(/*freeze*/p2m, /*freeze*/p2m); + x1 + x2 + x3 + x4 + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/receiver/decl_errors.exp b/third_party/move/move-compiler-v2/tests/checking/receiver/decl_errors.exp index 8df2af098de89..4cbfc158da8cb 100644 --- a/third_party/move/move-compiler-v2/tests/checking/receiver/decl_errors.exp +++ b/third_party/move/move-compiler-v2/tests/checking/receiver/decl_errors.exp @@ -38,14 +38,14 @@ module 0x42::n { } // end 0x42::n module 0x42::m { use 0x42::n::{T}; // resolved as: 0x42::n - struct G { - x: #0, - y: #1, + struct G { + x: T, + y: R, } struct S { x: u64, } - private fun receiver(self: m::S) { + private fun receiver(self: S) { Tuple() } private fun receiver_for_external_type(self: n::T) { @@ -57,10 +57,39 @@ module 0x42::m { private fun receiver_for_primitive(self: &u64) { Tuple() } - private fun receiver_non_linear_instantiated(self: m::G<#0, #0>) { + private fun receiver_non_linear_instantiated(self: G) { Tuple() } - private fun receiver_partial_instantiated(self: m::G) { + private fun receiver_partial_instantiated(self: G) { Tuple() } } // end 0x42::m + +// -- Sourcified model before bytecode pipeline +module 0x42::n { + struct T { + x: u64, + } +} +module 0x42::m { + use 0x42::n; + struct G { + x: T, + y: R, + } + struct S { + x: u64, + } + fun receiver(self: S) { + } + fun receiver_for_external_type(self: n::T) { + } + fun receiver_for_external_vector(self: vector) { + } + fun receiver_for_primitive(self: &u64) { + } + fun receiver_non_linear_instantiated(self: G) { + } + fun receiver_partial_instantiated(self: G) { + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/receiver/dont_warn_unused_self.exp b/third_party/move/move-compiler-v2/tests/checking/receiver/dont_warn_unused_self.exp index 135e3d8795172..2f6c8d9fb2115 100644 --- a/third_party/move/move-compiler-v2/tests/checking/receiver/dont_warn_unused_self.exp +++ b/third_party/move/move-compiler-v2/tests/checking/receiver/dont_warn_unused_self.exp @@ -11,7 +11,7 @@ module 0x42::m { struct S { x: u64, } - private fun receiver(self: m::S,y: u64) { + private fun receiver(self: S,y: u64) { Tuple() } spec { @@ -19,3 +19,12 @@ module 0x42::m { } } // end 0x42::m + +// -- Sourcified model before bytecode pipeline +module 0x42::m { + struct S has drop { + x: u64, + } + fun receiver(self: S, y: u64) { + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/receiver/generic_calls.exp b/third_party/move/move-compiler-v2/tests/checking/receiver/generic_calls.exp index b396815647805..765d427973c0e 100644 --- a/third_party/move/move-compiler-v2/tests/checking/receiver/generic_calls.exp +++ b/third_party/move/move-compiler-v2/tests/checking/receiver/generic_calls.exp @@ -1,32 +1,32 @@ // -- Model dump before bytecode pipeline module 0x42::m { - struct S { - x: #0, + struct S { + x: T, } - private fun id(self: m::S<#0>): m::S<#0> { + private fun id(self: S): S { self } - private inline fun inlined(f: |m::S<#0>|m::S<#0>,s: m::S<#0>) { + private inline fun inlined(f: |S|S,s: S) { (f)(s); Tuple() } - private fun receiver(self: m::S<#0>,y: #0) { - select m::S.x>(self) = y; + private fun receiver(self: S,y: T) { + select m::S.x>(self) = y; Tuple() } - private fun receiver_more_generics(self: m::S<#0>,_y: #1) { + private fun receiver_more_generics(self: S,_y: R) { Tuple() } - private fun receiver_needs_type_args(self: m::S<#0>,_y: #0) { + private fun receiver_needs_type_args(self: S,_y: T) { Abort(1) } - private fun receiver_ref(self: &m::S<#0>,_y: #0) { + private fun receiver_ref(self: &S,_y: T) { Tuple() } - private fun receiver_ref_mut(self: &mut m::S<#0>,y: #0) { - select m::S.x<&mut m::S>(self) = y + private fun receiver_ref_mut(self: &mut S,y: T) { + select m::S.x<&mut S>(self) = y } - private fun test_call_styles(s: m::S,x: u64) { + private fun test_call_styles(s: S,x: u64) { m::receiver(s, x); m::receiver_ref(Borrow(Immutable)(s), x); m::receiver_ref_mut(Borrow(Mutable)(s), x); @@ -34,14 +34,54 @@ module 0x42::m { m::receiver_needs_type_args(s, x); Tuple() } - private fun test_receiver_inference(s: m::S) { + private fun test_receiver_inference(s: S) { { - let (s: m::S): (m::S) = Tuple(s); + let (s: S): (S) = Tuple(s); { - let (s: m::S): (m::S) = Tuple(s); + let (s: S): (S) = Tuple(s); m::id(s) }; Tuple() } } } // end 0x42::m + +// -- Sourcified model before bytecode pipeline +module 0x42::m { + struct S { + x: T, + } + fun id(self: S): S { + self + } + inline fun inlined(f: |S|S, s: S) { + f(s); + } + fun receiver(self: S, y: T) { + self.x = y; + } + fun receiver_more_generics(self: S, _y: R) { + } + fun receiver_needs_type_args(self: S, _y: T) { + abort 1 + } + fun receiver_ref(self: &S, _y: T) { + } + fun receiver_ref_mut(self: &mut S, y: T) { + self.x = y + } + fun test_call_styles(s: S, x: u64) { + receiver(s, x); + receiver_ref(&s, x); + receiver_ref_mut(&mut s, x); + receiver_more_generics(s, 22); + receiver_needs_type_args(s, x); + } + fun test_receiver_inference(s: S) { + let (s) = (s); + { + let (s) = (s); + id(s) + }; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/receiver/generic_calls_typed.exp b/third_party/move/move-compiler-v2/tests/checking/receiver/generic_calls_typed.exp index b396815647805..765d427973c0e 100644 --- a/third_party/move/move-compiler-v2/tests/checking/receiver/generic_calls_typed.exp +++ b/third_party/move/move-compiler-v2/tests/checking/receiver/generic_calls_typed.exp @@ -1,32 +1,32 @@ // -- Model dump before bytecode pipeline module 0x42::m { - struct S { - x: #0, + struct S { + x: T, } - private fun id(self: m::S<#0>): m::S<#0> { + private fun id(self: S): S { self } - private inline fun inlined(f: |m::S<#0>|m::S<#0>,s: m::S<#0>) { + private inline fun inlined(f: |S|S,s: S) { (f)(s); Tuple() } - private fun receiver(self: m::S<#0>,y: #0) { - select m::S.x>(self) = y; + private fun receiver(self: S,y: T) { + select m::S.x>(self) = y; Tuple() } - private fun receiver_more_generics(self: m::S<#0>,_y: #1) { + private fun receiver_more_generics(self: S,_y: R) { Tuple() } - private fun receiver_needs_type_args(self: m::S<#0>,_y: #0) { + private fun receiver_needs_type_args(self: S,_y: T) { Abort(1) } - private fun receiver_ref(self: &m::S<#0>,_y: #0) { + private fun receiver_ref(self: &S,_y: T) { Tuple() } - private fun receiver_ref_mut(self: &mut m::S<#0>,y: #0) { - select m::S.x<&mut m::S>(self) = y + private fun receiver_ref_mut(self: &mut S,y: T) { + select m::S.x<&mut S>(self) = y } - private fun test_call_styles(s: m::S,x: u64) { + private fun test_call_styles(s: S,x: u64) { m::receiver(s, x); m::receiver_ref(Borrow(Immutable)(s), x); m::receiver_ref_mut(Borrow(Mutable)(s), x); @@ -34,14 +34,54 @@ module 0x42::m { m::receiver_needs_type_args(s, x); Tuple() } - private fun test_receiver_inference(s: m::S) { + private fun test_receiver_inference(s: S) { { - let (s: m::S): (m::S) = Tuple(s); + let (s: S): (S) = Tuple(s); { - let (s: m::S): (m::S) = Tuple(s); + let (s: S): (S) = Tuple(s); m::id(s) }; Tuple() } } } // end 0x42::m + +// -- Sourcified model before bytecode pipeline +module 0x42::m { + struct S { + x: T, + } + fun id(self: S): S { + self + } + inline fun inlined(f: |S|S, s: S) { + f(s); + } + fun receiver(self: S, y: T) { + self.x = y; + } + fun receiver_more_generics(self: S, _y: R) { + } + fun receiver_needs_type_args(self: S, _y: T) { + abort 1 + } + fun receiver_ref(self: &S, _y: T) { + } + fun receiver_ref_mut(self: &mut S, y: T) { + self.x = y + } + fun test_call_styles(s: S, x: u64) { + receiver(s, x); + receiver_ref(&s, x); + receiver_ref_mut(&mut s, x); + receiver_more_generics(s, 22); + receiver_needs_type_args(s, x); + } + fun test_receiver_inference(s: S) { + let (s) = (s); + { + let (s) = (s); + id(s) + }; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/receiver/same_names.exp b/third_party/move/move-compiler-v2/tests/checking/receiver/same_names.exp index b45494f8c2cf6..9f205590de554 100644 --- a/third_party/move/move-compiler-v2/tests/checking/receiver/same_names.exp +++ b/third_party/move/move-compiler-v2/tests/checking/receiver/same_names.exp @@ -3,16 +3,16 @@ module 0x42::b { struct MyOtherList { len: u64, } - public fun len(self: &b::MyOtherList): u64 { - select b::MyOtherList.len<&b::MyOtherList>(self) + public fun len(self: &MyOtherList): u64 { + select b::MyOtherList.len<&MyOtherList>(self) } } // end 0x42::b module 0x42::a { struct MyList { len: u64, } - public fun len(self: &a::MyList): u64 { - select a::MyList.len<&a::MyList>(self) + public fun len(self: &MyList): u64 { + select a::MyList.len<&MyList>(self) } } // end 0x42::a module 0x42::c { @@ -35,3 +35,33 @@ module 0x42::c { } } } // end 0x42::c + +// -- Sourcified model before bytecode pipeline +module 0x42::b { + struct MyOtherList { + len: u64, + } + public fun len(self: &MyOtherList): u64 { + self.len + } +} +module 0x42::a { + struct MyList { + len: u64, + } + public fun len(self: &MyList): u64 { + self.len + } +} +module 0x42::c { + use 0x42::b; + use 0x42::a; + inline fun foo(f: |(a::MyList, b::MyOtherList)|, x: a::MyList, y: b::MyOtherList) { + f(x, y) + } + fun test(x: a::MyList, y: b::MyOtherList) { + let (x,y) = (x, y); + let (x,y) = (x, y); + if (a::len(&x) + b::len(&y) == 1) () else abort 1 + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/receiver/same_names_typed.exp b/third_party/move/move-compiler-v2/tests/checking/receiver/same_names_typed.exp index b45494f8c2cf6..9f205590de554 100644 --- a/third_party/move/move-compiler-v2/tests/checking/receiver/same_names_typed.exp +++ b/third_party/move/move-compiler-v2/tests/checking/receiver/same_names_typed.exp @@ -3,16 +3,16 @@ module 0x42::b { struct MyOtherList { len: u64, } - public fun len(self: &b::MyOtherList): u64 { - select b::MyOtherList.len<&b::MyOtherList>(self) + public fun len(self: &MyOtherList): u64 { + select b::MyOtherList.len<&MyOtherList>(self) } } // end 0x42::b module 0x42::a { struct MyList { len: u64, } - public fun len(self: &a::MyList): u64 { - select a::MyList.len<&a::MyList>(self) + public fun len(self: &MyList): u64 { + select a::MyList.len<&MyList>(self) } } // end 0x42::a module 0x42::c { @@ -35,3 +35,33 @@ module 0x42::c { } } } // end 0x42::c + +// -- Sourcified model before bytecode pipeline +module 0x42::b { + struct MyOtherList { + len: u64, + } + public fun len(self: &MyOtherList): u64 { + self.len + } +} +module 0x42::a { + struct MyList { + len: u64, + } + public fun len(self: &MyList): u64 { + self.len + } +} +module 0x42::c { + use 0x42::b; + use 0x42::a; + inline fun foo(f: |(a::MyList, b::MyOtherList)|, x: a::MyList, y: b::MyOtherList) { + f(x, y) + } + fun test(x: a::MyList, y: b::MyOtherList) { + let (x,y) = (x, y); + let (x,y) = (x, y); + if (a::len(&x) + b::len(&y) == 1) () else abort 1 + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/receiver/vectors.exp b/third_party/move/move-compiler-v2/tests/checking/receiver/vectors.exp index 35e73d04076f8..8fce705c3af8d 100644 --- a/third_party/move/move-compiler-v2/tests/checking/receiver/vectors.exp +++ b/third_party/move/move-compiler-v2/tests/checking/receiver/vectors.exp @@ -1,12 +1,12 @@ // -- Model dump before bytecode pipeline module 0x1::vector { - private fun receiver(self: vector<#0>,_y: #0) { + private fun receiver(self: vector,_y: T) { Tuple() } - private fun receiver_ref(self: &vector<#0>,_y: #0) { + private fun receiver_ref(self: &vector,_y: T) { Tuple() } - private fun receiver_ref_mut(self: &mut vector<#0>,_y: #0) { + private fun receiver_ref_mut(self: &mut vector,_y: T) { Tuple() } private fun test_call_styles(s: vector,x: u64) { @@ -16,3 +16,18 @@ module 0x1::vector { Tuple() } } // end 0x1::vector + +// -- Sourcified model before bytecode pipeline +module 0x1::vector { + fun receiver(self: vector, _y: T) { + } + fun receiver_ref(self: &vector, _y: T) { + } + fun receiver_ref_mut(self: &mut vector, _y: T) { + } + fun test_call_styles(s: vector, x: u64) { + receiver(s, x); + receiver_ref(&s, x); + receiver_ref_mut(&mut s, x); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/specs/assert_skipped_for_spec.exp b/third_party/move/move-compiler-v2/tests/checking/specs/assert_skipped_for_spec.exp index 35729286d8c52..6c0bb9bfdcd4d 100644 --- a/third_party/move/move-compiler-v2/tests/checking/specs/assert_skipped_for_spec.exp +++ b/third_party/move/move-compiler-v2/tests/checking/specs/assert_skipped_for_spec.exp @@ -15,3 +15,11 @@ module 0x42::M { Sub(x, 1) } } // end 0x42::M + +// -- Sourcified model before bytecode pipeline +module 0x42::M { + fun bar(x: u64): u64 { + if (x > 0) () else abort 1; + x - 1 + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/specs/conditions_ok.exp b/third_party/move/move-compiler-v2/tests/checking/specs/conditions_ok.exp index 54fe8ac5132a6..5524b749b2ebc 100644 --- a/third_party/move/move-compiler-v2/tests/checking/specs/conditions_ok.exp +++ b/third_party/move/move-compiler-v2/tests/checking/specs/conditions_ok.exp @@ -8,7 +8,7 @@ module 0x42::M { Deref(x) } spec { - aborts_if Or(Eq(Freeze(false)($t0), 0), Eq(select M::Ghost$some_global.v(global(0x0)), 0)); + aborts_if Or(Eq(Freeze(false)($t0), 0), Eq(select M::Ghost$some_global.v(global<0x42::M::Ghost$some_global>(0x0)), 0)); ensures Gt(Old($t0), $t0); ensures Eq(result0(), Freeze(false)($t0)); } @@ -20,7 +20,7 @@ module 0x42::M { ensures And(Eq($t0, result0()), Eq(result1(), true)); } - private fun with_emits(_guid: vector,_msg: #0,x: u64): u64 { + private fun with_emits(_guid: vector,_msg: T,x: u64): u64 { x } spec { @@ -30,3 +30,20 @@ module 0x42::M { } } // end 0x42::M + +// -- Sourcified model before bytecode pipeline +module 0x42::M { + struct Ghost$some_global has copy, drop, store, key { + v: u64, + } + fun add_some(x: &mut u64): u64 { + *x = *x + 1; + *x + } + fun multiple_results(x: u64): (u64, bool) { + (x, true) + } + fun with_emits(_guid: vector, _msg: T, x: u64): u64 { + x + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/specs/expressions_ok.exp b/third_party/move/move-compiler-v2/tests/checking/specs/expressions_ok.exp index c1a6dc4437405..cb024a2ab2a64 100644 --- a/third_party/move/move-compiler-v2/tests/checking/specs/expressions_ok.exp +++ b/third_party/move/move-compiler-v2/tests/checking/specs/expressions_ok.exp @@ -62,3 +62,7 @@ module 0x42::M { M::generic_function(3, 3) } } // end 0x42::M + +// -- Sourcified model before bytecode pipeline +module 0x42::M { +} diff --git a/third_party/move/move-compiler-v2/tests/checking/specs/inline_fun_in_spec.exp b/third_party/move/move-compiler-v2/tests/checking/specs/inline_fun_in_spec.exp index 07a0195d39472..007310efd886e 100644 --- a/third_party/move/move-compiler-v2/tests/checking/specs/inline_fun_in_spec.exp +++ b/third_party/move/move-compiler-v2/tests/checking/specs/inline_fun_in_spec.exp @@ -1,14 +1,14 @@ // -- Model dump before bytecode pipeline module 0x42::m { spec { - invariant forall a: address: TypeDomain
(): Implies(exists(a), { + invariant forall a: address: TypeDomain
(): Implies(exists<0x42::m::S>(a), { let (x: address): (address) = Tuple(a); { let r: bool = { let (a: address): (address) = Tuple(x); - Lt(select m::S.f({ + Lt(select m::S.f<0x42::m::S>({ let (a: address): (address) = Tuple(a); - global(a) + global<0x42::m::S>(a) }), 10) }; r @@ -32,7 +32,7 @@ module 0x42::m { }; } - private inline fun exec(f: |#0|#1,x: #0): #1 { + private inline fun exec(f: |T|R,x: T): R { { let r: R = (f)(x); spec { @@ -74,7 +74,63 @@ module 0x42::m { }); } - private inline fun get(a: address): � { + private inline fun get(a: address): &R { BorrowGlobal(Immutable)(a) } } // end 0x42::m + +// -- Sourcified model before bytecode pipeline +module 0x42::m { + struct S has key { + f: u64, + } + /* + spec { + invariant { + let (x: num): (num) = Tuple(select m::S.f()); + { + let r: bool = { + let (x: num): (num) = Tuple(x); + Gt(x, 0) + }; + r + } + }; + } + + */ + inline fun exec(f: |T|R, x: T): R { + let r = f(x); + + /* spec { + assert Eq<#1>(r, (f)($t1)); + } + */ + ; + r + } + fun function_code_spec_block(x: u64): u64 { + + /* spec { + assert { + let (x: num): (num) = Tuple($t0); + { + let r: bool = { + let (y: num): (num) = Tuple(x); + Gt(y, 0) + }; + r + } + }; + } + */ + ; + x + 1 + } + fun function_spec_block(x: u64): u64 { + x + 1 + } + inline fun get(a: address): &R { + borrow_global(a) + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/specs/inline_fun_in_spec_typed.exp b/third_party/move/move-compiler-v2/tests/checking/specs/inline_fun_in_spec_typed.exp index e3c750934cc8c..06ac84e29aaa4 100644 --- a/third_party/move/move-compiler-v2/tests/checking/specs/inline_fun_in_spec_typed.exp +++ b/third_party/move/move-compiler-v2/tests/checking/specs/inline_fun_in_spec_typed.exp @@ -1,14 +1,14 @@ // -- Model dump before bytecode pipeline module 0x42::m { spec { - invariant forall a: address: TypeDomain
(): Implies(exists(a), { + invariant forall a: address: TypeDomain
(): Implies(exists<0x42::m::S>(a), { let (x: address): (address) = Tuple(a); { let r: bool = { let (a: address): (address) = Tuple(x); - Lt(select m::S.f({ + Lt(select m::S.f<0x42::m::S>({ let (a: address): (address) = Tuple(a); - global(a) + global<0x42::m::S>(a) }), 10) }; r @@ -32,7 +32,7 @@ module 0x42::m { }; } - private inline fun exec(f: |#0|#1,x: #0): #1 { + private inline fun exec(f: |T|R,x: T): R { { let r: R = (f)(x); spec { @@ -74,7 +74,63 @@ module 0x42::m { }); } - private inline fun get(a: address): � { + private inline fun get(a: address): &R { BorrowGlobal(Immutable)(a) } } // end 0x42::m + +// -- Sourcified model before bytecode pipeline +module 0x42::m { + struct S has key { + f: u64, + } + /* + spec { + invariant { + let (x: u64): (u64) = Tuple(select m::S.f()); + { + let r: bool = { + let (x: u64): (u64) = Tuple(x); + Gt(x, 0) + }; + r + } + }; + } + + */ + inline fun exec(f: |T|R, x: T): R { + let r = f(x); + + /* spec { + assert Eq<#1>(r, (f)($t1)); + } + */ + ; + r + } + fun function_code_spec_block(x: u64): u64 { + + /* spec { + assert { + let (x: u64): (u64) = Tuple($t0); + { + let r: bool = { + let (y: u64): (u64) = Tuple(x); + Gt(y, 0) + }; + r + } + }; + } + */ + ; + x + 1 + } + fun function_spec_block(x: u64): u64 { + x + 1 + } + inline fun get(a: address): &R { + borrow_global(a) + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/specs/inline_fun_spec_ok.exp b/third_party/move/move-compiler-v2/tests/checking/specs/inline_fun_spec_ok.exp index fc26e019be673..42ce1545caefb 100644 --- a/third_party/move/move-compiler-v2/tests/checking/specs/inline_fun_spec_ok.exp +++ b/third_party/move/move-compiler-v2/tests/checking/specs/inline_fun_spec_ok.exp @@ -8,3 +8,16 @@ module 0x42::M { 42 } } // end 0x42::M + +// -- Sourcified model before bytecode pipeline +module 0x42::M { + public inline fun f(): u64 { + + /* spec { + assert true; + } + */ + ; + 42 + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/specs/inline_spec_inference.exp b/third_party/move/move-compiler-v2/tests/checking/specs/inline_spec_inference.exp index 6073b62bbaa43..382979923cce9 100644 --- a/third_party/move/move-compiler-v2/tests/checking/specs/inline_spec_inference.exp +++ b/third_party/move/move-compiler-v2/tests/checking/specs/inline_spec_inference.exp @@ -11,3 +11,17 @@ module 0x42::m { } } } // end 0x42::m + +// -- Sourcified model before bytecode pipeline +module 0x42::m { + fun foo() { + let i = 10; + + /* spec { + assert forall j: num: Range(0, i): Lt(j, i); + } + */ + ; + i = i + 1 + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/specs/inline_spec_inference_bitvector.exp b/third_party/move/move-compiler-v2/tests/checking/specs/inline_spec_inference_bitvector.exp index f60c5fa68e8f8..6f8111e4122ff 100644 --- a/third_party/move/move-compiler-v2/tests/checking/specs/inline_spec_inference_bitvector.exp +++ b/third_party/move/move-compiler-v2/tests/checking/specs/inline_spec_inference_bitvector.exp @@ -22,3 +22,21 @@ module 0x42::bit_vector_infer { } } } // end 0x42::bit_vector_infer + +// -- Sourcified model before bytecode pipeline +module 0x42::bit_vector_infer { + public fun new(_length: u64) { + let counter = 1; + if (counter > 0) { + counter = counter - 1; + }; + let bit_field = 0x1::vector::empty(); + 0x1::vector::push_back(&mut bit_field, false); + + /* spec { + assert Eq(Len(bit_field), 0); + } + */ + ; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/specs/inline_spec_inference_vector.exp b/third_party/move/move-compiler-v2/tests/checking/specs/inline_spec_inference_vector.exp index d0b499f33c6f9..3034b88d36648 100644 --- a/third_party/move/move-compiler-v2/tests/checking/specs/inline_spec_inference_vector.exp +++ b/third_party/move/move-compiler-v2/tests/checking/specs/inline_spec_inference_vector.exp @@ -13,3 +13,17 @@ module 0x42::bit_vector { } } } // end 0x42::bit_vector + +// -- Sourcified model before bytecode pipeline +module 0x42::bit_vector { + public fun new(_length: u64) { + let bit_field = 0x1::vector::empty(); + + /* spec { + assert Eq(Len(bit_field), 0); + } + */ + ; + 0x1::vector::push_back(&mut bit_field, false); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/specs/inline_spec_old.exp b/third_party/move/move-compiler-v2/tests/checking/specs/inline_spec_old.exp index f0d2ba60255f7..fc1cae43ff96c 100644 --- a/third_party/move/move-compiler-v2/tests/checking/specs/inline_spec_old.exp +++ b/third_party/move/move-compiler-v2/tests/checking/specs/inline_spec_old.exp @@ -8,3 +8,15 @@ module 0x42::m { Tuple() } } // end 0x42::m + +// -- Sourcified model before bytecode pipeline +module 0x42::m { + public fun foo(vec: &mut vector) { + + /* spec { + assert forall k: num: Range(0, Len($t0)): Eq(Index($t0, k), Index(Old>($t0), k)); + } + */ + ; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/specs/intrinsic_decl_ok.exp b/third_party/move/move-compiler-v2/tests/checking/specs/intrinsic_decl_ok.exp index 6b81d226f4150..c6ba5637c6bcf 100644 --- a/third_party/move/move-compiler-v2/tests/checking/specs/intrinsic_decl_ok.exp +++ b/third_party/move/move-compiler-v2/tests/checking/specs/intrinsic_decl_ok.exp @@ -1,29 +1,55 @@ // -- Model dump before bytecode pipeline module 0x42::M { - struct MyTable1 { + struct MyTable1 { dummy_field: bool, } spec { } - struct MyTable2 { + struct MyTable2 { dummy_field: bool, } spec { } - private native fun contains(t: &M::MyTable2<#0, #1>,k: #0): bool; - private native fun borrow(t: &M::MyTable2<#0, #1>,k: #0):  - private native fun borrow_mut(t: &mut M::MyTable1<#0, #1>,k: #0): &mut #1; - private native fun destroy_empty(t: M::MyTable1<#0, #1>); - private native fun length(t: &M::MyTable1<#0, #1>): u64; - private native fun remove(t: &mut M::MyTable2<#0, #1>,k: #0): #1; - private native fun new(): M::MyTable1<#0, #1>; - private native fun new2(): M::MyTable2<#0, #1>; - spec fun spec_len(t: M::MyTable1<#0, #1>): num; - spec fun spec_set(t: M::MyTable1<#0, #1>,k: #0,v: #1): M::MyTable1<#0, #1>; - spec fun spec_get(t: M::MyTable1<#0, #1>,k: #0): #1; - spec fun spec_len2(t: M::MyTable2<#0, #1>): num; - spec fun spec_del(t: M::MyTable2<#0, #1>): num; - spec fun spec_has_key(t: M::MyTable2<#0, #1>,k: #0): bool; + private native fun contains(t: &MyTable2,k: K): bool; + private native fun borrow(t: &MyTable2,k: K): &V; + private native fun borrow_mut(t: &mut MyTable1,k: K): &mut V; + private native fun destroy_empty(t: MyTable1); + private native fun length(t: &MyTable1): u64; + private native fun remove(t: &mut MyTable2,k: K): V; + private native fun new(): MyTable1; + private native fun new2(): MyTable2; + spec fun spec_len(t: MyTable1<#0, #1>): num; + spec fun spec_set(t: MyTable1<#0, #1>,k: #0,v: #1): MyTable1<#0, #1>; + spec fun spec_get(t: MyTable1<#0, #1>,k: #0): #1; + spec fun spec_len2(t: MyTable2<#0, #1>): num; + spec fun spec_del(t: MyTable2<#0, #1>): num; + spec fun spec_has_key(t: MyTable2<#0, #1>,k: #0): bool; } // end 0x42::M + +// -- Sourcified model before bytecode pipeline +module 0x42::M { + struct MyTable1 { + } + /* + spec { + } + + */ + struct MyTable2 { + } + /* + spec { + } + + */ + native fun contains(t: &MyTable2, k: K): bool ; + native fun borrow(t: &MyTable2, k: K): &V ; + native fun borrow_mut(t: &mut MyTable1, k: K): &mut V ; + native fun destroy_empty(t: MyTable1) ; + native fun length(t: &MyTable1): u64 ; + native fun remove(t: &mut MyTable2, k: K): V ; + native fun new(): MyTable1 ; + native fun new2(): MyTable2 ; +} diff --git a/third_party/move/move-compiler-v2/tests/checking/specs/invariants_ok.exp b/third_party/move/move-compiler-v2/tests/checking/specs/invariants_ok.exp index adfbe05e60cbc..00ce3b6b54444 100644 --- a/third_party/move/move-compiler-v2/tests/checking/specs/invariants_ok.exp +++ b/third_party/move/move-compiler-v2/tests/checking/specs/invariants_ok.exp @@ -1,10 +1,10 @@ // -- Model dump before bytecode pipeline module 0x42::M { struct R { - s: M::S, + s: S, } spec { - invariant M::less10(true, select M::S.x(select M::R.s())); + invariant M::less10(true, select M::S.x<0x42::M::S>(select M::R.s())); } struct S { @@ -26,3 +26,26 @@ module 0x42::M { M::less10(Not(c), x) } } // end 0x42::M + +// -- Sourcified model before bytecode pipeline +module 0x42::M { + struct R { + s: S, + } + /* + spec { + invariant M::less10(true, select M::S.x<0x42::M::S>(select M::R.s())); + } + + */ + struct S { + x: u64, + y: bool, + } + /* + spec { + invariant Eq(Gt(select M::S.x(), 0), select M::S.y()); + } + + */ +} diff --git a/third_party/move/move-compiler-v2/tests/checking/specs/lets_ok.exp b/third_party/move/move-compiler-v2/tests/checking/specs/lets_ok.exp index 4cba29d734223..a89a56075d873 100644 --- a/third_party/move/move-compiler-v2/tests/checking/specs/lets_ok.exp +++ b/third_party/move/move-compiler-v2/tests/checking/specs/lets_ok.exp @@ -11,3 +11,11 @@ module 0x42::M { } } // end 0x42::M + +// -- Sourcified model before bytecode pipeline +module 0x42::M { + fun foo(x: &mut u64): u64 { + *x = *x + 1; + *x + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/specs/move_function_in_spec_ok.exp b/third_party/move/move-compiler-v2/tests/checking/specs/move_function_in_spec_ok.exp index f6a3025c7b239..4ba45bf526215 100644 --- a/third_party/move/move-compiler-v2/tests/checking/specs/move_function_in_spec_ok.exp +++ b/third_party/move/move-compiler-v2/tests/checking/specs/move_function_in_spec_ok.exp @@ -5,12 +5,12 @@ module 0x42::move_function_in_spec { } private fun foo() { { - let type_info: move_function_in_spec::TypeInfo = move_function_in_spec::type_of(); + let type_info: TypeInfo = move_function_in_spec::type_of(); { - let account_address: address = select move_function_in_spec::TypeInfo.account_address(type_info); + let account_address: address = select move_function_in_spec::TypeInfo.account_address(type_info); spec { assert move_function_in_spec::$no_change(account_address, account_address); - assert Eq
(account_address, select move_function_in_spec::TypeInfo.account_address(move_function_in_spec::$type_of<#0>())); + assert Eq
(account_address, select move_function_in_spec::TypeInfo.account_address<0x42::move_function_in_spec::TypeInfo>(move_function_in_spec::$type_of<#0>())); } ; Tuple() @@ -18,23 +18,50 @@ module 0x42::move_function_in_spec { } } public fun no_change(target: address,new_addr: address): bool - acquires move_function_in_spec::TypeInfo(*) + acquires TypeInfo(*) { { - let ty: &move_function_in_spec::TypeInfo = BorrowGlobal(Immutable)(target); - Eq
(select move_function_in_spec::TypeInfo.account_address<&move_function_in_spec::TypeInfo>(ty), new_addr) + let ty: &TypeInfo = BorrowGlobal(Immutable)(target); + Eq
(select move_function_in_spec::TypeInfo.account_address<&TypeInfo>(ty), new_addr) } } - public fun type_of(): move_function_in_spec::TypeInfo { + public fun type_of(): TypeInfo { Abort(1) } spec fun $no_change(target: address,new_addr: address): bool { { - let ty: &move_function_in_spec::TypeInfo = global(target); - Eq
(select move_function_in_spec::TypeInfo.account_address(ty), new_addr) + let ty: &0x42::move_function_in_spec::TypeInfo = global<0x42::move_function_in_spec::TypeInfo>(target); + Eq
(select move_function_in_spec::TypeInfo.account_address<0x42::move_function_in_spec::TypeInfo>(ty), new_addr) } } - spec fun $type_of(): move_function_in_spec::TypeInfo { + spec fun $type_of(): TypeInfo { Tuple() } } // end 0x42::move_function_in_spec + +// -- Sourcified model before bytecode pipeline +module 0x42::move_function_in_spec { + struct TypeInfo has copy, drop, store, key { + account_address: address, + } + fun foo() { + let type_info = type_of(); + let account_address = type_info.account_address; + + /* spec { + assert move_function_in_spec::$no_change(account_address, account_address); + assert Eq
(account_address, select move_function_in_spec::TypeInfo.account_address<0x42::move_function_in_spec::TypeInfo>(move_function_in_spec::$type_of<#0>())); + } + */ + ; + } + public fun no_change(target: address, new_addr: address): bool + acquires TypeInfo + { + let ty = borrow_global(target); + ty.account_address == new_addr + } + public fun type_of(): TypeInfo { + abort 1 + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/specs/quantifiers_ok.exp b/third_party/move/move-compiler-v2/tests/checking/specs/quantifiers_ok.exp index bef42715930d6..113804cb1f31b 100644 --- a/third_party/move/move-compiler-v2/tests/checking/specs/quantifiers_ok.exp +++ b/third_party/move/move-compiler-v2/tests/checking/specs/quantifiers_ok.exp @@ -3,10 +3,17 @@ module 0x42::M { struct S { x: u64, } - spec fun exists_in_vector(v: vector): bool { - exists s: M::S: v: Gt(select M::S.x(s), 0) + spec fun exists_in_vector(v: vector): bool { + exists s: 0x42::M::S: v: Gt(select M::S.x<0x42::M::S>(s), 0) } - spec fun some_in_vector(v: vector): M::S { - choose s: M::S: v: Eq(select M::S.x(s), 0) + spec fun some_in_vector(v: vector): S { + choose s: 0x42::M::S: v: Eq(select M::S.x<0x42::M::S>(s), 0) } } // end 0x42::M + +// -- Sourcified model before bytecode pipeline +module 0x42::M { + struct S { + x: u64, + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/specs/schemas_ok.exp b/third_party/move/move-compiler-v2/tests/checking/specs/schemas_ok.exp index 3ef30afac08f9..efd869ab6d011 100644 --- a/third_party/move/move-compiler-v2/tests/checking/specs/schemas_ok.exp +++ b/third_party/move/move-compiler-v2/tests/checking/specs/schemas_ok.exp @@ -52,8 +52,8 @@ note: unused schema M::SchemaExp // -- Model dump before bytecode pipeline module 0x42::M { - struct S { - x: #0, + struct S { + x: X, } private fun add(x: u64): u64 { Add(x, 1) @@ -75,3 +75,18 @@ module 0x42::M { } } // end 0x42::M + +// -- Sourcified model before bytecode pipeline +module 0x42::M { + struct S { + x: X, + } + fun add(x: u64): u64 { + x + 1 + } + fun id(x: u64): u64 { + x + } + fun multiple(_x: u64, _y: u64) { + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/specs/structs_ok.exp b/third_party/move/move-compiler-v2/tests/checking/specs/structs_ok.exp index 591fc4fd388df..69ded91a3a375 100644 --- a/third_party/move/move-compiler-v2/tests/checking/specs/structs_ok.exp +++ b/third_party/move/move-compiler-v2/tests/checking/specs/structs_ok.exp @@ -3,31 +3,31 @@ module 0x42::M { struct T { x: u64, } - struct G { - x: #0, + struct G { + x: T, y: bool, } struct R { - s: M::S, + s: S, } struct S { x: u64, y: bool, z: vector, } - public fun f(r: M::R): M::T { - pack M::T(select M::S.x(select M::R.s(r))) + public fun f(r: R): T { + pack M::T(select M::S.x(select M::R.s(r))) } - spec fun struct_access(s: M::S): u64 { - select M::S.x(s) + spec fun struct_access(s: S): u64 { + select M::S.x<0x42::M::S>(s) } - spec fun nested_struct_access(r: M::R): bool { - select M::S.y(select M::R.s(r)) + spec fun nested_struct_access(r: R): bool { + select M::S.y<0x42::M::S>(select M::R.s<0x42::M::R>(r)) } - spec fun struct_pack(x: u64,y: bool,z: vector): M::S { + spec fun struct_pack(x: u64,y: bool,z: vector): S { pack M::S(x, y, z) } - spec fun struct_pack_other_order(x: u64,y: bool,z: vector): M::S { + spec fun struct_pack_other_order(x: u64,y: bool,z: vector): S { { let $z: vector = z; { @@ -36,16 +36,38 @@ module 0x42::M { } } } - spec fun generic_struct_pack(x: u64,y: bool): M::G { + spec fun generic_struct_pack(x: u64,y: bool): G { pack M::G(x, y) } - spec fun generic_struct_pack_instantiated(x: u64,y: bool): M::G { + spec fun generic_struct_pack_instantiated(x: u64,y: bool): G { pack M::G(x, y) } - spec fun resource_global(addr: address): M::T { - global(addr) + spec fun resource_global(addr: address): T { + global<0x42::M::T>(addr) } spec fun resource_global_exists(addr: address): bool { - exists(addr) + exists<0x42::M::T>(addr) } } // end 0x42::M + +// -- Sourcified model before bytecode pipeline +module 0x42::M { + struct T has key { + x: u64, + } + struct G { + x: T, + y: bool, + } + struct R has drop { + s: S, + } + struct S has drop { + x: u64, + y: bool, + z: vector, + } + public fun f(r: R): T { + T{x: r.s.x} + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/specs/type_variance_ok.exp b/third_party/move/move-compiler-v2/tests/checking/specs/type_variance_ok.exp index 2c0d1376d130b..a4d2c3eb0a363 100644 --- a/third_party/move/move-compiler-v2/tests/checking/specs/type_variance_ok.exp +++ b/third_party/move/move-compiler-v2/tests/checking/specs/type_variance_ok.exp @@ -8,3 +8,10 @@ module 0x42::M { } } // end 0x42::M + +// -- Sourcified model before bytecode pipeline +module 0x42::M { + fun foo(v: vector): vector { + v + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/specs/update_field_ok.exp b/third_party/move/move-compiler-v2/tests/checking/specs/update_field_ok.exp index a808e4517a749..7e15d2d3c36be 100644 --- a/third_party/move/move-compiler-v2/tests/checking/specs/update_field_ok.exp +++ b/third_party/move/move-compiler-v2/tests/checking/specs/update_field_ok.exp @@ -4,16 +4,27 @@ module 0x42::update_field_ok { x: u64, y: u64, } - private fun f(r: &mut update_field_ok::R) { - select update_field_ok::R.x<&mut update_field_ok::R>(r) = 1; + private fun f(r: &mut R) { + select update_field_ok::R.x<&mut R>(r) = 1; Tuple() } spec { aborts_if false; - ensures Eq(Freeze(false)($t0), update_field_ok::assign_x_1(Old($t0))); + ensures Eq<0x42::update_field_ok::R>(Freeze(false)($t0), update_field_ok::assign_x_1(Old<0x42::update_field_ok::R>($t0))); } - spec fun assign_x_1(r: update_field_ok::R): update_field_ok::R { - update update_field_ok::R.x(r, 1) + spec fun assign_x_1(r: R): R { + update update_field_ok::R.x<0x42::update_field_ok::R>(r, 1) } } // end 0x42::update_field_ok + +// -- Sourcified model before bytecode pipeline +module 0x42::update_field_ok { + struct R { + x: u64, + y: u64, + } + fun f(r: &mut R) { + r.x = 1; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/annotated_types.exp b/third_party/move/move-compiler-v2/tests/checking/typing/annotated_types.exp index 7547136a5af2a..f9428b593f237 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/annotated_types.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/annotated_types.exp @@ -11,7 +11,22 @@ module 0x8675309::M { 0; pack M::S(false); M::R{ dummy_field: _ } = pack M::R(false); - (_: u64, _: M::S, M::R{ dummy_field: _ }): (u64, M::S, M::R) = Tuple(0, pack M::S(false), pack M::R(false)); + (_: u64, _: S, M::R{ dummy_field: _ }): (u64, S, R) = Tuple(0, pack M::S(false), pack M::R(false)); Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + } + struct S has drop { + } + fun t() { + (); + 0; + S{}; + R{} = R{}; + (_,_,R{}) = (0, S{}, R{}); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/assign_nested2.exp b/third_party/move/move-compiler-v2/tests/checking/typing/assign_nested2.exp index 909472ecef030..b7fe0f0db7198 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/assign_nested2.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/assign_nested2.exp @@ -28,7 +28,7 @@ module 0x8675309::A { { let r_ref: &mut u64 = Borrow(Mutable)(r); { - let s: A::S = pack A::S(0); + let s: S = pack A::S(0); (_: u64, x: u64, _: u64, _: u64): (u64, u64, u64, u64) = A::four(); Tuple() } @@ -37,3 +37,20 @@ module 0x8675309::A { } } } // end 0x8675309::A + +// -- Sourcified model before bytecode pipeline +module 0x8675309::A { + struct S has drop { + f: u64, + } + fun four(): (u64, u64, u64, u64) { + (0, 1, 2, 3) + } + public fun mixed() { + let x; + let r = 0; + let r_ref = &mut r; + let s = S{f: 0}; + (_,x,_,_) = four(); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/assign_tuple_wg.exp b/third_party/move/move-compiler-v2/tests/checking/typing/assign_tuple_wg.exp index a250c827c1d81..bdd2fc85f3254 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/assign_tuple_wg.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/assign_tuple_wg.exp @@ -18,10 +18,27 @@ module 0xc0ffee::dummy2 { struct State { value: u64, } - private fun tuple_assignments(s: &signer,state: dummy2::State) { + private fun tuple_assignments(s: &signer,state: State) { { - let (): () = MoveTo(s, state); + let (): () = MoveTo(s, state); Tuple() } } } // end 0xc0ffee::dummy2 + +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::dummy1 { + fun bar(b: bool) { + let () = if (b) baz(); + } + fun baz() { + } +} +module 0xc0ffee::dummy2 { + struct State has key { + value: u64, + } + fun tuple_assignments(s: &signer, state: State) { + let () = move_to(s, state); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/binary_add.exp b/third_party/move/move-compiler-v2/tests/checking/typing/binary_add.exp index 499d07865f69a..fd8094767a75d 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/binary_add.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/binary_add.exp @@ -3,7 +3,7 @@ module 0x8675309::M { struct R { f: u64, } - private fun t0(x: u64,r: M::R) { + private fun t0(x: u64,r: R) { 0; 1; 1; @@ -13,11 +13,32 @@ module 0x8675309::M { 1; 1; Add(Copy(x), Move(x)); - Add(select M::R.f(r), select M::R.f(r)); - Add(Add(Add(1, select M::R.f(r)), select M::R.f(r)), 0); + Add(select M::R.f(r), select M::R.f(r)); + Add(Add(Add(1, select M::R.f(r)), select M::R.f(r)), 0); { let M::R{ f: _ } = r; Tuple() } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + f: u64, + } + fun t0(x: u64, r: R) { + 0; + 1; + 1; + 1u8; + 1u8; + 1u128; + 1u128; + 1; + (copy x) + (move x); + r.f + r.f; + 1 + r.f + r.f + 0; + let R{f: _} = r; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/binary_and.exp b/third_party/move/move-compiler-v2/tests/checking/typing/binary_and.exp index dcc109892aa00..73a8766017937 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/binary_and.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/binary_and.exp @@ -3,13 +3,13 @@ module 0x8675309::M { struct R { f: bool, } - private fun t0(x: bool,r: M::R) { + private fun t0(x: bool,r: R) { false; false; false; true; And(Copy(x), Move(x)); - And(select M::R.f(r), select M::R.f(r)); + And(select M::R.f(r), select M::R.f(r)); false; { let M::R{ f: _ } = r; @@ -17,3 +17,20 @@ module 0x8675309::M { } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + f: bool, + } + fun t0(x: bool, r: R) { + false; + false; + false; + true; + (copy x) && (move x); + r.f && r.f; + false; + let R{f: _} = r; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/binary_bit_and.exp b/third_party/move/move-compiler-v2/tests/checking/typing/binary_bit_and.exp index 176d035a54ab7..39e24786a4398 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/binary_bit_and.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/binary_bit_and.exp @@ -3,7 +3,7 @@ module 0x8675309::M { struct R { f: u64, } - private fun t0(x: u64,r: M::R) { + private fun t0(x: u64,r: R) { 0; 0; 0; @@ -13,11 +13,32 @@ module 0x8675309::M { 0; 0; BitAnd(Copy(x), Move(x)); - BitAnd(select M::R.f(r), select M::R.f(r)); - BitAnd(BitAnd(BitAnd(1, select M::R.f(r)), select M::R.f(r)), 0); + BitAnd(select M::R.f(r), select M::R.f(r)); + BitAnd(BitAnd(BitAnd(1, select M::R.f(r)), select M::R.f(r)), 0); { let M::R{ f: _ } = r; Tuple() } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + f: u64, + } + fun t0(x: u64, r: R) { + 0; + 0; + 0; + 0u8; + 0u8; + 0u128; + 0u128; + 0; + (copy x) & (move x); + r.f & r.f; + 1 & r.f & r.f & 0; + let R{f: _} = r; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/binary_bit_or.exp b/third_party/move/move-compiler-v2/tests/checking/typing/binary_bit_or.exp index a0dd5a3086d89..ce3e511d8babf 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/binary_bit_or.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/binary_bit_or.exp @@ -3,7 +3,7 @@ module 0x8675309::M { struct R { f: u64, } - private fun t0(x: u64,r: M::R) { + private fun t0(x: u64,r: R) { 0; 1; 1; @@ -13,11 +13,32 @@ module 0x8675309::M { 1; 1; BitOr(Copy(x), Move(x)); - BitOr(select M::R.f(r), select M::R.f(r)); - BitOr(BitOr(BitOr(1, select M::R.f(r)), select M::R.f(r)), 0); + BitOr(select M::R.f(r), select M::R.f(r)); + BitOr(BitOr(BitOr(1, select M::R.f(r)), select M::R.f(r)), 0); { let M::R{ f: _ } = r; Tuple() } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + f: u64, + } + fun t0(x: u64, r: R) { + 0; + 1; + 1; + 1u8; + 1u8; + 1u128; + 1u128; + 1; + (copy x) | (move x); + r.f | r.f; + 1 | r.f | r.f | 0; + let R{f: _} = r; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/binary_div.exp b/third_party/move/move-compiler-v2/tests/checking/typing/binary_div.exp index 619ea3afb311a..0495092f02468 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/binary_div.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/binary_div.exp @@ -3,7 +3,7 @@ module 0x8675309::M { struct R { f: u64, } - private fun t0(x: u64,r: M::R) { + private fun t0(x: u64,r: R) { Div(0, 0); Div(1, 0); 0; @@ -13,11 +13,32 @@ module 0x8675309::M { 0; 0; Div(Copy(x), Move(x)); - Div(select M::R.f(r), select M::R.f(r)); - Div(Div(Div(1, select M::R.f(r)), select M::R.f(r)), 0); + Div(select M::R.f(r), select M::R.f(r)); + Div(Div(Div(1, select M::R.f(r)), select M::R.f(r)), 0); { let M::R{ f: _ } = r; Tuple() } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + f: u64, + } + fun t0(x: u64, r: R) { + 0 / 0; + 1 / 0; + 0; + 0u8; + 0u8; + 0u128; + 0u128; + 0; + (copy x) / (move x); + r.f / r.f; + 1 / r.f / r.f / 0; + let R{f: _} = r; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/binary_geq.exp b/third_party/move/move-compiler-v2/tests/checking/typing/binary_geq.exp index 1cb8410cde8ec..2a29ec5402b29 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/binary_geq.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/binary_geq.exp @@ -3,7 +3,7 @@ module 0x8675309::M { struct R { f: u64, } - private fun t0(x: u64,r: M::R) { + private fun t0(x: u64,r: R) { true; true; false; @@ -13,11 +13,32 @@ module 0x8675309::M { false; false; Ge(Copy(x), Move(x)); - Ge(select M::R.f(r), select M::R.f(r)); - And(Ge(1, select M::R.f(r)), Ge(select M::R.f(r), 0)); + Ge(select M::R.f(r), select M::R.f(r)); + And(Ge(1, select M::R.f(r)), Ge(select M::R.f(r), 0)); { let M::R{ f: _ } = r; Tuple() } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + f: u64, + } + fun t0(x: u64, r: R) { + true; + true; + false; + false; + false; + false; + false; + false; + (copy x) >= (move x); + r.f >= r.f; + 1 >= r.f && r.f >= 0; + let R{f: _} = r; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/binary_gt.exp b/third_party/move/move-compiler-v2/tests/checking/typing/binary_gt.exp index 550c2c2a85560..f8b6c74e19878 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/binary_gt.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/binary_gt.exp @@ -3,7 +3,7 @@ module 0x8675309::M { struct R { f: u64, } - private fun t0(x: u64,r: M::R) { + private fun t0(x: u64,r: R) { false; true; false; @@ -13,11 +13,32 @@ module 0x8675309::M { false; false; Gt(Copy(x), Move(x)); - Gt(select M::R.f(r), select M::R.f(r)); - And(Gt(1, select M::R.f(r)), Gt(select M::R.f(r), 0)); + Gt(select M::R.f(r), select M::R.f(r)); + And(Gt(1, select M::R.f(r)), Gt(select M::R.f(r), 0)); { let M::R{ f: _ } = r; Tuple() } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + f: u64, + } + fun t0(x: u64, r: R) { + false; + true; + false; + false; + false; + false; + false; + false; + (copy x) > (move x); + r.f > r.f; + 1 > r.f && r.f > 0; + let R{f: _} = r; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/binary_leq.exp b/third_party/move/move-compiler-v2/tests/checking/typing/binary_leq.exp index 72c2b34c7462a..7d4f67a317c34 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/binary_leq.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/binary_leq.exp @@ -3,7 +3,7 @@ module 0x8675309::M { struct R { f: u64, } - private fun t0(x: u64,r: M::R) { + private fun t0(x: u64,r: R) { true; false; true; @@ -13,11 +13,32 @@ module 0x8675309::M { true; true; Le(Copy(x), Move(x)); - Le(select M::R.f(r), select M::R.f(r)); - And(Le(1, select M::R.f(r)), Le(select M::R.f(r), 0)); + Le(select M::R.f(r), select M::R.f(r)); + And(Le(1, select M::R.f(r)), Le(select M::R.f(r), 0)); { let M::R{ f: _ } = r; Tuple() } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + f: u64, + } + fun t0(x: u64, r: R) { + true; + false; + true; + true; + true; + true; + true; + true; + (copy x) <= (move x); + r.f <= r.f; + 1 <= r.f && r.f <= 0; + let R{f: _} = r; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/binary_lt.exp b/third_party/move/move-compiler-v2/tests/checking/typing/binary_lt.exp index 39c7c05043c27..a39b2db954320 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/binary_lt.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/binary_lt.exp @@ -3,7 +3,7 @@ module 0x8675309::M { struct R { f: u64, } - private fun t0(x: u64,r: M::R) { + private fun t0(x: u64,r: R) { false; false; true; @@ -13,11 +13,32 @@ module 0x8675309::M { true; true; Lt(Copy(x), Move(x)); - Lt(select M::R.f(r), select M::R.f(r)); - And(Lt(1, select M::R.f(r)), Lt(select M::R.f(r), 0)); + Lt(select M::R.f(r), select M::R.f(r)); + And(Lt(1, select M::R.f(r)), Lt(select M::R.f(r), 0)); { let M::R{ f: _ } = r; Tuple() } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + f: u64, + } + fun t0(x: u64, r: R) { + false; + false; + true; + true; + true; + true; + true; + true; + (copy x) < (move x); + r.f < r.f; + 1 < r.f && r.f < 0; + let R{f: _} = r; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/binary_mod.exp b/third_party/move/move-compiler-v2/tests/checking/typing/binary_mod.exp index 354e15f5bb020..5821dccc1c2b3 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/binary_mod.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/binary_mod.exp @@ -3,7 +3,7 @@ module 0x8675309::M { struct R { f: u64, } - private fun t0(x: u64,r: M::R) { + private fun t0(x: u64,r: R) { Mod(0, 0); Mod(1, 0); 0; @@ -13,11 +13,32 @@ module 0x8675309::M { 0; 0; Mod(Copy(x), Move(x)); - Mod(select M::R.f(r), select M::R.f(r)); - Mod(Mod(Mod(1, select M::R.f(r)), select M::R.f(r)), 0); + Mod(select M::R.f(r), select M::R.f(r)); + Mod(Mod(Mod(1, select M::R.f(r)), select M::R.f(r)), 0); { let M::R{ f: _ } = r; Tuple() } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + f: u64, + } + fun t0(x: u64, r: R) { + 0 % 0; + 1 % 0; + 0; + 0u8; + 0u8; + 0u128; + 0u128; + 0; + (copy x) % (move x); + r.f % r.f; + 1 % r.f % r.f % 0; + let R{f: _} = r; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/binary_mul.exp b/third_party/move/move-compiler-v2/tests/checking/typing/binary_mul.exp index 061c338dc982b..f2796fad68ebc 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/binary_mul.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/binary_mul.exp @@ -3,7 +3,7 @@ module 0x8675309::M { struct R { f: u64, } - private fun t0(x: u64,r: M::R) { + private fun t0(x: u64,r: R) { 0; 0; 0; @@ -13,11 +13,32 @@ module 0x8675309::M { 0; 0; Mul(Copy(x), Move(x)); - Mul(select M::R.f(r), select M::R.f(r)); - Mul(Mul(Mul(1, select M::R.f(r)), select M::R.f(r)), 0); + Mul(select M::R.f(r), select M::R.f(r)); + Mul(Mul(Mul(1, select M::R.f(r)), select M::R.f(r)), 0); { let M::R{ f: _ } = r; Tuple() } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + f: u64, + } + fun t0(x: u64, r: R) { + 0; + 0; + 0; + 0u8; + 0u8; + 0u128; + 0u128; + 0; + (copy x) * (move x); + r.f * r.f; + 1 * r.f * r.f * 0; + let R{f: _} = r; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/binary_or.exp b/third_party/move/move-compiler-v2/tests/checking/typing/binary_or.exp index ba12114e3fabb..3f789c99b0013 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/binary_or.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/binary_or.exp @@ -3,13 +3,13 @@ module 0x8675309::M { struct R { f: bool, } - private fun t0(x: bool,r: M::R) { + private fun t0(x: bool,r: R) { true; true; true; true; Or(Copy(x), Move(x)); - Or(select M::R.f(r), select M::R.f(r)); + Or(select M::R.f(r), select M::R.f(r)); true; { let M::R{ f: _ } = r; @@ -17,3 +17,20 @@ module 0x8675309::M { } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + f: bool, + } + fun t0(x: bool, r: R) { + true; + true; + true; + true; + (copy x) || (move x); + r.f || r.f; + true; + let R{f: _} = r; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/binary_shl.exp b/third_party/move/move-compiler-v2/tests/checking/typing/binary_shl.exp index d5774d480eedc..3b588ae51c05d 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/binary_shl.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/binary_shl.exp @@ -4,7 +4,7 @@ module 0x8675309::M { f: u64, b: u8, } - private fun t0(x: u64,b: u8,r: M::R) { + private fun t0(x: u64,b: u8,r: R) { 0; 1; 0; @@ -13,8 +13,29 @@ module 0x8675309::M { 0; 0; Shl(Copy(x), Copy(b)); - Shl(select M::R.f(r), select M::R.b(r)); - Shl(Shl(Shl(1, select M::R.b(r)), select M::R.b(r)), 0); + Shl(select M::R.f(r), select M::R.b(r)); + Shl(Shl(Shl(1, select M::R.b(r)), select M::R.b(r)), 0); M::R{ f: _, b: _ } = r } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + f: u64, + b: u8, + } + fun t0(x: u64, b: u8, r: R) { + 0; + 1; + 0; + 0; + 1u8; + 0u128; + 0; + (copy x) << (copy b); + r.f << r.b; + 1 << r.b << r.b << 0u8; + R{f: _,b: _} = r + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/binary_shr.exp b/third_party/move/move-compiler-v2/tests/checking/typing/binary_shr.exp index 9f84205f4b470..83b8b2181e934 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/binary_shr.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/binary_shr.exp @@ -4,7 +4,7 @@ module 0x8675309::M { f: u64, b: u8, } - private fun t0(x: u64,b: u8,r: M::R) { + private fun t0(x: u64,b: u8,r: R) { 0; 1; 0; @@ -13,8 +13,29 @@ module 0x8675309::M { 0; 0; Shr(Copy(x), Copy(b)); - Shr(select M::R.f(r), select M::R.b(r)); - Shr(Shr(Shr(1, select M::R.b(r)), select M::R.b(r)), 0); + Shr(select M::R.f(r), select M::R.b(r)); + Shr(Shr(Shr(1, select M::R.b(r)), select M::R.b(r)), 0); M::R{ f: _, b: _ } = r } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + f: u64, + b: u8, + } + fun t0(x: u64, b: u8, r: R) { + 0; + 1; + 0; + 0; + 1u8; + 0u128; + 0; + (copy x) >> (copy b); + r.f >> r.b; + 1 >> r.b >> r.b >> 0u8; + R{f: _,b: _} = r + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/binary_sub.exp b/third_party/move/move-compiler-v2/tests/checking/typing/binary_sub.exp index 7165eb499e522..bd16e85576bda 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/binary_sub.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/binary_sub.exp @@ -3,7 +3,7 @@ module 0x8675309::M { struct R { f: u64, } - private fun t0(x: u64,r: M::R) { + private fun t0(x: u64,r: R) { 0; 1; Sub(0, 1); @@ -13,11 +13,32 @@ module 0x8675309::M { Sub(0, 1); Sub(0, 1); Sub(Copy(x), Move(x)); - Sub(select M::R.f(r), select M::R.f(r)); - Sub(Sub(Sub(1, select M::R.f(r)), select M::R.f(r)), 0); + Sub(select M::R.f(r), select M::R.f(r)); + Sub(Sub(Sub(1, select M::R.f(r)), select M::R.f(r)), 0); { let M::R{ f: _ } = r; Tuple() } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + f: u64, + } + fun t0(x: u64, r: R) { + 0; + 1; + 0 - 1; + 0u8 - 1u8; + 0u8 - 1u8; + 0u128 - 1u128; + 0u128 - 1u128; + 0 - 1; + (copy x) - (move x); + r.f - r.f; + 1 - r.f - r.f - 0; + let R{f: _} = r; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/binary_xor.exp b/third_party/move/move-compiler-v2/tests/checking/typing/binary_xor.exp index e0ca221882df9..0d24ebfbf52cb 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/binary_xor.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/binary_xor.exp @@ -3,7 +3,7 @@ module 0x8675309::M { struct R { f: u64, } - private fun t0(x: u64,r: M::R) { + private fun t0(x: u64,r: R) { 0; 1; 1; @@ -13,11 +13,32 @@ module 0x8675309::M { 1; 1; Xor(Copy(x), Move(x)); - Xor(select M::R.f(r), select M::R.f(r)); - Xor(Xor(Xor(1, select M::R.f(r)), select M::R.f(r)), 0); + Xor(select M::R.f(r), select M::R.f(r)); + Xor(Xor(Xor(1, select M::R.f(r)), select M::R.f(r)), 0); { let M::R{ f: _ } = r; Tuple() } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + f: u64, + } + fun t0(x: u64, r: R) { + 0; + 1; + 1; + 1u8; + 1u8; + 1u128; + 1u128; + 1; + (copy x) ^ (move x); + r.f ^ r.f; + 1 ^ r.f ^ r.f ^ 0; + let R{f: _} = r; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/bind_with_type_annot.exp b/third_party/move/move-compiler-v2/tests/checking/typing/bind_with_type_annot.exp index 76be5734e6def..622c066db0cde 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/bind_with_type_annot.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/bind_with_type_annot.exp @@ -6,7 +6,7 @@ module 0x8675309::M { private fun t0() { 0; { - let (x: u64, b: bool, M::R{ f }): (u64, bool, M::R) = Tuple(0, false, pack M::R(0)); + let (x: u64, b: bool, M::R{ f }): (u64, bool, R) = Tuple(0, false, pack M::R(0)); 0; false; 0; @@ -14,3 +14,17 @@ module 0x8675309::M { } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + f: u64, + } + fun t0() { + 0; + let (x,b,R{f: f}) = (0, false, R{f: 0}); + 0; + false; + 0; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/block_empty.exp b/third_party/move/move-compiler-v2/tests/checking/typing/block_empty.exp index 010d6c24957fd..6d5fb67404227 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/block_empty.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/block_empty.exp @@ -7,3 +7,12 @@ module 0x8675309::M { Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + fun t0() { + (); + (); + (); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/block_single_expr.exp b/third_party/move/move-compiler-v2/tests/checking/typing/block_single_expr.exp index d443f1578d571..998992acaf010 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/block_single_expr.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/block_single_expr.exp @@ -12,3 +12,16 @@ module 0x8675309::M { Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + } + fun t0() { + 0; + &0; + &mut 0; + R{} = R{}; + (0, false); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/block_with_statements.exp b/third_party/move/move-compiler-v2/tests/checking/typing/block_with_statements.exp index ab55de9be63c6..01a807ee43c9e 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/block_with_statements.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/block_with_statements.exp @@ -8,8 +8,8 @@ module 0x8675309::M { Borrow(Immutable)(0); Borrow(Mutable)(1); M::R{ dummy_field: _ } = { - let r: M::R = { - let r: M::R = pack M::R(false); + let r: R = { + let r: R = pack M::R(false); r }; r @@ -18,3 +18,22 @@ module 0x8675309::M { Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + } + fun t0() { + 0; + &0; + &mut 1; + R{} = { + let r = { + let r = R{}; + r + }; + r + }; + (0, false); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/borrow_field.exp b/third_party/move/move-compiler-v2/tests/checking/typing/borrow_field.exp index aa470df8dbe0c..6da28f42b0d35 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/borrow_field.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/borrow_field.exp @@ -3,7 +3,17 @@ module 0x8675309::M { struct S { f: u64, } - private fun t0(s: &M::S,s_mut: &mut M::S,s_mut2: &mut M::S): (&u64, &u64, &mut u64) { - Tuple(Borrow(Immutable)(select M::S.f<&M::S>(s)), Borrow(Immutable)(select M::S.f<&mut M::S>(s_mut)), Borrow(Mutable)(select M::S.f<&mut M::S>(s_mut2))) + private fun t0(s: &S,s_mut: &mut S,s_mut2: &mut S): (&u64, &u64, &mut u64) { + Tuple(Borrow(Immutable)(select M::S.f<&S>(s)), Borrow(Immutable)(select M::S.f<&mut S>(s_mut)), Borrow(Mutable)(select M::S.f<&mut S>(s_mut2))) } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct S { + f: u64, + } + fun t0(s: &S, s_mut: &mut S, s_mut2: &mut S): (&u64, &u64, &mut u64) { + (&s.f, &s_mut.f, &mut s_mut2.f) + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/borrow_field_chain.exp b/third_party/move/move-compiler-v2/tests/checking/typing/borrow_field_chain.exp index 94a934431a457..460c8c1455365 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/borrow_field_chain.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/borrow_field_chain.exp @@ -1,24 +1,48 @@ // -- Model dump before bytecode pipeline module 0x8675309::M { struct X1 { - x2: M::X2, + x2: X2, } struct X2 { - x3: M::X3, + x3: X3, } struct X3 { f: u64, } - private fun t0(x1: &M::X1,x1_mut: &mut M::X1) { - Borrow(Immutable)(select M::X1.x2<&M::X1>(x1)); - Borrow(Immutable)(select M::X2.x3(select M::X1.x2<&M::X1>(x1))); - Borrow(Immutable)(select M::X3.f(select M::X2.x3(select M::X1.x2<&M::X1>(x1)))); - Borrow(Immutable)(select M::X1.x2<&mut M::X1>(x1_mut)); - Borrow(Immutable)(select M::X2.x3(select M::X1.x2<&mut M::X1>(x1_mut))); - Borrow(Immutable)(select M::X3.f(select M::X2.x3(select M::X1.x2<&mut M::X1>(x1_mut)))); - Borrow(Mutable)(select M::X1.x2<&mut M::X1>(x1_mut)); - Borrow(Mutable)(select M::X2.x3(select M::X1.x2<&mut M::X1>(x1_mut))); - Borrow(Mutable)(select M::X3.f(select M::X2.x3(select M::X1.x2<&mut M::X1>(x1_mut)))); + private fun t0(x1: &X1,x1_mut: &mut X1) { + Borrow(Immutable)(select M::X1.x2<&X1>(x1)); + Borrow(Immutable)(select M::X2.x3(select M::X1.x2<&X1>(x1))); + Borrow(Immutable)(select M::X3.f(select M::X2.x3(select M::X1.x2<&X1>(x1)))); + Borrow(Immutable)(select M::X1.x2<&mut X1>(x1_mut)); + Borrow(Immutable)(select M::X2.x3(select M::X1.x2<&mut X1>(x1_mut))); + Borrow(Immutable)(select M::X3.f(select M::X2.x3(select M::X1.x2<&mut X1>(x1_mut)))); + Borrow(Mutable)(select M::X1.x2<&mut X1>(x1_mut)); + Borrow(Mutable)(select M::X2.x3(select M::X1.x2<&mut X1>(x1_mut))); + Borrow(Mutable)(select M::X3.f(select M::X2.x3(select M::X1.x2<&mut X1>(x1_mut)))); Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct X1 { + x2: X2, + } + struct X2 { + x3: X3, + } + struct X3 { + f: u64, + } + fun t0(x1: &X1, x1_mut: &mut X1) { + &x1.x2; + &x1.x2.x3; + &x1.x2.x3.f; + &x1_mut.x2; + &x1_mut.x2.x3; + &x1_mut.x2.x3.f; + &mut x1_mut.x2; + &mut x1_mut.x2.x3; + &mut x1_mut.x2.x3.f; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/borrow_field_complex_root_expr.exp b/third_party/move/move-compiler-v2/tests/checking/typing/borrow_field_complex_root_expr.exp index 2fc210c2e303e..8effc39a97ef2 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/borrow_field_complex_root_expr.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/borrow_field_complex_root_expr.exp @@ -3,36 +3,54 @@ module 0x8675309::M { struct S { f: u64, } - private fun t0(cond: bool,s: &M::S,s_mut: &mut M::S) { - Borrow(Immutable)(select M::S.f<&M::S>(if cond { + private fun t0(cond: bool,s: &S,s_mut: &mut S) { + Borrow(Immutable)(select M::S.f<&S>(if cond { s } else { s })); - Borrow(Immutable)(select M::S.f<&M::S>(if cond { + Borrow(Immutable)(select M::S.f<&S>(if cond { Freeze(false)(s_mut) } else { s })); - Borrow(Immutable)(select M::S.f<&M::S>(if cond { + Borrow(Immutable)(select M::S.f<&S>(if cond { s } else { Freeze(false)(s_mut) })); - Borrow(Immutable)(select M::S.f<&mut M::S>(if cond { + Borrow(Immutable)(select M::S.f<&mut S>(if cond { s_mut } else { s_mut })); - Borrow(Mutable)(select M::S.f<&mut M::S>(if cond { + Borrow(Mutable)(select M::S.f<&mut S>(if cond { s_mut } else { s_mut })); - Borrow(Immutable)(select M::S.f<&M::S>({ - let s: M::S = pack M::S(0); + Borrow(Immutable)(select M::S.f<&S>({ + let s: S = pack M::S(0); Borrow(Immutable)(s) })); Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct S has drop { + f: u64, + } + fun t0(cond: bool, s: &S, s_mut: &mut S) { + &(if (cond) s else s).f; + &(if (cond) /*freeze*/s_mut else s).f; + &(if (cond) s else /*freeze*/s_mut).f; + &(if (cond) s_mut else s_mut).f; + &mut (if (cond) s_mut else s_mut).f; + &{ + let s = S{f: 0}; + &s + }.f; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/borrow_field_non_ref_root.exp b/third_party/move/move-compiler-v2/tests/checking/typing/borrow_field_non_ref_root.exp index 0a38afe6bcae3..d3eaf882a2445 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/borrow_field_non_ref_root.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/borrow_field_non_ref_root.exp @@ -3,15 +3,15 @@ module 0x8675309::M { struct S { f: u64, } - private fun t0(cond: bool,s: M::S) { - Borrow(Immutable)(select M::S.f(s)); - Borrow(Mutable)(select M::S.f(s)); - Borrow(Immutable)(select M::S.f(if cond { + private fun t0(cond: bool,s: S) { + Borrow(Immutable)(select M::S.f(s)); + Borrow(Mutable)(select M::S.f(s)); + Borrow(Immutable)(select M::S.f(if cond { pack M::S(0) } else { pack M::S(1) })); - Borrow(Mutable)(select M::S.f(if cond { + Borrow(Mutable)(select M::S.f(if cond { pack M::S(0) } else { pack M::S(1) @@ -19,3 +19,16 @@ module 0x8675309::M { Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct S has drop { + f: u64, + } + fun t0(cond: bool, s: S) { + &s.f; + &mut s.f; + &(if (cond) S{f: 0} else S{f: 1}).f; + &mut (if (cond) S{f: 0} else S{f: 1}).f; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/borrow_local.exp b/third_party/move/move-compiler-v2/tests/checking/typing/borrow_local.exp index 8ad47b62f6fa0..c048387e43b32 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/borrow_local.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/borrow_local.exp @@ -6,7 +6,7 @@ module 0x8675309::M { struct S { dummy_field: bool, } - private fun t0(b: bool,u: u64,s: M::S,r: M::R): M::R { + private fun t0(b: bool,u: u64,s: S,r: R): R { Borrow(Immutable)(b); Borrow(Mutable)(b); Borrow(Immutable)(u); @@ -17,15 +17,15 @@ module 0x8675309::M { Borrow(Mutable)(r); r } - private fun t1(): M::R { + private fun t1(): R { { let b: bool = true; { let u: u64 = 0; { - let s: M::S = pack M::S(false); + let s: S = pack M::S(false); { - let r: M::R = pack M::R(false); + let r: R = pack M::R(false); Borrow(Immutable)(b); Borrow(Mutable)(b); Borrow(Immutable)(u); @@ -41,3 +41,37 @@ module 0x8675309::M { } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + } + struct S has drop { + } + fun t0(b: bool, u: u64, s: S, r: R): R { + &b; + &mut b; + &u; + &mut u; + &s; + &mut s; + &r; + &mut r; + r + } + fun t1(): R { + let b = true; + let u = 0; + let s = S{}; + let r = R{}; + &b; + &mut b; + &u; + &mut u; + &s; + &mut s; + &r; + &mut r; + r + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/borrow_local_temp.exp b/third_party/move/move-compiler-v2/tests/checking/typing/borrow_local_temp.exp index 7a4a44001c2a9..695b878ef0ff7 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/borrow_local_temp.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/borrow_local_temp.exp @@ -16,3 +16,19 @@ module 0x8675309::M { Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + } + struct S has drop { + } + fun t0() { + &true; + &mut false; + &0; + &mut 1; + &S{}; + &mut S{}; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/break_any_type.exp b/third_party/move/move-compiler-v2/tests/checking/typing/break_any_type.exp index cd7b6720f3e26..88c5ed384a9da 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/break_any_type.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/break_any_type.exp @@ -3,7 +3,7 @@ module 0x8675309::M { struct Coin { dummy_field: bool, } - private fun foo(c: M::Coin) { + private fun foo(c: Coin) { M::Coin{ dummy_field: _ } = c; Tuple() } @@ -27,3 +27,20 @@ module 0x8675309::M { } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct Coin { + } + fun foo(c: Coin) { + Coin{} = c; + } + fun t0() { + while (true) { + 0 + (break); + } + } + fun t1() { + while (true) foo(break) + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/break_outside_loop.exp b/third_party/move/move-compiler-v2/tests/checking/typing/break_outside_loop.exp index a670f268e2b0c..f403ecd7de819 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/break_outside_loop.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/break_outside_loop.exp @@ -24,6 +24,21 @@ module 0x8675309::M { } } // end 0x8675309::M +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + fun bar() { + break; + } + fun baz(x: u64): u64 { + if (x >= 5) break; + 0 + } + fun foo() { + while (true) break; + break + } +} + Diagnostics: error: missing enclosing loop statement diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/cast.exp b/third_party/move/move-compiler-v2/tests/checking/typing/cast.exp index bf716645ad244..59ec0b8dcbb7a 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/cast.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/cast.exp @@ -37,3 +37,20 @@ module 0x8675309::M { } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + fun t0(x8: u8, x64: u64, x128: u128) { + let _ = x8; + let _ = x64; + let _ = x128; + let _ = x64 as u8; + let _ = x128 as u64; + let _ = x8 as u128; + let _ = x128 as u8; + let _ = x8 as u64; + let _ = x64 as u128; + let _ = 340282366920938463463374607431768211455u128 as u8; + let _ = 340282366920938463463374607431768211455u128 as u64; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/constant_all_valid_types.exp b/third_party/move/move-compiler-v2/tests/checking/typing/constant_all_valid_types.exp index f66ba14550703..39492187167e2 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/constant_all_valid_types.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/constant_all_valid_types.exp @@ -41,3 +41,46 @@ module _0 { Tuple() } } // end _0 + +// -- Sourcified model before bytecode pipeline +module 0x42::M { + fun t1(): u8 { + 0u8 + } + fun t2(): u64 { + 0 + } + fun t3(): u128 { + 0u128 + } + fun t4(): bool { + false + } + fun t5(): address { + 0x0 + } + fun t6(): vector { + vector[1u8, 35u8] + } + fun t7(): vector { + vector[97u8, 98u8, 99u8, 100u8] + } + fun t8(): vector
{ + vector[0x0, 0x1] + } + fun t9(): u8 { + 0u8 + } +} +script { + fun t() { + 0u8; + 0; + 0u128; + false; + 0x0; + vector[1u8, 35u8]; + vector[97u8, 98u8, 99u8, 100u8]; + vector[0x0, 0x1]; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/constant_folding.exp b/third_party/move/move-compiler-v2/tests/checking/typing/constant_folding.exp index 097ca8ea8e451..0df0d44c38389 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/constant_folding.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/constant_folding.exp @@ -1,3 +1,7 @@ // -- Model dump before bytecode pipeline module 0x42::constant_folding { } // end 0x42::constant_folding + +// -- Sourcified model before bytecode pipeline +module 0x42::constant_folding { +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/constant_supported_exps.exp b/third_party/move/move-compiler-v2/tests/checking/typing/constant_supported_exps.exp index c5c471f77575f..5fa35de19b8d6 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/constant_supported_exps.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/constant_supported_exps.exp @@ -1,3 +1,7 @@ // -- Model dump before bytecode pipeline module 0x42::M { } // end 0x42::M + +// -- Sourcified model before bytecode pipeline +module 0x42::M { +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/continue_any_type.exp b/third_party/move/move-compiler-v2/tests/checking/typing/continue_any_type.exp index d24bcf657bd5c..2799fd5291964 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/continue_any_type.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/continue_any_type.exp @@ -3,7 +3,7 @@ module 0x8675309::M { struct Coin { dummy_field: bool, } - private fun foo(c: M::Coin) { + private fun foo(c: Coin) { M::Coin{ dummy_field: _ } = c; Tuple() } @@ -27,3 +27,20 @@ module 0x8675309::M { } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct Coin { + } + fun foo(c: Coin) { + Coin{} = c; + } + fun t0() { + while (true) { + 0 + (continue); + } + } + fun t1() { + while (true) foo(continue) + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/continue_outside_loop.exp b/third_party/move/move-compiler-v2/tests/checking/typing/continue_outside_loop.exp index e77ad864b8a12..674634a3cf6e1 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/continue_outside_loop.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/continue_outside_loop.exp @@ -12,6 +12,14 @@ module 0x8675309::M { } } // end 0x8675309::M +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + fun foo() { + while (true) continue; + continue + } +} + Diagnostics: error: missing enclosing loop statement diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/decl_unpack_references.exp b/third_party/move/move-compiler-v2/tests/checking/typing/decl_unpack_references.exp index 9a0164bc1800c..dac518a528414 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/decl_unpack_references.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/decl_unpack_references.exp @@ -1,8 +1,8 @@ // -- Model dump before bytecode pipeline module 0x8675309::M { struct R { - s1: M::S, - s2: M::S, + s1: S, + s2: S, } struct S { f: u64, @@ -12,7 +12,7 @@ module 0x8675309::M { let M::R{ s1: M::S{ f }, s2 }; f: u64 = 0; f; - s2: M::S = pack M::S(0); + s2: S = pack M::S(0); s2; Tuple() } @@ -22,7 +22,7 @@ module 0x8675309::M { let M::R{ s1: M::S{ f }, s2 }; f: &u64 = Borrow(Immutable)(0); f; - s2: &M::S = Borrow(Immutable)(pack M::S(0)); + s2: &S = Borrow(Immutable)(pack M::S(0)); s2; Tuple() } @@ -32,9 +32,41 @@ module 0x8675309::M { let M::R{ s1: M::S{ f }, s2 }; f: &mut u64 = Borrow(Mutable)(0); f; - s2: &mut M::S = Borrow(Mutable)(pack M::S(0)); + s2: &mut S = Borrow(Mutable)(pack M::S(0)); s2; Tuple() } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + s1: S, + s2: S, + } + struct S has drop { + f: u64, + } + fun t0() { + let R{s1: S{f: f},s2: s2}; + f = 0; + f; + s2 = S{f: 0}; + s2; + } + fun t1() { + let R{s1: S{f: f},s2: s2}; + f = &0; + f; + s2 = &S{f: 0}; + s2; + } + fun t2() { + let R{s1: S{f: f},s2: s2}; + f = &mut 0; + f; + s2 = &mut S{f: 0}; + s2; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/declare_with_type_annot.exp b/third_party/move/move-compiler-v2/tests/checking/typing/declare_with_type_annot.exp index 1963ff3280348..c92de114750b8 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/declare_with_type_annot.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/declare_with_type_annot.exp @@ -31,8 +31,18 @@ module 0x8675309::M { } private fun t0() { { - let (x: u64, b: bool, M::R{ f }): (u64, bool, M::R); + let (x: u64, b: bool, M::R{ f }): (u64, bool, R); Tuple() } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + f: u64, + } + fun t0() { + let (x,b,R{f: f}); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/derefrence.exp b/third_party/move/move-compiler-v2/tests/checking/typing/derefrence.exp index bf84fdebe47b4..b31e5493c2008 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/derefrence.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/derefrence.exp @@ -2,24 +2,48 @@ module 0x8675309::M { struct S { f: u64, - x: M::X, + x: X, } struct X { dummy_field: bool, } - private fun t0(x: &u64,x_mut: &mut u64,s: &M::S,s_mut: &mut M::S) { + private fun t0(x: &u64,x_mut: &mut u64,s: &S,s_mut: &mut S) { Deref(x); Deref(x_mut); Deref(s); - Deref(Borrow(Immutable)(select M::S.f<&M::S>(s))); - select M::S.f<&M::S>(s); - Deref(Borrow(Immutable)(select M::S.x<&M::S>(s))); + Deref(Borrow(Immutable)(select M::S.f<&S>(s))); + select M::S.f<&S>(s); + Deref(Borrow(Immutable)(select M::S.x<&S>(s))); Deref(s_mut); - Deref(Borrow(Immutable)(select M::S.f<&mut M::S>(s_mut))); - Deref(Borrow(Mutable)(select M::S.f<&mut M::S>(s_mut))); - select M::S.f<&mut M::S>(s_mut); - Deref(Borrow(Immutable)(select M::S.x<&mut M::S>(s_mut))); - Deref(Borrow(Mutable)(select M::S.x<&mut M::S>(s_mut))); + Deref(Borrow(Immutable)(select M::S.f<&mut S>(s_mut))); + Deref(Borrow(Mutable)(select M::S.f<&mut S>(s_mut))); + select M::S.f<&mut S>(s_mut); + Deref(Borrow(Immutable)(select M::S.x<&mut S>(s_mut))); + Deref(Borrow(Mutable)(select M::S.x<&mut S>(s_mut))); Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct S has copy, drop { + f: u64, + x: X, + } + struct X has copy, drop { + } + fun t0(x: &u64, x_mut: &mut u64, s: &S, s_mut: &mut S) { + *x; + *x_mut; + *s; + *&s.f; + s.f; + *&s.x; + *s_mut; + *&s_mut.f; + *&mut s_mut.f; + s_mut.f; + *&s_mut.x; + *&mut s_mut.x; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/dummy_field.exp b/third_party/move/move-compiler-v2/tests/checking/typing/dummy_field.exp index 31d482ecaceb0..83cbf7d25cbf8 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/dummy_field.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/dummy_field.exp @@ -7,32 +7,58 @@ module 0x42::test { dummy_field: bool, } public entry fun test(addr: address) - acquires test::R(*) + acquires R(*) { { - let test::R{ dummy_field: _dummy_field } = MoveFrom(addr); + let test::R{ dummy_field: _dummy_field } = MoveFrom(addr); Tuple() } } private fun test2(): bool { { - let r: test::R = pack test::R(true); - select test::R.dummy_field(r) + let r: R = pack test::R(true); + select test::R.dummy_field(r) } } public entry fun test3(addr: address) - acquires test::T(*) + acquires T(*) { { - let test::T{ dummy_field: _ } = MoveFrom(addr); + let test::T{ dummy_field: _ } = MoveFrom(addr); Tuple() } } public entry fun test4(s: &signer) { { - let r: test::T = pack test::T(false); - MoveTo(s, r); + let r: T = pack test::T(false); + MoveTo(s, r); Tuple() } } } // end 0x42::test + +// -- Sourcified model before bytecode pipeline +module 0x42::test { + struct T has drop, store, key { + } + struct R has drop, store, key { + } + public entry fun test(addr: address) + acquires R + { + let R{} = move_from(addr); + } + fun test2(): bool { + let r = R{}; + r.dummy_field + } + public entry fun test3(addr: address) + acquires T + { + let T{} = move_from(addr); + } + public entry fun test4(s: &signer) { + let r = T{}; + move_to(s, r); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/entry_on_any_vis.exp b/third_party/move/move-compiler-v2/tests/checking/typing/entry_on_any_vis.exp index cd2aba36667c8..57b08d2aad09a 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/entry_on_any_vis.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/entry_on_any_vis.exp @@ -10,3 +10,13 @@ module 0x2::M { Tuple() } } // end 0x2::M + +// -- Sourcified model before bytecode pipeline +module 0x2::M { + entry fun f1() { + } + public entry fun f2() { + } + friend entry fun f3() { + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/entry_signature_no_warning.exp b/third_party/move/move-compiler-v2/tests/checking/typing/entry_signature_no_warning.exp index 241ce607db4dd..314382987eb04 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/entry_signature_no_warning.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/entry_signature_no_warning.exp @@ -9,13 +9,31 @@ module 0x42::M { public entry fun signer_ref(_: &signer) { Tuple() } - public entry fun struct_arg(_: M::CoolStruct) { + public entry fun struct_arg(_: CoolStruct) { Tuple() } - public entry fun struct_ret(): M::CoolStruct { + public entry fun struct_ret(): CoolStruct { pack M::CoolStruct(false) } public entry fun u64_ret(): u64 { 0 } } // end 0x42::M + +// -- Sourcified model before bytecode pipeline +module 0x42::M { + struct CoolStruct has drop { + } + public entry fun late_signer(_u: u64, _s: signer) { + } + public entry fun signer_ref(_: &signer) { + } + public entry fun struct_arg(_: CoolStruct) { + } + public entry fun struct_ret(): CoolStruct { + CoolStruct{} + } + public entry fun u64_ret(): u64 { + 0 + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/eq.exp b/third_party/move/move-compiler-v2/tests/checking/typing/eq.exp index e36049a6df034..66f11852a5790 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/eq.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/eq.exp @@ -1,7 +1,7 @@ // -- Model dump before bytecode pipeline module 0x8675309::M { - struct G { - f: #0, + struct G { + f: T, } struct R { f: u64, @@ -9,7 +9,7 @@ module 0x8675309::M { struct S { u: u64, } - private fun t0(r: &M::R,r_mut: &mut M::R,s: M::S,s_ref: &M::S,s_mut: &mut M::S) { + private fun t0(r: &R,r_mut: &mut R,s: S,s_ref: &S,s_mut: &mut S) { false; false; false; @@ -18,19 +18,55 @@ module 0x8675309::M { Eq(Borrow(Immutable)(0), Borrow(Immutable)(1)); false; false; - Eq(Borrow(Immutable)(s), s_ref); - Eq(Freeze(false)(Borrow(Mutable)(s)), s_ref); - Eq(Freeze(false)(Borrow(Mutable)(s)), Freeze(false)(s_mut)); - Eq(Borrow(Immutable)(s), Freeze(false)(s_mut)); - Eq(s_ref, Freeze(false)(s_mut)); - Eq(Freeze(false)(s_mut), Freeze(false)(s_mut)); - Eq(pack M::S(0), s); - Eq(r, r); - Eq(Freeze(false)(r_mut), Freeze(false)(r_mut)); - Eq(r, Freeze(false)(r_mut)); - Eq(Freeze(false)(r_mut), r); - Eq>(pack M::G(1), pack M::G(1)); - Eq>(pack M::G(1), pack M::G(1)); + Eq(Borrow(Immutable)(s), s_ref); + Eq(Freeze(false)(Borrow(Mutable)(s)), s_ref); + Eq(Freeze(false)(Borrow(Mutable)(s)), Freeze(false)(s_mut)); + Eq(Borrow(Immutable)(s), Freeze(false)(s_mut)); + Eq(s_ref, Freeze(false)(s_mut)); + Eq(Freeze(false)(s_mut), Freeze(false)(s_mut)); + Eq(pack M::S(0), s); + Eq(r, r); + Eq(Freeze(false)(r_mut), Freeze(false)(r_mut)); + Eq(r, Freeze(false)(r_mut)); + Eq(Freeze(false)(r_mut), r); + Eq>(pack M::G(1), pack M::G(1)); + Eq>(pack M::G(1), pack M::G(1)); Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct G has drop { + f: T, + } + struct R { + f: u64, + } + struct S has drop { + u: u64, + } + fun t0(r: &R, r_mut: &mut R, s: S, s_ref: &S, s_mut: &mut S) { + false; + false; + false; + false; + false; + &0 == &1; + false; + false; + &s == s_ref; + /*freeze*/&mut s == s_ref; + /*freeze*/&mut s == /*freeze*/s_mut; + &s == /*freeze*/s_mut; + s_ref == /*freeze*/s_mut; + /*freeze*/s_mut == /*freeze*/s_mut; + S{u: 0} == s; + r == r; + /*freeze*/r_mut == /*freeze*/r_mut; + r == /*freeze*/r_mut; + /*freeze*/r_mut == r; + G{f: 1} == G{f: 1}; + G{f: 1} == G{f: 1}; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/eq_inline.exp b/third_party/move/move-compiler-v2/tests/checking/typing/eq_inline.exp index 66175e1b1b2f8..4135375126682 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/eq_inline.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/eq_inline.exp @@ -16,3 +16,12 @@ module 0x42::m { Tuple() } } // end 0x42::m + +// -- Sourcified model before bytecode pipeline +module 0x42::m { + inline fun foo(f: |&u64|) { + } + fun g() { + (); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/eq_inline_typed.exp b/third_party/move/move-compiler-v2/tests/checking/typing/eq_inline_typed.exp index c7449303ddf2c..d4ac8aa2042a4 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/eq_inline_typed.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/eq_inline_typed.exp @@ -16,3 +16,12 @@ module 0x42::m { Tuple() } } // end 0x42::m + +// -- Sourcified model before bytecode pipeline +module 0x42::m { + inline fun foo(f: |&u64|) { + } + fun g() { + (); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/eq_ref.exp b/third_party/move/move-compiler-v2/tests/checking/typing/eq_ref.exp index cd2f23dae1abf..c7ed238eae972 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/eq_ref.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/eq_ref.exp @@ -13,3 +13,16 @@ module 0x42::m { Tuple() } } // end 0x42::m + +// -- Sourcified model before bytecode pipeline +module 0x42::m { + fun mut_ref_to_mut_ref(x: u64, y: u64) { + /*freeze*/&mut x == /*freeze*/&mut y; + } + fun mut_ref_to_ref(x: u64, y: u64) { + /*freeze*/&mut x == &y; + } + fun ref_to_ref(x: u64, y: u64) { + &x == &y; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/exp_list.exp b/third_party/move/move-compiler-v2/tests/checking/typing/exp_list.exp index 2545066f07559..4cc3f57c1d1fe 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/exp_list.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/exp_list.exp @@ -1,15 +1,30 @@ // -- Model dump before bytecode pipeline module 0x8675309::M { - struct R { - f: #0, + struct R { + f: T, } struct S { dummy_field: bool, } - private fun t0(): (u64, M::S, M::R>) { - Tuple(0, pack M::S(false), pack M::R>(pack M::R(1))) + private fun t0(): (u64, S, R>) { + Tuple(0, pack M::S(false), pack M::R>(pack M::R(1))) } - private fun t1(s: &M::S,r: &mut M::R): (u64, &M::S, &mut M::R) { + private fun t1(s: &S,r: &mut R): (u64, &S, &mut R) { Tuple(0, s, r) } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + f: T, + } + struct S { + } + fun t0(): (u64, S, R>) { + (0, S{}, R>{f: R{f: 1}}) + } + fun t1(s: &S, r: &mut R): (u64, &S, &mut R) { + (0, s, r) + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/explicit_copy.exp b/third_party/move/move-compiler-v2/tests/checking/typing/explicit_copy.exp index 7d8da10d750b3..302b50e6dc637 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/explicit_copy.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/explicit_copy.exp @@ -8,7 +8,7 @@ module 0x8675309::M { } private fun t() { { - let s: M::S = pack M::S(false); + let s: S = pack M::S(false); Copy(0); Copy(s); s; @@ -17,3 +17,18 @@ module 0x8675309::M { } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + } + struct S has copy, drop { + } + fun t() { + let s = S{}; + copy 0; + copy s; + s; + 0; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/explicit_move.exp b/third_party/move/move-compiler-v2/tests/checking/typing/explicit_move.exp index 27cbe3d3c17ea..ac1cb7ef24a48 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/explicit_move.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/explicit_move.exp @@ -10,9 +10,9 @@ module 0x8675309::M { { let u: u64 = 0; { - let s: M::S = pack M::S(false); + let s: S = pack M::S(false); { - let r: M::R = pack M::R(false); + let r: R = pack M::R(false); Move(u); Move(s); M::R{ dummy_field: _ } = Move(r); @@ -22,3 +22,19 @@ module 0x8675309::M { } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + } + struct S has drop { + } + fun t() { + let u = 0; + let s = S{}; + let r = R{}; + move u; + move s; + R{} = move r; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/global_builtins.exp b/third_party/move/move-compiler-v2/tests/checking/typing/global_builtins.exp index 90546524be56a..4a0650b38a25c 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/global_builtins.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/global_builtins.exp @@ -4,18 +4,18 @@ module 0x8675309::M { dummy_field: bool, } private fun t0(a: &signer) - acquires M::R(*) + acquires R(*) { { - let _: bool = exists(0x0); + let _: bool = exists(0x0); { - let (): () = MoveTo(a, pack M::R(false)); + let (): () = MoveTo(a, pack M::R(false)); { - let _: &M::R = BorrowGlobal(Immutable)(0x0); + let _: &R = BorrowGlobal(Immutable)(0x0); { - let _: &mut M::R = BorrowGlobal(Mutable)(0x0); + let _: &mut R = BorrowGlobal(Mutable)(0x0); { - let M::R{ dummy_field: _ } = MoveFrom(0x0); + let M::R{ dummy_field: _ } = MoveFrom(0x0); Tuple() } } @@ -24,18 +24,18 @@ module 0x8675309::M { } } private fun t1(a: &signer) - acquires M::R(*) + acquires R(*) { { - let _: bool = exists(0x0); + let _: bool = exists(0x0); { - let (): () = MoveTo(a, pack M::R(false)); + let (): () = MoveTo(a, pack M::R(false)); { - let _: &M::R = BorrowGlobal(Immutable)(0x0); + let _: &R = BorrowGlobal(Immutable)(0x0); { - let _: &mut M::R = BorrowGlobal(Mutable)(0x0); + let _: &mut R = BorrowGlobal(Mutable)(0x0); { - let M::R{ dummy_field: _ } = MoveFrom(0x0); + let M::R{ dummy_field: _ } = MoveFrom(0x0); Tuple() } } @@ -44,3 +44,27 @@ module 0x8675309::M { } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R has key { + } + fun t0(a: &signer) + acquires R + { + let _ = exists(0x0); + let () = move_to(a, R{}); + let _ = borrow_global(0x0); + let _ = borrow_global_mut(0x0); + let R{} = move_from(0x0); + } + fun t1(a: &signer) + acquires R + { + let _ = exists(0x0); + let () = move_to(a, R{}); + let _ = borrow_global(0x0); + let _ = borrow_global_mut(0x0); + let R{} = move_from(0x0); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/global_builtins_inferred.exp b/third_party/move/move-compiler-v2/tests/checking/typing/global_builtins_inferred.exp index 8e154566ebee7..8491eba0f62c7 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/global_builtins_inferred.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/global_builtins_inferred.exp @@ -4,10 +4,10 @@ module 0x42::m { addr: address, } public fun foo(input: address): address - acquires m::A(*) + acquires A(*) { { - let a: m::A = MoveFrom(input); + let a: A = MoveFrom(input); { let m::A{ addr } = a; addr @@ -15,3 +15,17 @@ module 0x42::m { } } } // end 0x42::m + +// -- Sourcified model before bytecode pipeline +module 0x42::m { + struct A has key { + addr: address, + } + public fun foo(input: address): address + acquires A + { + let a = move_from(input); + let A{addr: addr} = a; + addr + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/hex_and_decimal_address.exp b/third_party/move/move-compiler-v2/tests/checking/typing/hex_and_decimal_address.exp index 579a7a863a3ef..7bbed33ec8cf2 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/hex_and_decimal_address.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/hex_and_decimal_address.exp @@ -3,10 +3,10 @@ module 0x7b::M { struct S { dummy_field: bool, } - public fun s(): M::S { + public fun s(): S { pack M::S(false) } - public fun take(_s: M::S) { + public fun take(_s: S) { Tuple() } } // end 0x7b::M @@ -19,3 +19,22 @@ module _0 { Tuple() } } // end _0 + +// -- Sourcified model before bytecode pipeline +module 0x7b::M { + struct S has copy, drop { + } + public fun s(): S { + S{} + } + public fun take(_s: S) { + } +} +script { + fun main() { + 0x7b::M::take(0x7b::M::s()); + 0x7b::M::take(0x7b::M::s()); + 0x7b::M::take(0x7b::M::s()); + 0x7b::M::take(0x7b::M::s()); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/if_branches_subtype.exp b/third_party/move/move-compiler-v2/tests/checking/typing/if_branches_subtype.exp index 9d0e1d8462b55..4efc4b81f3766 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/if_branches_subtype.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/if_branches_subtype.exp @@ -49,3 +49,17 @@ module 0x8675309::M { } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + fun t0(cond: bool, u: &u64, u_mut: &mut u64) { + let _ = if (cond) u else /*freeze*/u_mut; + let _ = if (cond) /*freeze*/u_mut else u; + let _ = if (cond) /*freeze*/u_mut else /*freeze*/u_mut; + } + fun t1(cond: bool, u: &u64, u_mut: &mut u64) { + let (_,_) = if (cond) (u, u) else (/*freeze*/u_mut, /*freeze*/u_mut); + let (_,_) = if (cond) (/*freeze*/u_mut, u) else (u, /*freeze*/u_mut); + let (_,_) = if (cond) (u, /*freeze*/u_mut) else (/*freeze*/u_mut, u); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/if_condition.exp b/third_party/move/move-compiler-v2/tests/checking/typing/if_condition.exp index 554498b46c986..f0b20328e1276 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/if_condition.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/if_condition.exp @@ -25,3 +25,15 @@ module 0x8675309::M { } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + fun t0() { + if (true) (); + if (false) () + } + fun t1() { + if (true) (); + if (false) () + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/if_default_else.exp b/third_party/move/move-compiler-v2/tests/checking/typing/if_default_else.exp index 4794c6c8e1d7c..e3fd81b609527 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/if_default_else.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/if_default_else.exp @@ -24,3 +24,14 @@ module 0x8675309::M { } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + fun t0(cond: bool) { + if (cond) (); + let () = if (cond) (); + let () = if (cond) { + 0; + }; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/if_matched_branches.exp b/third_party/move/move-compiler-v2/tests/checking/typing/if_matched_branches.exp index 780bf81624ba8..1ddad9454c1b9 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/if_matched_branches.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/if_matched_branches.exp @@ -45,7 +45,7 @@ module 0x8675309::M { } else { Tuple(1, true) }; - (_: u64, _: u64, _: &u64, M::R{ dummy_field: _ }): (u64, u64, &u64, M::R) = if cond { + (_: u64, _: u64, _: &u64, M::R{ dummy_field: _ }): (u64, u64, &u64, R) = if cond { Tuple(0, 0, Borrow(Immutable)(0), pack M::R(false)) } else { Tuple(1, 1, Borrow(Immutable)(1), pack M::R(false)) @@ -53,3 +53,23 @@ module 0x8675309::M { Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + } + fun t0(cond: bool) { + if (cond) (); + } + fun t1(cond: bool) { + if (cond) 0 else 0; + if (cond) false else false; + R{} = if (cond) R{} else R{}; + if (cond) &0 else &1; + if (cond) &mut 0 else &mut 1; + } + fun t2(cond: bool) { + if (cond) (0, false) else (1, true); + (_,_,_,R{}) = if (cond) (0, 0, &0, R{}) else (1, 1, &1, R{}); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/implicit_deref_borrow_field.exp b/third_party/move/move-compiler-v2/tests/checking/typing/implicit_deref_borrow_field.exp index 091162a89bff2..cc51779b1ed1b 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/implicit_deref_borrow_field.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/implicit_deref_borrow_field.exp @@ -3,7 +3,17 @@ module 0x8675309::M { struct S { f: u64, } - private fun t0(s: &M::S,s_mut: &mut M::S): (u64, u64) { - Tuple(select M::S.f<&M::S>(s), select M::S.f<&mut M::S>(s_mut)) + private fun t0(s: &S,s_mut: &mut S): (u64, u64) { + Tuple(select M::S.f<&S>(s), select M::S.f<&mut S>(s_mut)) } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct S { + f: u64, + } + fun t0(s: &S, s_mut: &mut S): (u64, u64) { + (s.f, s_mut.f) + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/implicit_deref_borrow_field_chain.exp b/third_party/move/move-compiler-v2/tests/checking/typing/implicit_deref_borrow_field_chain.exp index a295429bd20ef..5b32bfee85bac 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/implicit_deref_borrow_field_chain.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/implicit_deref_borrow_field_chain.exp @@ -1,19 +1,38 @@ // -- Model dump before bytecode pipeline module 0x8675309::M { struct X1 { - x2: M::X2, + x2: X2, } struct X2 { - x3: M::X3, + x3: X3, } struct X3 { f: u64, } - private fun t0(x1: &M::X1,x1_mut: &mut M::X1,x2: &M::X2,x2_mut: &mut M::X2) { - select M::X3.f(select M::X2.x3(select M::X1.x2<&M::X1>(x1))); - select M::X3.f(select M::X2.x3(select M::X1.x2<&mut M::X1>(x1_mut))); - select M::X3.f(select M::X2.x3<&M::X2>(x2)); - select M::X3.f(select M::X2.x3<&mut M::X2>(x2_mut)); + private fun t0(x1: &X1,x1_mut: &mut X1,x2: &X2,x2_mut: &mut X2) { + select M::X3.f(select M::X2.x3(select M::X1.x2<&X1>(x1))); + select M::X3.f(select M::X2.x3(select M::X1.x2<&mut X1>(x1_mut))); + select M::X3.f(select M::X2.x3<&X2>(x2)); + select M::X3.f(select M::X2.x3<&mut X2>(x2_mut)); Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct X1 { + x2: X2, + } + struct X2 { + x3: X3, + } + struct X3 { + f: u64, + } + fun t0(x1: &X1, x1_mut: &mut X1, x2: &X2, x2_mut: &mut X2) { + x1.x2.x3.f; + x1_mut.x2.x3.f; + x2.x3.f; + x2_mut.x3.f; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/implicit_deref_borrow_field_complex_root_expr.exp b/third_party/move/move-compiler-v2/tests/checking/typing/implicit_deref_borrow_field_complex_root_expr.exp index 6a875a79a8dd8..f3d40cef51c37 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/implicit_deref_borrow_field_complex_root_expr.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/implicit_deref_borrow_field_complex_root_expr.exp @@ -3,31 +3,48 @@ module 0x8675309::M { struct S { f: u64, } - private fun t0(cond: bool,s: &M::S,s_mut: &mut M::S) { - select M::S.f<&M::S>(if cond { + private fun t0(cond: bool,s: &S,s_mut: &mut S) { + select M::S.f<&S>(if cond { s } else { s }); - select M::S.f<&M::S>(if cond { + select M::S.f<&S>(if cond { Freeze(false)(s_mut) } else { s }); - select M::S.f<&M::S>(if cond { + select M::S.f<&S>(if cond { s } else { Freeze(false)(s_mut) }); - select M::S.f<&mut M::S>(if cond { + select M::S.f<&mut S>(if cond { s_mut } else { s_mut }); - select M::S.f<&M::S>({ - let s: M::S = pack M::S(0); + select M::S.f<&S>({ + let s: S = pack M::S(0); Borrow(Immutable)(s) }); Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct S has drop { + f: u64, + } + fun t0(cond: bool, s: &S, s_mut: &mut S) { + (if (cond) s else s).f; + (if (cond) /*freeze*/s_mut else s).f; + (if (cond) s else /*freeze*/s_mut).f; + (if (cond) s_mut else s_mut).f; + { + let s = S{f: 0}; + &s + }.f; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/implicit_deref_borrow_field_non_ref_non_local_root.exp b/third_party/move/move-compiler-v2/tests/checking/typing/implicit_deref_borrow_field_non_ref_non_local_root.exp index 71ba5235e8226..f94c9cb4a3e05 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/implicit_deref_borrow_field_non_ref_non_local_root.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/implicit_deref_borrow_field_non_ref_non_local_root.exp @@ -3,21 +3,21 @@ module 0x8675309::M { struct S { f: u64, } - private fun bar(): M::S { + private fun bar(): S { pack M::S(0) } - private fun foo(): &M::S { + private fun foo(): &S { Abort(0) } - private fun t0(cond: bool,_s: M::S) { - select M::S.f<&M::S>(M::foo()); - select M::S.f(M::bar()); - select M::S.f<&M::S>(if cond { + private fun t0(cond: bool,_s: S) { + select M::S.f<&S>(M::foo()); + select M::S.f(M::bar()); + select M::S.f<&S>(if cond { M::foo() } else { Borrow(Immutable)(M::bar()) }); - select M::S.f(if cond { + select M::S.f(if cond { Deref(M::foo()) } else { M::bar() @@ -25,3 +25,22 @@ module 0x8675309::M { Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct S has copy, drop { + f: u64, + } + fun bar(): S { + S{f: 0} + } + fun foo(): &S { + abort 0 + } + fun t0(cond: bool, _s: S) { + foo().f; + bar().f; + (if (cond) foo() else &bar()).f; + (if (cond) *foo() else bar()).f; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/implicit_deref_borrow_field_non_ref_root.exp b/third_party/move/move-compiler-v2/tests/checking/typing/implicit_deref_borrow_field_non_ref_root.exp index 32221dc6ce4a4..730ca3e3600da 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/implicit_deref_borrow_field_non_ref_root.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/implicit_deref_borrow_field_non_ref_root.exp @@ -3,9 +3,9 @@ module 0x8675309::M { struct S { f: u64, } - private fun t0(cond: bool,s: M::S) { - select M::S.f(s); - select M::S.f(if cond { + private fun t0(cond: bool,s: S) { + select M::S.f(s); + select M::S.f(if cond { pack M::S(0) } else { pack M::S(1) @@ -13,3 +13,14 @@ module 0x8675309::M { Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct S has drop { + f: u64, + } + fun t0(cond: bool, s: S) { + s.f; + (if (cond) S{f: 0} else S{f: 1}).f; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/lambda.exp b/third_party/move/move-compiler-v2/tests/checking/typing/lambda.exp index c3d568872e95c..ebcdc1fcfcfdb 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/lambda.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/lambda.exp @@ -48,3 +48,11 @@ error: tuple type `()` is not allowed as a type argument (type was inferred) │ ^ │ = required by instantiating type parameter `T` of function `foreach` + +error: function type `|u64|u64` is not allowed as a field type + ┌─ tests/checking/typing/lambda.move:81:12 + │ +81 │ f: |u64|u64, // expected lambda not allowed + │ ^^^^^^^^ + │ + = required by declaration of field `f` diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/lambda.move b/third_party/move/move-compiler-v2/tests/checking/typing/lambda.move index c5e29275c3d28..92e24c0c36a90 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/lambda.move +++ b/third_party/move/move-compiler-v2/tests/checking/typing/lambda.move @@ -83,18 +83,17 @@ module 0x8675309::M { public fun fun_arg_lambda_not_allowed(x: |u64|) {} // expected lambda not allowed - public inline fun macro_result_lambda_not_allowed(): |u64| { // expected lambda not allowed + public inline fun inline_result_lambda_not_allowed(): |u64| { // expected lambda not allowed abort (1) } public fun fun_result_lambda_not_allowed(): |u64| { // expected lambda not allowed abort (1) } - } module 0x1::XVector { - public fun length(v: &vector): u64 { abort(1) } - public fun is_empty(v: &vector): bool { abort(1) } - public fun borrow(v: &vector, i: u64): &T { abort(1) } - public fun pop_back(v: &mut vector): T { abort(1) } + public fun length(_v: &vector): u64 { abort(1) } + public fun is_empty(_v: &vector): bool { abort(1) } + public fun borrow(_v: &vector, _i: u64): &T { abort(1) } + public fun pop_back(_v: &mut vector): T { abort(1) } } diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/lambda2.exp b/third_party/move/move-compiler-v2/tests/checking/typing/lambda2.exp new file mode 100644 index 0000000000000..4f27955dd4e6f --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/typing/lambda2.exp @@ -0,0 +1,25 @@ + +Diagnostics: +error: Only inline functions may have function-typed parameters, but non-inline function `M::fun_arg_lambda_not_allowed` has a function parameter: + ┌─ tests/checking/typing/lambda2.move:84:16 + │ +84 │ public fun fun_arg_lambda_not_allowed(x: |u64|) {} // expected lambda not allowed + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^ - Parameter `x` has function-valued type `|u64|`. + +warning: Unused parameter `x`. Consider removing or prefixing with an underscore: `_x` + ┌─ tests/checking/typing/lambda2.move:84:43 + │ +84 │ public fun fun_arg_lambda_not_allowed(x: |u64|) {} // expected lambda not allowed + │ ^ + +error: Functions may not return function-typed values, but function `M::inline_result_lambda_not_allowed` return type is the function type `|u64|`: + ┌─ tests/checking/typing/lambda2.move:86:59 + │ +86 │ public inline fun inline_result_lambda_not_allowed(): |u64| { // expected lambda not allowed + │ ^^^^^ + +error: Functions may not return function-typed values, but function `M::fun_result_lambda_not_allowed` return type is the function type `|u64|`: + ┌─ tests/checking/typing/lambda2.move:89:49 + │ +89 │ public fun fun_result_lambda_not_allowed(): |u64| { // expected lambda not allowed + │ ^^^^^ diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/lambda2.move b/third_party/move/move-compiler-v2/tests/checking/typing/lambda2.move new file mode 100644 index 0000000000000..ccaa472c2e2c2 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/typing/lambda2.move @@ -0,0 +1,99 @@ +module 0x8675309::M { + + // *** NOTE: THIS TEST FILE IS DERIVED FROM lambda.move by commenting out code which has errors + // successfully flagged by move-compiler-v2, so that we can check for errors on other lines + // which may be shadowed by those errors. + // + // We keep the commented code so that the error line numbers line up. + // + // + // ... code removed here to allow above message ... + + // public inline fun foreach(v: &vector, action: |&T|) { // expected to be not implemented + // let i = 0; + // while (i < XVector::length(v)) { + // action(XVector::borrow(v, i)); + // i = i + 1; + // } + // } + + // public fun correct_foreach() { + // let v = vector[1, 2, 3]; + // let sum = 0; + // foreach(&v, |e| sum = sum + *e) // expected to be not implemented + // } + + // public fun correct_reduce(): u64 { + // let v = vector[1, 2, 3]; + // reduce(v, 0, |t, r| t + r) + // } + + // public fun corrected_nested() { + // let v = vector[vector[1,2], vector[3]]; + // let sum = 0; + // foreach(&v, |e| sum = sum + reduce!(*e, 0, |t, r| t + r)); + // } + + // public inline fun wrong_local_call_arg_count(v: &vector, action: |&T|) { + // let i = 0; + // while (i < XVector::length(v)) { + // action(XVector::borrow(v, i), i); // expected to have wrong argument count + // i = i + 1; + // } + // } + + // public inline fun wrong_local_call_arg_type(v: &vector, action: |&T|) { + // let i = 0; + // while (i < XVector::length(v)) { + // action(i); // expected to have wrong argument type + // i = i + 1; + // } + // } + + // public inline fun wrong_local_call_result_type(v: &vector, action: |&T|) { + // let i = 0; + // while (i < XVector::length(v)) { + // i = i + action(XVector::borrow(v, i)); // expected to have wrong result type + // } + // } + + // public fun wrong_local_call_no_fun(x: u64) { + // x(1) // expected to be not a function + // } + + // public fun wrong_lambda_inferred_type() { + // let v = vector[1, 2, 3]; + // let sum = 0; + // foreach(&v, |e| sum = sum + e) // expected to cannot infer type + // } + + // public fun wrong_lambda_result_type() { + // let v = vector[1, 2, 3]; + // let sum = 0; + // foreach(&v, |e| { sum = sum + *e; *e }) // expected to have wrong result type of lambda + // } + + public fun lambda_not_allowed() { + let _x = |i| i + 1; // expected lambda not allowed + } + + // struct FieldFunNotAllowed { + // f: |u64|u64, // expected lambda not allowed + // } + + public fun fun_arg_lambda_not_allowed(x: |u64|) {} // expected lambda not allowed + + public inline fun inline_result_lambda_not_allowed(): |u64| { // expected lambda not allowed + abort (1) + } + public fun fun_result_lambda_not_allowed(): |u64| { // expected lambda not allowed + abort (1) + } +} + +// module 0x1::XVector { +// public fun length(_v: &vector): u64 { abort(1) } +// public fun is_empty(_v: &vector): bool { abort(1) } +// public fun borrow(_v: &vector, _i: u64): &T { abort(1) } +// public fun pop_back(_v: &mut vector): T { abort(1) } +// } diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/lambda3.exp b/third_party/move/move-compiler-v2/tests/checking/typing/lambda3.exp new file mode 100644 index 0000000000000..f89935a0c3358 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/typing/lambda3.exp @@ -0,0 +1,24 @@ +// -- Model dump before bytecode pipeline +module 0x8675309::M { + public fun lambda_not_allowed() { + { + let _x: |u64|u64 = |i: u64| Add(i, 1); + Tuple() + } + } +} // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + public fun lambda_not_allowed() { + let _x = |i| i + 1; + } +} + + +Diagnostics: +error: Function-typed values not yet supported except as parameters to calls to inline functions + ┌─ tests/checking/typing/lambda3.move:77:18 + │ +77 │ let _x = |i| i + 1; // expected lambda not allowed + │ ^^^^^^^^^ diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/lambda3.move b/third_party/move/move-compiler-v2/tests/checking/typing/lambda3.move new file mode 100644 index 0000000000000..bc302cc3ee3be --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/typing/lambda3.move @@ -0,0 +1,99 @@ +module 0x8675309::M { + + // *** NOTE: THIS TEST FILE IS DERIVED FROM lambda2.move by commenting out code which has errors + // successfully flagged by move-compiler-v2, so that we can check for errors on other lines + // which may be shadowed by those errors. + // + // We keep the commented code so that the error line numbers line up. + // + // + // ... code removed here to allow above message ... + + // public inline fun foreach(v: &vector, action: |&T|) { // expected to be not implemented + // let i = 0; + // while (i < XVector::length(v)) { + // action(XVector::borrow(v, i)); + // i = i + 1; + // } + // } + + // public fun correct_foreach() { + // let v = vector[1, 2, 3]; + // let sum = 0; + // foreach(&v, |e| sum = sum + *e) // expected to be not implemented + // } + + // public fun correct_reduce(): u64 { + // let v = vector[1, 2, 3]; + // reduce(v, 0, |t, r| t + r) + // } + + // public fun corrected_nested() { + // let v = vector[vector[1,2], vector[3]]; + // let sum = 0; + // foreach(&v, |e| sum = sum + reduce!(*e, 0, |t, r| t + r)); + // } + + // public inline fun wrong_local_call_arg_count(v: &vector, action: |&T|) { + // let i = 0; + // while (i < XVector::length(v)) { + // action(XVector::borrow(v, i), i); // expected to have wrong argument count + // i = i + 1; + // } + // } + + // public inline fun wrong_local_call_arg_type(v: &vector, action: |&T|) { + // let i = 0; + // while (i < XVector::length(v)) { + // action(i); // expected to have wrong argument type + // i = i + 1; + // } + // } + + // public inline fun wrong_local_call_result_type(v: &vector, action: |&T|) { + // let i = 0; + // while (i < XVector::length(v)) { + // i = i + action(XVector::borrow(v, i)); // expected to have wrong result type + // } + // } + + // public fun wrong_local_call_no_fun(x: u64) { + // x(1) // expected to be not a function + // } + + // public fun wrong_lambda_inferred_type() { + // let v = vector[1, 2, 3]; + // let sum = 0; + // foreach(&v, |e| sum = sum + e) // expected to cannot infer type + // } + + // public fun wrong_lambda_result_type() { + // let v = vector[1, 2, 3]; + // let sum = 0; + // foreach(&v, |e| { sum = sum + *e; *e }) // expected to have wrong result type of lambda + // } + + public fun lambda_not_allowed() { + let _x = |i| i + 1; // expected lambda not allowed + } + + // struct FieldFunNotAllowed { + // f: |u64|u64, // expected lambda not allowed + // } + + // public fun fun_arg_lambda_not_allowed(x: |u64|) {} // expected lambda not allowed + + // public inline fun macro_result_lambda_not_allowed(): |u64| { // expected lambda not allowed + // abort (1) + // } + // public fun fun_result_lambda_not_allowed(): |u64| { // expected lambda not allowed + // abort (1) + // } +} + +// module 0x1::XVector { +// public fun length(_v: &vector): u64 { abort(1) } +// public fun is_empty(_v: &vector): bool { abort(1) } +// public fun borrow(_v: &vector, _i: u64): &T { abort(1) } +// public fun pop_back(_v: &mut vector): T { abort(1) } +// } diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/lambda_returning_lambda.exp b/third_party/move/move-compiler-v2/tests/checking/typing/lambda_returning_lambda.exp new file mode 100644 index 0000000000000..d16f979db0562 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/typing/lambda_returning_lambda.exp @@ -0,0 +1,7 @@ + +Diagnostics: +error: Functions may not return function-typed values, but function `M::foreach_caller` has a parameter of function type with function-typed result: + ┌─ tests/checking/typing/lambda_returning_lambda.move:12:23 + │ +12 │ public inline fun foreach_caller(v: &vector, action: ||(|&T|)) { + │ ^^^^^^^^^^^^^^ ------ Parameter `action` has type `|()||&T|`, which has function type `|&T|` as a function result type diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/lambda_returning_lambda.move b/third_party/move/move-compiler-v2/tests/checking/typing/lambda_returning_lambda.move new file mode 100644 index 0000000000000..bd93f64575ac6 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/typing/lambda_returning_lambda.move @@ -0,0 +1,28 @@ +module 0x8675309::M { + use 0x1::XVector; + + public inline fun foreach(v: &vector, action: |&T|) { // expected to be not implemented + let i = 0; + while (i < XVector::length(v)) { + action(XVector::borrow(v, i)); + i = i + 1; + } + } + + public inline fun foreach_caller(v: &vector, action: ||(|&T|)) { + foreach(v, action()) + } + + public fun whacky_foreach() { + let v = vector[1, 2, 3]; + let sum = 0; + foreach_caller(&v, ||(|e| sum = sum + *e)) // expected to be not implemented + } +} + +module 0x1::XVector { + public fun length(_v: &vector): u64 { abort(1) } + public fun is_empty(_v: &vector): bool { abort(1) } + public fun borrow(_v: &vector, _i: u64): &T { abort(1) } + public fun pop_back(_v: &mut vector): T { abort(1) } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/lambda_returning_lambda2.exp b/third_party/move/move-compiler-v2/tests/checking/typing/lambda_returning_lambda2.exp new file mode 100644 index 0000000000000..7fccdd1f60f5f --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/typing/lambda_returning_lambda2.exp @@ -0,0 +1,7 @@ + +Diagnostics: +error: Functions may not return function-typed values, but function `M::foreach_caller2` has a parameter of function type with function-typed result: + ┌─ tests/checking/typing/lambda_returning_lambda2.move:3:23 + │ +3 │ public inline fun foreach_caller2(_v: &vector, _action: ||(|&T|)) { + │ ^^^^^^^^^^^^^^^ ------- Parameter `_action` has type `|()||&T|`, which has function type `|&T|` as a function result type diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/lambda_returning_lambda2.move b/third_party/move/move-compiler-v2/tests/checking/typing/lambda_returning_lambda2.move new file mode 100644 index 0000000000000..a77ab5421fcd8 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/checking/typing/lambda_returning_lambda2.move @@ -0,0 +1,12 @@ +module 0x8675309::M { + + public inline fun foreach_caller2(_v: &vector, _action: ||(|&T|)) { + abort(1) + } + + public fun whacky_foreach2() { + let v = vector[1, 2, 3]; + let sum = 0; + foreach_caller2(&v, ||(|e| sum = sum + *e)) // expected to be not implemented + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/lambda_typed.exp b/third_party/move/move-compiler-v2/tests/checking/typing/lambda_typed.exp index 351d607a7b2b7..7817686444d8f 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/lambda_typed.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/lambda_typed.exp @@ -35,3 +35,11 @@ error: cannot pass `|&u64|u64` to a function which expects argument of type `|&u │ 73 │ foreach(&v, |e: &u64| { sum = sum + *e; *e }) // expected to have wrong result type of lambda │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error: function type `|u64|u64` is not allowed as a field type + ┌─ tests/checking/typing/lambda_typed.move:81:12 + │ +81 │ f: |u64|u64, // expected lambda not allowed + │ ^^^^^^^^ + │ + = required by declaration of field `f` diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/large_binop.exp b/third_party/move/move-compiler-v2/tests/checking/typing/large_binop.exp index f73d7143c36ca..3f70f2058995c 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/large_binop.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/large_binop.exp @@ -5,3 +5,10 @@ module _0 { Tuple() } } // end _0 + +// -- Sourcified model before bytecode pipeline +script { + fun main() { + true; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/loop_body.exp b/third_party/move/move-compiler-v2/tests/checking/typing/loop_body.exp index c38a819cc31da..1c34ecf7e75a0 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/loop_body.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/loop_body.exp @@ -59,3 +59,38 @@ module 0x8675309::M { } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + fun t0() { + loop () + } + fun t1() { + loop () + } + fun t2() { + loop () + } + fun t3() { + loop { + 0; + } + } + fun t4() { + loop if (true) () + } + fun t5() { + loop break; + loop break; + loop return () + } + fun t6() { + loop continue + } + fun t7() { + loop continue + } + fun t8() { + loop loop break + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/loop_result_type.exp b/third_party/move/move-compiler-v2/tests/checking/typing/loop_result_type.exp index 6e4b1c4bce1ba..10ffc738309aa 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/loop_result_type.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/loop_result_type.exp @@ -52,3 +52,35 @@ module 0x2::M { } } } // end 0x2::M + +// -- Sourcified model before bytecode pipeline +module 0x2::X { + struct R { + } +} +module 0x2::M { + use 0x2::X; + fun foo(_x: u64) { + } + fun t0(): X::R { + loop () + } + fun t1(): u64 { + loop { + 0; + } + } + fun t2() { + foo(loop ()) + } + fun t3(): X::R { + let x = loop { + 0; + }; + x + } + fun t4() { + let () = loop break; + let () = loop if (false) break; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/main_arguments.exp b/third_party/move/move-compiler-v2/tests/checking/typing/main_arguments.exp index f48682cc04cc9..b6b7ad1aeadb6 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/main_arguments.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/main_arguments.exp @@ -4,3 +4,9 @@ module _0 { Tuple() } } // end _0 + +// -- Sourcified model before bytecode pipeline +script { + fun main(_sender: signer, _a: address, _x8: u8, _x64: u64, _x128: u128, _b: bool, _v8: vector, _va: vector
, _v64: vector, _v128: vector, _vb: vector, _vv8: vector>, _vva: vector>, _vv64: vector>, _vv128: vector>, _vvb: vector>, _vvv8: vector>>, _vvva: vector>>, _vvv64: vector>>, _vvv128: vector>>, _vvvb: vector>>, _vvvv8: vector>>>, _vvvva: vector>>>, _vvvv64: vector>>>, _vvvv128: vector>>>, _vvvvb: vector>>>) { + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/main_arguments_various_caes.exp b/third_party/move/move-compiler-v2/tests/checking/typing/main_arguments_various_caes.exp index a729ac9e9d988..96d0c3ab89dd8 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/main_arguments_various_caes.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/main_arguments_various_caes.exp @@ -1,7 +1,7 @@ // -- Model dump before bytecode pipeline module 0x42::M { - struct Cup { - f1: #0, + struct Cup { + f1: T, } struct R { dummy_field: bool, @@ -9,13 +9,33 @@ module 0x42::M { struct S { dummy_field: bool, } - public fun eat(r: M::R) { + public fun eat(r: R) { M::R{ dummy_field: _ } = r } } // end 0x42::M module _0 { use 0x42::M::{S, R, Cup}; // resolved as: 0x42::M - private fun main(_s: &signer,_a0: #0,_a1: vector<#0>,_a2: vector>,_a3: M::S,_a4: M::R,_a5: M::Cup,_a6: M::Cup<#0>,_a7: vector) { + private fun main(_s: &signer,_a0: T,_a1: vector,_a2: vector>,_a3: M::S,_a4: M::R,_a5: M::Cup,_a6: M::Cup,_a7: vector) { Abort(0) } } // end _0 + +// -- Sourcified model before bytecode pipeline +module 0x42::M { + struct Cup { + f1: T, + } + struct R { + } + struct S { + } + public fun eat(r: R) { + R{} = r + } +} +script { + use 0x42::M; + fun main(_s: &signer, _a0: T, _a1: vector, _a2: vector>, _a3: M::S, _a4: M::R, _a5: M::Cup, _a6: M::Cup, _a7: vector) { + abort 0 + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/main_call_entry.exp b/third_party/move/move-compiler-v2/tests/checking/typing/main_call_entry.exp index f8cdb6a2f8ceb..ca739da5a5a17 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/main_call_entry.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/main_call_entry.exp @@ -9,3 +9,14 @@ module _0 { X::foo() } } // end _0 + +// -- Sourcified model before bytecode pipeline +module 0x2::X { + public entry fun foo() { + } +} +script { + fun main() { + 0x2::X::foo() + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/main_with_type_parameters.exp b/third_party/move/move-compiler-v2/tests/checking/typing/main_with_type_parameters.exp index cb4ca6a2d2269..4ce4ef16c689a 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/main_with_type_parameters.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/main_with_type_parameters.exp @@ -4,3 +4,9 @@ module _0 { Tuple() } } // end _0 + +// -- Sourcified model before bytecode pipeline +script { + fun main() { + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/module_call_entry_function.exp b/third_party/move/move-compiler-v2/tests/checking/typing/module_call_entry_function.exp index cf5eb639e99ea..e0bbe04193eb2 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/module_call_entry_function.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/module_call_entry_function.exp @@ -49,3 +49,49 @@ module 0x2::M { M::f_script() } } // end 0x2::M + +// -- Sourcified model before bytecode pipeline +module 0x2::Y { + friend 0x2::M; + friend fun f_friend() { + } +} +module 0x2::X { + public fun f_public() { + } + public entry fun f_script() { + } +} +module 0x2::M { + use 0x2::Y; + use 0x2::X; + friend fun f_friend() { + } + public fun f_public() { + } + public entry fun f_script() { + } + fun f_private() { + } + public entry fun f_script_call_friend() { + Y::f_friend() + } + public entry fun f_script_call_public() { + X::f_public() + } + public entry fun f_script_call_script() { + X::f_script() + } + public entry fun f_script_call_self_friend() { + f_friend() + } + public entry fun f_script_call_self_private() { + f_private() + } + public entry fun f_script_call_self_public() { + f_public() + } + public entry fun f_script_call_self_script() { + f_script() + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/move_from_type_argument.exp b/third_party/move/move-compiler-v2/tests/checking/typing/move_from_type_argument.exp index 8e154566ebee7..8491eba0f62c7 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/move_from_type_argument.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/move_from_type_argument.exp @@ -4,10 +4,10 @@ module 0x42::m { addr: address, } public fun foo(input: address): address - acquires m::A(*) + acquires A(*) { { - let a: m::A = MoveFrom(input); + let a: A = MoveFrom(input); { let m::A{ addr } = a; addr @@ -15,3 +15,17 @@ module 0x42::m { } } } // end 0x42::m + +// -- Sourcified model before bytecode pipeline +module 0x42::m { + struct A has key { + addr: address, + } + public fun foo(input: address): address + acquires A + { + let a = move_from(input); + let A{addr: addr} = a; + addr + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/mutable_eq_and_neq.exp b/third_party/move/move-compiler-v2/tests/checking/typing/mutable_eq_and_neq.exp index 21143c8c8fed3..1de8c0247dc61 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/mutable_eq_and_neq.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/mutable_eq_and_neq.exp @@ -4,14 +4,14 @@ module 0x8675309::M { f: bool, } struct P { - b1: M::B, - b2: M::B, + b1: B, + b2: B, } struct S { f: u64, g: u64, } - private fun t(r1: &mut u64,r2: &mut u64,s: &mut M::S) { + private fun t(r1: &mut u64,r2: &mut u64,s: &mut S) { Eq(Freeze(false)(r1), Freeze(false)(r1)); Eq(Freeze(false)(r1), Freeze(false)(r2)); Eq(Freeze(false)(r2), Freeze(false)(r2)); @@ -20,26 +20,67 @@ module 0x8675309::M { Neq(Freeze(false)(r1), Freeze(false)(r2)); Neq(Freeze(false)(r2), Freeze(false)(r2)); Neq(Freeze(false)(r2), Freeze(false)(r2)); - Eq(Freeze(false)(Borrow(Mutable)(select M::S.f<&mut M::S>(s))), Freeze(false)(Borrow(Mutable)(select M::S.f<&mut M::S>(s)))); - Eq(Freeze(false)(Borrow(Mutable)(select M::S.f<&mut M::S>(s))), Freeze(false)(Borrow(Mutable)(select M::S.g<&mut M::S>(s)))); - Eq(Freeze(false)(Borrow(Mutable)(select M::S.g<&mut M::S>(s))), Freeze(false)(Borrow(Mutable)(select M::S.f<&mut M::S>(s)))); - Eq(Freeze(false)(Borrow(Mutable)(select M::S.g<&mut M::S>(s))), Freeze(false)(Borrow(Mutable)(select M::S.g<&mut M::S>(s)))); - Neq(Freeze(false)(Borrow(Mutable)(select M::S.f<&mut M::S>(s))), Freeze(false)(Borrow(Mutable)(select M::S.f<&mut M::S>(s)))); - Neq(Freeze(false)(Borrow(Mutable)(select M::S.f<&mut M::S>(s))), Freeze(false)(Borrow(Mutable)(select M::S.g<&mut M::S>(s)))); - Neq(Freeze(false)(Borrow(Mutable)(select M::S.g<&mut M::S>(s))), Freeze(false)(Borrow(Mutable)(select M::S.f<&mut M::S>(s)))); - Neq(Freeze(false)(Borrow(Mutable)(select M::S.g<&mut M::S>(s))), Freeze(false)(Borrow(Mutable)(select M::S.g<&mut M::S>(s)))); + Eq(Freeze(false)(Borrow(Mutable)(select M::S.f<&mut S>(s))), Freeze(false)(Borrow(Mutable)(select M::S.f<&mut S>(s)))); + Eq(Freeze(false)(Borrow(Mutable)(select M::S.f<&mut S>(s))), Freeze(false)(Borrow(Mutable)(select M::S.g<&mut S>(s)))); + Eq(Freeze(false)(Borrow(Mutable)(select M::S.g<&mut S>(s))), Freeze(false)(Borrow(Mutable)(select M::S.f<&mut S>(s)))); + Eq(Freeze(false)(Borrow(Mutable)(select M::S.g<&mut S>(s))), Freeze(false)(Borrow(Mutable)(select M::S.g<&mut S>(s)))); + Neq(Freeze(false)(Borrow(Mutable)(select M::S.f<&mut S>(s))), Freeze(false)(Borrow(Mutable)(select M::S.f<&mut S>(s)))); + Neq(Freeze(false)(Borrow(Mutable)(select M::S.f<&mut S>(s))), Freeze(false)(Borrow(Mutable)(select M::S.g<&mut S>(s)))); + Neq(Freeze(false)(Borrow(Mutable)(select M::S.g<&mut S>(s))), Freeze(false)(Borrow(Mutable)(select M::S.f<&mut S>(s)))); + Neq(Freeze(false)(Borrow(Mutable)(select M::S.g<&mut S>(s))), Freeze(false)(Borrow(Mutable)(select M::S.g<&mut S>(s)))); Tuple() } - private fun t1(p: &mut M::P) { + private fun t1(p: &mut P) { { - let comp: bool = Eq(Freeze(false)(Borrow(Mutable)(select M::P.b1<&mut M::P>(p))), Freeze(false)(Borrow(Mutable)(select M::P.b2<&mut M::P>(p)))); - select M::B.f(select M::P.b1<&mut M::P>(p)) = comp + let comp: bool = Eq(Freeze(false)(Borrow(Mutable)(select M::P.b1<&mut P>(p))), Freeze(false)(Borrow(Mutable)(select M::P.b2<&mut P>(p)))); + select M::B.f(select M::P.b1<&mut P>(p)) = comp } } - private fun t2(p: &mut M::P) { + private fun t2(p: &mut P) { { - let comp: bool = Neq(Freeze(false)(Borrow(Mutable)(select M::P.b1<&mut M::P>(p))), Freeze(false)(Borrow(Mutable)(select M::P.b2<&mut M::P>(p)))); - select M::B.f(select M::P.b1<&mut M::P>(p)) = comp + let comp: bool = Neq(Freeze(false)(Borrow(Mutable)(select M::P.b1<&mut P>(p))), Freeze(false)(Borrow(Mutable)(select M::P.b2<&mut P>(p)))); + select M::B.f(select M::P.b1<&mut P>(p)) = comp } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct B { + f: bool, + } + struct P { + b1: B, + b2: B, + } + struct S { + f: u64, + g: u64, + } + fun t(r1: &mut u64, r2: &mut u64, s: &mut S) { + /*freeze*/r1 == /*freeze*/r1; + /*freeze*/r1 == /*freeze*/r2; + /*freeze*/r2 == /*freeze*/r2; + /*freeze*/r2 == /*freeze*/r2; + /*freeze*/r1 != /*freeze*/r1; + /*freeze*/r1 != /*freeze*/r2; + /*freeze*/r2 != /*freeze*/r2; + /*freeze*/r2 != /*freeze*/r2; + /*freeze*/&mut s.f == /*freeze*/&mut s.f; + /*freeze*/&mut s.f == /*freeze*/&mut s.g; + /*freeze*/&mut s.g == /*freeze*/&mut s.f; + /*freeze*/&mut s.g == /*freeze*/&mut s.g; + /*freeze*/&mut s.f != /*freeze*/&mut s.f; + /*freeze*/&mut s.f != /*freeze*/&mut s.g; + /*freeze*/&mut s.g != /*freeze*/&mut s.f; + /*freeze*/&mut s.g != /*freeze*/&mut s.g; + } + fun t1(p: &mut P) { + let comp = /*freeze*/&mut p.b1 == /*freeze*/&mut p.b2; + p.b1.f = comp + } + fun t2(p: &mut P) { + let comp = /*freeze*/&mut p.b1 != /*freeze*/&mut p.b2; + p.b1.f = comp + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/mutate.exp b/third_party/move/move-compiler-v2/tests/checking/typing/mutate.exp index f293fbeeac9c4..8ddf9c8239941 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/mutate.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/mutate.exp @@ -3,10 +3,10 @@ module 0x8675309::M { struct S { f: u64, } - private fun bar(s: &mut M::S): &mut M::S { + private fun bar(s: &mut S): &mut S { s } - private fun baz(): M::S { + private fun baz(): S { pack M::S(0) } private fun foo(x: &mut u64): &mut u64 { @@ -14,21 +14,52 @@ module 0x8675309::M { } private fun t0() { Borrow(Mutable)(0) = 1; - Borrow(Mutable)(select M::S.f(pack M::S(0))) = 1; + Borrow(Mutable)(select M::S.f(pack M::S(0))) = 1; M::foo(Borrow(Mutable)(0)) = 1; - select M::S.f<&mut M::S>(M::bar(Borrow(Mutable)(pack M::S(0)))) = 1; - Borrow(Mutable)(select M::S.f<&mut M::S>(M::bar(Borrow(Mutable)(pack M::S(0))))) = 1; - select M::S.f(M::baz()) = 1; - Borrow(Mutable)(select M::S.f(M::baz())) = 1; + select M::S.f<&mut S>(M::bar(Borrow(Mutable)(pack M::S(0)))) = 1; + Borrow(Mutable)(select M::S.f<&mut S>(M::bar(Borrow(Mutable)(pack M::S(0))))) = 1; + select M::S.f(M::baz()) = 1; + Borrow(Mutable)(select M::S.f(M::baz())) = 1; Tuple() } private fun t1() { { - let r: &mut M::S = Borrow(Mutable)(pack M::S(0)); + let r: &mut S = Borrow(Mutable)(pack M::S(0)); r = pack M::S(1); - select M::S.f<&mut M::S>(r) = 1; - Borrow(Mutable)(select M::S.f<&mut M::S>(r)) = 1; + select M::S.f<&mut S>(r) = 1; + Borrow(Mutable)(select M::S.f<&mut S>(r)) = 1; Tuple() } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct S has copy, drop { + f: u64, + } + fun bar(s: &mut S): &mut S { + s + } + fun baz(): S { + S{f: 0} + } + fun foo(x: &mut u64): &mut u64 { + x + } + fun t0() { + *&mut 0 = 1; + *&mut S{f: 0}.f = 1; + *foo(&mut 0) = 1; + bar(&mut S{f: 0}).f = 1; + *&mut bar(&mut S{f: 0}).f = 1; + baz().f = 1; + *&mut baz().f = 1; + } + fun t1() { + let r = &mut S{f: 0}; + *r = S{f: 1}; + r.f = 1; + *&mut r.f = 1; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/neq.exp b/third_party/move/move-compiler-v2/tests/checking/typing/neq.exp index c4c982bba8e20..00367877ed612 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/neq.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/neq.exp @@ -1,7 +1,7 @@ // -- Model dump before bytecode pipeline module 0x8675309::M { - struct G { - f: #0, + struct G { + f: T, } struct R { f: u64, @@ -9,7 +9,7 @@ module 0x8675309::M { struct S { u: u64, } - private fun t0(r: &M::R,r_mut: &mut M::R,s: M::S,s_ref: &M::S,s_mut: &mut M::S) { + private fun t0(r: &R,r_mut: &mut R,s: S,s_ref: &S,s_mut: &mut S) { true; true; true; @@ -18,19 +18,55 @@ module 0x8675309::M { Neq(Borrow(Immutable)(0), Borrow(Immutable)(1)); true; true; - Neq(Borrow(Immutable)(s), s_ref); - Neq(Freeze(false)(Borrow(Mutable)(s)), s_ref); - Neq(Freeze(false)(Borrow(Mutable)(s)), Freeze(false)(s_mut)); - Neq(Borrow(Immutable)(s), Freeze(false)(s_mut)); - Neq(s_ref, Freeze(false)(s_mut)); - Neq(Freeze(false)(s_mut), Freeze(false)(s_mut)); - Neq(pack M::S(0), s); - Neq(r, r); - Neq(Freeze(false)(r_mut), Freeze(false)(r_mut)); - Neq(r, Freeze(false)(r_mut)); - Neq(Freeze(false)(r_mut), r); - Neq>(pack M::G(1), pack M::G(2)); - Neq>(pack M::G(1), pack M::G(2)); + Neq(Borrow(Immutable)(s), s_ref); + Neq(Freeze(false)(Borrow(Mutable)(s)), s_ref); + Neq(Freeze(false)(Borrow(Mutable)(s)), Freeze(false)(s_mut)); + Neq(Borrow(Immutable)(s), Freeze(false)(s_mut)); + Neq(s_ref, Freeze(false)(s_mut)); + Neq(Freeze(false)(s_mut), Freeze(false)(s_mut)); + Neq(pack M::S(0), s); + Neq(r, r); + Neq(Freeze(false)(r_mut), Freeze(false)(r_mut)); + Neq(r, Freeze(false)(r_mut)); + Neq(Freeze(false)(r_mut), r); + Neq>(pack M::G(1), pack M::G(2)); + Neq>(pack M::G(1), pack M::G(2)); Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct G has drop { + f: T, + } + struct R { + f: u64, + } + struct S has drop { + u: u64, + } + fun t0(r: &R, r_mut: &mut R, s: S, s_ref: &S, s_mut: &mut S) { + true; + true; + true; + true; + true; + &0 != &1; + true; + true; + &s != s_ref; + /*freeze*/&mut s != s_ref; + /*freeze*/&mut s != /*freeze*/s_mut; + &s != /*freeze*/s_mut; + s_ref != /*freeze*/s_mut; + /*freeze*/s_mut != /*freeze*/s_mut; + S{u: 0} != s; + r != r; + /*freeze*/r_mut != /*freeze*/r_mut; + r != /*freeze*/r_mut; + /*freeze*/r_mut != r; + G{f: 1} != G{f: 2}; + G{f: 1} != G{f: 2}; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/nested_post_process.exp b/third_party/move/move-compiler-v2/tests/checking/typing/nested_post_process.exp index d82dc536aca59..302baa8097a2f 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/nested_post_process.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/nested_post_process.exp @@ -3,16 +3,16 @@ module 0x42::simple_map { use std::error; use std::option; use std::vector; - struct Element { - key: #0, - value: #1, + struct Element { + key: Key, + value: Value, } - struct SimpleMap { - data: vector>, + struct SimpleMap { + data: vector>, } - public fun borrow(map: &simple_map::SimpleMap<#0, #1>,key: �):  { + public fun borrow(map: &SimpleMap,key: &Key): &Value { { - let maybe_idx: option::Option = simple_map::find(map, key); + let maybe_idx: 0x1::option::Option = simple_map::find(map, key); if option::is_some(Borrow(Immutable)(maybe_idx)) { Tuple() } else { @@ -20,20 +20,20 @@ module 0x42::simple_map { }; { let idx: u64 = option::extract(Borrow(Mutable)(maybe_idx)); - Borrow(Immutable)(select simple_map::Element.value<&simple_map::Element>(vector::borrow>(Borrow(Immutable)(select simple_map::SimpleMap.data<&simple_map::SimpleMap>(map)), idx))) + Borrow(Immutable)(select simple_map::Element.value<&Element>(vector::borrow>(Borrow(Immutable)(select simple_map::SimpleMap.data<&SimpleMap>(map)), idx))) } } } - private fun find(map: &simple_map::SimpleMap<#0, #1>,key: �): option::Option { + private fun find(map: &SimpleMap,key: &Key): 0x1::option::Option { { - let leng: u64 = vector::length>(Borrow(Immutable)(select simple_map::SimpleMap.data<&simple_map::SimpleMap>(map))); + let leng: u64 = vector::length>(Borrow(Immutable)(select simple_map::SimpleMap.data<&SimpleMap>(map))); { let i: u64 = 0; loop { if Lt(i, leng) { { - let element: &simple_map::Element = vector::borrow>(Borrow(Immutable)(select simple_map::SimpleMap.data<&simple_map::SimpleMap>(map)), i); - if Eq(Borrow(Immutable)(select simple_map::Element.key<&simple_map::Element>(element)), key) { + let element: &Element = vector::borrow>(Borrow(Immutable)(select simple_map::SimpleMap.data<&SimpleMap>(map)), i); + if Eq(Borrow(Immutable)(select simple_map::Element.key<&Element>(element)), key) { return option::some(i) } else { Tuple() @@ -50,3 +50,30 @@ module 0x42::simple_map { } } } // end 0x42::simple_map + +// -- Sourcified model before bytecode pipeline +module 0x42::simple_map { + struct Element has copy, drop, store { + key: Key, + value: Value, + } + struct SimpleMap has copy, drop, store { + data: vector>, + } + public fun borrow(map: &SimpleMap, key: &Key): &Value { + let maybe_idx = find(map, key); + if (0x1::option::is_some(&maybe_idx)) () else abort 0x1::error::invalid_argument(2); + let idx = 0x1::option::extract(&mut maybe_idx); + &0x1::vector::borrow>(&map.data, idx).value + } + fun find(map: &SimpleMap, key: &Key): 0x1::option::Option { + let leng = 0x1::vector::length>(&map.data); + let i = 0; + while (i < leng) { + let element = 0x1::vector::borrow>(&map.data, i); + if (&element.key == key) return 0x1::option::some(i); + i = i + 1; + }; + 0x1::option::none() + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/other_builtins.exp b/third_party/move/move-compiler-v2/tests/checking/typing/other_builtins.exp index f55b1ebdbb292..c9bac663000b3 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/other_builtins.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/other_builtins.exp @@ -1,6 +1,6 @@ // -- Model dump before bytecode pipeline module 0x8675309::M { - private fun any(): #0 { + private fun any(): T { Abort(0) } private fun foo(x: &mut u64) { @@ -19,3 +19,16 @@ module 0x8675309::M { Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + fun any(): T { + abort 0 + } + fun foo(x: &mut u64) { + /*freeze*/x; + /*freeze*/&mut any>(); + if (false) () else abort *x; + if (true) () else abort 0; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/phantom_param_struct_decl.exp b/third_party/move/move-compiler-v2/tests/checking/typing/phantom_param_struct_decl.exp index 4e1a345132678..478578a6eacaf 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/phantom_param_struct_decl.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/phantom_param_struct_decl.exp @@ -1,20 +1,41 @@ // -- Model dump before bytecode pipeline module 0x42::M1 { - struct S1 { + struct S1 { a: u64, } - struct S2 { - a: M1::S1<#0>, - b: vector>, + struct S2 { + a: S1, + b: vector>, } - struct S3 { - a: #1, - b: #3, + struct S3 { + a: T2, + b: T4, } - struct S4 { + struct S4 { a: u64, } - struct S5 { - a: M1::S4<#0>, + struct S5 { + a: S4, } } // end 0x42::M1 + +// -- Sourcified model before bytecode pipeline +module 0x42::M1 { + struct S1 { + a: u64, + } + struct S2 { + a: S1, + b: vector>, + } + struct S3 { + a: T2, + b: T4, + } + struct S4 { + a: u64, + } + struct S5 { + a: S4, + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/return_any_type.exp b/third_party/move/move-compiler-v2/tests/checking/typing/return_any_type.exp index bdaaf2919cd70..9cd94b1ea8825 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/return_any_type.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/return_any_type.exp @@ -3,7 +3,7 @@ module 0x8675309::M { struct Coin { dummy_field: bool, } - private fun foo(c: M::Coin) { + private fun foo(c: Coin) { M::Coin{ dummy_field: _ } = c; Tuple() } @@ -16,3 +16,18 @@ module 0x8675309::M { Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct Coin { + } + fun foo(c: Coin) { + Coin{} = c; + } + fun t0() { + 0 + (return ()); + } + fun t1() { + foo(return ()); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/return_type_explicit_exp.exp b/third_party/move/move-compiler-v2/tests/checking/typing/return_type_explicit_exp.exp index 087f85511cbb4..71676a3601e39 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/return_type_explicit_exp.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/return_type_explicit_exp.exp @@ -13,7 +13,7 @@ module 0x8675309::M { private fun t1(): u64 { return 0 } - private fun t2(): (u64, bool, M::R) { + private fun t2(): (u64, bool, R) { loop { if true { return Tuple(0, false, pack M::R(false)) @@ -24,3 +24,19 @@ module 0x8675309::M { Abort(0) } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + } + fun t0() { + if (true) () else () + } + fun t1(): u64 { + 0 + } + fun t2(): (u64, bool, R) { + while (true) return (0, false, R{}); + abort 0 + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/return_type_last_exp.exp b/third_party/move/move-compiler-v2/tests/checking/typing/return_type_last_exp.exp index d469d10153e28..1efd715b2ff84 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/return_type_last_exp.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/return_type_last_exp.exp @@ -9,7 +9,21 @@ module 0x8675309::M { private fun t1(): u64 { 0 } - private fun t2(): (u64, bool, M::R) { + private fun t2(): (u64, bool, R) { Tuple(0, false, pack M::R(false)) } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + } + fun t0() { + } + fun t1(): u64 { + 0 + } + fun t2(): (u64, bool, R) { + (0, false, R{}) + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/seq_ignores_value.exp b/third_party/move/move-compiler-v2/tests/checking/typing/seq_ignores_value.exp index 36751b65e11ae..ccb4861ab2f24 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/seq_ignores_value.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/seq_ignores_value.exp @@ -24,3 +24,21 @@ module 0x8675309::M { Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct S has drop { + } + fun t0() { + (); + } + fun t1() { + 0; + } + fun t2() { + (0, false, S{}); + } + fun t3() { + if (true) (0, false, S{}) else (0, false, S{}); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/shadowing.exp b/third_party/move/move-compiler-v2/tests/checking/typing/shadowing.exp index 7c470644e168e..5330d3e7bd8c3 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/shadowing.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/shadowing.exp @@ -41,3 +41,44 @@ module 0x8675309::M { Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct S { + f: u64, + b: bool, + } + fun t0() { + { + false; + }; + 0; + { + false; + }; + 0; + { + { + 0x0; + }; + false; + }; + 0; + } + fun t1(cond: bool) { + if (cond) { + false; + } else { + 0x0; + }; + 0; + } + fun t2() { + loop { + let S{f: _,b: x} = S{f: 0,b: false}; + false; + break + }; + 0; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/spec_block_ok.exp b/third_party/move/move-compiler-v2/tests/checking/typing/spec_block_ok.exp index 34634cdef94de..e0af192eb67db 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/spec_block_ok.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/spec_block_ok.exp @@ -6,3 +6,14 @@ module 0x8675309::M { } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + fun specs_in_fun(_x: u64) { + + /* spec { + } + */ + + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/struct_no_field_list.exp b/third_party/move/move-compiler-v2/tests/checking/typing/struct_no_field_list.exp index 9b406f3159e49..070f921e332b9 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/struct_no_field_list.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/struct_no_field_list.exp @@ -3,7 +3,7 @@ module 0x42::m { struct S { dummy_field: bool, } - private fun f(_s: m::S): m::S { + private fun f(_s: S): S { pack m::S(false) } private fun d() { @@ -19,3 +19,17 @@ module 0x42::m { } } } // end 0x42::m + +// -- Sourcified model before bytecode pipeline +module 0x42::m { + struct S has copy, drop { + } + fun f(_s: S): S { + S{} + } + fun d() { + let S{} = S{}; + let S{} = S{}; + let S{} = S{}; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/subtype_annotation.exp b/third_party/move/move-compiler-v2/tests/checking/typing/subtype_annotation.exp index 5b93526f7596c..237df485ea5b1 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/subtype_annotation.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/subtype_annotation.exp @@ -20,3 +20,23 @@ module 0x8675309::M { Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct S has drop { + } + fun t0() { + &mut 0; + /*freeze*/&mut 0; + &0; + &mut S{}; + /*freeze*/&mut S{}; + &S{}; + } + fun t1() { + (&mut 0, &mut 0); + (&mut 0, /*freeze*/&mut 0); + (/*freeze*/&mut 0, &mut 0); + (/*freeze*/&mut 0, /*freeze*/&mut 0); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/subtype_args.exp b/third_party/move/move-compiler-v2/tests/checking/typing/subtype_args.exp index 47591d5ea5698..4f9d394213ba6 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/subtype_args.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/subtype_args.exp @@ -3,23 +3,23 @@ module 0x8675309::M { struct S { dummy_field: bool, } - private fun imm(_x: �) { + private fun imm(_x: &T) { Tuple() } - private fun imm_imm(_x: �,_y: �) { + private fun imm_imm(_x: &T,_y: &T) { Tuple() } - private fun imm_mut(_x: �,_y: &mut #0) { + private fun imm_mut(_x: &T,_y: &mut T) { Tuple() } - private fun mut_imm(_x: &mut #0,_y: �) { + private fun mut_imm(_x: &mut T,_y: &T) { Tuple() } private fun t0() { M::imm(Freeze(false)(Borrow(Mutable)(0))); M::imm(Borrow(Immutable)(0)); - M::imm(Freeze(false)(Borrow(Mutable)(pack M::S(false)))); - M::imm(Borrow(Immutable)(pack M::S(false))); + M::imm(Freeze(false)(Borrow(Mutable)(pack M::S(false)))); + M::imm(Borrow(Immutable)(pack M::S(false))); Tuple() } private fun t1() { @@ -34,3 +34,32 @@ module 0x8675309::M { Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct S has drop { + } + fun imm(_x: &T) { + } + fun imm_imm(_x: &T, _y: &T) { + } + fun imm_mut(_x: &T, _y: &mut T) { + } + fun mut_imm(_x: &mut T, _y: &T) { + } + fun t0() { + imm(/*freeze*/&mut 0); + imm(&0); + imm(/*freeze*/&mut S{}); + imm(&S{}); + } + fun t1() { + imm_mut(/*freeze*/&mut 0, &mut 0); + mut_imm(&mut 0, /*freeze*/&mut 0); + imm_imm(/*freeze*/&mut 0, /*freeze*/&mut 0); + } + inline fun t2(f: |(&u64, &mut u64)|) { + f(&mut 0, &mut 0); + f(&0, &mut 0); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/subtype_assign.exp b/third_party/move/move-compiler-v2/tests/checking/typing/subtype_assign.exp index 4a999869cd4a7..d4493825059b4 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/subtype_assign.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/subtype_assign.exp @@ -33,3 +33,28 @@ module 0x8675309::M { } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct S { + } + fun t0() { + let x; + x = /*freeze*/&mut 0; + x; + } + fun t1() { + let (x,y); + (x,y) = (&mut 0, /*freeze*/&mut 0); + x; + y; + let (x,y); + (x,y) = (/*freeze*/&mut 0, &mut 0); + x; + y; + let (x,y); + (x,y) = (/*freeze*/&mut 0, /*freeze*/&mut 0); + x; + y; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/subtype_bind.exp b/third_party/move/move-compiler-v2/tests/checking/typing/subtype_bind.exp index 70b49a9c85fe5..f0f96e40b7d90 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/subtype_bind.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/subtype_bind.exp @@ -29,3 +29,24 @@ module 0x8675309::M { } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct S { + } + fun t0() { + let x = /*freeze*/&mut 0; + x; + } + fun t1() { + let (x,y) = (&mut 0, /*freeze*/&mut 0); + x; + y; + let (x,y) = (/*freeze*/&mut 0, &mut 0); + x; + y; + let (x,y) = (/*freeze*/&mut 0, /*freeze*/&mut 0); + x; + y; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/subtype_return.exp b/third_party/move/move-compiler-v2/tests/checking/typing/subtype_return.exp index c7ac66f03b81d..ae5aaddde3f22 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/subtype_return.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/subtype_return.exp @@ -6,7 +6,7 @@ module 0x8675309::M { private fun t0(u: &mut u64): &u64 { Freeze(false)(u) } - private fun t1(s: &mut M::S): &M::S { + private fun t1(s: &mut S): &S { Freeze(false)(s) } private fun t2(u1: &mut u64,u2: &mut u64): (&u64, &mut u64) { @@ -19,3 +19,24 @@ module 0x8675309::M { Tuple(Freeze(false)(u1), Freeze(false)(u2)) } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct S { + } + fun t0(u: &mut u64): &u64 { + /*freeze*/u + } + fun t1(s: &mut S): &S { + /*freeze*/s + } + fun t2(u1: &mut u64, u2: &mut u64): (&u64, &mut u64) { + (/*freeze*/u1, u2) + } + fun t3(u1: &mut u64, u2: &mut u64): (&mut u64, &u64) { + (u1, /*freeze*/u2) + } + fun t4(u1: &mut u64, u2: &mut u64): (&u64, &u64) { + (/*freeze*/u1, /*freeze*/u2) + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/tuple.exp b/third_party/move/move-compiler-v2/tests/checking/typing/tuple.exp index 132846113b36a..2b347473d4478 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/tuple.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/tuple.exp @@ -3,13 +3,27 @@ module 0x42::tuple { struct S { f: u64, } - private fun tuple(x: u64): (u64, tuple::S) { + private fun tuple(x: u64): (u64, S) { Tuple(x, pack tuple::S(Add(x, 1))) } private fun use_tuple(x: u64): u64 { { - let (x: u64, tuple::S{ f: y }): (u64, tuple::S) = tuple::tuple(x); + let (x: u64, tuple::S{ f: y }): (u64, S) = tuple::tuple(x); Add(x, y) } } } // end 0x42::tuple + +// -- Sourcified model before bytecode pipeline +module 0x42::tuple { + struct S { + f: u64, + } + fun tuple(x: u64): (u64, S) { + (x, S{f: x + 1}) + } + fun use_tuple(x: u64): u64 { + let (x,S{f: y}) = tuple(x); + x + y + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/type_variable_join_single_pack.exp b/third_party/move/move-compiler-v2/tests/checking/typing/type_variable_join_single_pack.exp index 0d0ae4c52ac2e..fa31994614be4 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/type_variable_join_single_pack.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/type_variable_join_single_pack.exp @@ -1,18 +1,32 @@ // -- Model dump before bytecode pipeline module 0x42::M { - struct Box { - f1: #0, - f2: #0, + struct Box { + f1: T, + f2: T, } private fun t0() { { - let b: M::Box = pack M::Box(0, 1); + let b: Box = pack M::Box(0, 1); Deref(Borrow(Immutable)(b)); { - let b2: M::Box> = pack M::Box>(Deref(Borrow(Immutable)(b)), b); + let b2: Box> = pack M::Box>(Deref(Borrow(Immutable)(b)), b); b2; Tuple() } } } } // end 0x42::M + +// -- Sourcified model before bytecode pipeline +module 0x42::M { + struct Box has copy, drop { + f1: T, + f2: T, + } + fun t0() { + let b = Box{f1: 0,f2: 1}; + *&b; + let b2 = Box>{f1: *&b,f2: b}; + b2; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/type_variable_join_single_unpack.exp b/third_party/move/move-compiler-v2/tests/checking/typing/type_variable_join_single_unpack.exp index 63252d1509136..fd5056c8e617a 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/type_variable_join_single_unpack.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/type_variable_join_single_unpack.exp @@ -1,10 +1,10 @@ // -- Model dump before bytecode pipeline module 0x8675309::M { - struct Box { - f1: #0, - f2: #0, + struct Box { + f1: T, + f2: T, } - private fun new(): M::Box<#0> { + private fun new(): Box { Abort(0) } private fun t0() { @@ -13,7 +13,7 @@ module 0x8675309::M { f1; f2; { - let M::Box>{ f1, f2 } = M::new>(); + let M::Box>{ f1, f2 } = M::new>(); f1; f2; Tuple() @@ -21,3 +21,22 @@ module 0x8675309::M { } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct Box has drop { + f1: T, + f2: T, + } + fun new(): Box { + abort 0 + } + fun t0() { + let Box{f1: f1,f2: f2} = new(); + f1; + f2; + let Box>{f1: f1,f2: f2} = new>(); + f1; + f2; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/type_variable_join_single_unpack_assign.exp b/third_party/move/move-compiler-v2/tests/checking/typing/type_variable_join_single_unpack_assign.exp index 141500cd03452..e1f2fc3e1a998 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/type_variable_join_single_unpack_assign.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/type_variable_join_single_unpack_assign.exp @@ -1,10 +1,10 @@ // -- Model dump before bytecode pipeline module 0x8675309::M { - struct Box { - f1: #0, - f2: #0, + struct Box { + f1: T, + f2: T, } - private fun new(): M::Box<#0> { + private fun new(): Box { Abort(0) } private fun t0() { @@ -16,10 +16,10 @@ module 0x8675309::M { f1; f2; { - let f1: M::Box; + let f1: Box; { - let f2: M::Box; - M::Box>{ f1, f2 } = M::new>(); + let f2: Box; + M::Box>{ f1, f2 } = M::new>(); f1; f2; Tuple() @@ -29,3 +29,26 @@ module 0x8675309::M { } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct Box has drop { + f1: T, + f2: T, + } + fun new(): Box { + abort 0 + } + fun t0() { + let f1; + let f2; + Box{f1: f1,f2: f2} = new(); + f1; + f2; + let f1; + let f2; + Box>{f1: f1,f2: f2} = new>(); + f1; + f2; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/type_variable_join_threaded_pack.exp b/third_party/move/move-compiler-v2/tests/checking/typing/type_variable_join_threaded_pack.exp index 00bbfae97d9e5..66848a8d97a42 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/type_variable_join_threaded_pack.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/type_variable_join_threaded_pack.exp @@ -1,31 +1,31 @@ // -- Model dump before bytecode pipeline module 0x2::Container { - struct T { - f: #0, + struct T { + f: V, } - public fun get(_self: &Container::T<#0>): #0 { + public fun get(_self: &T): V { Abort(0) } - public fun new(): Container::T<#0> { + public fun new(): T { Abort(0) } - public fun put(_self: &mut Container::T<#0>,_item: #0) { + public fun put(_self: &mut T,_item: V) { Abort(0) } } // end 0x2::Container module 0x2::M { use 0x2::Container; // resolved as: 0x2::Container - struct Box { - f1: #0, - f2: #0, + struct Box { + f1: T, + f2: T, } - private fun t0(): M::Box { + private fun t0(): Box { { let v: Container::T = Container::new(); { let x: u64 = Container::get(Borrow(Immutable)(v)); { - let b: M::Box = pack M::Box(x, x); + let b: Box = pack M::Box(x, x); Container::put(Borrow(Mutable)(v), 0); b } @@ -33,3 +33,33 @@ module 0x2::M { } } } // end 0x2::M + +// -- Sourcified model before bytecode pipeline +module 0x2::Container { + struct T has drop { + f: V, + } + public fun get(_self: &T): V { + abort 0 + } + public fun new(): T { + abort 0 + } + public fun put(_self: &mut T, _item: V) { + abort 0 + } +} +module 0x2::M { + use 0x2::Container; + struct Box has drop { + f1: T, + f2: T, + } + fun t0(): Box { + let v = Container::new(); + let x = Container::get(&v); + let b = Box{f1: x,f2: x}; + Container::put(&mut v, 0); + b + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/unary_not.exp b/third_party/move/move-compiler-v2/tests/checking/typing/unary_not.exp index da126c647eb2c..6028cd1a02094 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/unary_not.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/unary_not.exp @@ -3,16 +3,32 @@ module 0x8675309::M { struct R { f: bool, } - private fun t0(x: bool,r: M::R) { + private fun t0(x: bool,r: R) { false; true; Not(x); Not(Copy(x)); Not(Move(x)); - Not(select M::R.f(r)); + Not(select M::R.f(r)); { let M::R{ f: _ } = r; Tuple() } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + f: bool, + } + fun t0(x: bool, r: R) { + false; + true; + !x; + !(copy x); + !(move x); + !r.f; + let R{f: _} = r; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/unit.exp b/third_party/move/move-compiler-v2/tests/checking/typing/unit.exp index 0e7cba1feba85..d3915ca4454d9 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/unit.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/unit.exp @@ -5,3 +5,10 @@ module 0x8675309::M { Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + fun foo() { + (); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/unused_lambda_param.exp b/third_party/move/move-compiler-v2/tests/checking/typing/unused_lambda_param.exp index 27ff8573fad05..26dddef4b0ac9 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/unused_lambda_param.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/unused_lambda_param.exp @@ -24,3 +24,19 @@ module 0xc0ffee::m { Tuple() } } // end 0xc0ffee::m + +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + inline fun test(p: u64, f: |u64|u64): u64 { + f(p) + } + fun unused_lambda() { + 1; + } + fun unused_lambda_suppressed1() { + 1; + } + fun unused_lambda_suppressed2() { + 1; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/unused_lambda_param_typed.exp b/third_party/move/move-compiler-v2/tests/checking/typing/unused_lambda_param_typed.exp index ca98b47e485f2..a8cb39a936fa9 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/unused_lambda_param_typed.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/unused_lambda_param_typed.exp @@ -24,3 +24,19 @@ module 0xc0ffee::m { Tuple() } } // end 0xc0ffee::m + +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + inline fun test(p: u64, f: |u64|u64): u64 { + f(p) + } + fun unused_lambda() { + 1; + } + fun unused_lambda_suppressed1() { + 1; + } + fun unused_lambda_suppressed2() { + 1; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/unused_local.exp b/third_party/move/move-compiler-v2/tests/checking/typing/unused_local.exp index 0e221c884baf1..f1d8f4b665dd0 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/unused_local.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/unused_local.exp @@ -104,3 +104,37 @@ module 0x8675309::M { Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct S { + f: u64, + g: bool, + } + fun t0() { + } + fun t1() { + } + fun t2() { + let S{f: f,g: g}; + } + fun two_unused(x: u64, y: bool) { + } + fun unused_local_suppressed1() { + } + fun unused_local_suppressed2() { + } + native fun unused_native_ok(x: u64, y: bool) ; + fun unused_param(x: u64) { + } + fun unused_param1_used_param2(x: u64, y: bool): bool { + y + } + fun unused_param2_used_param1(x: u64, y: bool): u64 { + x + } + fun unused_param_suppressed1(_: u64) { + } + fun unused_param_suppressed2(_x: u64) { + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/use_local.exp b/third_party/move/move-compiler-v2/tests/checking/typing/use_local.exp index d211b57c21a2e..6f1a2a0fe293d 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/use_local.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/use_local.exp @@ -8,9 +8,9 @@ module 0x8675309::M { } private fun t() { { - let s: M::S = pack M::S(false); + let s: S = pack M::S(false); { - let r: M::R = pack M::R(false); + let r: R = pack M::R(false); 0; s; M::R{ dummy_field: _ } = r; @@ -19,3 +19,18 @@ module 0x8675309::M { } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + } + struct S has drop { + } + fun t() { + let s = S{}; + let r = R{}; + 0; + s; + R{} = r; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/v1-examples/multi_pool_money_market_token.exp b/third_party/move/move-compiler-v2/tests/checking/typing/v1-examples/multi_pool_money_market_token.exp index afd5f2f73d767..438c492d624f6 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/v1-examples/multi_pool_money_market_token.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/v1-examples/multi_pool_money_market_token.exp @@ -18,29 +18,29 @@ warning: unused type parameter // -- Model dump before bytecode pipeline module 0x2::Token { - struct Coin { - type: #0, + struct Coin { + type: AssetType, value: u64, } - public fun create(type: #0,value: u64): Token::Coin<#0> { + public fun create(type: ATy,value: u64): Coin { pack Token::Coin(type, value) } - public fun value(coin: &Token::Coin<#0>): u64 { - select Token::Coin.value<&Token::Coin>(coin) + public fun value(coin: &Coin): u64 { + select Token::Coin.value<&Coin>(coin) } - public fun deposit(coin: &mut Token::Coin<#0>,check: Token::Coin<#0>) { + public fun deposit(coin: &mut Coin,check: Coin) { { let Token::Coin{ type, value } = check; - if Eq(Borrow(Immutable)(select Token::Coin.type<&mut Token::Coin>(coin)), Borrow(Immutable)(type)) { + if Eq(Borrow(Immutable)(select Token::Coin.type<&mut Coin>(coin)), Borrow(Immutable)(type)) { Tuple() } else { Abort(42) }; - select Token::Coin.value<&mut Token::Coin>(coin) = Add(select Token::Coin.value<&mut Token::Coin>(coin), value); + select Token::Coin.value<&mut Coin>(coin) = Add(select Token::Coin.value<&mut Coin>(coin), value); Tuple() } } - public fun destroy_zero(coin: Token::Coin<#0>) { + public fun destroy_zero(coin: Coin) { { let Token::Coin{ type: _, value } = coin; if Eq(value, 0) { @@ -50,57 +50,57 @@ module 0x2::Token { } } } - public fun join(xus: Token::Coin<#0>,coin2: Token::Coin<#0>): Token::Coin<#0> { + public fun join(xus: Coin,coin2: Coin): Coin { Token::deposit(Borrow(Mutable)(xus), coin2); xus } - public fun split(coin: Token::Coin<#0>,amount: u64): (Token::Coin<#0>, Token::Coin<#0>) { + public fun split(coin: Coin,amount: u64): (Coin, Coin) { { - let other: Token::Coin = Token::withdraw(Borrow(Mutable)(coin), amount); + let other: Coin = Token::withdraw(Borrow(Mutable)(coin), amount); Tuple(coin, other) } } - public fun withdraw(coin: &mut Token::Coin<#0>,amount: u64): Token::Coin<#0> { - if Ge(select Token::Coin.value<&mut Token::Coin>(coin), amount) { + public fun withdraw(coin: &mut Coin,amount: u64): Coin { + if Ge(select Token::Coin.value<&mut Coin>(coin), amount) { Tuple() } else { Abort(10) }; - select Token::Coin.value<&mut Token::Coin>(coin) = Sub(select Token::Coin.value<&mut Token::Coin>(coin), amount); - pack Token::Coin(Deref(Borrow(Immutable)(select Token::Coin.type<&mut Token::Coin>(coin))), amount) + select Token::Coin.value<&mut Coin>(coin) = Sub(select Token::Coin.value<&mut Coin>(coin), amount); + pack Token::Coin(Deref(Borrow(Immutable)(select Token::Coin.type<&mut Coin>(coin))), amount) } } // end 0x2::Token module 0x2::Map { - struct T { + struct T { } - public native fun empty(): Map::T<#0, #1>; - public native fun remove(m: &Map::T<#0, #1>,k: �): #1; - public native fun contains_key(m: &Map::T<#0, #1>,k: �): bool; - public native fun get(m: &Map::T<#0, #1>,k: �):  - public native fun get_mut(m: &mut Map::T<#0, #1>,k: �): &mut #1; - public native fun insert(m: &Map::T<#0, #1>,k: #0,v: #1); + public native fun empty(): T; + public native fun remove(m: &T,k: &K): V; + public native fun contains_key(m: &T,k: &K): bool; + public native fun get(m: &T,k: &K): &V; + public native fun get_mut(m: &mut T,k: &K): &mut V; + public native fun insert(m: &T,k: K,v: V); } // end 0x2::Map module 0x3::OneToOneMarket { use std::signer; use 0x2::Map; // resolved as: 0x2::Map use 0x2::Token; // resolved as: 0x2::Token - struct BorrowRecord { + struct BorrowRecord { record: Map::T, } - struct DepositRecord { + struct DepositRecord { record: Map::T, } - struct Pool { - coin: Token::Coin<#0>, + struct Pool { + coin: Token::Coin, } - struct Price { + struct Price { price: u64, } - public fun borrow(account: &signer,pool_owner: address,amount: u64): Token::Coin<#1> - acquires OneToOneMarket::Price(*) - acquires OneToOneMarket::Pool(*) - acquires OneToOneMarket::DepositRecord(*) - acquires OneToOneMarket::BorrowRecord(*) + public fun borrow(account: &signer,pool_owner: address,amount: u64): Token::Coin + acquires Price(*) + acquires Pool(*) + acquires DepositRecord(*) + acquires BorrowRecord(*) { if Le(amount, OneToOneMarket::max_borrow_amount(account, pool_owner)) { Tuple() @@ -109,46 +109,46 @@ module 0x3::OneToOneMarket { }; OneToOneMarket::update_borrow_record(account, pool_owner, amount); { - let pool: &mut OneToOneMarket::Pool = BorrowGlobal(Mutable)>(pool_owner); - Token::withdraw(Borrow(Mutable)(select OneToOneMarket::Pool.coin<&mut OneToOneMarket::Pool>(pool)), amount) + let pool: &mut Pool = BorrowGlobal(Mutable)>(pool_owner); + Token::withdraw(Borrow(Mutable)(select OneToOneMarket::Pool.coin<&mut Pool>(pool)), amount) } } - public fun deposit(account: &signer,pool_owner: address,coin: Token::Coin<#0>) - acquires OneToOneMarket::Pool(*) - acquires OneToOneMarket::DepositRecord(*) + public fun deposit(account: &signer,pool_owner: address,coin: Token::Coin) + acquires Pool(*) + acquires DepositRecord(*) { { let amount: u64 = Token::value(Borrow(Immutable)(coin)); OneToOneMarket::update_deposit_record(account, pool_owner, amount); { - let pool: &mut OneToOneMarket::Pool = BorrowGlobal(Mutable)>(pool_owner); - Token::deposit(Borrow(Mutable)(select OneToOneMarket::Pool.coin<&mut OneToOneMarket::Pool>(pool)), coin) + let pool: &mut Pool = BorrowGlobal(Mutable)>(pool_owner); + Token::deposit(Borrow(Mutable)(select OneToOneMarket::Pool.coin<&mut Pool>(pool)), coin) } } } - private fun accept(account: &signer,init: Token::Coin<#0>) { + private fun accept(account: &signer,init: Token::Coin) { { let sender: address = signer::address_of(account); - if Not(exists>(sender)) { + if Not(exists>(sender)) { Tuple() } else { Abort(42) }; - MoveTo>(account, pack OneToOneMarket::Pool(init)) + MoveTo>(account, pack OneToOneMarket::Pool(init)) } } private fun borrowed_amount(account: &signer,pool_owner: address): u64 - acquires OneToOneMarket::BorrowRecord(*) + acquires BorrowRecord(*) { { let sender: address = signer::address_of(account); - if Not(exists>(sender)) { + if Not(exists>(sender)) { return 0 } else { Tuple() }; { - let record: &Map::T = Borrow(Immutable)(select OneToOneMarket::BorrowRecord.record<&OneToOneMarket::BorrowRecord>(BorrowGlobal(Immutable)>(sender))); + let record: &Map::T = Borrow(Immutable)(select OneToOneMarket::BorrowRecord.record<&BorrowRecord>(BorrowGlobal(Immutable)>(sender))); if Map::contains_key(record, Borrow(Immutable)(pool_owner)) { Deref(Map::get(record, Borrow(Immutable)(pool_owner))) } else { @@ -158,17 +158,17 @@ module 0x3::OneToOneMarket { } } private fun deposited_amount(account: &signer,pool_owner: address): u64 - acquires OneToOneMarket::DepositRecord(*) + acquires DepositRecord(*) { { let sender: address = signer::address_of(account); - if Not(exists>(sender)) { + if Not(exists>(sender)) { return 0 } else { Tuple() }; { - let record: &Map::T = Borrow(Immutable)(select OneToOneMarket::DepositRecord.record<&OneToOneMarket::DepositRecord>(BorrowGlobal(Immutable)>(sender))); + let record: &Map::T = Borrow(Immutable)(select OneToOneMarket::DepositRecord.record<&DepositRecord>(BorrowGlobal(Immutable)>(sender))); if Map::contains_key(record, Borrow(Immutable)(pool_owner)) { Deref(Map::get(record, Borrow(Immutable)(pool_owner))) } else { @@ -178,17 +178,17 @@ module 0x3::OneToOneMarket { } } private fun max_borrow_amount(account: &signer,pool_owner: address): u64 - acquires OneToOneMarket::Price(*) - acquires OneToOneMarket::Pool(*) - acquires OneToOneMarket::DepositRecord(*) - acquires OneToOneMarket::BorrowRecord(*) + acquires Price(*) + acquires Pool(*) + acquires DepositRecord(*) + acquires BorrowRecord(*) { { let input_deposited: u64 = OneToOneMarket::deposited_amount(account, pool_owner); { let output_deposited: u64 = OneToOneMarket::borrowed_amount(account, pool_owner); { - let input_into_output: u64 = Mul(input_deposited, select OneToOneMarket::Price.price<&OneToOneMarket::Price>(BorrowGlobal(Immutable)>(pool_owner))); + let input_into_output: u64 = Mul(input_deposited, select OneToOneMarket::Price.price<&Price>(BorrowGlobal(Immutable)>(pool_owner))); { let max_output: u64 = if Lt(input_into_output, output_deposited) { 0 @@ -197,8 +197,8 @@ module 0x3::OneToOneMarket { }; { let available_output: u64 = { - let pool: &OneToOneMarket::Pool = BorrowGlobal(Immutable)>(pool_owner); - Token::value(Borrow(Immutable)(select OneToOneMarket::Pool.coin<&OneToOneMarket::Pool>(pool))) + let pool: &Pool = BorrowGlobal(Immutable)>(pool_owner); + Token::value(Borrow(Immutable)(select OneToOneMarket::Pool.coin<&Pool>(pool))) }; if Lt(max_output, available_output) { max_output @@ -211,23 +211,23 @@ module 0x3::OneToOneMarket { } } } - public fun register_price(account: &signer,initial_in: Token::Coin<#0>,initial_out: Token::Coin<#1>,price: u64) { + public fun register_price(account: &signer,initial_in: Token::Coin,initial_out: Token::Coin,price: u64) { OneToOneMarket::accept(account, initial_in); OneToOneMarket::accept(account, initial_out); - MoveTo>(account, pack OneToOneMarket::Price(price)) + MoveTo>(account, pack OneToOneMarket::Price(price)) } private fun update_borrow_record(account: &signer,pool_owner: address,amount: u64) - acquires OneToOneMarket::BorrowRecord(*) + acquires BorrowRecord(*) { { let sender: address = signer::address_of(account); - if Not(exists>(sender)) { - MoveTo>(account, pack OneToOneMarket::BorrowRecord(Map::empty())) + if Not(exists>(sender)) { + MoveTo>(account, pack OneToOneMarket::BorrowRecord(Map::empty())) } else { Tuple() }; { - let record: &mut Map::T = Borrow(Mutable)(select OneToOneMarket::BorrowRecord.record<&mut OneToOneMarket::BorrowRecord>(BorrowGlobal(Mutable)>(sender))); + let record: &mut Map::T = Borrow(Mutable)(select OneToOneMarket::BorrowRecord.record<&mut BorrowRecord>(BorrowGlobal(Mutable)>(sender))); if Map::contains_key(Freeze(false)(record), Borrow(Immutable)(pool_owner)) { { let old_amount: u64 = Map::remove(Freeze(false)(record), Borrow(Immutable)(pool_owner)); @@ -242,17 +242,17 @@ module 0x3::OneToOneMarket { } } private fun update_deposit_record(account: &signer,pool_owner: address,amount: u64) - acquires OneToOneMarket::DepositRecord(*) + acquires DepositRecord(*) { { let sender: address = signer::address_of(account); - if Not(exists>(sender)) { - MoveTo>(account, pack OneToOneMarket::DepositRecord(Map::empty())) + if Not(exists>(sender)) { + MoveTo>(account, pack OneToOneMarket::DepositRecord(Map::empty())) } else { Tuple() }; { - let record: &mut Map::T = Borrow(Mutable)(select OneToOneMarket::DepositRecord.record<&mut OneToOneMarket::DepositRecord>(BorrowGlobal(Mutable)>(sender))); + let record: &mut Map::T = Borrow(Mutable)(select OneToOneMarket::DepositRecord.record<&mut DepositRecord>(BorrowGlobal(Mutable)>(sender))); if Map::contains_key(Freeze(false)(record), Borrow(Immutable)(pool_owner)) { { let old_amount: u64 = Map::remove(Freeze(false)(record), Borrow(Immutable)(pool_owner)); @@ -274,7 +274,7 @@ module 0x70dd::ToddNickels { dummy_field: bool, } struct Wallet { - nickels: Token::Coin, + nickels: Token::Coin, } public fun init(account: &signer) { if Eq
(signer::address_of(account), 0x70dd) { @@ -282,19 +282,181 @@ module 0x70dd::ToddNickels { } else { Abort(42) }; - MoveTo(account, pack ToddNickels::Wallet(Token::create(pack ToddNickels::T(false), 0))) + MoveTo(account, pack ToddNickels::Wallet(Token::create(pack ToddNickels::T(false), 0))) } - public fun destroy(c: Token::Coin) - acquires ToddNickels::Wallet(*) + public fun destroy(c: Token::Coin) + acquires Wallet(*) { - Token::deposit(Borrow(Mutable)(select ToddNickels::Wallet.nickels<&mut ToddNickels::Wallet>(BorrowGlobal(Mutable)(0x70dd))), c) + Token::deposit(Borrow(Mutable)(select ToddNickels::Wallet.nickels<&mut Wallet>(BorrowGlobal(Mutable)(0x70dd))), c) } - public fun mint(account: &signer): Token::Coin { + public fun mint(account: &signer): Token::Coin { if Eq
(signer::address_of(account), 0x70dd) { Tuple() } else { Abort(42) }; - Token::create(pack ToddNickels::T(false), 5) + Token::create(pack ToddNickels::T(false), 5) } } // end 0x70dd::ToddNickels + +// -- Sourcified model before bytecode pipeline +module 0x2::Token { + struct Coin has store { + type: AssetType, + value: u64, + } + public fun create(type: ATy, value: u64): Coin { + Coin{type: type,value: value} + } + public fun value(coin: &Coin): u64 { + coin.value + } + public fun deposit(coin: &mut Coin, check: Coin) { + let Coin{type: type,value: value} = check; + if (&coin.type == &type) () else abort 42; + coin.value = coin.value + value; + } + public fun destroy_zero(coin: Coin) { + let Coin{type: _,value: value} = coin; + if (value == 0) () else abort 11 + } + public fun join(xus: Coin, coin2: Coin): Coin { + deposit(&mut xus, coin2); + xus + } + public fun split(coin: Coin, amount: u64): (Coin, Coin) { + let other = withdraw(&mut coin, amount); + (coin, other) + } + public fun withdraw(coin: &mut Coin, amount: u64): Coin { + if (coin.value >= amount) () else abort 10; + coin.value = coin.value - amount; + Coin{type: *&coin.type,value: amount} + } +} +module 0x2::Map { + struct T has copy, drop, store { + } + public native fun empty(): T ; + public native fun remove(m: &T, k: &K): V ; + public native fun contains_key(m: &T, k: &K): bool ; + public native fun get(m: &T, k: &K): &V ; + public native fun get_mut(m: &mut T, k: &K): &mut V ; + public native fun insert(m: &T, k: K, v: V) ; +} +module 0x3::OneToOneMarket { + use 0x2::Token; + use 0x2::Map; + struct BorrowRecord has key { + record: Map::T, + } + struct DepositRecord has key { + record: Map::T, + } + struct Pool has key { + coin: Token::Coin, + } + struct Price has key { + price: u64, + } + public fun borrow(account: &signer, pool_owner: address, amount: u64): Token::Coin + acquires Priceacquires Poolacquires DepositRecordacquires BorrowRecord + { + if (amount <= max_borrow_amount(account, pool_owner)) () else abort 1025; + update_borrow_record(account, pool_owner, amount); + let pool = borrow_global_mut>(pool_owner); + Token::withdraw(&mut pool.coin, amount) + } + public fun deposit(account: &signer, pool_owner: address, coin: Token::Coin) + acquires Poolacquires DepositRecord + { + let amount = Token::value(&coin); + update_deposit_record(account, pool_owner, amount); + let pool = borrow_global_mut>(pool_owner); + Token::deposit(&mut pool.coin, coin) + } + fun accept(account: &signer, init: Token::Coin) { + let sender = 0x1::signer::address_of(account); + if (!exists>(sender)) () else abort 42; + move_to>(account, Pool{coin: init}) + } + fun borrowed_amount(account: &signer, pool_owner: address): u64 + acquires BorrowRecord + { + let sender = 0x1::signer::address_of(account); + if (!exists>(sender)) return 0; + let record = &borrow_global>(sender).record; + if (Map::contains_key(record, &pool_owner)) *Map::get(record, &pool_owner) else 0 + } + fun deposited_amount(account: &signer, pool_owner: address): u64 + acquires DepositRecord + { + let sender = 0x1::signer::address_of(account); + if (!exists>(sender)) return 0; + let record = &borrow_global>(sender).record; + if (Map::contains_key(record, &pool_owner)) *Map::get(record, &pool_owner) else 0 + } + fun max_borrow_amount(account: &signer, pool_owner: address): u64 + acquires Priceacquires Poolacquires DepositRecordacquires BorrowRecord + { + let input_deposited = deposited_amount(account, pool_owner); + let output_deposited = borrowed_amount(account, pool_owner); + let input_into_output = input_deposited * borrow_global>(pool_owner).price; + let max_output = if (input_into_output < output_deposited) 0 else input_into_output - output_deposited; + let available_output = { + let pool = borrow_global>(pool_owner); + Token::value(&pool.coin) + }; + if (max_output < available_output) max_output else available_output + } + public fun register_price(account: &signer, initial_in: Token::Coin, initial_out: Token::Coin, price: u64) { + accept(account, initial_in); + accept(account, initial_out); + move_to>(account, Price{price: price}) + } + fun update_borrow_record(account: &signer, pool_owner: address, amount: u64) + acquires BorrowRecord + { + let sender = 0x1::signer::address_of(account); + if (!exists>(sender)) move_to>(account, BorrowRecord{record: Map::empty()}); + let record = &mut borrow_global_mut>(sender).record; + if (Map::contains_key(/*freeze*/record, &pool_owner)) { + let old_amount = Map::remove(/*freeze*/record, &pool_owner); + amount = amount + old_amount; + }; + Map::insert(/*freeze*/record, pool_owner, amount) + } + fun update_deposit_record(account: &signer, pool_owner: address, amount: u64) + acquires DepositRecord + { + let sender = 0x1::signer::address_of(account); + if (!exists>(sender)) move_to>(account, DepositRecord{record: Map::empty()}); + let record = &mut borrow_global_mut>(sender).record; + if (Map::contains_key(/*freeze*/record, &pool_owner)) { + let old_amount = Map::remove(/*freeze*/record, &pool_owner); + amount = amount + old_amount; + }; + Map::insert(/*freeze*/record, pool_owner, amount) + } +} +module 0x70dd::ToddNickels { + use 0x2::Token; + struct T has copy, drop, store { + } + struct Wallet has key { + nickels: Token::Coin, + } + public fun init(account: &signer) { + if (0x1::signer::address_of(account) == 0x70dd) () else abort 42; + move_to(account, Wallet{nickels: Token::create(T{}, 0)}) + } + public fun destroy(c: Token::Coin) + acquires Wallet + { + Token::deposit(&mut borrow_global_mut(0x70dd).nickels, c) + } + public fun mint(account: &signer): Token::Coin { + if (0x1::signer::address_of(account) == 0x70dd) () else abort 42; + Token::create(T{}, 5) + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/v1-examples/simple_money_market_token.exp b/third_party/move/move-compiler-v2/tests/checking/typing/v1-examples/simple_money_market_token.exp index b2f8b5a64657b..3749e13a8d819 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/v1-examples/simple_money_market_token.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/v1-examples/simple_money_market_token.exp @@ -1,28 +1,28 @@ // -- Model dump before bytecode pipeline module 0x2::Token { - struct Coin { - type: #0, + struct Coin { + type: AssetType, value: u64, } - public fun create(type: #0,value: u64): Token::Coin<#0> { + public fun create(type: ATy,value: u64): Coin { pack Token::Coin(type, value) } - public fun value(coin: &Token::Coin<#0>): u64 { - select Token::Coin.value<&Token::Coin>(coin) + public fun value(coin: &Coin): u64 { + select Token::Coin.value<&Coin>(coin) } - public fun deposit(coin: &mut Token::Coin<#0>,check: Token::Coin<#0>) { + public fun deposit(coin: &mut Coin,check: Coin) { { let Token::Coin{ type, value } = check; - if Eq(Borrow(Immutable)(select Token::Coin.type<&mut Token::Coin>(coin)), Borrow(Immutable)(type)) { + if Eq(Borrow(Immutable)(select Token::Coin.type<&mut Coin>(coin)), Borrow(Immutable)(type)) { Tuple() } else { Abort(42) }; - select Token::Coin.value<&mut Token::Coin>(coin) = Add(select Token::Coin.value<&mut Token::Coin>(coin), value); + select Token::Coin.value<&mut Coin>(coin) = Add(select Token::Coin.value<&mut Coin>(coin), value); Tuple() } } - public fun destroy_zero(coin: Token::Coin<#0>) { + public fun destroy_zero(coin: Coin) { { let Token::Coin{ type: _, value } = coin; if Eq(value, 0) { @@ -32,24 +32,24 @@ module 0x2::Token { } } } - public fun join(xus: Token::Coin<#0>,coin2: Token::Coin<#0>): Token::Coin<#0> { + public fun join(xus: Coin,coin2: Coin): Coin { Token::deposit(Borrow(Mutable)(xus), coin2); xus } - public fun split(coin: Token::Coin<#0>,amount: u64): (Token::Coin<#0>, Token::Coin<#0>) { + public fun split(coin: Coin,amount: u64): (Coin, Coin) { { - let other: Token::Coin = Token::withdraw(Borrow(Mutable)(coin), amount); + let other: Coin = Token::withdraw(Borrow(Mutable)(coin), amount); Tuple(coin, other) } } - public fun withdraw(coin: &mut Token::Coin<#0>,amount: u64): Token::Coin<#0> { - if Ge(select Token::Coin.value<&mut Token::Coin>(coin), amount) { + public fun withdraw(coin: &mut Coin,amount: u64): Coin { + if Ge(select Token::Coin.value<&mut Coin>(coin), amount) { Tuple() } else { Abort(10) }; - select Token::Coin.value<&mut Token::Coin>(coin) = Sub(select Token::Coin.value<&mut Token::Coin>(coin), amount); - pack Token::Coin(Deref(Borrow(Immutable)(select Token::Coin.type<&mut Token::Coin>(coin))), amount) + select Token::Coin.value<&mut Coin>(coin) = Sub(select Token::Coin.value<&mut Coin>(coin), amount); + pack Token::Coin(Deref(Borrow(Immutable)(select Token::Coin.type<&mut Coin>(coin))), amount) } } // end 0x2::Token module 0x70dd::ToddNickels { @@ -59,7 +59,7 @@ module 0x70dd::ToddNickels { dummy_field: bool, } struct Wallet { - nickels: Token::Coin, + nickels: Token::Coin, } public fun init(account: &signer) { if Eq
(signer::address_of(account), 0x70dd) { @@ -67,42 +67,42 @@ module 0x70dd::ToddNickels { } else { Abort(42) }; - MoveTo(account, pack ToddNickels::Wallet(Token::create(pack ToddNickels::T(false), 0))) + MoveTo(account, pack ToddNickels::Wallet(Token::create(pack ToddNickels::T(false), 0))) } - public fun destroy(c: Token::Coin) - acquires ToddNickels::Wallet(*) + public fun destroy(c: Token::Coin) + acquires Wallet(*) { - Token::deposit(Borrow(Mutable)(select ToddNickels::Wallet.nickels<&mut ToddNickels::Wallet>(BorrowGlobal(Mutable)(0x70dd))), c) + Token::deposit(Borrow(Mutable)(select ToddNickels::Wallet.nickels<&mut Wallet>(BorrowGlobal(Mutable)(0x70dd))), c) } - public fun mint(account: &signer): Token::Coin { + public fun mint(account: &signer): Token::Coin { if Eq
(signer::address_of(account), 0x70dd) { Tuple() } else { Abort(42) }; - Token::create(pack ToddNickels::T(false), 5) + Token::create(pack ToddNickels::T(false), 5) } } // end 0x70dd::ToddNickels module 0xb055::OneToOneMarket { use std::signer; use 0x2::Token; // resolved as: 0x2::Token - struct BorrowRecord { + struct BorrowRecord { record: u64, } - struct DepositRecord { + struct DepositRecord { record: u64, } - struct Pool { - coin: Token::Coin<#0>, + struct Pool { + coin: Token::Coin, } - struct Price { + struct Price { price: u64, } - public fun borrow(account: &signer,amount: u64): Token::Coin<#1> - acquires OneToOneMarket::Price(*) - acquires OneToOneMarket::Pool(*) - acquires OneToOneMarket::DepositRecord(*) - acquires OneToOneMarket::BorrowRecord(*) + public fun borrow(account: &signer,amount: u64): Token::Coin + acquires Price(*) + acquires Pool(*) + acquires DepositRecord(*) + acquires BorrowRecord(*) { if Le(amount, OneToOneMarket::max_borrow_amount(account)) { Tuple() @@ -111,72 +111,72 @@ module 0xb055::OneToOneMarket { }; OneToOneMarket::update_borrow_record(account, amount); { - let pool: &mut OneToOneMarket::Pool = BorrowGlobal(Mutable)>(0xb055); - Token::withdraw(Borrow(Mutable)(select OneToOneMarket::Pool.coin<&mut OneToOneMarket::Pool>(pool)), amount) + let pool: &mut Pool = BorrowGlobal(Mutable)>(0xb055); + Token::withdraw(Borrow(Mutable)(select OneToOneMarket::Pool.coin<&mut Pool>(pool)), amount) } } - public fun deposit(account: &signer,coin: Token::Coin<#0>) - acquires OneToOneMarket::Pool(*) - acquires OneToOneMarket::DepositRecord(*) + public fun deposit(account: &signer,coin: Token::Coin) + acquires Pool(*) + acquires DepositRecord(*) { { let amount: u64 = Token::value(Borrow(Immutable)(coin)); OneToOneMarket::update_deposit_record(account, amount); { - let pool: &mut OneToOneMarket::Pool = BorrowGlobal(Mutable)>(0xb055); - Token::deposit(Borrow(Mutable)(select OneToOneMarket::Pool.coin<&mut OneToOneMarket::Pool>(pool)), coin) + let pool: &mut Pool = BorrowGlobal(Mutable)>(0xb055); + Token::deposit(Borrow(Mutable)(select OneToOneMarket::Pool.coin<&mut Pool>(pool)), coin) } } } - private fun accept(account: &signer,init: Token::Coin<#0>) { + private fun accept(account: &signer,init: Token::Coin) { { let sender: address = signer::address_of(account); - if Not(exists>(sender)) { + if Not(exists>(sender)) { Tuple() } else { Abort(42) }; - MoveTo>(account, pack OneToOneMarket::Pool(init)) + MoveTo>(account, pack OneToOneMarket::Pool(init)) } } private fun borrowed_amount(account: &signer): u64 - acquires OneToOneMarket::BorrowRecord(*) + acquires BorrowRecord(*) { { let sender: address = signer::address_of(account); - if Not(exists>(sender)) { + if Not(exists>(sender)) { return 0 } else { Tuple() }; - select OneToOneMarket::BorrowRecord.record<&OneToOneMarket::BorrowRecord>(BorrowGlobal(Immutable)>(sender)) + select OneToOneMarket::BorrowRecord.record<&BorrowRecord>(BorrowGlobal(Immutable)>(sender)) } } private fun deposited_amount(account: &signer): u64 - acquires OneToOneMarket::DepositRecord(*) + acquires DepositRecord(*) { { let sender: address = signer::address_of(account); - if Not(exists>(sender)) { + if Not(exists>(sender)) { return 0 } else { Tuple() }; - select OneToOneMarket::DepositRecord.record<&OneToOneMarket::DepositRecord>(BorrowGlobal(Immutable)>(sender)) + select OneToOneMarket::DepositRecord.record<&DepositRecord>(BorrowGlobal(Immutable)>(sender)) } } private fun max_borrow_amount(account: &signer): u64 - acquires OneToOneMarket::Price(*) - acquires OneToOneMarket::Pool(*) - acquires OneToOneMarket::DepositRecord(*) - acquires OneToOneMarket::BorrowRecord(*) + acquires Price(*) + acquires Pool(*) + acquires DepositRecord(*) + acquires BorrowRecord(*) { { let input_deposited: u64 = OneToOneMarket::deposited_amount(account); { let output_deposited: u64 = OneToOneMarket::borrowed_amount(account); { - let input_into_output: u64 = Mul(input_deposited, select OneToOneMarket::Price.price<&OneToOneMarket::Price>(BorrowGlobal(Immutable)>(0xb055))); + let input_into_output: u64 = Mul(input_deposited, select OneToOneMarket::Price.price<&Price>(BorrowGlobal(Immutable)>(0xb055))); { let max_output: u64 = if Lt(input_into_output, output_deposited) { 0 @@ -185,8 +185,8 @@ module 0xb055::OneToOneMarket { }; { let available_output: u64 = { - let pool: &OneToOneMarket::Pool = BorrowGlobal(Immutable)>(0xb055); - Token::value(Borrow(Immutable)(select OneToOneMarket::Pool.coin<&OneToOneMarket::Pool>(pool))) + let pool: &Pool = BorrowGlobal(Immutable)>(0xb055); + Token::value(Borrow(Immutable)(select OneToOneMarket::Pool.coin<&Pool>(pool))) }; if Lt(max_output, available_output) { max_output @@ -199,7 +199,7 @@ module 0xb055::OneToOneMarket { } } } - public fun register_price(account: &signer,initial_in: Token::Coin<#0>,initial_out: Token::Coin<#1>,price: u64) { + public fun register_price(account: &signer,initial_in: Token::Coin,initial_out: Token::Coin,price: u64) { { let sender: address = signer::address_of(account); if Eq
(sender, 0xb055) { @@ -209,39 +209,182 @@ module 0xb055::OneToOneMarket { }; OneToOneMarket::accept(account, initial_in); OneToOneMarket::accept(account, initial_out); - MoveTo>(account, pack OneToOneMarket::Price(price)) + MoveTo>(account, pack OneToOneMarket::Price(price)) } } private fun update_borrow_record(account: &signer,amount: u64) - acquires OneToOneMarket::BorrowRecord(*) + acquires BorrowRecord(*) { { let sender: address = signer::address_of(account); - if Not(exists>(sender)) { - MoveTo>(account, pack OneToOneMarket::BorrowRecord(0)) + if Not(exists>(sender)) { + MoveTo>(account, pack OneToOneMarket::BorrowRecord(0)) } else { Tuple() }; { - let record: &mut u64 = Borrow(Mutable)(select OneToOneMarket::BorrowRecord.record<&mut OneToOneMarket::BorrowRecord>(BorrowGlobal(Mutable)>(sender))); + let record: &mut u64 = Borrow(Mutable)(select OneToOneMarket::BorrowRecord.record<&mut BorrowRecord>(BorrowGlobal(Mutable)>(sender))); record = Add(Deref(record), amount) } } } private fun update_deposit_record(account: &signer,amount: u64) - acquires OneToOneMarket::DepositRecord(*) + acquires DepositRecord(*) { { let sender: address = signer::address_of(account); - if Not(exists>(sender)) { - MoveTo>(account, pack OneToOneMarket::DepositRecord(0)) + if Not(exists>(sender)) { + MoveTo>(account, pack OneToOneMarket::DepositRecord(0)) } else { Tuple() }; { - let record: &mut u64 = Borrow(Mutable)(select OneToOneMarket::DepositRecord.record<&mut OneToOneMarket::DepositRecord>(BorrowGlobal(Mutable)>(sender))); + let record: &mut u64 = Borrow(Mutable)(select OneToOneMarket::DepositRecord.record<&mut DepositRecord>(BorrowGlobal(Mutable)>(sender))); record = Add(Deref(record), amount) } } } } // end 0xb055::OneToOneMarket + +// -- Sourcified model before bytecode pipeline +module 0x2::Token { + struct Coin has store { + type: AssetType, + value: u64, + } + public fun create(type: ATy, value: u64): Coin { + Coin{type: type,value: value} + } + public fun value(coin: &Coin): u64 { + coin.value + } + public fun deposit(coin: &mut Coin, check: Coin) { + let Coin{type: type,value: value} = check; + if (&coin.type == &type) () else abort 42; + coin.value = coin.value + value; + } + public fun destroy_zero(coin: Coin) { + let Coin{type: _,value: value} = coin; + if (value == 0) () else abort 11 + } + public fun join(xus: Coin, coin2: Coin): Coin { + deposit(&mut xus, coin2); + xus + } + public fun split(coin: Coin, amount: u64): (Coin, Coin) { + let other = withdraw(&mut coin, amount); + (coin, other) + } + public fun withdraw(coin: &mut Coin, amount: u64): Coin { + if (coin.value >= amount) () else abort 10; + coin.value = coin.value - amount; + Coin{type: *&coin.type,value: amount} + } +} +module 0x70dd::ToddNickels { + use 0x2::Token; + struct T has copy, drop, store { + } + struct Wallet has key { + nickels: Token::Coin, + } + public fun init(account: &signer) { + if (0x1::signer::address_of(account) == 0x70dd) () else abort 42; + move_to(account, Wallet{nickels: Token::create(T{}, 0)}) + } + public fun destroy(c: Token::Coin) + acquires Wallet + { + Token::deposit(&mut borrow_global_mut(0x70dd).nickels, c) + } + public fun mint(account: &signer): Token::Coin { + if (0x1::signer::address_of(account) == 0x70dd) () else abort 42; + Token::create(T{}, 5) + } +} +module 0xb055::OneToOneMarket { + use 0x2::Token; + struct BorrowRecord has key { + record: u64, + } + struct DepositRecord has key { + record: u64, + } + struct Pool has key { + coin: Token::Coin, + } + struct Price has key { + price: u64, + } + public fun borrow(account: &signer, amount: u64): Token::Coin + acquires Priceacquires Poolacquires DepositRecordacquires BorrowRecord + { + if (amount <= max_borrow_amount(account)) () else abort 1025; + update_borrow_record(account, amount); + let pool = borrow_global_mut>(0xb055); + Token::withdraw(&mut pool.coin, amount) + } + public fun deposit(account: &signer, coin: Token::Coin) + acquires Poolacquires DepositRecord + { + let amount = Token::value(&coin); + update_deposit_record(account, amount); + let pool = borrow_global_mut>(0xb055); + Token::deposit(&mut pool.coin, coin) + } + fun accept(account: &signer, init: Token::Coin) { + let sender = 0x1::signer::address_of(account); + if (!exists>(sender)) () else abort 42; + move_to>(account, Pool{coin: init}) + } + fun borrowed_amount(account: &signer): u64 + acquires BorrowRecord + { + let sender = 0x1::signer::address_of(account); + if (!exists>(sender)) return 0; + borrow_global>(sender).record + } + fun deposited_amount(account: &signer): u64 + acquires DepositRecord + { + let sender = 0x1::signer::address_of(account); + if (!exists>(sender)) return 0; + borrow_global>(sender).record + } + fun max_borrow_amount(account: &signer): u64 + acquires Priceacquires Poolacquires DepositRecordacquires BorrowRecord + { + let input_deposited = deposited_amount(account); + let output_deposited = borrowed_amount(account); + let input_into_output = input_deposited * borrow_global>(0xb055).price; + let max_output = if (input_into_output < output_deposited) 0 else input_into_output - output_deposited; + let available_output = { + let pool = borrow_global>(0xb055); + Token::value(&pool.coin) + }; + if (max_output < available_output) max_output else available_output + } + public fun register_price(account: &signer, initial_in: Token::Coin, initial_out: Token::Coin, price: u64) { + let sender = 0x1::signer::address_of(account); + if (sender == 0xb055) () else abort 42; + accept(account, initial_in); + accept(account, initial_out); + move_to>(account, Price{price: price}) + } + fun update_borrow_record(account: &signer, amount: u64) + acquires BorrowRecord + { + let sender = 0x1::signer::address_of(account); + if (!exists>(sender)) move_to>(account, BorrowRecord{record: 0}); + let record = &mut borrow_global_mut>(sender).record; + *record = *record + amount + } + fun update_deposit_record(account: &signer, amount: u64) + acquires DepositRecord + { + let sender = 0x1::signer::address_of(account); + if (!exists>(sender)) move_to>(account, DepositRecord{record: 0}); + let record = &mut borrow_global_mut>(sender).record; + *record = *record + amount + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/values.exp b/third_party/move/move-compiler-v2/tests/checking/typing/values.exp index 4b5bcacdedbcc..4946b50f1eaf6 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/values.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/values.exp @@ -9,3 +9,14 @@ module 0x8675309::M { Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + fun t() { + 0x1; + 0; + 10000; + true; + false; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/vector_basic_cases.exp b/third_party/move/move-compiler-v2/tests/checking/typing/vector_basic_cases.exp index fd4c8a5d38bce..e06c3d6abf801 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/vector_basic_cases.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/vector_basic_cases.exp @@ -8,7 +8,7 @@ module 0x42::Test { [Number(0), Number(1), Number(2)]; [Number(0), Number(1), Number(2)]; [Address(Numerical(0000000000000000000000000000000000000000000000000000000000000000)), Address(Numerical(0000000000000000000000000000000000000000000000000000000000000001))]; - Vector(pack Test::X(false), pack Test::X(false)); + Vector(pack Test::X(false), pack Test::X(false)); Vector>(Vector
(), Vector
()); Vector>>(Vector>(Vector
(), Vector
()), Vector>()); Tuple() @@ -22,9 +22,36 @@ module 0x42::Test { [Number(0)]; [Number(0)]; [Address(Numerical(0000000000000000000000000000000000000000000000000000000000000000))]; - Vector(pack Test::X(false)); + Vector(pack Test::X(false)); Vector>(Vector
()); Vector>>(Vector>(Vector
())); Tuple() } } // end 0x42::Test + +// -- Sourcified model before bytecode pipeline +module 0x42::Test { + struct X has drop { + } + fun many() { + vector[0u8, 1u8, 2u8]; + vector[0, 1, 2]; + vector[0u128, 1u128, 2u128]; + vector[0x0, 0x1]; + vector[X{}, X{}]; + vector[vector[], vector[]]; + vector[vector[vector[], vector[]], vector[]]; + } + fun none() { + vector[]; + } + fun one() { + vector[0u8]; + vector[0]; + vector[0u128]; + vector[0x0]; + vector[X{}]; + vector[vector[]]; + vector[vector[vector[]]]; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/while_body.exp b/third_party/move/move-compiler-v2/tests/checking/typing/while_body.exp index c71c0fe643d07..e2b63bd3fa16b 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/while_body.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/while_body.exp @@ -91,3 +91,22 @@ module 0x8675309::M { } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + fun t0(cond: bool) { + while (cond) (); + while (cond) (); + while (cond) (); + while (cond) { + 0; + }; + while (cond) if (cond) (); + while (cond) break; + while (cond) break; + while (cond) continue; + while (cond) continue; + while (cond) return (); + while (cond) while (cond) break + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/while_condition.exp b/third_party/move/move-compiler-v2/tests/checking/typing/while_condition.exp index a939866f306f8..039489df286e3 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/while_condition.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/while_condition.exp @@ -33,3 +33,15 @@ module 0x8675309::M { } } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + fun t0() { + while (true) (); + while (false) () + } + fun t1() { + while (true) (); + while (false) () + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/unused/local_var.exp b/third_party/move/move-compiler-v2/tests/checking/unused/local_var.exp index da933d7ffa6a1..4dd44d731fac7 100644 --- a/third_party/move/move-compiler-v2/tests/checking/unused/local_var.exp +++ b/third_party/move/move-compiler-v2/tests/checking/unused/local_var.exp @@ -4,3 +4,10 @@ module 0xc0ffee::m { 5 } } // end 0xc0ffee::m + +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + public fun test(): u64 { + 5 + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/unused/unused_enum.exp b/third_party/move/move-compiler-v2/tests/checking/unused/unused_enum.exp index 118953e1f7238..1905db1c3f9dc 100644 --- a/third_party/move/move-compiler-v2/tests/checking/unused/unused_enum.exp +++ b/third_party/move/move-compiler-v2/tests/checking/unused/unused_enum.exp @@ -4,10 +4,23 @@ module 0x42::enum_types { enum MessageHolder { Empty, Message { - message: string::String, + message: 0x1::string::String, } NewMessage { - message: string::String, + message: 0x1::string::String, } } } // end 0x42::enum_types + +// -- Sourcified model before bytecode pipeline +module 0x42::enum_types { + enum MessageHolder has drop, key { + Empty, + Message { + message: 0x1::string::String, + } + NewMessage { + message: 0x1::string::String, + } + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/variants/variants_allow_match_empty_block.exp b/third_party/move/move-compiler-v2/tests/checking/variants/variants_allow_match_empty_block.exp index 763597db69434..211d1c533310f 100644 --- a/third_party/move/move-compiler-v2/tests/checking/variants/variants_allow_match_empty_block.exp +++ b/third_party/move/move-compiler-v2/tests/checking/variants/variants_allow_match_empty_block.exp @@ -4,3 +4,9 @@ module 0x815::m { Tuple() } } // end 0x815::m + +// -- Sourcified model before bytecode pipeline +module 0x815::m { + fun match() { + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/variants/variants_allow_match_fun_1.exp b/third_party/move/move-compiler-v2/tests/checking/variants/variants_allow_match_fun_1.exp index 310eee4728c3c..1abaaa8079872 100644 --- a/third_party/move/move-compiler-v2/tests/checking/variants/variants_allow_match_fun_1.exp +++ b/third_party/move/move-compiler-v2/tests/checking/variants/variants_allow_match_fun_1.exp @@ -10,18 +10,41 @@ module 0x815::m { z: u32, } } - private fun caller(c: m::CommonFields): bool { + private fun caller(c: CommonFields): bool { m::match(c) } - private fun match(c: m::CommonFields): bool { + private fun match(c: CommonFields): bool { match (c) { m::CommonFields::Foo{ x, y: _ } => { Gt(x, 0) } - _: m::CommonFields => { + _: CommonFields => { false } } } } // end 0x815::m + +// -- Sourcified model before bytecode pipeline +module 0x815::m { + enum CommonFields { + Foo { + x: u64, + y: u8, + } + Bar { + x: u64, + z: u32, + } + } + fun caller(c: CommonFields): bool { + match(c) + } + fun match(c: CommonFields): bool { + match (c) { + CommonFields::Foo{x: x,y: _} => x > 0, + _ => false, + } + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/variants/variants_allow_match_fun_2.exp b/third_party/move/move-compiler-v2/tests/checking/variants/variants_allow_match_fun_2.exp index 4f144439aeab2..3c6136be07907 100644 --- a/third_party/move/move-compiler-v2/tests/checking/variants/variants_allow_match_fun_2.exp +++ b/third_party/move/move-compiler-v2/tests/checking/variants/variants_allow_match_fun_2.exp @@ -15,12 +15,12 @@ module 0x815::m { } private fun match(): bool { { - let c: m::CommonFields = pack m::CommonFields::Foo(0, 0); + let c: CommonFields = pack m::CommonFields::Foo(0, 0); match (c) { m::CommonFields::Foo{ x, y: _ } => { Gt(x, 0) } - _: m::CommonFields => { + _: CommonFields => { false } } @@ -28,3 +28,27 @@ module 0x815::m { } } } // end 0x815::m + +// -- Sourcified model before bytecode pipeline +module 0x815::m { + enum CommonFields { + Foo { + x: u64, + y: u8, + } + Bar { + x: u64, + z: u32, + } + } + fun caller(): bool { + match() + } + fun match(): bool { + let c = CommonFields::Foo{x: 0,y: 0u8}; + match (c) { + CommonFields::Foo{x: x,y: _} => x > 0, + _ => false, + } + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/variants/variants_allow_match_fun_3.exp b/third_party/move/move-compiler-v2/tests/checking/variants/variants_allow_match_fun_3.exp index fd62c81d4e4be..16ae0b37ecef0 100644 --- a/third_party/move/move-compiler-v2/tests/checking/variants/variants_allow_match_fun_3.exp +++ b/third_party/move/move-compiler-v2/tests/checking/variants/variants_allow_match_fun_3.exp @@ -10,18 +10,41 @@ module 0x815::m { z: u32, } } - private fun caller(c: m::CommonFields): bool { + private fun caller(c: CommonFields): bool { And(m::match(c, 22), true) } - private fun match(c: m::CommonFields,t: u64): bool { + private fun match(c: CommonFields,t: u64): bool { match (c) { m::CommonFields::Foo{ x, y: _ } => { Gt(x, t) } - _: m::CommonFields => { + _: CommonFields => { false } } } } // end 0x815::m + +// -- Sourcified model before bytecode pipeline +module 0x815::m { + enum CommonFields { + Foo { + x: u64, + y: u8, + } + Bar { + x: u64, + z: u32, + } + } + fun caller(c: CommonFields): bool { + match(c, 22) && true + } + fun match(c: CommonFields, t: u64): bool { + match (c) { + CommonFields::Foo{x: x,y: _} => x > t, + _ => false, + } + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/variants/variants_allow_match_var.exp b/third_party/move/move-compiler-v2/tests/checking/variants/variants_allow_match_var.exp index b9760d48fca83..574285468902b 100644 --- a/third_party/move/move-compiler-v2/tests/checking/variants/variants_allow_match_var.exp +++ b/third_party/move/move-compiler-v2/tests/checking/variants/variants_allow_match_var.exp @@ -10,7 +10,7 @@ module 0x815::m { z: u32, } } - private fun match(c: m::CommonFields,t: u64): bool { + private fun match(c: CommonFields,t: u64): bool { match (c) { m::CommonFields::Foo{ x, y: _ } => { { @@ -18,10 +18,33 @@ module 0x815::m { match } } - _: m::CommonFields => { + _: CommonFields => { false } } } } // end 0x815::m + +// -- Sourcified model before bytecode pipeline +module 0x815::m { + enum CommonFields { + Foo { + x: u64, + y: u8, + } + Bar { + x: u64, + z: u32, + } + } + fun match(c: CommonFields, t: u64): bool { + match (c) { + CommonFields::Foo{x: x,y: _} => { + let match = x > t; + match + }, + _ => false, + } + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/variants/variants_constants.exp b/third_party/move/move-compiler-v2/tests/checking/variants/variants_constants.exp index 3dd980bb022e7..a58db1808bd05 100644 --- a/third_party/move/move-compiler-v2/tests/checking/variants/variants_constants.exp +++ b/third_party/move/move-compiler-v2/tests/checking/variants/variants_constants.exp @@ -11,29 +11,29 @@ module 0x815::m { } private fun t0(): bool { { - let c: m::Color = pack m::Color::Red(); - Eq(select_variants m::Color.RGB.red(c), 1) + let c: Color = pack m::Color::Red(); + Eq(select_variants m::Color.RGB.red(c), 1) } } private fun t1(): bool { { - let c: m::Color = pack m::Color::Red(); - Eq(select_variants m::Color.RGB.red(c), 1) + let c: Color = pack m::Color::Red(); + Eq(select_variants m::Color.RGB.red(c), 1) } } private fun t2(): bool { { - let c: m::Color = pack m::Color::Blue(); - Eq(select_variants m::Color.RGB.red(c), 1) + let c: Color = pack m::Color::Blue(); + Eq(select_variants m::Color.RGB.red(c), 1) } } private fun t3(): bool { { - let c: m::Color = pack m::Color::Blue(); - Eq(select_variants m::Color.RGB.red(c), 1) + let c: Color = pack m::Color::Blue(); + Eq(select_variants m::Color.RGB.red(c), 1) } } - private fun t4(c: &m::Color) { + private fun t4(c: &Color) { match (c) { m::Color::Red => { Abort(1) @@ -46,6 +46,41 @@ module 0x815::m { } } // end 0x815::m +// -- Sourcified model before bytecode pipeline +module 0x815::m { + enum Color { + RGB { + red: u64, + green: u64, + blue: u64, + } + Red, + Blue, + } + fun t0(): bool { + let c = Color::Red{}; + c.RGB.red == 1 + } + fun t1(): bool { + let c = Color::Red{}; + c.RGB.red == 1 + } + fun t2(): bool { + let c = Color::Blue{}; + c.RGB.red == 1 + } + fun t3(): bool { + let c = Color::Blue{}; + c.RGB.red == 1 + } + fun t4(c: &Color) { + match (c) { + Color::Red{} => abort 1, + Color::Blue{} => abort 2, + } + } +} + Diagnostics: error: match not exhaustive diff --git a/third_party/move/move-compiler-v2/tests/checking/variants/variants_ok.exp b/third_party/move/move-compiler-v2/tests/checking/variants/variants_ok.exp index 94a7918c9b8f3..3327aba9b16e9 100644 --- a/third_party/move/move-compiler-v2/tests/checking/variants/variants_ok.exp +++ b/third_party/move/move-compiler-v2/tests/checking/variants/variants_ok.exp @@ -32,7 +32,7 @@ module 0x815::m { other: u32, } } - private fun t1(self: m::Color): bool { + private fun t1(self: Color): bool { match (self) { m::Color::RGB{ red, green, blue } => { Gt(Add(Add(red, green), blue), 0) @@ -46,7 +46,7 @@ module 0x815::m { } } - private fun t1_address_qualified(self: m::Color): bool { + private fun t1_address_qualified(self: Color): bool { match (self) { m::Color::RGB{ red, green, blue } => { Gt(Add(Add(red, green), blue), 0) @@ -60,7 +60,7 @@ module 0x815::m { } } - private fun t1_field_named(self: m::Color): bool { + private fun t1_field_named(self: Color): bool { match (self) { m::Color::RGB{ red: r, green: g, blue } => { Gt(Add(Add(r, g), blue), 0) @@ -74,7 +74,7 @@ module 0x815::m { } } - private fun t1_module_qualified(self: m::Color): bool { + private fun t1_module_qualified(self: Color): bool { match (self) { m::Color::RGB{ red, green, blue } => { Gt(Add(Add(red, green), blue), 0) @@ -88,7 +88,7 @@ module 0x815::m { } } - private fun t1_uses_block(self: m::Color): bool { + private fun t1_uses_block(self: Color): bool { match (self) { m::Color::RGB{ red, green, blue } => { Gt(Add(Add(red, green), blue), 0) @@ -102,7 +102,7 @@ module 0x815::m { } } - private fun t1_uses_block_no_comma(self: m::Color): bool { + private fun t1_uses_block_no_comma(self: Color): bool { match (self) { m::Color::RGB{ red, green, blue } => { Gt(Add(Add(red, green), blue), 0) @@ -116,41 +116,41 @@ module 0x815::m { } } - private fun t2(self: m::Color): bool { + private fun t2(self: Color): bool { match (self) { m::Color::RGB{ red, green, blue } => { Gt(Add(Add(red, green), blue), 0) } - _: m::Color => { + _: Color => { true } } } - private fun t3(self: m::Color): bool { + private fun t3(self: Color): bool { match (Borrow(Immutable)(self)) { m::Color::RGB{ red, green, blue } => { Gt(Add(Add(Deref(red), Deref(green)), Deref(blue)), 0) } - _: &m::Color => { + _: &Color => { true } } } - private fun t4(self: m::Color): m::Color { + private fun t4(self: Color): Color { match (Borrow(Mutable)(self)) { m::Color::RGB{ red, green: _, blue: _ } => { red = 2 } - _: &mut m::Color => { + _: &mut Color => { Tuple() } } ; self } - private fun t5_freeze(self: m::Color): u64 { + private fun t5_freeze(self: Color): u64 { { let x: u64 = 1; { @@ -161,7 +161,7 @@ module 0x815::m { m::Color::Blue => { Freeze(false)(Borrow(Mutable)(x)) } - _: &mut m::Color => { + _: &mut Color => { Freeze(false)(Borrow(Mutable)(x)) } } @@ -170,24 +170,24 @@ module 0x815::m { } } } - private fun t6_construct(self: m::Color): m::Color { + private fun t6_construct(self: Color): Color { match (self) { m::Color::RGB{ red, green, blue } => { pack m::Color::RGB(Add(red, 1), Sub(green, 1), blue) } - _: m::Color => { + _: Color => { self } } } - private fun t7_let_unpack(self: m::Color): u64 { + private fun t7_let_unpack(self: Color): u64 { { let m::Color::RGB{ red, green, blue } = self; Add(Add(red, green), blue) } } - private fun t8_unqualified_variant(self: m::Color): bool { + private fun t8_unqualified_variant(self: Color): bool { match (self) { m::Color::RGB{ red, green, blue } => { And(Neq(red, green), Neq(green, blue)) @@ -201,10 +201,139 @@ module 0x815::m { } } - private fun t9_common_field(self: m::CommonFields): u64 { - select_variants m::CommonFields.Foo.x|m::CommonFields.Bar.x(self) + private fun t9_common_field(self: CommonFields): u64 { + select_variants m::CommonFields.Foo.x|m::CommonFields.Bar.x(self) } - private fun t9_common_field_ref(self: &m::CommonFields): &u64 { - Borrow(Immutable)(select_variants m::CommonFields.Foo.x|m::CommonFields.Bar.x<&m::CommonFields>(self)) + private fun t9_common_field_ref(self: &CommonFields): &u64 { + Borrow(Immutable)(select_variants m::CommonFields.Foo.x|m::CommonFields.Bar.x<&CommonFields>(self)) } } // end 0x815::m + +// -- Sourcified model before bytecode pipeline +module 0x815::m { + use 0x815::m; + enum Color { + RGB { + red: u64, + green: u64, + blue: u64, + } + Red, + Blue, + } + enum ColorUsesBlockNoComma { + RGB { + red: u64, + green: u64, + blue: u64, + } + Red, + Blue, + } + enum CommonFields { + Foo { + x: u64, + y: u8, + } + Bar { + x: u64, + z: u32, + } + Baz { + other: u32, + } + } + fun t1(self: Color): bool { + match (self) { + Color::RGB{red: red,green: green,blue: blue} => red + green + blue > 0, + Color::Red{} => true, + Color::Blue{} => false, + } + } + fun t1_address_qualified(self: Color): bool { + match (self) { + Color::RGB{red: red,green: green,blue: blue} => red + green + blue > 0, + Color::Red{} => true, + Color::Blue{} => false, + } + } + fun t1_field_named(self: Color): bool { + match (self) { + Color::RGB{red: r,green: g,blue: blue} => r + g + blue > 0, + Color::Red{} => true, + Color::Blue{} => false, + } + } + fun t1_module_qualified(self: Color): bool { + match (self) { + Color::RGB{red: red,green: green,blue: blue} => red + green + blue > 0, + Color::Red{} => true, + Color::Blue{} => false, + } + } + fun t1_uses_block(self: Color): bool { + match (self) { + Color::RGB{red: red,green: green,blue: blue} => red + green + blue > 0, + Color::Red{} => true, + Color::Blue{} => false, + } + } + fun t1_uses_block_no_comma(self: Color): bool { + match (self) { + Color::RGB{red: red,green: green,blue: blue} => red + green + blue > 0, + Color::Red{} => true, + Color::Blue{} => false, + } + } + fun t2(self: Color): bool { + match (self) { + Color::RGB{red: red,green: green,blue: blue} => red + green + blue > 0, + _ => true, + } + } + fun t3(self: Color): bool { + match (&self) { + Color::RGB{red: red,green: green,blue: blue} => *red + *green + *blue > 0, + _ => true, + } + } + fun t4(self: Color): Color { + match (&mut self) { + Color::RGB{red: red,green: _,blue: _} => *red = 2, + _ => (), + }; + self + } + fun t5_freeze(self: Color): u64 { + let x = 1; + let r = match (&mut self) { + Color::Red{} => &x, + Color::Blue{} => /*freeze*/&mut x, + _ => /*freeze*/&mut x, + }; + *r + } + fun t6_construct(self: Color): Color { + match (self) { + Color::RGB{red: red,green: green,blue: blue} => Color::RGB{red: red + 1,green: green - 1,blue: blue}, + _ => self, + } + } + fun t7_let_unpack(self: Color): u64 { + let Color::RGB{red: red,green: green,blue: blue} = self; + red + green + blue + } + fun t8_unqualified_variant(self: Color): bool { + match (self) { + Color::RGB{red: red,green: green,blue: blue} => red != green && green != blue, + Color::Red{} => true, + Color::Blue{} => false, + } + } + fun t9_common_field(self: CommonFields): u64 { + self.Foo.x + } + fun t9_common_field_ref(self: &CommonFields): &u64 { + &self.Foo.x + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/variants/variants_test_no_parenthesis_ok.exp b/third_party/move/move-compiler-v2/tests/checking/variants/variants_test_no_parenthesis_ok.exp index 2662bcbd649a7..da04961916c62 100644 --- a/third_party/move/move-compiler-v2/tests/checking/variants/variants_test_no_parenthesis_ok.exp +++ b/third_party/move/move-compiler-v2/tests/checking/variants/variants_test_no_parenthesis_ok.exp @@ -9,27 +9,63 @@ module 0x815::m { Red, Blue, } - enum Generic { + enum Generic { Foo { - 0: #0, + 0: T, } Bar { 0: u64, } } - private fun test(c: m::Color): bool { + private fun test(c: Color): bool { test_variants m::Color::Red|RGB(c) } - private fun test_fully_qualified(c: m::Color): bool { + private fun test_fully_qualified(c: Color): bool { test_variants m::Color::Red(c) } - private fun test_generic(x: &m::Generic<#0>): bool { + private fun test_generic(x: &Generic): bool { test_variants m::Generic::Foo(x) } - private fun test_generic_qualified(x: &m::Generic<#0>): bool { + private fun test_generic_qualified(x: &Generic): bool { test_variants m::Generic::Foo(x) } - private fun test_qualified(c: m::Color): bool { + private fun test_qualified(c: Color): bool { test_variants m::Color::Red|RGB(c) } } // end 0x815::m + +// -- Sourcified model before bytecode pipeline +module 0x815::m { + enum Color { + RGB { + red: u64, + green: u64, + blue: u64, + } + Red, + Blue, + } + enum Generic { + Foo { + 0: T, + } + Bar { + 0: u64, + } + } + fun test(c: Color): bool { + c is Red | RGB + } + fun test_fully_qualified(c: Color): bool { + c is Red + } + fun test_generic(x: &Generic): bool { + x is Foo + } + fun test_generic_qualified(x: &Generic): bool { + x is Foo + } + fun test_qualified(c: Color): bool { + c is Red | RGB + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/variants/variants_test_ok.exp b/third_party/move/move-compiler-v2/tests/checking/variants/variants_test_ok.exp index 2662bcbd649a7..da04961916c62 100644 --- a/third_party/move/move-compiler-v2/tests/checking/variants/variants_test_ok.exp +++ b/third_party/move/move-compiler-v2/tests/checking/variants/variants_test_ok.exp @@ -9,27 +9,63 @@ module 0x815::m { Red, Blue, } - enum Generic { + enum Generic { Foo { - 0: #0, + 0: T, } Bar { 0: u64, } } - private fun test(c: m::Color): bool { + private fun test(c: Color): bool { test_variants m::Color::Red|RGB(c) } - private fun test_fully_qualified(c: m::Color): bool { + private fun test_fully_qualified(c: Color): bool { test_variants m::Color::Red(c) } - private fun test_generic(x: &m::Generic<#0>): bool { + private fun test_generic(x: &Generic): bool { test_variants m::Generic::Foo(x) } - private fun test_generic_qualified(x: &m::Generic<#0>): bool { + private fun test_generic_qualified(x: &Generic): bool { test_variants m::Generic::Foo(x) } - private fun test_qualified(c: m::Color): bool { + private fun test_qualified(c: Color): bool { test_variants m::Color::Red|RGB(c) } } // end 0x815::m + +// -- Sourcified model before bytecode pipeline +module 0x815::m { + enum Color { + RGB { + red: u64, + green: u64, + blue: u64, + } + Red, + Blue, + } + enum Generic { + Foo { + 0: T, + } + Bar { + 0: u64, + } + } + fun test(c: Color): bool { + c is Red | RGB + } + fun test_fully_qualified(c: Color): bool { + c is Red + } + fun test_generic(x: &Generic): bool { + x is Foo + } + fun test_generic_qualified(x: &Generic): bool { + x is Foo + } + fun test_qualified(c: Color): bool { + c is Red | RGB + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/variants/variants_test_parse_ok2.exp b/third_party/move/move-compiler-v2/tests/checking/variants/variants_test_parse_ok2.exp index 0db60293cf7d3..89a8fa4425a82 100644 --- a/third_party/move/move-compiler-v2/tests/checking/variants/variants_test_parse_ok2.exp +++ b/third_party/move/move-compiler-v2/tests/checking/variants/variants_test_parse_ok2.exp @@ -9,7 +9,23 @@ module 0x815::m { Red, Blue, } - private fun test_red_or_rgb(c: m::Color): bool { + private fun test_red_or_rgb(c: Color): bool { test_variants m::Color::Red(c) } } // end 0x815::m + +// -- Sourcified model before bytecode pipeline +module 0x815::m { + enum Color { + RGB { + red: u64, + green: u64, + blue: u64, + } + Red, + Blue, + } + fun test_red_or_rgb(c: Color): bool { + c is Red + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/visibility-checker/direct_visibility.exp b/third_party/move/move-compiler-v2/tests/checking/visibility-checker/direct_visibility.exp index 90e9da4323862..01606d3099264 100644 --- a/third_party/move/move-compiler-v2/tests/checking/visibility-checker/direct_visibility.exp +++ b/third_party/move/move-compiler-v2/tests/checking/visibility-checker/direct_visibility.exp @@ -20,3 +20,24 @@ module 0x815::c { Tuple() } } // end 0x815::c + +// -- Sourcified model before bytecode pipeline +module 0x815::b { + friend 0x815::c; + friend fun f() { + } +} +module 0x815::a { + friend 0x815::c; + friend fun f() { + } + friend fun g() { + } +} +module 0x815::c { + public fun f() { + 0x815::a::f(); + 0x815::a::g(); + 0x815::b::f(); + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/visibility-checker/module_call_visibility_friend.exp b/third_party/move/move-compiler-v2/tests/checking/visibility-checker/module_call_visibility_friend.exp index f3424d16dd504..46adaacca8b01 100644 --- a/third_party/move/move-compiler-v2/tests/checking/visibility-checker/module_call_visibility_friend.exp +++ b/third_party/move/move-compiler-v2/tests/checking/visibility-checker/module_call_visibility_friend.exp @@ -49,3 +49,51 @@ module 0x2::M { M::f_friend() } } // end 0x2::M + +// -- Sourcified model before bytecode pipeline +module 0x2::Y { + friend 0x2::M; + friend fun f_friend() { + } +} +module 0x2::X { + public fun f_public() { + } +} +module 0x2::M { + use 0x2::Y; + use 0x2::X; + friend fun f_friend() { + } + public fun f_public() { + } + friend fun f_friend_call_friend() { + Y::f_friend() + } + friend fun f_friend_call_public() { + X::f_public() + } + friend fun f_friend_call_self_friend() { + f_friend() + } + friend fun f_friend_call_self_private() { + f_private() + } + friend fun f_friend_call_self_public() { + f_public() + } + fun f_private() { + } + fun f_private_call_friend() { + Y::f_friend() + } + fun f_private_call_self_friend() { + f_friend() + } + public fun f_public_call_friend() { + Y::f_friend() + } + public fun f_public_call_self_friend() { + f_friend() + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/visibility-checker/parser_package_keyword.exp b/third_party/move/move-compiler-v2/tests/checking/visibility-checker/parser_package_keyword.exp index 89a30d8357d83..d40b04a4fe2dd 100644 --- a/third_party/move/move-compiler-v2/tests/checking/visibility-checker/parser_package_keyword.exp +++ b/third_party/move/move-compiler-v2/tests/checking/visibility-checker/parser_package_keyword.exp @@ -7,3 +7,11 @@ module 0x42::package { } } } // end 0x42::package + +// -- Sourcified model before bytecode pipeline +module 0x42::package { + friend fun package(package: u8): u8 { + let package = package + 1u8; + package + } +} diff --git a/third_party/move/move-compiler-v2/tests/control-flow-simplification/jump-label.on.exp b/third_party/move/move-compiler-v2/tests/control-flow-simplification/jump-label.on.exp index 450d583197d37..5ad35eeb9ecae 100644 --- a/third_party/move/move-compiler-v2/tests/control-flow-simplification/jump-label.on.exp +++ b/third_party/move/move-compiler-v2/tests/control-flow-simplification/jump-label.on.exp @@ -3,20 +3,20 @@ [variant baseline] fun test::test<#0>($t0: vector): #0 { var $t1: #0 - var $t2: string::String + var $t2: 0x1::string::String var $t3: bool - var $t4: string::String - var $t5: string::String + var $t4: 0x1::string::String + var $t5: 0x1::string::String var $t6: vector var $t7: bool var $t8: bool - var $t9: string::String - var $t10: string::String + var $t9: 0x1::string::String + var $t10: 0x1::string::String var $t11: vector var $t12: bool var $t13: bool - var $t14: string::String - var $t15: string::String + var $t14: 0x1::string::String + var $t15: 0x1::string::String var $t16: vector var $t17: bool var $t18: u64 @@ -75,8 +75,8 @@ fun test::baz<#0>($t0: vector): #0 { [variant baseline] -fun test::foo<#0>(): string::String { - var $t0: string::String [unused] +fun test::foo<#0>(): 0x1::string::String { + var $t0: 0x1::string::String [unused] var $t1: u64 0: $t1 := 0 1: abort($t1) diff --git a/third_party/move/move-compiler-v2/tests/copy-propagation/mut_refs_2.exp b/third_party/move/move-compiler-v2/tests/copy-propagation/mut_refs_2.exp index ef9bd20a5bed4..f5a73992e920b 100644 --- a/third_party/move/move-compiler-v2/tests/copy-propagation/mut_refs_2.exp +++ b/third_party/move/move-compiler-v2/tests/copy-propagation/mut_refs_2.exp @@ -1,30 +1,30 @@ ============ initial bytecode ================ [variant baseline] -fun m::test($t0: m::S): u64 { +fun m::test($t0: 0xc0ffee::m::S): u64 { var $t1: u64 - var $t2: m::S - var $t3: m::S + var $t2: 0xc0ffee::m::S + var $t3: 0xc0ffee::m::S var $t4: &mut u64 - var $t5: &mut m::S + var $t5: &mut 0xc0ffee::m::S var $t6: u64 - var $t7: &m::S + var $t7: &0xc0ffee::m::S var $t8: &u64 0: $t2 := infer($t0) 1: $t3 := infer($t2) 2: $t5 := borrow_local($t2) - 3: $t4 := borrow_field.a($t5) + 3: $t4 := borrow_field<0xc0ffee::m::S>.a($t5) 4: $t6 := 0 5: write_ref($t4, $t6) 6: $t7 := borrow_local($t3) - 7: $t8 := borrow_field.a($t7) + 7: $t8 := borrow_field<0xc0ffee::m::S>.a($t7) 8: $t1 := read_ref($t8) 9: return $t1 } Diagnostics: -error: local `p` of type `m::S` does not have the `copy` ability +error: local `p` of type `S` does not have the `copy` ability ┌─ tests/copy-propagation/mut_refs_2.move:10:17 │ 10 │ let q = p; @@ -32,13 +32,13 @@ error: local `p` of type `m::S` does not have the `copy` ability 11 │ let ref = &mut p.a; │ - used here -error: local `p` of type `m::S` does not have the `drop` ability +error: local `p` of type `S` does not have the `drop` ability ┌─ tests/copy-propagation/mut_refs_2.move:11:24 │ 11 │ let ref = &mut p.a; │ ^ still borrowed but will be implicitly dropped later since it is no longer used -error: local `q` of type `m::S` does not have the `drop` ability +error: local `q` of type `S` does not have the `drop` ability ┌─ tests/copy-propagation/mut_refs_2.move:13:9 │ 13 │ q.a diff --git a/third_party/move/move-compiler-v2/tests/copy-propagation/sequential_assign_struct.exp b/third_party/move/move-compiler-v2/tests/copy-propagation/sequential_assign_struct.exp index 4497f58bc7399..0b75ab964eac4 100644 --- a/third_party/move/move-compiler-v2/tests/copy-propagation/sequential_assign_struct.exp +++ b/third_party/move/move-compiler-v2/tests/copy-propagation/sequential_assign_struct.exp @@ -1,13 +1,13 @@ ============ initial bytecode ================ [variant baseline] -fun m::sequential($t0: m::Foo): m::Foo { - var $t1: m::Foo - var $t2: m::Foo - var $t3: m::Foo - var $t4: m::Foo - var $t5: m::Foo - var $t6: m::Foo +fun m::sequential($t0: 0xc0ffee::m::Foo): 0xc0ffee::m::Foo { + var $t1: 0xc0ffee::m::Foo + var $t2: 0xc0ffee::m::Foo + var $t3: 0xc0ffee::m::Foo + var $t4: 0xc0ffee::m::Foo + var $t5: 0xc0ffee::m::Foo + var $t6: 0xc0ffee::m::Foo 0: $t2 := infer($t0) 1: $t3 := infer($t2) 2: $t4 := infer($t3) @@ -20,13 +20,13 @@ fun m::sequential($t0: m::Foo): m::Foo { ============ after DeadStoreElimination: ================ [variant baseline] -fun m::sequential($t0: m::Foo): m::Foo { - var $t1: m::Foo - var $t2: m::Foo - var $t3: m::Foo - var $t4: m::Foo - var $t5: m::Foo - var $t6: m::Foo +fun m::sequential($t0: 0xc0ffee::m::Foo): 0xc0ffee::m::Foo { + var $t1: 0xc0ffee::m::Foo + var $t2: 0xc0ffee::m::Foo + var $t3: 0xc0ffee::m::Foo + var $t4: 0xc0ffee::m::Foo + var $t5: 0xc0ffee::m::Foo + var $t6: 0xc0ffee::m::Foo 0: $t2 := move($t0) 1: $t3 := move($t2) 2: $t4 := move($t3) diff --git a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/recursive_type_instantiation.exp b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/recursive_type_instantiation.exp index 2982749beb36a..fc66bb2fb5ef9 100644 --- a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/recursive_type_instantiation.exp +++ b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/recursive_type_instantiation.exp @@ -6,7 +6,7 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr 6 │ public fun simple_recursion() { │ ^^^^^^^^^^^^^^^^ │ - = `simple_recursion` calls `simple_recursion>` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:7 + = `simple_recursion` calls `simple_recursion>` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:7 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/recursive_type_instantiation.move:10:9 @@ -15,7 +15,7 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr │ ^^^^^^^^^^^^^^^^^^^^^ │ = `two_level_recursion_0` calls `two_level_recursion_1` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:11 - = `two_level_recursion_1` calls `two_level_recursion_0>` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:15 + = `two_level_recursion_1` calls `two_level_recursion_0>` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:15 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/recursive_type_instantiation.move:14:9 @@ -23,8 +23,8 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr 14 │ fun two_level_recursion_1() { │ ^^^^^^^^^^^^^^^^^^^^^ │ - = `two_level_recursion_1` calls `two_level_recursion_0>` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:15 - = `two_level_recursion_0>` calls `two_level_recursion_1>` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:11 + = `two_level_recursion_1` calls `two_level_recursion_0>` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:15 + = `two_level_recursion_0>` calls `two_level_recursion_1>` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:11 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/recursive_type_instantiation.move:18:9 @@ -34,7 +34,7 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr │ = `three_level_recursion_0` calls `three_level_recursion_1` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:19 = `three_level_recursion_1` calls `three_level_recursion_2` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:23 - = `three_level_recursion_2` calls `three_level_recursion_0>` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:27 + = `three_level_recursion_2` calls `three_level_recursion_0>` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:27 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/recursive_type_instantiation.move:22:9 @@ -43,8 +43,8 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr │ ^^^^^^^^^^^^^^^^^^^^^^^ │ = `three_level_recursion_1` calls `three_level_recursion_2` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:23 - = `three_level_recursion_2` calls `three_level_recursion_0>` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:27 - = `three_level_recursion_0>` calls `three_level_recursion_1>` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:19 + = `three_level_recursion_2` calls `three_level_recursion_0>` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:27 + = `three_level_recursion_0>` calls `three_level_recursion_1>` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:19 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/recursive_type_instantiation.move:26:9 @@ -52,9 +52,9 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr 26 │ fun three_level_recursion_2() { │ ^^^^^^^^^^^^^^^^^^^^^^^ │ - = `three_level_recursion_2` calls `three_level_recursion_0>` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:27 - = `three_level_recursion_0>` calls `three_level_recursion_1>` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:19 - = `three_level_recursion_1>` calls `three_level_recursion_2>` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:23 + = `three_level_recursion_2` calls `three_level_recursion_0>` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:27 + = `three_level_recursion_0>` calls `three_level_recursion_1>` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:19 + = `three_level_recursion_1>` calls `three_level_recursion_2>` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:23 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/recursive_type_instantiation.move:30:9 @@ -62,7 +62,7 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr 30 │ fun recurse_at_different_position() { │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ │ - = `recurse_at_different_position` calls `recurse_at_different_position>` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:31 + = `recurse_at_different_position` calls `recurse_at_different_position>` at tests/cyclic-instantiation-checker/recursive_type_instantiation.move:31 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/recursive_type_instantiation.move:44:9 diff --git a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/complex_1.exp b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/complex_1.exp index 8ada1df6f41e8..1bb77ca0c249f 100644 --- a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/complex_1.exp +++ b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/complex_1.exp @@ -8,7 +8,7 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr │ = `c` calls `d` at tests/cyclic-instantiation-checker/v1-tests/complex_1.move:15 = `d` calls `b` at tests/cyclic-instantiation-checker/v1-tests/complex_1.move:20 - = `b` calls `c, bool>` at tests/cyclic-instantiation-checker/v1-tests/complex_1.move:10 + = `b` calls `c, bool>` at tests/cyclic-instantiation-checker/v1-tests/complex_1.move:10 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/v1-tests/complex_1.move:26:9 @@ -17,7 +17,7 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr │ ^ │ = `f` calls `g` at tests/cyclic-instantiation-checker/v1-tests/complex_1.move:27 - = `g` calls `f>` at tests/cyclic-instantiation-checker/v1-tests/complex_1.move:31 + = `g` calls `f>` at tests/cyclic-instantiation-checker/v1-tests/complex_1.move:31 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/v1-tests/complex_1.move:30:9 @@ -25,5 +25,5 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr 30 │ fun g() { │ ^ │ - = `g` calls `f>` at tests/cyclic-instantiation-checker/v1-tests/complex_1.move:31 - = `f>` calls `g>` at tests/cyclic-instantiation-checker/v1-tests/complex_1.move:27 + = `g` calls `f>` at tests/cyclic-instantiation-checker/v1-tests/complex_1.move:31 + = `f>` calls `g>` at tests/cyclic-instantiation-checker/v1-tests/complex_1.move:27 diff --git a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_just_type_params_ok.exp b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_just_type_params_ok.exp index 96b0d3038b3ec..331fad3cf4272 100644 --- a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_just_type_params_ok.exp +++ b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_just_type_params_ok.exp @@ -7,3 +7,13 @@ module 0x8675309::M { M::f() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + fun f() { + g() + } + fun g() { + f() + } +} diff --git a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_non_generic_type_ok.exp b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_non_generic_type_ok.exp index 079ed8439d493..fe2891c695b11 100644 --- a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_non_generic_type_ok.exp +++ b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_non_generic_type_ok.exp @@ -1,12 +1,25 @@ // -- Model dump before bytecode pipeline module 0x8675309::M { - struct S { - f: #0, + struct S { + f: T, } private fun f() { - M::g>() + M::g>() } private fun g() { M::f() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct S { + f: T, + } + fun f() { + g>() + } + fun g() { + f() + } +} diff --git a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_just_type_params_shitfing_ok.exp b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_just_type_params_shitfing_ok.exp index f6b05b6205cce..d8a3df9022b92 100644 --- a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_just_type_params_shitfing_ok.exp +++ b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_just_type_params_shitfing_ok.exp @@ -10,3 +10,16 @@ module 0x8675309::M { M::f() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + fun f() { + g() + } + fun g() { + h() + } + fun h() { + f() + } +} diff --git a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_non_generic_types_ok.exp b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_non_generic_types_ok.exp index 970a22f456bec..a4cbf1c179a07 100644 --- a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_non_generic_types_ok.exp +++ b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_non_generic_types_ok.exp @@ -7,8 +7,8 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr │ ^ │ = `f` calls `g` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_non_generic_types_ok.move:5 - = `g` calls `h>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_non_generic_types_ok.move:9 - = `h>` calls `f>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_non_generic_types_ok.move:14 + = `g` calls `h>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_non_generic_types_ok.move:9 + = `h>` calls `f>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_non_generic_types_ok.move:14 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_non_generic_types_ok.move:8:9 @@ -16,9 +16,9 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr 8 │ fun g() { │ ^ │ - = `g` calls `h>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_non_generic_types_ok.move:9 - = `h>` calls `f>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_non_generic_types_ok.move:14 - = `f>` calls `g, T1>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_non_generic_types_ok.move:5 + = `g` calls `h>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_non_generic_types_ok.move:9 + = `h>` calls `f>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_non_generic_types_ok.move:14 + = `f>` calls `g, T1>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_non_generic_types_ok.move:5 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_non_generic_types_ok.move:12:9 @@ -28,4 +28,4 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr │ = `h` calls `f` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_non_generic_types_ok.move:14 = `f` calls `g` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_non_generic_types_ok.move:5 - = `g` calls `h>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_non_generic_types_ok.move:9 + = `g` calls `h>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_non_generic_types_ok.move:9 diff --git a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_shifting.exp b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_shifting.exp index 4a656d29b6bec..db55cc263c7ba 100644 --- a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_shifting.exp +++ b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_shifting.exp @@ -7,8 +7,8 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr │ ^ │ = `f` calls `g` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_shifting.move:5 - = `g` calls `h>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_shifting.move:9 - = `h>` calls `f>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_shifting.move:13 + = `g` calls `h>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_shifting.move:9 + = `h>` calls `f>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_shifting.move:13 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_shifting.move:8:9 @@ -16,9 +16,9 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr 8 │ fun g() { │ ^ │ - = `g` calls `h>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_shifting.move:9 - = `h>` calls `f>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_shifting.move:13 - = `f>` calls `g, T1>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_shifting.move:5 + = `g` calls `h>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_shifting.move:9 + = `h>` calls `f>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_shifting.move:13 + = `f>` calls `g, T1>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_shifting.move:5 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_shifting.move:12:9 @@ -28,4 +28,4 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr │ = `h` calls `f` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_shifting.move:13 = `f` calls `g` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_shifting.move:5 - = `g` calls `h>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_shifting.move:9 + = `g` calls `h>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_three_args_type_con_shifting.move:9 diff --git a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_two_args_non_generic_type_and_type_param_ok.exp b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_two_args_non_generic_type_and_type_param_ok.exp index 2d82b3c7326e7..8fd13c702bc93 100644 --- a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_two_args_non_generic_type_and_type_param_ok.exp +++ b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_two_args_non_generic_type_and_type_param_ok.exp @@ -7,3 +7,13 @@ module 0x8675309::M { M::f() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + fun f() { + g() + } + fun g() { + f() + } +} diff --git a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_two_args_swapping_just_type_params_ok.exp b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_two_args_swapping_just_type_params_ok.exp index 3033747e7d84a..0f757f0c9298b 100644 --- a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_two_args_swapping_just_type_params_ok.exp +++ b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_two_args_swapping_just_type_params_ok.exp @@ -9,3 +9,13 @@ module 0x8675309::M { Tuple() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + fun f() { + g(); + } + fun g() { + f(); + } +} diff --git a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_two_args_swapping_type_con.exp b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_two_args_swapping_type_con.exp index 106f433858593..1066db9abfad9 100644 --- a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_two_args_swapping_type_con.exp +++ b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_two_args_swapping_type_con.exp @@ -7,7 +7,7 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr │ ^ │ = `f` calls `g` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_two_args_swapping_type_con.move:5 - = `g` calls `f, u64>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_two_args_swapping_type_con.move:9 + = `g` calls `f, u64>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_two_args_swapping_type_con.move:9 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_two_args_swapping_type_con.move:8:9 @@ -15,5 +15,5 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr 8 │ fun g() { │ ^ │ - = `g` calls `f, u64>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_two_args_swapping_type_con.move:9 - = `f, u64>` calls `g, T1>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_two_args_swapping_type_con.move:5 + = `g` calls `f, u64>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_two_args_swapping_type_con.move:9 + = `f, u64>` calls `g, T1>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_two_args_swapping_type_con.move:5 diff --git a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_type_con.exp b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_type_con.exp index 78f036b15c4b4..13dda3ae7a1ba 100644 --- a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_type_con.exp +++ b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_type_con.exp @@ -6,8 +6,8 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr 7 │ fun f() { │ ^ │ - = `f` calls `g>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_type_con.move:8 - = `g>` calls `f>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_type_con.move:12 + = `f` calls `g>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_type_con.move:8 + = `g>` calls `f>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_type_con.move:12 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_type_con.move:11:9 @@ -16,4 +16,4 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr │ ^ │ = `g` calls `f` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_type_con.move:12 - = `f` calls `g>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_type_con.move:8 + = `f` calls `g>` at tests/cyclic-instantiation-checker/v1-tests/mutually_recursive_type_con.move:8 diff --git a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/nested_types_1.exp b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/nested_types_1.exp index d1798cea03482..4c7b9af8fd5e8 100644 --- a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/nested_types_1.exp +++ b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/nested_types_1.exp @@ -6,4 +6,4 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr 4 │ fun foo() { │ ^^^ │ - = `foo` calls `foo>>` at tests/cyclic-instantiation-checker/v1-tests/nested_types_1.move:5 + = `foo` calls `foo>>` at tests/cyclic-instantiation-checker/v1-tests/nested_types_1.move:5 diff --git a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/nested_types_2.exp b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/nested_types_2.exp index 53a1b6a0bfa1f..4b510f95aa137 100644 --- a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/nested_types_2.exp +++ b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/nested_types_2.exp @@ -6,4 +6,4 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr 5 │ fun foo() { │ ^^^ │ - = `foo` calls `foo>>>` at tests/cyclic-instantiation-checker/v1-tests/nested_types_2.move:6 + = `foo` calls `foo>>>` at tests/cyclic-instantiation-checker/v1-tests/nested_types_2.move:6 diff --git a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/recursive_infinite_type_terminates.exp b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/recursive_infinite_type_terminates.exp index 7fc325aa27a60..aa91ac66ecc19 100644 --- a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/recursive_infinite_type_terminates.exp +++ b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/recursive_infinite_type_terminates.exp @@ -6,4 +6,4 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr 9 │ fun f(n: u64, x: T): T { │ ^ │ - = `f` calls `f>` at tests/cyclic-instantiation-checker/v1-tests/recursive_infinite_type_terminates.move:11 + = `f` calls `f>` at tests/cyclic-instantiation-checker/v1-tests/recursive_infinite_type_terminates.move:11 diff --git a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/recursive_one_arg_just_type_params_ok.exp b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/recursive_one_arg_just_type_params_ok.exp index 3e04246fb2bb0..d5e393f16eb61 100644 --- a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/recursive_one_arg_just_type_params_ok.exp +++ b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/recursive_one_arg_just_type_params_ok.exp @@ -1,6 +1,13 @@ // -- Model dump before bytecode pipeline module 0x8675309::M { - public fun f(x: #0) { + public fun f(x: T) { M::f(x) } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + public fun f(x: T) { + f(x) + } +} diff --git a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/recursive_one_arg_non_generic_type_ok.exp b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/recursive_one_arg_non_generic_type_ok.exp index f0921c5777493..cc5ac27ef18fa 100644 --- a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/recursive_one_arg_non_generic_type_ok.exp +++ b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/recursive_one_arg_non_generic_type_ok.exp @@ -4,3 +4,10 @@ module 0x8675309::M { M::f() } } // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + fun f() { + f() + } +} diff --git a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/recursive_one_arg_type_con.exp b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/recursive_one_arg_type_con.exp index 189a74b48d385..e9041a1b05430 100644 --- a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/recursive_one_arg_type_con.exp +++ b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/recursive_one_arg_type_con.exp @@ -6,4 +6,4 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr 6 │ fun f(x: T) { │ ^ │ - = `f` calls `f>` at tests/cyclic-instantiation-checker/v1-tests/recursive_one_arg_type_con.move:7 + = `f` calls `f>` at tests/cyclic-instantiation-checker/v1-tests/recursive_one_arg_type_con.move:7 diff --git a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/recursive_two_args_swapping_type_con.exp b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/recursive_two_args_swapping_type_con.exp index e652867004204..4892af9de9b66 100644 --- a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/recursive_two_args_swapping_type_con.exp +++ b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/recursive_two_args_swapping_type_con.exp @@ -6,4 +6,4 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr 7 │ fun f(a: T1, x: T2) { │ ^ │ - = `f` calls `f, T1>` at tests/cyclic-instantiation-checker/v1-tests/recursive_two_args_swapping_type_con.move:8 + = `f` calls `f, T1>` at tests/cyclic-instantiation-checker/v1-tests/recursive_two_args_swapping_type_con.move:8 diff --git a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/two_loops.exp b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/two_loops.exp index 37fe8076ba422..ca960a47c8c83 100644 --- a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/two_loops.exp +++ b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-tests/two_loops.exp @@ -6,7 +6,7 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr 7 │ fun f() { │ ^ │ - = `f` calls `f>` at tests/cyclic-instantiation-checker/v1-tests/two_loops.move:8 + = `f` calls `f>` at tests/cyclic-instantiation-checker/v1-tests/two_loops.move:8 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/v1-tests/two_loops.move:11:9 @@ -14,4 +14,4 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr 11 │ fun g() { │ ^ │ - = `g` calls `g>` at tests/cyclic-instantiation-checker/v1-tests/two_loops.move:12 + = `g` calls `g>` at tests/cyclic-instantiation-checker/v1-tests/two_loops.move:12 diff --git a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.exp b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.exp index 69f2b94f77c68..b5a0ebd9a7cd4 100644 --- a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.exp +++ b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.exp @@ -6,7 +6,7 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr 6 │ public fun t() { │ ^ │ - = `t` calls `t>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:7 + = `t` calls `t>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:7 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:10:16 @@ -14,8 +14,8 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr 10 │ public fun x() { │ ^ │ - = `x` calls `y>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:11 - = `y>` calls `x>>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:14 + = `x` calls `y>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:11 + = `y>` calls `x>>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:14 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:13:16 @@ -23,8 +23,8 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr 13 │ public fun y() { │ ^ │ - = `y` calls `x>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:14 - = `x>` calls `y>>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:11 + = `y` calls `x>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:14 + = `x>` calls `y>>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:11 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:17:16 @@ -34,7 +34,7 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr │ = `a` calls `b` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:18 = `b` calls `c` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:21 - = `c` calls `a>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:24 + = `c` calls `a>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:24 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:20:16 @@ -43,8 +43,8 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr │ ^ │ = `b` calls `c` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:21 - = `c` calls `a>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:24 - = `a>` calls `b>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:18 + = `c` calls `a>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:24 + = `a>` calls `b>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:18 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:23:16 @@ -52,9 +52,9 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr 23 │ public fun c() { │ ^ │ - = `c` calls `a>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:24 - = `a>` calls `b>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:18 - = `b>` calls `c>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:21 + = `c` calls `a>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:24 + = `a>` calls `b>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:18 + = `b>` calls `c>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:21 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:37:16 @@ -62,7 +62,7 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr 37 │ public fun z() { │ ^ │ - = `z` calls `z>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:38 + = `z` calls `z>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:38 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:41:16 @@ -72,8 +72,8 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr │ = `a` calls `b` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:42 = `b` calls `c` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:45 - = `c` calls `d>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:48 - = `d>` calls `a>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:51 + = `c` calls `d>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:48 + = `d>` calls `a>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:51 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:44:16 @@ -82,9 +82,9 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr │ ^ │ = `b` calls `c` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:45 - = `c` calls `d>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:48 - = `d>` calls `a>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:51 - = `a>` calls `b>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:42 + = `c` calls `d>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:48 + = `d>` calls `a>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:51 + = `a>` calls `b>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:42 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:47:16 @@ -92,10 +92,10 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr 47 │ public fun c() { │ ^ │ - = `c` calls `d>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:48 - = `d>` calls `a>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:51 - = `a>` calls `b>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:42 - = `b>` calls `c>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:45 + = `c` calls `d>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:48 + = `d>` calls `a>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:51 + = `a>` calls `b>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:42 + = `b>` calls `c>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:45 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:50:16 @@ -106,7 +106,7 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr = `d` calls `a` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:51 = `a` calls `b` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:42 = `b` calls `c` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:45 - = `c` calls `d>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:48 + = `c` calls `d>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:48 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:58:16 @@ -115,8 +115,8 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr │ ^^ │ = `tl` calls `tr` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:59 - = `tr` calls `bl>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:62 - = `bl>` calls `tl>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:69 + = `tr` calls `bl>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:62 + = `bl>` calls `tl>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:69 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:61:16 @@ -124,9 +124,9 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr 61 │ public fun tr() { │ ^^ │ - = `tr` calls `bl>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:62 - = `bl>` calls `tl>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:69 - = `tl>` calls `tr>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:59 + = `tr` calls `bl>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:62 + = `bl>` calls `tl>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:69 + = `tl>` calls `tr>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:59 error: cyclic type instantiation: a cycle of recursive calls causes a type to grow without bound ┌─ tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:68:16 @@ -136,4 +136,4 @@ error: cyclic type instantiation: a cycle of recursive calls causes a type to gr │ = `bl` calls `tl` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:69 = `tl` calls `tr` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:59 - = `tr` calls `bl>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:62 + = `tr` calls `bl>` at tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_invalid.move:62 diff --git a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_valid.exp b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_valid.exp index 01e7a4ee24a6a..62dc89568b814 100644 --- a/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_valid.exp +++ b/third_party/move/move-compiler-v2/tests/cyclic-instantiation-checker/v1-typing/infinite_instantiations_valid.exp @@ -1,7 +1,7 @@ // -- Model dump before bytecode pipeline module 0x42::M { - struct Box { - f: #0, + struct Box { + f: T, } public fun t0() { M::t1(); @@ -12,10 +12,10 @@ module 0x42::M { M::t1() } public fun x() { - M::y>() + M::y>() } public fun y() { - M::z>() + M::z>() } public fun z() { M::z() @@ -27,3 +27,33 @@ module 0x42::N { M::t0>() } } // end 0x42::N + +// -- Sourcified model before bytecode pipeline +module 0x42::M { + struct Box { + f: T, + } + public fun t0() { + t1(); + t0() + } + public fun t1() { + t0(); + t1() + } + public fun x() { + y>() + } + public fun y() { + z>() + } + public fun z() { + z() + } +} +module 0x42::N { + use 0x42::M; + public fun t() { + M::t0>() + } +} diff --git a/third_party/move/move-compiler-v2/tests/folding/constant_folding_addresses.exp b/third_party/move/move-compiler-v2/tests/folding/constant_folding_addresses.exp index 42c1f4eb0702c..1fdc7289870ba 100644 --- a/third_party/move/move-compiler-v2/tests/folding/constant_folding_addresses.exp +++ b/third_party/move/move-compiler-v2/tests/folding/constant_folding_addresses.exp @@ -8,5 +8,15 @@ module 0x8675309::M { } } // end 0x8675309::M +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + fun const_addr(): address { + 0x1234 + } + fun const_addr_let(): address { + 0x1234 + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/folding/constant_values.exp b/third_party/move/move-compiler-v2/tests/folding/constant_values.exp index 5ff1d119edddf..9a90765e9018a 100644 --- a/third_party/move/move-compiler-v2/tests/folding/constant_values.exp +++ b/third_party/move/move-compiler-v2/tests/folding/constant_values.exp @@ -20,5 +20,27 @@ module 0x8675309::M { } } // end 0x8675309::M +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + fun int128(): u128 { + 4u128 + } + fun int16(): u16 { + 123u16 + } + fun int256(): u256 { + 4u256 + } + fun int32(): u32 { + 137u32 + } + fun int64(): u64 { + 5 + } + fun int8(): u8 { + 1u8 + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/folding/empty_tvectors.exp b/third_party/move/move-compiler-v2/tests/folding/empty_tvectors.exp index 66bf0e6e4f6ad..51166063aa526 100644 --- a/third_party/move/move-compiler-v2/tests/folding/empty_tvectors.exp +++ b/third_party/move/move-compiler-v2/tests/folding/empty_tvectors.exp @@ -5,8 +5,8 @@ module 0x42::m { use std::vector; public entry fun init() { { - let _: vector = { - let result: vector = Vector(); + let _: vector<0x1::string::String> = { + let result: vector<0x1::string::String> = Vector<0x1::string::String>(); { let (v: vector>): (vector>) = Tuple([]); vector::reverse>(Borrow(Mutable)(v)); @@ -16,7 +16,7 @@ module 0x42::m { let e: vector = vector::pop_back>(Borrow(Mutable)(v)); { let (elem: vector): (vector) = Tuple(e); - vector::push_back(Borrow(Mutable)(result), { + vector::push_back<0x1::string::String>(Borrow(Mutable)(result), { let (key: vector): (vector) = Tuple(elem); string::utf8(key) }) @@ -64,5 +64,47 @@ module 0x42::m { } } // end 0x42::m +// -- Sourcified model before bytecode pipeline +module 0x42::m { + public entry fun init() { + let _ = { + let result = vector[]; + { + let (v) = (vector[]); + 0x1::vector::reverse>(&mut v); + while (!0x1::vector::is_empty>(&v)) { + let e = 0x1::vector::pop_back>(&mut v); + { + let (elem) = (e); + 0x1::vector::push_back<0x1::string::String>(&mut result, { + let (key) = (elem); + 0x1::string::utf8(key) + }) + }; + }; + }; + result + }; + let _ = { + let result = vector[]; + { + let (v) = (vector[]); + 0x1::vector::reverse(&mut v); + while (!0x1::vector::is_empty(&v)) { + let e = 0x1::vector::pop_back(&mut v); + { + let (elem) = (e); + 0x1::vector::push_back>(&mut result, { + let (v) = (elem); + 0x1::bcs::to_bytes(&v) + }) + }; + }; + }; + result + }; + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/folding/empty_vectors.exp b/third_party/move/move-compiler-v2/tests/folding/empty_vectors.exp index 2f4734fb55c28..3402cc93608e2 100644 --- a/third_party/move/move-compiler-v2/tests/folding/empty_vectors.exp +++ b/third_party/move/move-compiler-v2/tests/folding/empty_vectors.exp @@ -65,5 +65,48 @@ module 0x42::m { } } // end 0x42::m +// -- Sourcified model before bytecode pipeline +module 0x42::m { + public entry fun init() { + let _x = { + let result = vector[]; + { + let (v) = (vector[]); + 0x1::vector::reverse>(&mut v); + while (!0x1::vector::is_empty>(&v)) { + let e = 0x1::vector::pop_back>(&mut v); + { + let (elem) = (e); + 0x1::vector::push_back(&mut result, { + let (key) = (elem); + let t = key; + 0x1::vector::length(&t) + 2 + }) + }; + }; + }; + result + }; + let _y = { + let result = vector[]; + { + let (v) = (vector[]); + 0x1::vector::reverse(&mut v); + while (!0x1::vector::is_empty(&v)) { + let e = 0x1::vector::pop_back(&mut v); + { + let (elem) = (e); + 0x1::vector::push_back(&mut result, { + let (v) = (elem); + v + 3 + }) + }; + }; + }; + result + }; + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/folding/empty_vectors2.exp b/third_party/move/move-compiler-v2/tests/folding/empty_vectors2.exp index ba0862f8e2ead..6c622215906da 100644 --- a/third_party/move/move-compiler-v2/tests/folding/empty_vectors2.exp +++ b/third_party/move/move-compiler-v2/tests/folding/empty_vectors2.exp @@ -71,5 +71,50 @@ module 0x42::m { } } // end 0x42::m +// -- Sourcified model before bytecode pipeline +module 0x42::m { + public entry fun init() { + let _x = { + let (v) = (*0x1::vector::borrow>>(&vector[vector[]], 0)); + let result = vector[]; + { + let (v) = (v); + 0x1::vector::reverse>(&mut v); + while (!0x1::vector::is_empty>(&v)) { + let e = 0x1::vector::pop_back>(&mut v); + { + let (elem) = (e); + 0x1::vector::push_back(&mut result, { + let (key) = (elem); + let t = key; + 0x1::vector::length(&t) + 2 + }) + }; + }; + }; + result + }; + let _y = { + let (v) = (*0x1::vector::borrow>(&vector[vector[]], 0)); + let result = vector[]; + { + let (v) = (v); + 0x1::vector::reverse(&mut v); + while (!0x1::vector::is_empty(&v)) { + let e = 0x1::vector::pop_back(&mut v); + { + let (elem) = (e); + 0x1::vector::push_back(&mut result, { + let (v) = (elem); + v + 3 + }) + }; + }; + }; + result + }; + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/folding/non_constant_empty_vec.exp b/third_party/move/move-compiler-v2/tests/folding/non_constant_empty_vec.exp index 2a78724f1fe13..339a8bdf36d94 100644 --- a/third_party/move/move-compiler-v2/tests/folding/non_constant_empty_vec.exp +++ b/third_party/move/move-compiler-v2/tests/folding/non_constant_empty_vec.exp @@ -3,10 +3,10 @@ module 0x42::M { struct S { dummy_field: bool, } - public fun empty_generic_vec(): vector<#0> { + public fun empty_generic_vec(): vector { Vector() } - public fun empty_generic_vec_vec(): vector> { + public fun empty_generic_vec_vec(): vector> { Vector>() } public fun empty_signer_vec(): vector { @@ -15,13 +15,37 @@ module 0x42::M { public fun empty_signer_vec_vec(): vector> { Vector>() } - public fun empty_struct_vec(): vector { - Vector() + public fun empty_struct_vec(): vector { + Vector() } - public fun empty_struct_vec_vec(): vector> { - Vector>() + public fun empty_struct_vec_vec(): vector> { + Vector>() } } // end 0x42::M +// -- Sourcified model before bytecode pipeline +module 0x42::M { + struct S { + } + public fun empty_generic_vec(): vector { + vector[] + } + public fun empty_generic_vec_vec(): vector> { + vector[] + } + public fun empty_signer_vec(): vector { + vector[] + } + public fun empty_signer_vec_vec(): vector> { + vector[] + } + public fun empty_struct_vec(): vector { + vector[] + } + public fun empty_struct_vec_vec(): vector> { + vector[] + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/folding/nonempty_tvectors.exp b/third_party/move/move-compiler-v2/tests/folding/nonempty_tvectors.exp index 7bf7030c145e4..f8822d3a0a187 100644 --- a/third_party/move/move-compiler-v2/tests/folding/nonempty_tvectors.exp +++ b/third_party/move/move-compiler-v2/tests/folding/nonempty_tvectors.exp @@ -5,8 +5,8 @@ module 0x42::m { use std::vector; public entry fun init() { { - let _: vector = { - let result: vector = Vector(); + let _: vector<0x1::string::String> = { + let result: vector<0x1::string::String> = Vector<0x1::string::String>(); { let (v: vector>): (vector>) = Tuple([Vector([Number(3)])]); vector::reverse>(Borrow(Mutable)(v)); @@ -16,7 +16,7 @@ module 0x42::m { let e: vector = vector::pop_back>(Borrow(Mutable)(v)); { let (elem: vector): (vector) = Tuple(e); - vector::push_back(Borrow(Mutable)(result), { + vector::push_back<0x1::string::String>(Borrow(Mutable)(result), { let (key: vector): (vector) = Tuple(elem); string::utf8(key) }) @@ -64,5 +64,47 @@ module 0x42::m { } } // end 0x42::m +// -- Sourcified model before bytecode pipeline +module 0x42::m { + public entry fun init() { + let _ = { + let result = vector[]; + { + let (v) = (vector[vector[3u8]]); + 0x1::vector::reverse>(&mut v); + while (!0x1::vector::is_empty>(&v)) { + let e = 0x1::vector::pop_back>(&mut v); + { + let (elem) = (e); + 0x1::vector::push_back<0x1::string::String>(&mut result, { + let (key) = (elem); + 0x1::string::utf8(key) + }) + }; + }; + }; + result + }; + let _ = { + let result = vector[]; + { + let (v) = (vector[3]); + 0x1::vector::reverse(&mut v); + while (!0x1::vector::is_empty(&v)) { + let e = 0x1::vector::pop_back(&mut v); + { + let (elem) = (e); + 0x1::vector::push_back>(&mut result, { + let (v) = (elem); + 0x1::bcs::to_bytes(&v) + }) + }; + }; + }; + result + }; + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/folding/nonempty_vectors.exp b/third_party/move/move-compiler-v2/tests/folding/nonempty_vectors.exp index 396a5f522928b..9c4bc6ee25968 100644 --- a/third_party/move/move-compiler-v2/tests/folding/nonempty_vectors.exp +++ b/third_party/move/move-compiler-v2/tests/folding/nonempty_vectors.exp @@ -65,5 +65,48 @@ module 0x42::m { } } // end 0x42::m +// -- Sourcified model before bytecode pipeline +module 0x42::m { + public entry fun init() { + let _x = { + let result = vector[]; + { + let (v) = (vector[vector[1u8]]); + 0x1::vector::reverse>(&mut v); + while (!0x1::vector::is_empty>(&v)) { + let e = 0x1::vector::pop_back>(&mut v); + { + let (elem) = (e); + 0x1::vector::push_back(&mut result, { + let (key) = (elem); + let t = key; + 0x1::vector::length(&t) + 2 + }) + }; + }; + }; + result + }; + let _y = { + let result = vector[]; + { + let (v) = (vector[3]); + 0x1::vector::reverse(&mut v); + while (!0x1::vector::is_empty(&v)) { + let e = 0x1::vector::pop_back(&mut v); + { + let (elem) = (e); + 0x1::vector::push_back(&mut result, { + let (v) = (elem); + v + 3 + }) + }; + }; + }; + result + }; + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/lambda-lifting/basic.exp b/third_party/move/move-compiler-v2/tests/lambda-lifting/basic.exp index 560bf63d49ec9..cd8a217423ba1 100644 --- a/third_party/move/move-compiler-v2/tests/lambda-lifting/basic.exp +++ b/third_party/move/move-compiler-v2/tests/lambda-lifting/basic.exp @@ -18,6 +18,206 @@ module 0xcafe::m { } // end 0xcafe::m +// -- Model dump after env processor unused checks: +module 0xcafe::m { + private fun map(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun no_name_clash(x: u64,c: u64): u64 { + m::map(x, |y: u64| Add(y, c)) + } + private fun with_name_clash1(x: u64,c: u64): u64 { + m::map(x, |x: u64| Add(x, c)) + } + private fun with_name_clash2(x: u64,c: u64): u64 { + m::map(x, |x: u64| Add({ + let x: u64 = Add(c, 1); + x + }, x)) + } +} // end 0xcafe::m + + +// -- Model dump after env processor type parameter check: +module 0xcafe::m { + private fun map(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun no_name_clash(x: u64,c: u64): u64 { + m::map(x, |y: u64| Add(y, c)) + } + private fun with_name_clash1(x: u64,c: u64): u64 { + m::map(x, |x: u64| Add(x, c)) + } + private fun with_name_clash2(x: u64,c: u64): u64 { + m::map(x, |x: u64| Add({ + let x: u64 = Add(c, 1); + x + }, x)) + } +} // end 0xcafe::m + + +// -- Model dump after env processor check recursive struct definition: +module 0xcafe::m { + private fun map(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun no_name_clash(x: u64,c: u64): u64 { + m::map(x, |y: u64| Add(y, c)) + } + private fun with_name_clash1(x: u64,c: u64): u64 { + m::map(x, |x: u64| Add(x, c)) + } + private fun with_name_clash2(x: u64,c: u64): u64 { + m::map(x, |x: u64| Add({ + let x: u64 = Add(c, 1); + x + }, x)) + } +} // end 0xcafe::m + + +// -- Model dump after env processor check cyclic type instantiation: +module 0xcafe::m { + private fun map(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun no_name_clash(x: u64,c: u64): u64 { + m::map(x, |y: u64| Add(y, c)) + } + private fun with_name_clash1(x: u64,c: u64): u64 { + m::map(x, |x: u64| Add(x, c)) + } + private fun with_name_clash2(x: u64,c: u64): u64 { + m::map(x, |x: u64| Add({ + let x: u64 = Add(c, 1); + x + }, x)) + } +} // end 0xcafe::m + + +// -- Model dump after env processor unused struct params check: +module 0xcafe::m { + private fun map(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun no_name_clash(x: u64,c: u64): u64 { + m::map(x, |y: u64| Add(y, c)) + } + private fun with_name_clash1(x: u64,c: u64): u64 { + m::map(x, |x: u64| Add(x, c)) + } + private fun with_name_clash2(x: u64,c: u64): u64 { + m::map(x, |x: u64| Add({ + let x: u64 = Add(c, 1); + x + }, x)) + } +} // end 0xcafe::m + + +// -- Model dump after env processor access and use check before inlining: +module 0xcafe::m { + private fun map(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun no_name_clash(x: u64,c: u64): u64 { + m::map(x, |y: u64| Add(y, c)) + } + private fun with_name_clash1(x: u64,c: u64): u64 { + m::map(x, |x: u64| Add(x, c)) + } + private fun with_name_clash2(x: u64,c: u64): u64 { + m::map(x, |x: u64| Add({ + let x: u64 = Add(c, 1); + x + }, x)) + } +} // end 0xcafe::m + + +// -- Model dump after env processor inlining: +module 0xcafe::m { + private fun map(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun no_name_clash(x: u64,c: u64): u64 { + m::map(x, |y: u64| Add(y, c)) + } + private fun with_name_clash1(x: u64,c: u64): u64 { + m::map(x, |x: u64| Add(x, c)) + } + private fun with_name_clash2(x: u64,c: u64): u64 { + m::map(x, |x: u64| Add({ + let x: u64 = Add(c, 1); + x + }, x)) + } +} // end 0xcafe::m + + +// -- Model dump after env processor access and use check after inlining: +module 0xcafe::m { + private fun map(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun no_name_clash(x: u64,c: u64): u64 { + m::map(x, |y: u64| Add(y, c)) + } + private fun with_name_clash1(x: u64,c: u64): u64 { + m::map(x, |x: u64| Add(x, c)) + } + private fun with_name_clash2(x: u64,c: u64): u64 { + m::map(x, |x: u64| Add({ + let x: u64 = Add(c, 1); + x + }, x)) + } +} // end 0xcafe::m + + +// -- Model dump after env processor acquires check: +module 0xcafe::m { + private fun map(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun no_name_clash(x: u64,c: u64): u64 { + m::map(x, |y: u64| Add(y, c)) + } + private fun with_name_clash1(x: u64,c: u64): u64 { + m::map(x, |x: u64| Add(x, c)) + } + private fun with_name_clash2(x: u64,c: u64): u64 { + m::map(x, |x: u64| Add({ + let x: u64 = Add(c, 1); + x + }, x)) + } +} // end 0xcafe::m + + +// -- Model dump after env processor simplifier: +module 0xcafe::m { + private fun map(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun no_name_clash(x: u64,c: u64): u64 { + m::map(x, |y: u64| Add(y, c)) + } + private fun with_name_clash1(x: u64,c: u64): u64 { + m::map(x, |x: u64| Add(x, c)) + } + private fun with_name_clash2(x: u64,c: u64): u64 { + m::map(x, |x: u64| Add({ + let x: u64 = Add(c, 1); + x + }, x)) + } +} // end 0xcafe::m + + // -- Model dump after env processor lambda-lifting: module 0xcafe::m { private fun map(x: u64,f: |u64|u64): u64 { @@ -45,3 +245,61 @@ module 0xcafe::m { }, x) } } // end 0xcafe::m + + +// -- Model dump after env processor specification checker: +module 0xcafe::m { + private fun map(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun no_name_clash(x: u64,c: u64): u64 { + m::map(x, closure m::no_name_clash$lambda$1(c)) + } + private fun with_name_clash1(x: u64,c: u64): u64 { + m::map(x, closure m::with_name_clash1$lambda$1(c)) + } + private fun with_name_clash2(x: u64,c: u64): u64 { + m::map(x, closure m::with_name_clash2$lambda$1(c)) + } + private fun no_name_clash$lambda$1(c: u64,y: u64): u64 { + Add(y, c) + } + private fun with_name_clash1$lambda$1(c: u64,x: u64): u64 { + Add(x, c) + } + private fun with_name_clash2$lambda$1(c: u64,x: u64): u64 { + Add({ + let x: u64 = Add(c, 1); + x + }, x) + } +} // end 0xcafe::m + + +// -- Model dump after env processor specification rewriter: +module 0xcafe::m { + private fun map(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun no_name_clash(x: u64,c: u64): u64 { + m::map(x, closure m::no_name_clash$lambda$1(c)) + } + private fun with_name_clash1(x: u64,c: u64): u64 { + m::map(x, closure m::with_name_clash1$lambda$1(c)) + } + private fun with_name_clash2(x: u64,c: u64): u64 { + m::map(x, closure m::with_name_clash2$lambda$1(c)) + } + private fun no_name_clash$lambda$1(c: u64,y: u64): u64 { + Add(y, c) + } + private fun with_name_clash1$lambda$1(c: u64,x: u64): u64 { + Add(x, c) + } + private fun with_name_clash2$lambda$1(c: u64,x: u64): u64 { + Add({ + let x: u64 = Add(c, 1); + x + }, x) + } +} // end 0xcafe::m diff --git a/third_party/move/move-compiler-v2/tests/lambda-lifting/modify.exp b/third_party/move/move-compiler-v2/tests/lambda-lifting/modify.exp index 9b00da4dfeba8..3664bd2e163bb 100644 --- a/third_party/move/move-compiler-v2/tests/lambda-lifting/modify.exp +++ b/third_party/move/move-compiler-v2/tests/lambda-lifting/modify.exp @@ -44,6 +44,463 @@ module 0xcafe::m { } // end 0xcafe::m +// -- Model dump after env processor unused checks: +module 0xcafe::m { + struct S { + x: u64, + } + private fun map(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun assigns_local(x: u64,c: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| z: u64 = 2; + Add(y, c)) + } + } + private fun assigns_param(x: u64,c: u64): u64 { + m::map(x, |y: u64| x: u64 = 2; + Add(y, c)) + } + private fun borrows_local(x: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| { + let r: &mut u64 = Borrow(Mutable)(z); + Add(y, Deref(r)) + }) + } + } + private fun borrows_param(x: u64,c: u64): u64 { + m::map(x, |y: u64| { + let r: &mut u64 = Borrow(Mutable)(c); + Add(y, Deref(r)) + }) + } + private fun immutable_borrow_ok(x: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| { + let r: &u64 = Borrow(Immutable)(z); + Add(y, Deref(r)) + }) + } + } +} // end 0xcafe::m + + +// -- Model dump after env processor type parameter check: +module 0xcafe::m { + struct S { + x: u64, + } + private fun map(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun assigns_local(x: u64,c: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| z: u64 = 2; + Add(y, c)) + } + } + private fun assigns_param(x: u64,c: u64): u64 { + m::map(x, |y: u64| x: u64 = 2; + Add(y, c)) + } + private fun borrows_local(x: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| { + let r: &mut u64 = Borrow(Mutable)(z); + Add(y, Deref(r)) + }) + } + } + private fun borrows_param(x: u64,c: u64): u64 { + m::map(x, |y: u64| { + let r: &mut u64 = Borrow(Mutable)(c); + Add(y, Deref(r)) + }) + } + private fun immutable_borrow_ok(x: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| { + let r: &u64 = Borrow(Immutable)(z); + Add(y, Deref(r)) + }) + } + } +} // end 0xcafe::m + + +// -- Model dump after env processor check recursive struct definition: +module 0xcafe::m { + struct S { + x: u64, + } + private fun map(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun assigns_local(x: u64,c: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| z: u64 = 2; + Add(y, c)) + } + } + private fun assigns_param(x: u64,c: u64): u64 { + m::map(x, |y: u64| x: u64 = 2; + Add(y, c)) + } + private fun borrows_local(x: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| { + let r: &mut u64 = Borrow(Mutable)(z); + Add(y, Deref(r)) + }) + } + } + private fun borrows_param(x: u64,c: u64): u64 { + m::map(x, |y: u64| { + let r: &mut u64 = Borrow(Mutable)(c); + Add(y, Deref(r)) + }) + } + private fun immutable_borrow_ok(x: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| { + let r: &u64 = Borrow(Immutable)(z); + Add(y, Deref(r)) + }) + } + } +} // end 0xcafe::m + + +// -- Model dump after env processor check cyclic type instantiation: +module 0xcafe::m { + struct S { + x: u64, + } + private fun map(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun assigns_local(x: u64,c: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| z: u64 = 2; + Add(y, c)) + } + } + private fun assigns_param(x: u64,c: u64): u64 { + m::map(x, |y: u64| x: u64 = 2; + Add(y, c)) + } + private fun borrows_local(x: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| { + let r: &mut u64 = Borrow(Mutable)(z); + Add(y, Deref(r)) + }) + } + } + private fun borrows_param(x: u64,c: u64): u64 { + m::map(x, |y: u64| { + let r: &mut u64 = Borrow(Mutable)(c); + Add(y, Deref(r)) + }) + } + private fun immutable_borrow_ok(x: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| { + let r: &u64 = Borrow(Immutable)(z); + Add(y, Deref(r)) + }) + } + } +} // end 0xcafe::m + + +// -- Model dump after env processor unused struct params check: +module 0xcafe::m { + struct S { + x: u64, + } + private fun map(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun assigns_local(x: u64,c: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| z: u64 = 2; + Add(y, c)) + } + } + private fun assigns_param(x: u64,c: u64): u64 { + m::map(x, |y: u64| x: u64 = 2; + Add(y, c)) + } + private fun borrows_local(x: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| { + let r: &mut u64 = Borrow(Mutable)(z); + Add(y, Deref(r)) + }) + } + } + private fun borrows_param(x: u64,c: u64): u64 { + m::map(x, |y: u64| { + let r: &mut u64 = Borrow(Mutable)(c); + Add(y, Deref(r)) + }) + } + private fun immutable_borrow_ok(x: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| { + let r: &u64 = Borrow(Immutable)(z); + Add(y, Deref(r)) + }) + } + } +} // end 0xcafe::m + + +// -- Model dump after env processor access and use check before inlining: +module 0xcafe::m { + struct S { + x: u64, + } + private fun map(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun assigns_local(x: u64,c: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| z: u64 = 2; + Add(y, c)) + } + } + private fun assigns_param(x: u64,c: u64): u64 { + m::map(x, |y: u64| x: u64 = 2; + Add(y, c)) + } + private fun borrows_local(x: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| { + let r: &mut u64 = Borrow(Mutable)(z); + Add(y, Deref(r)) + }) + } + } + private fun borrows_param(x: u64,c: u64): u64 { + m::map(x, |y: u64| { + let r: &mut u64 = Borrow(Mutable)(c); + Add(y, Deref(r)) + }) + } + private fun immutable_borrow_ok(x: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| { + let r: &u64 = Borrow(Immutable)(z); + Add(y, Deref(r)) + }) + } + } +} // end 0xcafe::m + + +// -- Model dump after env processor inlining: +module 0xcafe::m { + struct S { + x: u64, + } + private fun map(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun assigns_local(x: u64,c: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| z: u64 = 2; + Add(y, c)) + } + } + private fun assigns_param(x: u64,c: u64): u64 { + m::map(x, |y: u64| x: u64 = 2; + Add(y, c)) + } + private fun borrows_local(x: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| { + let r: &mut u64 = Borrow(Mutable)(z); + Add(y, Deref(r)) + }) + } + } + private fun borrows_param(x: u64,c: u64): u64 { + m::map(x, |y: u64| { + let r: &mut u64 = Borrow(Mutable)(c); + Add(y, Deref(r)) + }) + } + private fun immutable_borrow_ok(x: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| { + let r: &u64 = Borrow(Immutable)(z); + Add(y, Deref(r)) + }) + } + } +} // end 0xcafe::m + + +// -- Model dump after env processor access and use check after inlining: +module 0xcafe::m { + struct S { + x: u64, + } + private fun map(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun assigns_local(x: u64,c: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| z: u64 = 2; + Add(y, c)) + } + } + private fun assigns_param(x: u64,c: u64): u64 { + m::map(x, |y: u64| x: u64 = 2; + Add(y, c)) + } + private fun borrows_local(x: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| { + let r: &mut u64 = Borrow(Mutable)(z); + Add(y, Deref(r)) + }) + } + } + private fun borrows_param(x: u64,c: u64): u64 { + m::map(x, |y: u64| { + let r: &mut u64 = Borrow(Mutable)(c); + Add(y, Deref(r)) + }) + } + private fun immutable_borrow_ok(x: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| { + let r: &u64 = Borrow(Immutable)(z); + Add(y, Deref(r)) + }) + } + } +} // end 0xcafe::m + + +// -- Model dump after env processor acquires check: +module 0xcafe::m { + struct S { + x: u64, + } + private fun map(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun assigns_local(x: u64,c: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| z: u64 = 2; + Add(y, c)) + } + } + private fun assigns_param(x: u64,c: u64): u64 { + m::map(x, |y: u64| x: u64 = 2; + Add(y, c)) + } + private fun borrows_local(x: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| { + let r: &mut u64 = Borrow(Mutable)(z); + Add(y, Deref(r)) + }) + } + } + private fun borrows_param(x: u64,c: u64): u64 { + m::map(x, |y: u64| { + let r: &mut u64 = Borrow(Mutable)(c); + Add(y, Deref(r)) + }) + } + private fun immutable_borrow_ok(x: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| { + let r: &u64 = Borrow(Immutable)(z); + Add(y, Deref(r)) + }) + } + } +} // end 0xcafe::m + + +// -- Model dump after env processor simplifier: +module 0xcafe::m { + struct S { + x: u64, + } + private fun map(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun assigns_local(x: u64,c: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| z: u64 = 2; + Add(y, c)) + } + } + private fun assigns_param(x: u64,c: u64): u64 { + m::map(x, |y: u64| x: u64 = 2; + Add(y, c)) + } + private fun borrows_local(x: u64): u64 { + { + let z: u64 = 1; + m::map(x, |y: u64| { + let r: &mut u64 = Borrow(Mutable)(z); + Add(y, Deref(r)) + }) + } + } + private fun borrows_param(x: u64,c: u64): u64 { + m::map(x, |y: u64| { + let r: &mut u64 = Borrow(Mutable)(c); + Add(y, Deref(r)) + }) + } + private fun immutable_borrow_ok(x: u64): u64 { + m::map(x, |y: u64| { + let r: &u64 = Borrow(Immutable)(1); + Add(y, Deref(r)) + }) + } +} // end 0xcafe::m + + Diagnostics: error: captured variable `x` cannot be modified inside of a lambda diff --git a/third_party/move/move-compiler-v2/tests/lambda-lifting/nested.exp b/third_party/move/move-compiler-v2/tests/lambda-lifting/nested.exp index 9ddfd3b736119..6da5f81560227 100644 --- a/third_party/move/move-compiler-v2/tests/lambda-lifting/nested.exp +++ b/third_party/move/move-compiler-v2/tests/lambda-lifting/nested.exp @@ -12,6 +12,146 @@ module 0xcafe::m { } // end 0xcafe::m +// -- Model dump after env processor unused checks: +module 0xcafe::m { + private fun map1(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun map2(x: u8,f: |u8|u8): u8 { + (f)(x) + } + private fun nested(x: u64,c: u64): u64 { + m::map1(x, |y: u64| Cast(m::map2(Cast(Sub(y, c)), |y: u8| Add(y, Cast(c))))) + } +} // end 0xcafe::m + + +// -- Model dump after env processor type parameter check: +module 0xcafe::m { + private fun map1(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun map2(x: u8,f: |u8|u8): u8 { + (f)(x) + } + private fun nested(x: u64,c: u64): u64 { + m::map1(x, |y: u64| Cast(m::map2(Cast(Sub(y, c)), |y: u8| Add(y, Cast(c))))) + } +} // end 0xcafe::m + + +// -- Model dump after env processor check recursive struct definition: +module 0xcafe::m { + private fun map1(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun map2(x: u8,f: |u8|u8): u8 { + (f)(x) + } + private fun nested(x: u64,c: u64): u64 { + m::map1(x, |y: u64| Cast(m::map2(Cast(Sub(y, c)), |y: u8| Add(y, Cast(c))))) + } +} // end 0xcafe::m + + +// -- Model dump after env processor check cyclic type instantiation: +module 0xcafe::m { + private fun map1(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun map2(x: u8,f: |u8|u8): u8 { + (f)(x) + } + private fun nested(x: u64,c: u64): u64 { + m::map1(x, |y: u64| Cast(m::map2(Cast(Sub(y, c)), |y: u8| Add(y, Cast(c))))) + } +} // end 0xcafe::m + + +// -- Model dump after env processor unused struct params check: +module 0xcafe::m { + private fun map1(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun map2(x: u8,f: |u8|u8): u8 { + (f)(x) + } + private fun nested(x: u64,c: u64): u64 { + m::map1(x, |y: u64| Cast(m::map2(Cast(Sub(y, c)), |y: u8| Add(y, Cast(c))))) + } +} // end 0xcafe::m + + +// -- Model dump after env processor access and use check before inlining: +module 0xcafe::m { + private fun map1(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun map2(x: u8,f: |u8|u8): u8 { + (f)(x) + } + private fun nested(x: u64,c: u64): u64 { + m::map1(x, |y: u64| Cast(m::map2(Cast(Sub(y, c)), |y: u8| Add(y, Cast(c))))) + } +} // end 0xcafe::m + + +// -- Model dump after env processor inlining: +module 0xcafe::m { + private fun map1(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun map2(x: u8,f: |u8|u8): u8 { + (f)(x) + } + private fun nested(x: u64,c: u64): u64 { + m::map1(x, |y: u64| Cast(m::map2(Cast(Sub(y, c)), |y: u8| Add(y, Cast(c))))) + } +} // end 0xcafe::m + + +// -- Model dump after env processor access and use check after inlining: +module 0xcafe::m { + private fun map1(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun map2(x: u8,f: |u8|u8): u8 { + (f)(x) + } + private fun nested(x: u64,c: u64): u64 { + m::map1(x, |y: u64| Cast(m::map2(Cast(Sub(y, c)), |y: u8| Add(y, Cast(c))))) + } +} // end 0xcafe::m + + +// -- Model dump after env processor acquires check: +module 0xcafe::m { + private fun map1(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun map2(x: u8,f: |u8|u8): u8 { + (f)(x) + } + private fun nested(x: u64,c: u64): u64 { + m::map1(x, |y: u64| Cast(m::map2(Cast(Sub(y, c)), |y: u8| Add(y, Cast(c))))) + } +} // end 0xcafe::m + + +// -- Model dump after env processor simplifier: +module 0xcafe::m { + private fun map1(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun map2(x: u8,f: |u8|u8): u8 { + (f)(x) + } + private fun nested(x: u64,c: u64): u64 { + m::map1(x, |y: u64| Cast(m::map2(Cast(Sub(y, c)), |y: u8| Add(y, Cast(c))))) + } +} // end 0xcafe::m + + // -- Model dump after env processor lambda-lifting: module 0xcafe::m { private fun map1(x: u64,f: |u64|u64): u64 { @@ -30,3 +170,43 @@ module 0xcafe::m { Cast(m::map2(Cast(Sub(y, c)), closure m::nested$lambda$1(c))) } } // end 0xcafe::m + + +// -- Model dump after env processor specification checker: +module 0xcafe::m { + private fun map1(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun map2(x: u8,f: |u8|u8): u8 { + (f)(x) + } + private fun nested(x: u64,c: u64): u64 { + m::map1(x, closure m::nested$lambda$2(c)) + } + private fun nested$lambda$1(c: u64,y: u8): u8 { + Add(y, Cast(c)) + } + private fun nested$lambda$2(c: u64,y: u64): u64 { + Cast(m::map2(Cast(Sub(y, c)), closure m::nested$lambda$1(c))) + } +} // end 0xcafe::m + + +// -- Model dump after env processor specification rewriter: +module 0xcafe::m { + private fun map1(x: u64,f: |u64|u64): u64 { + (f)(x) + } + private fun map2(x: u8,f: |u8|u8): u8 { + (f)(x) + } + private fun nested(x: u64,c: u64): u64 { + m::map1(x, closure m::nested$lambda$2(c)) + } + private fun nested$lambda$1(c: u64,y: u8): u8 { + Add(y, Cast(c)) + } + private fun nested$lambda$2(c: u64,y: u64): u64 { + Cast(m::map2(Cast(Sub(y, c)), closure m::nested$lambda$1(c))) + } +} // end 0xcafe::m diff --git a/third_party/move/move-compiler-v2/tests/lambda-lifting/pattern.exp b/third_party/move/move-compiler-v2/tests/lambda-lifting/pattern.exp index c61dde92a484d..9b6c76f186bb0 100644 --- a/third_party/move/move-compiler-v2/tests/lambda-lifting/pattern.exp +++ b/third_party/move/move-compiler-v2/tests/lambda-lifting/pattern.exp @@ -1,13 +1,183 @@ // -- Model dump before env processor pipeline: module 0xcafe::m { - struct S { - x: #0, + struct S { + x: T, } - private fun consume(s: m::S<#0>,x: #0,f: |(m::S<#0>, #0)|#0): #0 { + private fun consume(s: S,x: T,f: |(S, T)|T): T { (f)(s, x) } - private fun pattern(s: m::S,x: u64): u64 { - m::consume(s, x, |(m::S{ x }, _y: u64): (m::S, u64)| { + private fun pattern(s: S,x: u64): u64 { + m::consume(s, x, |(m::S{ x }, _y: u64): (S, u64)| { + let y: u64 = x; + Add(x, y) + }) + } +} // end 0xcafe::m + + +// -- Model dump after env processor unused checks: +module 0xcafe::m { + struct S { + x: T, + } + private fun consume(s: S,x: T,f: |(S, T)|T): T { + (f)(s, x) + } + private fun pattern(s: S,x: u64): u64 { + m::consume(s, x, |(m::S{ x }, _y: u64): (S, u64)| { + let y: u64 = x; + Add(x, y) + }) + } +} // end 0xcafe::m + + +// -- Model dump after env processor type parameter check: +module 0xcafe::m { + struct S { + x: T, + } + private fun consume(s: S,x: T,f: |(S, T)|T): T { + (f)(s, x) + } + private fun pattern(s: S,x: u64): u64 { + m::consume(s, x, |(m::S{ x }, _y: u64): (S, u64)| { + let y: u64 = x; + Add(x, y) + }) + } +} // end 0xcafe::m + + +// -- Model dump after env processor check recursive struct definition: +module 0xcafe::m { + struct S { + x: T, + } + private fun consume(s: S,x: T,f: |(S, T)|T): T { + (f)(s, x) + } + private fun pattern(s: S,x: u64): u64 { + m::consume(s, x, |(m::S{ x }, _y: u64): (S, u64)| { + let y: u64 = x; + Add(x, y) + }) + } +} // end 0xcafe::m + + +// -- Model dump after env processor check cyclic type instantiation: +module 0xcafe::m { + struct S { + x: T, + } + private fun consume(s: S,x: T,f: |(S, T)|T): T { + (f)(s, x) + } + private fun pattern(s: S,x: u64): u64 { + m::consume(s, x, |(m::S{ x }, _y: u64): (S, u64)| { + let y: u64 = x; + Add(x, y) + }) + } +} // end 0xcafe::m + + +// -- Model dump after env processor unused struct params check: +module 0xcafe::m { + struct S { + x: T, + } + private fun consume(s: S,x: T,f: |(S, T)|T): T { + (f)(s, x) + } + private fun pattern(s: S,x: u64): u64 { + m::consume(s, x, |(m::S{ x }, _y: u64): (S, u64)| { + let y: u64 = x; + Add(x, y) + }) + } +} // end 0xcafe::m + + +// -- Model dump after env processor access and use check before inlining: +module 0xcafe::m { + struct S { + x: T, + } + private fun consume(s: S,x: T,f: |(S, T)|T): T { + (f)(s, x) + } + private fun pattern(s: S,x: u64): u64 { + m::consume(s, x, |(m::S{ x }, _y: u64): (S, u64)| { + let y: u64 = x; + Add(x, y) + }) + } +} // end 0xcafe::m + + +// -- Model dump after env processor inlining: +module 0xcafe::m { + struct S { + x: T, + } + private fun consume(s: S,x: T,f: |(S, T)|T): T { + (f)(s, x) + } + private fun pattern(s: S,x: u64): u64 { + m::consume(s, x, |(m::S{ x }, _y: u64): (S, u64)| { + let y: u64 = x; + Add(x, y) + }) + } +} // end 0xcafe::m + + +// -- Model dump after env processor access and use check after inlining: +module 0xcafe::m { + struct S { + x: T, + } + private fun consume(s: S,x: T,f: |(S, T)|T): T { + (f)(s, x) + } + private fun pattern(s: S,x: u64): u64 { + m::consume(s, x, |(m::S{ x }, _y: u64): (S, u64)| { + let y: u64 = x; + Add(x, y) + }) + } +} // end 0xcafe::m + + +// -- Model dump after env processor acquires check: +module 0xcafe::m { + struct S { + x: T, + } + private fun consume(s: S,x: T,f: |(S, T)|T): T { + (f)(s, x) + } + private fun pattern(s: S,x: u64): u64 { + m::consume(s, x, |(m::S{ x }, _y: u64): (S, u64)| { + let y: u64 = x; + Add(x, y) + }) + } +} // end 0xcafe::m + + +// -- Model dump after env processor simplifier: +module 0xcafe::m { + struct S { + x: T, + } + private fun consume(s: S,x: T,f: |(S, T)|T): T { + (f)(s, x) + } + private fun pattern(s: S,x: u64): u64 { + m::consume(s, x, |(m::S{ x }, _y: u64): (S, u64)| { let y: u64 = x; Add(x, y) }) @@ -17,16 +187,62 @@ module 0xcafe::m { // -- Model dump after env processor lambda-lifting: module 0xcafe::m { - struct S { - x: #0, + struct S { + x: T, + } + private fun consume(s: S,x: T,f: |(S, T)|T): T { + (f)(s, x) + } + private fun pattern(s: S,x: u64): u64 { + m::consume(s, x, closure m::pattern$lambda$1()) + } + private fun pattern$lambda$1(param$0: S,_y: u64): u64 { + { + let m::S{ x } = param$0; + { + let y: u64 = x; + Add(x, y) + } + } + } +} // end 0xcafe::m + + +// -- Model dump after env processor specification checker: +module 0xcafe::m { + struct S { + x: T, + } + private fun consume(s: S,x: T,f: |(S, T)|T): T { + (f)(s, x) + } + private fun pattern(s: S,x: u64): u64 { + m::consume(s, x, closure m::pattern$lambda$1()) + } + private fun pattern$lambda$1(param$0: S,_y: u64): u64 { + { + let m::S{ x } = param$0; + { + let y: u64 = x; + Add(x, y) + } + } + } +} // end 0xcafe::m + + +// -- Model dump after env processor specification rewriter: +module 0xcafe::m { + struct S { + x: T, } - private fun consume(s: m::S<#0>,x: #0,f: |(m::S<#0>, #0)|#0): #0 { + private fun consume(s: S,x: T,f: |(S, T)|T): T { (f)(s, x) } - private fun pattern(s: m::S,x: u64): u64 { + private fun pattern(s: S,x: u64): u64 { m::consume(s, x, closure m::pattern$lambda$1()) } - private fun pattern$lambda$1(param$0: m::S,_y: u64): u64 { + private fun pattern$lambda$1(param$0: S,_y: u64): u64 { { let m::S{ x } = param$0; { diff --git a/third_party/move/move-compiler-v2/tests/live-var/mut_ref.exp b/third_party/move/move-compiler-v2/tests/live-var/mut_ref.exp index 9e5ec15c0ef21..b2dabe6d743a4 100644 --- a/third_party/move/move-compiler-v2/tests/live-var/mut_ref.exp +++ b/third_party/move/move-compiler-v2/tests/live-var/mut_ref.exp @@ -2,11 +2,11 @@ [variant baseline] fun m::f1_ok() { - var $t0: m::R + var $t0: 0x42::m::R var $t1: u64 - var $t2: &mut m::R + var $t2: &mut 0x42::m::R 0: $t1 := 0 - 1: $t0 := pack m::R($t1) + 1: $t0 := pack 0x42::m::R($t1) 2: $t2 := borrow_local($t0) 3: m::some($t2) 4: m::some($t2) @@ -16,12 +16,12 @@ fun m::f1_ok() { [variant baseline] fun m::f1a_ok() { - var $t0: m::R + var $t0: 0x42::m::R var $t1: u64 - var $t2: &mut m::R - var $t3: m::R + var $t2: &mut 0x42::m::R + var $t3: 0x42::m::R 0: $t1 := 0 - 1: $t0 := pack m::R($t1) + 1: $t0 := pack 0x42::m::R($t1) 2: $t2 := borrow_local($t0) 3: $t3 := read_ref($t2) 4: m::some($t2) @@ -32,12 +32,12 @@ fun m::f1a_ok() { [variant baseline] fun m::f1b_ok() { - var $t0: m::R + var $t0: 0x42::m::R var $t1: u64 - var $t2: &mut m::R - var $t3: m::R + var $t2: &mut 0x42::m::R + var $t3: 0x42::m::R 0: $t1 := 0 - 1: $t0 := pack m::R($t1) + 1: $t0 := pack 0x42::m::R($t1) 2: $t2 := borrow_local($t0) 3: m::some($t2) 4: $t3 := read_ref($t2) @@ -48,11 +48,11 @@ fun m::f1b_ok() { [variant baseline] fun m::f2_fail() { - var $t0: m::R + var $t0: 0x42::m::R var $t1: u64 - var $t2: &mut m::R + var $t2: &mut 0x42::m::R 0: $t1 := 0 - 1: $t0 := pack m::R($t1) + 1: $t0 := pack 0x42::m::R($t1) 2: $t2 := borrow_local($t0) 3: m::some2($t2, $t2) 4: return () @@ -61,12 +61,12 @@ fun m::f2_fail() { [variant baseline] fun m::f3_ok() { - var $t0: m::R + var $t0: 0x42::m::R var $t1: u64 - var $t2: &mut m::R - var $t3: &mut m::R + var $t2: &mut 0x42::m::R + var $t3: &mut 0x42::m::R 0: $t1 := 0 - 1: $t0 := pack m::R($t1) + 1: $t0 := pack 0x42::m::R($t1) 2: $t2 := borrow_local($t0) 3: m::some($t2) 4: $t3 := borrow_local($t0) @@ -78,12 +78,12 @@ fun m::f3_ok() { [variant baseline] fun m::f4_ok() { - var $t0: m::R + var $t0: 0x42::m::R var $t1: u64 - var $t2: &mut m::R - var $t3: &mut m::R + var $t2: &mut 0x42::m::R + var $t3: &mut 0x42::m::R 0: $t1 := 0 - 1: $t0 := pack m::R($t1) + 1: $t0 := pack 0x42::m::R($t1) 2: $t2 := borrow_local($t0) 3: $t3 := m::id($t2) 4: $t2 := infer($t3) @@ -94,12 +94,12 @@ fun m::f4_ok() { [variant baseline] fun m::f5_fail($t0: bool) { - var $t1: m::R + var $t1: 0x42::m::R var $t2: u64 - var $t3: &mut m::R - var $t4: &mut m::R + var $t3: &mut 0x42::m::R + var $t4: &mut 0x42::m::R 0: $t2 := 0 - 1: $t1 := pack m::R($t2) + 1: $t1 := pack 0x42::m::R($t2) 2: $t3 := borrow_local($t1) 3: $t4 := infer($t3) 4: if ($t0) goto 5 else goto 9 @@ -116,21 +116,21 @@ fun m::f5_fail($t0: bool) { [variant baseline] -fun m::id($t0: &mut m::R): &mut m::R { - var $t1: &mut m::R +fun m::id($t0: &mut 0x42::m::R): &mut 0x42::m::R { + var $t1: &mut 0x42::m::R 0: $t1 := infer($t0) 1: return $t1 } [variant baseline] -fun m::some($t0: &mut m::R) { +fun m::some($t0: &mut 0x42::m::R) { 0: return () } [variant baseline] -fun m::some2($t0: &mut m::R, $t1: &mut m::R) { +fun m::some2($t0: &mut 0x42::m::R, $t1: &mut 0x42::m::R) { 0: return () } @@ -138,13 +138,13 @@ fun m::some2($t0: &mut m::R, $t1: &mut m::R) { [variant baseline] fun m::f1_ok() { - var $t0: m::R + var $t0: 0x42::m::R var $t1: u64 - var $t2: &mut m::R + var $t2: &mut 0x42::m::R # live vars: 0: $t1 := 0 # live vars: $t1 - 1: $t0 := pack m::R($t1) + 1: $t0 := pack 0x42::m::R($t1) # live vars: $t0 2: $t2 := borrow_local($t0) # live vars: $t2 @@ -158,14 +158,14 @@ fun m::f1_ok() { [variant baseline] fun m::f1a_ok() { - var $t0: m::R + var $t0: 0x42::m::R var $t1: u64 - var $t2: &mut m::R - var $t3: m::R + var $t2: &mut 0x42::m::R + var $t3: 0x42::m::R # live vars: 0: $t1 := 0 # live vars: $t1 - 1: $t0 := pack m::R($t1) + 1: $t0 := pack 0x42::m::R($t1) # live vars: $t0 2: $t2 := borrow_local($t0) # live vars: $t2 @@ -181,14 +181,14 @@ fun m::f1a_ok() { [variant baseline] fun m::f1b_ok() { - var $t0: m::R + var $t0: 0x42::m::R var $t1: u64 - var $t2: &mut m::R - var $t3: m::R + var $t2: &mut 0x42::m::R + var $t3: 0x42::m::R # live vars: 0: $t1 := 0 # live vars: $t1 - 1: $t0 := pack m::R($t1) + 1: $t0 := pack 0x42::m::R($t1) # live vars: $t0 2: $t2 := borrow_local($t0) # live vars: $t2 @@ -204,13 +204,13 @@ fun m::f1b_ok() { [variant baseline] fun m::f2_fail() { - var $t0: m::R + var $t0: 0x42::m::R var $t1: u64 - var $t2: &mut m::R + var $t2: &mut 0x42::m::R # live vars: 0: $t1 := 0 # live vars: $t1 - 1: $t0 := pack m::R($t1) + 1: $t0 := pack 0x42::m::R($t1) # live vars: $t0 2: $t2 := borrow_local($t0) # live vars: $t2 @@ -222,14 +222,14 @@ fun m::f2_fail() { [variant baseline] fun m::f3_ok() { - var $t0: m::R + var $t0: 0x42::m::R var $t1: u64 - var $t2: &mut m::R - var $t3: &mut m::R + var $t2: &mut 0x42::m::R + var $t3: &mut 0x42::m::R # live vars: 0: $t1 := 0 # live vars: $t1 - 1: $t0 := pack m::R($t1) + 1: $t0 := pack 0x42::m::R($t1) # live vars: $t0 2: $t2 := borrow_local($t0) # live vars: $t0, $t2 @@ -247,14 +247,14 @@ fun m::f3_ok() { [variant baseline] fun m::f4_ok() { - var $t0: m::R + var $t0: 0x42::m::R var $t1: u64 - var $t2: &mut m::R - var $t3: &mut m::R + var $t2: &mut 0x42::m::R + var $t3: &mut 0x42::m::R # live vars: 0: $t1 := 0 # live vars: $t1 - 1: $t0 := pack m::R($t1) + 1: $t0 := pack 0x42::m::R($t1) # live vars: $t0 2: $t2 := borrow_local($t0) # live vars: $t2 @@ -270,14 +270,14 @@ fun m::f4_ok() { [variant baseline] fun m::f5_fail($t0: bool) { - var $t1: m::R + var $t1: 0x42::m::R var $t2: u64 - var $t3: &mut m::R - var $t4: &mut m::R + var $t3: &mut 0x42::m::R + var $t4: &mut 0x42::m::R # live vars: $t0 0: $t2 := 0 # live vars: $t0, $t2 - 1: $t1 := pack m::R($t2) + 1: $t1 := pack 0x42::m::R($t2) # live vars: $t0, $t1 2: $t3 := borrow_local($t1) # live vars: $t0, $t3 @@ -306,8 +306,8 @@ fun m::f5_fail($t0: bool) { [variant baseline] -fun m::id($t0: &mut m::R): &mut m::R { - var $t1: &mut m::R +fun m::id($t0: &mut 0x42::m::R): &mut 0x42::m::R { + var $t1: &mut 0x42::m::R # live vars: $t0 0: $t1 := infer($t0) # live vars: $t1 @@ -316,14 +316,14 @@ fun m::id($t0: &mut m::R): &mut m::R { [variant baseline] -fun m::some($t0: &mut m::R) { +fun m::some($t0: &mut 0x42::m::R) { # live vars: $t0 0: return () } [variant baseline] -fun m::some2($t0: &mut m::R, $t1: &mut m::R) { +fun m::some2($t0: &mut 0x42::m::R, $t1: &mut 0x42::m::R) { # live vars: $t0, $t1 0: return () } diff --git a/third_party/move/move-compiler-v2/tests/more-v1/liveness/mut_ref2.exp b/third_party/move/move-compiler-v2/tests/more-v1/liveness/mut_ref2.exp index 9d2cce8b6b8c3..5549335eb8a0c 100644 --- a/third_party/move/move-compiler-v2/tests/more-v1/liveness/mut_ref2.exp +++ b/third_party/move/move-compiler-v2/tests/more-v1/liveness/mut_ref2.exp @@ -1,54 +1,54 @@ Diagnostics: -error: local `r` of type `m::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/more-v1/liveness/mut_ref2.move:15:17 │ 15 │ let x = &mut r; │ ^^^^^^ still borrowed but will be implicitly dropped later since it is no longer used -error: local `r` of type `m::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/more-v1/liveness/mut_ref2.move:23:17 │ 23 │ let x = &mut r; │ ^^^^^^ still borrowed but will be implicitly dropped later since it is no longer used -error: local `x` of type `m::R` does not have the `copy` ability +error: local `x` of type `R` does not have the `copy` ability ┌─ tests/more-v1/liveness/mut_ref2.move:24:9 │ 24 │ *x; // Expected ok because x is only read; ability analysis will check whether read is ok │ ^^ reference content copied here -error: value of type `m::R` does not have the `drop` ability +error: value of type `R` does not have the `drop` ability ┌─ tests/more-v1/liveness/mut_ref2.move:24:9 │ 24 │ *x; // Expected ok because x is only read; ability analysis will check whether read is ok │ ^^ implicitly dropped here since it is no longer used -error: local `r` of type `m::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/more-v1/liveness/mut_ref2.move:31:17 │ 31 │ let x = &mut r; │ ^^^^^^ still borrowed but will be implicitly dropped later since it is no longer used -error: local `x` of type `m::R` does not have the `copy` ability +error: local `x` of type `R` does not have the `copy` ability ┌─ tests/more-v1/liveness/mut_ref2.move:33:9 │ 33 │ *x; // Same as f1aok │ ^^ reference content copied here -error: value of type `m::R` does not have the `drop` ability +error: value of type `R` does not have the `drop` ability ┌─ tests/more-v1/liveness/mut_ref2.move:33:9 │ 33 │ *x; // Same as f1aok │ ^^ implicitly dropped here since it is no longer used -error: local `r` of type `m::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/more-v1/liveness/mut_ref2.move:42:13 │ 42 │ x = &mut r; │ ^^^^^^ still borrowed but will be implicitly dropped later since it is no longer used -error: local `r` of type `m::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/more-v1/liveness/mut_ref2.move:48:17 │ 48 │ let x = &mut r; diff --git a/third_party/move/move-compiler-v2/tests/more-v1/locals/assign_partial_resource.exp b/third_party/move/move-compiler-v2/tests/more-v1/locals/assign_partial_resource.exp index be7644dd18716..17f1975f56719 100644 --- a/third_party/move/move-compiler-v2/tests/more-v1/locals/assign_partial_resource.exp +++ b/third_party/move/move-compiler-v2/tests/more-v1/locals/assign_partial_resource.exp @@ -26,19 +26,19 @@ warning: Unused assignment to `r`. Consider removing or prefixing with an unders Diagnostics: -error: local `r` of type `M::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/more-v1/locals/assign_partial_resource.move:6:21 │ 6 │ if (cond) { r = R{}; }; │ ^^^^^^^ implicitly dropped here since it is no longer used -error: local `r` of type `M::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/more-v1/locals/assign_partial_resource.move:13:29 │ 13 │ if (cond) {} else { r = R{}; }; │ ^^^^^^^ implicitly dropped here since it is no longer used -error: local `r` of type `M::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/more-v1/locals/assign_partial_resource.move:20:24 │ 20 │ while (cond) { r = R{} }; diff --git a/third_party/move/move-compiler-v2/tests/more-v1/locals/assign_resource.exp b/third_party/move/move-compiler-v2/tests/more-v1/locals/assign_resource.exp index b99889c927077..6c7b79d54a2ce 100644 --- a/third_party/move/move-compiler-v2/tests/more-v1/locals/assign_resource.exp +++ b/third_party/move/move-compiler-v2/tests/more-v1/locals/assign_resource.exp @@ -14,25 +14,25 @@ warning: Unused assignment to `r`. Consider removing or prefixing with an unders Diagnostics: -error: local `r` of type `M::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/more-v1/locals/assign_resource.move:5:17 │ 5 │ let r = R{}; │ ^^^ implicitly dropped here since it is no longer used -error: local `r` of type `M::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/more-v1/locals/assign_resource.move:12:19 │ 12 │ if (cond) { r = R{}; }; │ ^^^^^^^^^^^^ implicitly dropped here since it is no longer used -error: local `r` of type `M::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/more-v1/locals/assign_resource.move:18:27 │ 18 │ if (cond) {} else { r = R{}; }; │ ^^^^^^^^^^^^ implicitly dropped here since it is no longer used -error: local `r` of type `M::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/more-v1/locals/assign_resource.move:24:24 │ 24 │ while (cond) { r = R{} }; diff --git a/third_party/move/move-compiler-v2/tests/more-v1/locals/unused_resource.exp b/third_party/move/move-compiler-v2/tests/more-v1/locals/unused_resource.exp index c26fec6cde656..a6c368f98dfca 100644 --- a/third_party/move/move-compiler-v2/tests/more-v1/locals/unused_resource.exp +++ b/third_party/move/move-compiler-v2/tests/more-v1/locals/unused_resource.exp @@ -46,43 +46,43 @@ warning: Unused assignment to `r`. Consider removing or prefixing with an unders Diagnostics: -error: local `r` of type `M::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/more-v1/locals/unused_resource.move:5:17 │ 5 │ let r = R{}; │ ^^^ implicitly dropped here since it is no longer used -error: local `_r` of type `M::R` does not have the `drop` ability +error: local `_r` of type `R` does not have the `drop` ability ┌─ tests/more-v1/locals/unused_resource.move:10:18 │ 10 │ let _r = R{}; │ ^^^ implicitly dropped here since it is no longer used -error: local `r` of type `M::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/more-v1/locals/unused_resource.move:15:21 │ 15 │ if (cond) { r = R{}; }; │ ^^^^^^^ implicitly dropped here since it is no longer used -error: local `r` of type `M::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/more-v1/locals/unused_resource.move:20:29 │ 20 │ if (cond) {} else { r = R{}; }; │ ^^^^^^^ implicitly dropped here since it is no longer used -error: local `r` of type `M::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/more-v1/locals/unused_resource.move:25:24 │ 25 │ while (cond) { r = R{} }; │ ^^^^^^^ implicitly dropped here since it is no longer used -error: value of type `M::R` does not have the `drop` ability +error: value of type `R` does not have the `drop` ability ┌─ tests/more-v1/locals/unused_resource.move:33:17 │ 33 │ let _ = &R{}; │ ^^^^ still borrowed but will be implicitly dropped later since it is no longer used -error: local `_x` of type `M::R` does not have the `drop` ability +error: local `_x` of type `R` does not have the `drop` ability ┌─ tests/more-v1/locals/unused_resource.move:36:22 │ 36 │ fun t7(_x: R) { diff --git a/third_party/move/move-compiler-v2/tests/more-v1/locals/unused_resource_explicit_return.exp b/third_party/move/move-compiler-v2/tests/more-v1/locals/unused_resource_explicit_return.exp index e6c1bf612b336..48ea71a1d6fcc 100644 --- a/third_party/move/move-compiler-v2/tests/more-v1/locals/unused_resource_explicit_return.exp +++ b/third_party/move/move-compiler-v2/tests/more-v1/locals/unused_resource_explicit_return.exp @@ -40,43 +40,43 @@ warning: Unused assignment to `x`. Consider removing or prefixing with an unders Diagnostics: -error: local `r` of type `M::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/more-v1/locals/unused_resource_explicit_return.move:5:17 │ 5 │ let r = R{}; │ ^^^ implicitly dropped here since it is no longer used -error: local `r` of type `M::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/more-v1/locals/unused_resource_explicit_return.move:11:21 │ 11 │ if (cond) { return () }; │ ^^^^^^^^^ implicitly dropped here since it is no longer used -error: local `r` of type `M::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/more-v1/locals/unused_resource_explicit_return.move:17:29 │ 17 │ if (cond) {} else { return () }; │ ^^^^^^^^^ implicitly dropped here since it is no longer used -error: local `r` of type `M::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/more-v1/locals/unused_resource_explicit_return.move:23:24 │ 23 │ while (cond) { return () }; │ ^^^^^^^^^ implicitly dropped here since it is no longer used -error: local `r` of type `M::R` does not have the `drop` ability +error: local `r` of type `R` does not have the `drop` ability ┌─ tests/more-v1/locals/unused_resource_explicit_return.move:28:17 │ 28 │ let r = R{}; │ ^^^ implicitly dropped here since it is no longer used -error: value of type `M::R` does not have the `drop` ability +error: value of type `R` does not have the `drop` ability ┌─ tests/more-v1/locals/unused_resource_explicit_return.move:33:17 │ 33 │ let x = &R{}; │ ^^^^ implicitly dropped here since it is no longer used -error: local `_x` of type `M::R` does not have the `drop` ability +error: local `_x` of type `R` does not have the `drop` ability ┌─ tests/more-v1/locals/unused_resource_explicit_return.move:38:9 │ 38 │ return () diff --git a/third_party/move/move-compiler-v2/tests/more-v1/parser/spec_parsing_fun_type_fail.exp b/third_party/move/move-compiler-v2/tests/more-v1/parser/spec_parsing_fun_type_fail.exp index f5a92a3579cfe..e023799381a93 100644 --- a/third_party/move/move-compiler-v2/tests/more-v1/parser/spec_parsing_fun_type_fail.exp +++ b/third_party/move/move-compiler-v2/tests/more-v1/parser/spec_parsing_fun_type_fail.exp @@ -4,7 +4,7 @@ error: Only inline functions may have function-typed parameters, but non-inline ┌─ tests/more-v1/parser/spec_parsing_fun_type_fail.move:2:9 │ 2 │ fun fun_type_in_prog(p: |u64|u64) { - │ ^^^^^^^^^^^^^^^^ - Parameter `p` has a function type. + │ ^^^^^^^^^^^^^^^^ - Parameter `p` has function-valued type `|u64|u64`. warning: Unused parameter `p`. Consider removing or prefixing with an underscore: `_p` ┌─ tests/more-v1/parser/spec_parsing_fun_type_fail.move:2:26 diff --git a/third_party/move/move-compiler-v2/tests/no-simplifier/constant_folding_ristretto.exp b/third_party/move/move-compiler-v2/tests/no-simplifier/constant_folding_ristretto.exp index b6a72eeed3745..8a40a0cf6ddc3 100644 --- a/third_party/move/move-compiler-v2/tests/no-simplifier/constant_folding_ristretto.exp +++ b/third_party/move/move-compiler-v2/tests/no-simplifier/constant_folding_ristretto.exp @@ -16,5 +16,14 @@ module 0xcafe::Ristretto { } } // end 0xcafe::Ristretto +// -- Sourcified model before bytecode pipeline +module 0xcafe::Ristretto { + public fun test() { + let non_canonical_highbit = vector[0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 128u8]; + let non_canonical_highbit_hex = vector[0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 128u8]; + if (non_canonical_highbit == non_canonical_highbit_hex) () else abort 1; + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/no-simplifier/moved_var_not_simplified.exp b/third_party/move/move-compiler-v2/tests/no-simplifier/moved_var_not_simplified.exp index 6c96a94dd70d5..0ed31e88c7ef1 100644 --- a/third_party/move/move-compiler-v2/tests/no-simplifier/moved_var_not_simplified.exp +++ b/third_party/move/move-compiler-v2/tests/no-simplifier/moved_var_not_simplified.exp @@ -11,6 +11,15 @@ module 0xc0ffee::m { } } // end 0xc0ffee::m +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + public fun test(): u8 { + let x = 40u8; + let y = move x; + x + y + } +} + Diagnostics: error: cannot move local `x` since it is still in use diff --git a/third_party/move/move-compiler-v2/tests/no-simplifier/moved_var_not_simplified2.exp b/third_party/move/move-compiler-v2/tests/no-simplifier/moved_var_not_simplified2.exp index ee86deb4ac415..73e1150d1e95c 100644 --- a/third_party/move/move-compiler-v2/tests/no-simplifier/moved_var_not_simplified2.exp +++ b/third_party/move/move-compiler-v2/tests/no-simplifier/moved_var_not_simplified2.exp @@ -14,6 +14,16 @@ module 0xc0ffee::m { } } // end 0xc0ffee::m +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + public fun test(): u8 { + let x = 40u8; + let y = move x; + let _z = x; + y + } +} + Diagnostics: error: cannot move local `x` since it is still in use diff --git a/third_party/move/move-compiler-v2/tests/op-equal/error_not_shown.exp b/third_party/move/move-compiler-v2/tests/op-equal/error_not_shown.exp new file mode 100644 index 0000000000000..9363c1382ce08 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/op-equal/error_not_shown.exp @@ -0,0 +1,7 @@ + +Diagnostics: +error: cannot mutably borrow from an immutable ref + ┌─ tests/op-equal/error_not_shown.move:9:17 + │ +9 │ let p = &mut self.0; + │ ^^^^^^^^^^^ diff --git a/third_party/move/move-compiler-v2/tests/op-equal/error_not_shown.move b/third_party/move/move-compiler-v2/tests/op-equal/error_not_shown.move new file mode 100644 index 0000000000000..576f91afa91cf --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/op-equal/error_not_shown.move @@ -0,0 +1,12 @@ +module 0x42::test { + struct Coin(u256) has drop; + + fun inc_old(x: &u256) { + *x = *x + 1; + } + + fun coin_inc_new_1(self: &Coin) { + let p = &mut self.0; + *p += 1; + } +} diff --git a/third_party/move/move-compiler-v2/tests/op-equal/eval_order.exp b/third_party/move/move-compiler-v2/tests/op-equal/eval_order.exp new file mode 100644 index 0000000000000..8d0574ecb6ed7 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/op-equal/eval_order.exp @@ -0,0 +1,496 @@ +// -- Model dump before bytecode pipeline +module 0xc0ffee::m { + private fun mod1(r: &mut u64) { + { + let $t1: &mut u64 = r; + $t1 = Add(Deref($t1), 2) + }; + Tuple() + } + private fun mod2(r: &mut u64): u64 { + { + let $t1: &mut u64 = r; + $t1 = Add(Deref($t1), 2) + }; + Deref(r) + } + public fun test0(): u64 { + { + let v: u64 = 1; + { + let $t: u64 = { + let $t: u64 = v: u64 = Add(v, 2); + v; + v: u64 = Add(v, $t) + }; + v; + v: u64 = Add(v, $t) + }; + v + } + } + public fun test1(): u64 { + { + let v: u64 = 1; + { + let $t: u64 = v: u64 = Add(v, 2); + v; + v: u64 = Add(v, $t) + }; + v + } + } + public fun test2(): u64 { + { + let v: u64 = 1; + { + let $t: u64 = m::mod1(Borrow(Mutable)(v)); + v; + v: u64 = Add(v, $t) + }; + v + } + } + public fun test3(): u64 { + { + let v: u64 = 1; + { + let $t: u64 = m::mod2(Borrow(Mutable)(v)); + v: u64 = Add(v, $t) + }; + v + } + } +} // end 0xc0ffee::m + +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + fun mod1(r: &mut u64) { + { + let $t1 = r; + *$t1 = *$t1 + 2 + }; + } + fun mod2(r: &mut u64): u64 { + { + let $t1 = r; + *$t1 = *$t1 + 2 + }; + *r + } + public fun test0(): u64 { + let v = 1; + { + let $t = { + { + let $t = { + v = v + 2; + v + }; + v = v + $t + }; + v + }; + v = v + $t + }; + v + } + public fun test1(): u64 { + let v = 1; + { + let $t = { + v = v + 2; + v + }; + v = v + $t + }; + v + } + public fun test2(): u64 { + let v = 1; + { + let $t = { + mod1(&mut v); + v + }; + v = v + $t + }; + v + } + public fun test3(): u64 { + let v = 1; + { + let $t = mod2(&mut v); + v = v + $t + }; + v + } +} + +============ initial bytecode ================ + +[variant baseline] +fun m::mod1($t0: &mut u64) { + var $t1: &mut u64 + var $t2: u64 + var $t3: u64 + var $t4: u64 + 0: $t1 := infer($t0) + 1: $t3 := read_ref($t1) + 2: $t4 := 2 + 3: $t2 := +($t3, $t4) + 4: write_ref($t1, $t2) + 5: return () +} + + +[variant baseline] +fun m::mod2($t0: &mut u64): u64 { + var $t1: u64 + var $t2: &mut u64 + var $t3: u64 + var $t4: u64 + var $t5: u64 + 0: $t2 := infer($t0) + 1: $t4 := read_ref($t2) + 2: $t5 := 2 + 3: $t3 := +($t4, $t5) + 4: write_ref($t2, $t3) + 5: $t1 := read_ref($t0) + 6: return $t1 +} + + +[variant baseline] +public fun m::test0(): u64 { + var $t0: u64 + var $t1: u64 + var $t2: u64 + var $t3: u64 + var $t4: u64 + var $t5: u64 + var $t6: u64 + var $t7: u64 + 0: $t1 := 1 + 1: $t5 := 2 + 2: $t4 := +($t1, $t5) + 3: $t1 := infer($t4) + 4: $t3 := infer($t1) + 5: $t6 := +($t1, $t3) + 6: $t1 := infer($t6) + 7: $t2 := infer($t1) + 8: $t7 := +($t1, $t2) + 9: $t1 := infer($t7) + 10: $t0 := infer($t1) + 11: return $t0 +} + + +[variant baseline] +public fun m::test1(): u64 { + var $t0: u64 + var $t1: u64 + var $t2: u64 + var $t3: u64 + var $t4: u64 + var $t5: u64 + 0: $t1 := 1 + 1: $t4 := 2 + 2: $t3 := +($t1, $t4) + 3: $t1 := infer($t3) + 4: $t2 := infer($t1) + 5: $t5 := +($t1, $t2) + 6: $t1 := infer($t5) + 7: $t0 := infer($t1) + 8: return $t0 +} + + +[variant baseline] +public fun m::test2(): u64 { + var $t0: u64 + var $t1: u64 + var $t2: u64 + var $t3: &mut u64 + var $t4: u64 + 0: $t1 := 1 + 1: $t3 := borrow_local($t1) + 2: m::mod1($t3) + 3: $t2 := infer($t1) + 4: $t4 := +($t1, $t2) + 5: $t1 := infer($t4) + 6: $t0 := infer($t1) + 7: return $t0 +} + + +[variant baseline] +public fun m::test3(): u64 { + var $t0: u64 + var $t1: u64 + var $t2: u64 + var $t3: &mut u64 + var $t4: u64 + 0: $t1 := 1 + 1: $t3 := borrow_local($t1) + 2: $t2 := m::mod2($t3) + 3: $t4 := +($t1, $t2) + 4: $t1 := infer($t4) + 5: $t0 := infer($t1) + 6: return $t0 +} + +============ after LiveVarAnalysisProcessor: ================ + +[variant baseline] +fun m::mod1($t0: &mut u64) { + var $t1: &mut u64 [unused] + var $t2: u64 [unused] + var $t3: u64 + var $t4: u64 + # live vars: $t0 + 0: $t3 := read_ref($t0) + # live vars: $t0, $t3 + 1: $t4 := 2 + # live vars: $t0, $t3, $t4 + 2: $t3 := +($t3, $t4) + # live vars: $t0, $t3 + 3: write_ref($t0, $t3) + # live vars: + 4: return () +} + + +[variant baseline] +fun m::mod2($t0: &mut u64): u64 { + var $t1: u64 [unused] + var $t2: &mut u64 + var $t3: u64 [unused] + var $t4: u64 + var $t5: u64 + # live vars: $t0 + 0: $t2 := copy($t0) + # live vars: $t0, $t2 + 1: $t4 := read_ref($t2) + # live vars: $t0, $t2, $t4 + 2: $t5 := 2 + # live vars: $t0, $t2, $t4, $t5 + 3: $t4 := +($t4, $t5) + # live vars: $t0, $t2, $t4 + 4: write_ref($t2, $t4) + # live vars: $t0 + 5: $t4 := read_ref($t0) + # live vars: $t4 + 6: return $t4 +} + + +[variant baseline] +public fun m::test0(): u64 { + var $t0: u64 [unused] + var $t1: u64 + var $t2: u64 [unused] + var $t3: u64 [unused] + var $t4: u64 [unused] + var $t5: u64 + var $t6: u64 [unused] + var $t7: u64 [unused] + # live vars: + 0: $t1 := 1 + # live vars: $t1 + 1: $t5 := 2 + # live vars: $t1, $t5 + 2: $t5 := +($t1, $t5) + # live vars: $t5 + 3: $t1 := move($t5) + # live vars: $t1 + 4: $t5 := copy($t1) + # live vars: $t1, $t5 + 5: $t5 := +($t1, $t5) + # live vars: $t5 + 6: $t1 := move($t5) + # live vars: $t1 + 7: $t5 := copy($t1) + # live vars: $t1, $t5 + 8: $t5 := +($t1, $t5) + # live vars: $t5 + 9: $t1 := move($t5) + # live vars: $t1 + 10: return $t1 +} + + +[variant baseline] +public fun m::test1(): u64 { + var $t0: u64 [unused] + var $t1: u64 + var $t2: u64 [unused] + var $t3: u64 [unused] + var $t4: u64 + var $t5: u64 [unused] + # live vars: + 0: $t1 := 1 + # live vars: $t1 + 1: $t4 := 2 + # live vars: $t1, $t4 + 2: $t4 := +($t1, $t4) + # live vars: $t4 + 3: $t1 := move($t4) + # live vars: $t1 + 4: $t4 := copy($t1) + # live vars: $t1, $t4 + 5: $t4 := +($t1, $t4) + # live vars: $t4 + 6: $t1 := move($t4) + # live vars: $t1 + 7: return $t1 +} + + +[variant baseline] +public fun m::test2(): u64 { + var $t0: u64 [unused] + var $t1: u64 + var $t2: u64 + var $t3: &mut u64 + var $t4: u64 [unused] + # live vars: + 0: $t1 := 1 + # live vars: $t1 + 1: $t3 := borrow_local($t1) + # live vars: $t1, $t3 + 2: m::mod1($t3) + # live vars: $t1 + 3: $t2 := copy($t1) + # live vars: $t1, $t2 + 4: $t2 := +($t1, $t2) + # live vars: $t2 + 5: $t1 := move($t2) + # live vars: $t1 + 6: $t2 := move($t1) + # live vars: $t2 + 7: return $t2 +} + + +[variant baseline] +public fun m::test3(): u64 { + var $t0: u64 [unused] + var $t1: u64 + var $t2: u64 + var $t3: &mut u64 + var $t4: u64 [unused] + # live vars: + 0: $t1 := 1 + # live vars: $t1 + 1: $t3 := borrow_local($t1) + # live vars: $t1, $t3 + 2: $t2 := m::mod2($t3) + # live vars: $t1, $t2 + 3: $t2 := +($t1, $t2) + # live vars: $t2 + 4: $t1 := move($t2) + # live vars: $t1 + 5: $t2 := move($t1) + # live vars: $t2 + 6: return $t2 +} + + +============ disassembled file-format ================== +// Move bytecode v7 +module c0ffee.m { + + +mod1(Arg0: &mut u64) /* def_idx: 0 */ { +B0: + 0: CopyLoc[0](Arg0: &mut u64) + 1: ReadRef + 2: LdU64(2) + 3: Add + 4: MoveLoc[0](Arg0: &mut u64) + 5: WriteRef + 6: Ret +} +mod2(Arg0: &mut u64): u64 /* def_idx: 1 */ { +L1: loc0: &mut u64 +B0: + 0: CopyLoc[0](Arg0: &mut u64) + 1: StLoc[1](loc0: &mut u64) + 2: CopyLoc[1](loc0: &mut u64) + 3: ReadRef + 4: LdU64(2) + 5: Add + 6: MoveLoc[1](loc0: &mut u64) + 7: WriteRef + 8: MoveLoc[0](Arg0: &mut u64) + 9: ReadRef + 10: Ret +} +public test0(): u64 /* def_idx: 2 */ { +L0: loc0: u64 +L1: loc1: u64 +B0: + 0: LdU64(1) + 1: LdU64(2) + 2: Add + 3: StLoc[0](loc0: u64) + 4: CopyLoc[0](loc0: u64) + 5: StLoc[1](loc1: u64) + 6: MoveLoc[0](loc0: u64) + 7: MoveLoc[1](loc1: u64) + 8: Add + 9: StLoc[0](loc0: u64) + 10: CopyLoc[0](loc0: u64) + 11: StLoc[1](loc1: u64) + 12: MoveLoc[0](loc0: u64) + 13: MoveLoc[1](loc1: u64) + 14: Add + 15: Ret +} +public test1(): u64 /* def_idx: 3 */ { +L0: loc0: u64 +L1: loc1: u64 +B0: + 0: LdU64(1) + 1: LdU64(2) + 2: Add + 3: StLoc[0](loc0: u64) + 4: CopyLoc[0](loc0: u64) + 5: StLoc[1](loc1: u64) + 6: MoveLoc[0](loc0: u64) + 7: MoveLoc[1](loc1: u64) + 8: Add + 9: Ret +} +public test2(): u64 /* def_idx: 4 */ { +L0: loc0: u64 +L1: loc1: u64 +B0: + 0: LdU64(1) + 1: StLoc[0](loc0: u64) + 2: MutBorrowLoc[0](loc0: u64) + 3: Call mod1(&mut u64) + 4: CopyLoc[0](loc0: u64) + 5: StLoc[1](loc1: u64) + 6: MoveLoc[0](loc0: u64) + 7: MoveLoc[1](loc1: u64) + 8: Add + 9: Ret +} +public test3(): u64 /* def_idx: 5 */ { +L0: loc0: u64 +L1: loc1: u64 +B0: + 0: LdU64(1) + 1: StLoc[0](loc0: u64) + 2: MutBorrowLoc[0](loc0: u64) + 3: Call mod2(&mut u64): u64 + 4: StLoc[1](loc1: u64) + 5: MoveLoc[0](loc0: u64) + 6: MoveLoc[1](loc1: u64) + 7: Add + 8: Ret +} +} +============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/op-equal/eval_order.move b/third_party/move/move-compiler-v2/tests/op-equal/eval_order.move new file mode 100644 index 0000000000000..a7ed50b6b2fd4 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/op-equal/eval_order.move @@ -0,0 +1,35 @@ +//# publish +module 0xc0ffee::m { + public fun test0(): u64 { + let v = 1; + v += {v += {v += 2; v}; v}; + v + } + + public fun test1(): u64 { + let v = 1; + v += {v += 2; v}; + v + } + + fun mod1(r: &mut u64) { + *r += 2; + } + + public fun test2(): u64 { + let v = 1; + v += {mod1(&mut v); v}; + v + } + + fun mod2(r: &mut u64): u64 { + *r += 2; + *r + } + + public fun test3(): u64 { + let v = 1; + v += mod2(&mut v); + v + } +} diff --git a/third_party/move/move-compiler-v2/tests/op-equal/invalid0.exp b/third_party/move/move-compiler-v2/tests/op-equal/invalid0.exp new file mode 100644 index 0000000000000..358a135228d62 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/op-equal/invalid0.exp @@ -0,0 +1,19 @@ + +Diagnostics: +error: cannot mutably borrow from an immutable ref + ┌─ tests/op-equal/invalid0.move:8:3 + │ +8 │ self.0 += 1; + │ ^^^^^^ + +error: expected `&mut` but `&` was provided + ┌─ tests/op-equal/invalid0.move:12:3 + │ +12 │ x[index] += 1; + │ ^ + +error: expected `&mut` but `&` was provided + ┌─ tests/op-equal/invalid0.move:16:3 + │ +16 │ x[index].0.0 += 1; + │ ^ diff --git a/third_party/move/move-compiler-v2/tests/op-equal/invalid0.move b/third_party/move/move-compiler-v2/tests/op-equal/invalid0.move new file mode 100644 index 0000000000000..7efcf97295eb5 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/op-equal/invalid0.move @@ -0,0 +1,18 @@ +module 0x42::test { + struct Coin(u256) has drop; + + struct Wrapper(T) has drop; + + + fun coin_inc_new_1(self: &Coin) { + self.0 += 1; + } + + fun inc_vec_new(x: &vector, index: u64) { + x[index] += 1; + } + + fun inc_vec_wrapped_coin_new(x: &vector>, index: u64) { + x[index].0.0 += 1; + } +} diff --git a/third_party/move/move-compiler-v2/tests/op-equal/invalid1.exp b/third_party/move/move-compiler-v2/tests/op-equal/invalid1.exp new file mode 100644 index 0000000000000..e615879b5173c --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/op-equal/invalid1.exp @@ -0,0 +1,81 @@ +// -- Model dump before bytecode pipeline +module 0x42::test { + private fun test() { + { + let x: u64 = 42; + { + let p: &mut u64 = Borrow(Mutable)(x); + x: u64 = Add(x, 1); + { + let $t1: &mut u64 = p; + $t1 = Add(Deref($t1), 1) + }; + x; + Tuple() + } + } + } +} // end 0x42::test + +// -- Sourcified model before bytecode pipeline +module 0x42::test { + fun test() { + let x = 42; + let p = &mut x; + x = x + 1; + { + let $t1 = p; + *$t1 = *$t1 + 1 + }; + x; + } +} + +============ initial bytecode ================ + +[variant baseline] +fun test::test() { + var $t0: u64 + var $t1: &mut u64 + var $t2: u64 + var $t3: u64 + var $t4: &mut u64 + var $t5: u64 + var $t6: u64 + var $t7: u64 + var $t8: u64 + 0: $t0 := 42 + 1: $t1 := borrow_local($t0) + 2: $t3 := 1 + 3: $t2 := +($t0, $t3) + 4: $t0 := infer($t2) + 5: $t4 := infer($t1) + 6: $t6 := read_ref($t4) + 7: $t7 := 1 + 8: $t5 := +($t6, $t7) + 9: write_ref($t4, $t5) + 10: $t8 := infer($t0) + 11: return () +} + + +Diagnostics: +error: cannot copy local `x` which is still mutably borrowed + ┌─ tests/op-equal/invalid1.move:5:3 + │ +4 │ let p = &mut x; + │ ------ local `x` previously mutably borrowed here +5 │ x += 1; + │ ^^^^^^ copy attempted here +6 │ *p += 1; + │ - conflicting reference `p` used here + +error: cannot drop local `x` which is still borrowed + ┌─ tests/op-equal/invalid1.move:5:3 + │ +4 │ let p = &mut x; + │ ------ local `x` previously mutably borrowed here +5 │ x += 1; + │ ^^^^^^ dropped here +6 │ *p += 1; + │ - conflicting reference `p` used here diff --git a/third_party/move/move-compiler-v2/tests/op-equal/invalid1.move b/third_party/move/move-compiler-v2/tests/op-equal/invalid1.move new file mode 100644 index 0000000000000..dcca73a261272 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/op-equal/invalid1.move @@ -0,0 +1,9 @@ +module 0x42::test { + fun test() { + let x = 42; + let p = &mut x; + x += 1; + *p += 1; + x; + } +} diff --git a/third_party/move/move-compiler-v2/tests/op-equal/invalid2.exp b/third_party/move/move-compiler-v2/tests/op-equal/invalid2.exp new file mode 100644 index 0000000000000..ed3dc28c6ba5b --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/op-equal/invalid2.exp @@ -0,0 +1,19 @@ + +Diagnostics: +error: cannot use `bool` with an operator which expects a value of type `integer` + ┌─ tests/op-equal/invalid2.move:4:9 + │ +4 │ x += 1; + │ ^ + +error: cannot use `bool` with an operator which expects a value of type `integer` + ┌─ tests/op-equal/invalid2.move:9:9 + │ +9 │ x += true; + │ ^ + +error: cannot use `bool` with an operator which expects a value of type `integer` + ┌─ tests/op-equal/invalid2.move:14:14 + │ +14 │ x += false; + │ ^^^^^ diff --git a/third_party/move/move-compiler-v2/tests/op-equal/invalid2.move b/third_party/move/move-compiler-v2/tests/op-equal/invalid2.move new file mode 100644 index 0000000000000..61edb2c1715b1 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/op-equal/invalid2.move @@ -0,0 +1,16 @@ +module 0x42::test { + fun test0() { + let x = false; + x += 1; + } + + fun test1() { + let x = false; + x += true; + } + + fun test2() { + let x = 1; + x += false; + } +} diff --git a/third_party/move/move-compiler-v2/tests/op-equal/invalid3.exp b/third_party/move/move-compiler-v2/tests/op-equal/invalid3.exp new file mode 100644 index 0000000000000..b983ec425e8fb --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/op-equal/invalid3.exp @@ -0,0 +1,13 @@ + +Diagnostics: +error: invalid assignment + ┌─ tests/op-equal/invalid3.move:3:9 + │ +3 │ 1 += 1; + │ ^ Invalid assignment syntax. Expected: a local, a field write, or a deconstructing assignment + +error: invalid assignment + ┌─ tests/op-equal/invalid3.move:11:9 + │ +11 │ foo() += 1; + │ ^^^^^ Invalid assignment syntax. Expected: a local, a field write, or a deconstructing assignment diff --git a/third_party/move/move-compiler-v2/tests/op-equal/invalid3.move b/third_party/move/move-compiler-v2/tests/op-equal/invalid3.move new file mode 100644 index 0000000000000..8600114840708 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/op-equal/invalid3.move @@ -0,0 +1,13 @@ +module 0x42::test { + fun test0() { + 1 += 1; + } + + fun foo(): u8 { + 42 + } + + fun test1() { + foo() += 1; + } +} diff --git a/third_party/move/move-compiler-v2/tests/op-equal/invalid4.exp b/third_party/move/move-compiler-v2/tests/op-equal/invalid4.exp new file mode 100644 index 0000000000000..1550536bbc989 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/op-equal/invalid4.exp @@ -0,0 +1,10 @@ + +Diagnostics: +error: unexpected token + ┌─ tests/op-equal/invalid4.move:3:15 + │ +3 │ let x += 1; + │ ^^ + │ │ + │ Unexpected '+=' + │ Expected ';' diff --git a/third_party/move/move-compiler-v2/tests/op-equal/invalid4.move b/third_party/move/move-compiler-v2/tests/op-equal/invalid4.move new file mode 100644 index 0000000000000..8b3d6ba344d7c --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/op-equal/invalid4.move @@ -0,0 +1,5 @@ +module 0x42::test { + fun test() { + let x += 1; + } +} diff --git a/third_party/move/move-compiler-v2/tests/op-equal/invalid5.exp b/third_party/move/move-compiler-v2/tests/op-equal/invalid5.exp new file mode 100644 index 0000000000000..e8856e5b040c6 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/op-equal/invalid5.exp @@ -0,0 +1,7 @@ + +Diagnostics: +error: cannot use `()` with an operator which expects a value of type `integer` + ┌─ tests/op-equal/invalid5.move:4:17 + │ +4 │ let y = (x += 2) * (x -= 1); + │ ^^^^^^^^ diff --git a/third_party/move/move-compiler-v2/tests/op-equal/invalid5.move b/third_party/move/move-compiler-v2/tests/op-equal/invalid5.move new file mode 100644 index 0000000000000..32be3bf383db8 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/op-equal/invalid5.move @@ -0,0 +1,6 @@ +module 0x42::test { + fun testZ() { + let x = 3; + let y = (x += 2) * (x -= 1); + } +} diff --git a/third_party/move/move-compiler-v2/tests/op-equal/invalid6.exp b/third_party/move/move-compiler-v2/tests/op-equal/invalid6.exp new file mode 100644 index 0000000000000..01616fe6ce2a2 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/op-equal/invalid6.exp @@ -0,0 +1,41 @@ +// -- Model dump before bytecode pipeline +module 0x42::test { + private fun inc_new(x: &u256) { + { + let $t1: &u256 = x; + $t1 = Add(Deref($t1), 1) + }; + Tuple() + } + private fun inc_old(x: &u256) { + x = Add(Deref(x), 1); + Tuple() + } +} // end 0x42::test + +// -- Sourcified model before bytecode pipeline +module 0x42::test { + fun inc_new(x: &u256) { + { + let $t1 = x; + *$t1 = *$t1 + 1u256 + }; + } + fun inc_old(x: &u256) { + *x = *x + 1u256; + } +} + + +Diagnostics: +error: expected `&mut` but found `&u256` + ┌─ tests/op-equal/invalid6.move:3:10 + │ +3 │ *x = *x + 1; + │ ^ + +error: expected `&mut` but found `&u256` + ┌─ tests/op-equal/invalid6.move:7:11 + │ +7 │ *x += 1; + │ ^ diff --git a/third_party/move/move-compiler-v2/tests/op-equal/invalid6.move b/third_party/move/move-compiler-v2/tests/op-equal/invalid6.move new file mode 100644 index 0000000000000..684e628eacfe0 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/op-equal/invalid6.move @@ -0,0 +1,9 @@ +module 0x42::test { + fun inc_old(x: &u256) { + *x = *x + 1; + } + + fun inc_new(x: &u256) { + *x += 1; + } +} diff --git a/third_party/move/move-compiler-v2/tests/op-equal/valid0.exp b/third_party/move/move-compiler-v2/tests/op-equal/valid0.exp new file mode 100644 index 0000000000000..e3a00fa6e9edf --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/op-equal/valid0.exp @@ -0,0 +1,1165 @@ +// -- Model dump before bytecode pipeline +module 0x42::test { + struct Coin { + 0: u256, + } + struct Wrapper { + 0: T, + } + private fun add1_new(x: u256): u256 { + x: u256 = Add(x, 1); + x + } + private fun add1_old(x: u256): u256 { + x: u256 = Add(x, 1); + x + } + private fun coin_inc_new_1(self: &mut Coin) { + { + let $t1: &mut u256 = Borrow(Mutable)(select test::Coin.0<&mut Coin>(self)); + $t1 = Add(Deref($t1), 1) + }; + Tuple() + } + private fun coin_inc_new_2(self: &mut Coin) { + { + let p: &mut u256 = Borrow(Mutable)(select test::Coin.0<&mut Coin>(self)); + p = Add(Deref(p), 1); + Tuple() + } + } + private fun coin_inc_old_1(self: &mut Coin) { + select test::Coin.0<&mut Coin>(self) = Add(select test::Coin.0<&mut Coin>(self), 1); + Tuple() + } + private fun coin_inc_old_2(self: &mut Coin) { + { + let p: &mut u256 = Borrow(Mutable)(select test::Coin.0<&mut Coin>(self)); + p = Add(Deref(p), 1); + Tuple() + } + } + private fun inc_coin_at(addr: address) + acquires Coin(*) + { + { + let coin: &mut Coin = BorrowGlobal(Mutable)(addr); + { + let $t1: &mut u256 = Borrow(Mutable)(select test::Coin.0<&mut Coin>(coin)); + $t1 = Add(Deref($t1), 1) + }; + Tuple() + } + } + private fun inc_new(x: &mut u256) { + { + let $t1: &mut u256 = x; + $t1 = Add(Deref($t1), 1) + }; + Tuple() + } + private fun inc_old(x: &mut u256) { + x = Add(Deref(x), 1); + Tuple() + } + private fun inc_vec_coin_new(x: vector,index: u64) { + { + let $t1: &mut u256 = Borrow(Mutable)(select test::Coin.0(vector::borrow_mut(Borrow(Mutable)(x), index))); + $t1 = Add(Deref($t1), 1) + }; + Tuple() + } + private fun inc_vec_coin_old(x: vector,index: u64) { + select test::Coin.0(vector::borrow_mut(Borrow(Mutable)(x), index)) = Add(select test::Coin.0(vector::borrow(Borrow(Immutable)(x), index)), 1); + Tuple() + } + private fun inc_vec_new(x: &mut vector,index: u64) { + { + let $t1: &mut u256 = vector::borrow_mut(x, index); + $t1 = Add(Deref($t1), 1) + }; + Tuple() + } + private fun inc_vec_old(x: vector,index: u64) { + vector::borrow_mut(Borrow(Mutable)(x), index) = Add(Deref(vector::borrow(Borrow(Immutable)(x), index)), 1); + Tuple() + } + private fun inc_vec_wrapped_coin_new(x: vector>,index: u64) { + { + let $t1: &mut u256 = Borrow(Mutable)(select test::Coin.0(select test::Wrapper.0>(vector::borrow_mut>(Borrow(Mutable)(x), index)))); + $t1 = Add(Deref($t1), 1) + }; + Tuple() + } + private fun inc_vec_wrapped_coin_old(x: vector>,index: u64) { + select test::Coin.0(select test::Wrapper.0>(vector::borrow_mut>(Borrow(Mutable)(x), index))) = Add(select test::Coin.0(select test::Wrapper.0>(vector::borrow>(Borrow(Immutable)(x), index))), 1); + Tuple() + } + private fun inc_wrapped_coin_new(x: &mut Wrapper) { + { + let $t1: &mut u256 = Borrow(Mutable)(select test::Coin.0(select test::Wrapper.0<&mut Wrapper>(x))); + $t1 = Add(Deref($t1), 1) + }; + Tuple() + } + private fun inc_wrapped_coin_old(x: &mut Wrapper) { + select test::Coin.0(select test::Wrapper.0<&mut Wrapper>(x)) = Add(select test::Coin.0(select test::Wrapper.0<&mut Wrapper>(x)), 1); + Tuple() + } +} // end 0x42::test + +// -- Sourcified model before bytecode pipeline +module 0x42::test { + struct Coin has drop, key { + 0: u256, + } + struct Wrapper has drop, key { + 0: T, + } + fun add1_new(x: u256): u256 { + x = x + 1u256; + x + } + fun add1_old(x: u256): u256 { + x = x + 1u256; + x + } + fun coin_inc_new_1(self: &mut Coin) { + { + let $t1 = &mut self.0; + *$t1 = *$t1 + 1u256 + }; + } + fun coin_inc_new_2(self: &mut Coin) { + let p = &mut self.0; + *p = *p + 1u256; + } + fun coin_inc_old_1(self: &mut Coin) { + self.0 = self.0 + 1u256; + } + fun coin_inc_old_2(self: &mut Coin) { + let p = &mut self.0; + *p = *p + 1u256; + } + fun inc_coin_at(addr: address) + acquires Coin + { + let coin = borrow_global_mut(addr); + { + let $t1 = &mut coin.0; + *$t1 = *$t1 + 1u256 + }; + } + fun inc_new(x: &mut u256) { + { + let $t1 = x; + *$t1 = *$t1 + 1u256 + }; + } + fun inc_old(x: &mut u256) { + *x = *x + 1u256; + } + fun inc_vec_coin_new(x: vector, index: u64) { + { + let $t1 = &mut 0x1::vector::borrow_mut(&mut x, index).0; + *$t1 = *$t1 + 1u256 + }; + } + fun inc_vec_coin_old(x: vector, index: u64) { + 0x1::vector::borrow_mut(&mut x, index).0 = 0x1::vector::borrow(&x, index).0 + 1u256; + } + fun inc_vec_new(x: &mut vector, index: u64) { + { + let $t1 = 0x1::vector::borrow_mut(x, index); + *$t1 = *$t1 + 1u256 + }; + } + fun inc_vec_old(x: vector, index: u64) { + *0x1::vector::borrow_mut(&mut x, index) = *0x1::vector::borrow(&x, index) + 1u256; + } + fun inc_vec_wrapped_coin_new(x: vector>, index: u64) { + { + let $t1 = &mut 0x1::vector::borrow_mut>(&mut x, index).0.0; + *$t1 = *$t1 + 1u256 + }; + } + fun inc_vec_wrapped_coin_old(x: vector>, index: u64) { + 0x1::vector::borrow_mut>(&mut x, index).0.0 = 0x1::vector::borrow>(&x, index).0.0 + 1u256; + } + fun inc_wrapped_coin_new(x: &mut Wrapper) { + { + let $t1 = &mut x.0.0; + *$t1 = *$t1 + 1u256 + }; + } + fun inc_wrapped_coin_old(x: &mut Wrapper) { + x.0.0 = x.0.0 + 1u256; + } +} + +============ initial bytecode ================ + +[variant baseline] +fun test::add1_new($t0: u256): u256 { + var $t1: u256 + var $t2: u256 + var $t3: u256 + 0: $t3 := 1 + 1: $t2 := +($t0, $t3) + 2: $t0 := infer($t2) + 3: $t1 := infer($t0) + 4: return $t1 +} + + +[variant baseline] +fun test::add1_old($t0: u256): u256 { + var $t1: u256 + var $t2: u256 + var $t3: u256 + 0: $t3 := 1 + 1: $t2 := +($t0, $t3) + 2: $t0 := infer($t2) + 3: $t1 := infer($t0) + 4: return $t1 +} + + +[variant baseline] +fun test::coin_inc_new_1($t0: &mut 0x42::test::Coin) { + var $t1: &mut u256 + var $t2: u256 + var $t3: u256 + var $t4: u256 + 0: $t1 := borrow_field<0x42::test::Coin>.0($t0) + 1: $t3 := read_ref($t1) + 2: $t4 := 1 + 3: $t2 := +($t3, $t4) + 4: write_ref($t1, $t2) + 5: return () +} + + +[variant baseline] +fun test::coin_inc_new_2($t0: &mut 0x42::test::Coin) { + var $t1: &mut u256 + var $t2: u256 + var $t3: u256 + var $t4: u256 + 0: $t1 := borrow_field<0x42::test::Coin>.0($t0) + 1: $t3 := read_ref($t1) + 2: $t4 := 1 + 3: $t2 := +($t3, $t4) + 4: write_ref($t1, $t2) + 5: return () +} + + +[variant baseline] +fun test::coin_inc_old_1($t0: &mut 0x42::test::Coin) { + var $t1: u256 + var $t2: u256 + var $t3: &u256 + var $t4: u256 + var $t5: &mut u256 + 0: $t3 := borrow_field<0x42::test::Coin>.0($t0) + 1: $t2 := read_ref($t3) + 2: $t4 := 1 + 3: $t1 := +($t2, $t4) + 4: $t5 := borrow_field<0x42::test::Coin>.0($t0) + 5: write_ref($t5, $t1) + 6: return () +} + + +[variant baseline] +fun test::coin_inc_old_2($t0: &mut 0x42::test::Coin) { + var $t1: &mut u256 + var $t2: u256 + var $t3: u256 + var $t4: u256 + 0: $t1 := borrow_field<0x42::test::Coin>.0($t0) + 1: $t3 := read_ref($t1) + 2: $t4 := 1 + 3: $t2 := +($t3, $t4) + 4: write_ref($t1, $t2) + 5: return () +} + + +[variant baseline] +fun test::inc_coin_at($t0: address) { + var $t1: &mut 0x42::test::Coin + var $t2: &mut u256 + var $t3: u256 + var $t4: u256 + var $t5: u256 + 0: $t1 := borrow_global<0x42::test::Coin>($t0) + 1: $t2 := borrow_field<0x42::test::Coin>.0($t1) + 2: $t4 := read_ref($t2) + 3: $t5 := 1 + 4: $t3 := +($t4, $t5) + 5: write_ref($t2, $t3) + 6: return () +} + + +[variant baseline] +fun test::inc_new($t0: &mut u256) { + var $t1: &mut u256 + var $t2: u256 + var $t3: u256 + var $t4: u256 + 0: $t1 := infer($t0) + 1: $t3 := read_ref($t1) + 2: $t4 := 1 + 3: $t2 := +($t3, $t4) + 4: write_ref($t1, $t2) + 5: return () +} + + +[variant baseline] +fun test::inc_old($t0: &mut u256) { + var $t1: u256 + var $t2: u256 + var $t3: u256 + 0: $t2 := read_ref($t0) + 1: $t3 := 1 + 2: $t1 := +($t2, $t3) + 3: write_ref($t0, $t1) + 4: return () +} + + +[variant baseline] +fun test::inc_vec_coin_new($t0: vector<0x42::test::Coin>, $t1: u64) { + var $t2: &mut u256 + var $t3: &mut 0x42::test::Coin + var $t4: &mut vector<0x42::test::Coin> + var $t5: u256 + var $t6: u256 + var $t7: u256 + 0: $t4 := borrow_local($t0) + 1: $t3 := vector::borrow_mut<0x42::test::Coin>($t4, $t1) + 2: $t2 := borrow_field<0x42::test::Coin>.0($t3) + 3: $t6 := read_ref($t2) + 4: $t7 := 1 + 5: $t5 := +($t6, $t7) + 6: write_ref($t2, $t5) + 7: return () +} + + +[variant baseline] +fun test::inc_vec_coin_old($t0: vector<0x42::test::Coin>, $t1: u64) { + var $t2: u256 + var $t3: u256 + var $t4: &0x42::test::Coin + var $t5: &vector<0x42::test::Coin> + var $t6: &u256 + var $t7: u256 + var $t8: &mut u256 + var $t9: &mut 0x42::test::Coin + var $t10: &mut vector<0x42::test::Coin> + 0: $t5 := borrow_local($t0) + 1: $t4 := vector::borrow<0x42::test::Coin>($t5, $t1) + 2: $t6 := borrow_field<0x42::test::Coin>.0($t4) + 3: $t3 := read_ref($t6) + 4: $t7 := 1 + 5: $t2 := +($t3, $t7) + 6: $t10 := borrow_local($t0) + 7: $t9 := vector::borrow_mut<0x42::test::Coin>($t10, $t1) + 8: $t8 := borrow_field<0x42::test::Coin>.0($t9) + 9: write_ref($t8, $t2) + 10: return () +} + + +[variant baseline] +fun test::inc_vec_new($t0: &mut vector, $t1: u64) { + var $t2: &mut u256 + var $t3: u256 + var $t4: u256 + var $t5: u256 + 0: $t2 := vector::borrow_mut($t0, $t1) + 1: $t4 := read_ref($t2) + 2: $t5 := 1 + 3: $t3 := +($t4, $t5) + 4: write_ref($t2, $t3) + 5: return () +} + + +[variant baseline] +fun test::inc_vec_old($t0: vector, $t1: u64) { + var $t2: u256 + var $t3: u256 + var $t4: &u256 + var $t5: &vector + var $t6: u256 + var $t7: &mut u256 + var $t8: &mut vector + 0: $t5 := borrow_local($t0) + 1: $t4 := vector::borrow($t5, $t1) + 2: $t3 := read_ref($t4) + 3: $t6 := 1 + 4: $t2 := +($t3, $t6) + 5: $t8 := borrow_local($t0) + 6: $t7 := vector::borrow_mut($t8, $t1) + 7: write_ref($t7, $t2) + 8: return () +} + + +[variant baseline] +fun test::inc_vec_wrapped_coin_new($t0: vector<0x42::test::Wrapper<0x42::test::Coin>>, $t1: u64) { + var $t2: &mut u256 + var $t3: &mut 0x42::test::Coin + var $t4: &mut 0x42::test::Wrapper<0x42::test::Coin> + var $t5: &mut vector<0x42::test::Wrapper<0x42::test::Coin>> + var $t6: u256 + var $t7: u256 + var $t8: u256 + 0: $t5 := borrow_local($t0) + 1: $t4 := vector::borrow_mut<0x42::test::Wrapper<0x42::test::Coin>>($t5, $t1) + 2: $t3 := borrow_field<0x42::test::Wrapper<0x42::test::Coin>>.0($t4) + 3: $t2 := borrow_field<0x42::test::Coin>.0($t3) + 4: $t7 := read_ref($t2) + 5: $t8 := 1 + 6: $t6 := +($t7, $t8) + 7: write_ref($t2, $t6) + 8: return () +} + + +[variant baseline] +fun test::inc_vec_wrapped_coin_old($t0: vector<0x42::test::Wrapper<0x42::test::Coin>>, $t1: u64) { + var $t2: u256 + var $t3: u256 + var $t4: &0x42::test::Coin + var $t5: &0x42::test::Wrapper<0x42::test::Coin> + var $t6: &vector<0x42::test::Wrapper<0x42::test::Coin>> + var $t7: &u256 + var $t8: u256 + var $t9: &mut u256 + var $t10: &mut 0x42::test::Coin + var $t11: &mut 0x42::test::Wrapper<0x42::test::Coin> + var $t12: &mut vector<0x42::test::Wrapper<0x42::test::Coin>> + 0: $t6 := borrow_local($t0) + 1: $t5 := vector::borrow<0x42::test::Wrapper<0x42::test::Coin>>($t6, $t1) + 2: $t4 := borrow_field<0x42::test::Wrapper<0x42::test::Coin>>.0($t5) + 3: $t7 := borrow_field<0x42::test::Coin>.0($t4) + 4: $t3 := read_ref($t7) + 5: $t8 := 1 + 6: $t2 := +($t3, $t8) + 7: $t12 := borrow_local($t0) + 8: $t11 := vector::borrow_mut<0x42::test::Wrapper<0x42::test::Coin>>($t12, $t1) + 9: $t10 := borrow_field<0x42::test::Wrapper<0x42::test::Coin>>.0($t11) + 10: $t9 := borrow_field<0x42::test::Coin>.0($t10) + 11: write_ref($t9, $t2) + 12: return () +} + + +[variant baseline] +fun test::inc_wrapped_coin_new($t0: &mut 0x42::test::Wrapper<0x42::test::Coin>) { + var $t1: &mut u256 + var $t2: &mut 0x42::test::Coin + var $t3: u256 + var $t4: u256 + var $t5: u256 + 0: $t2 := borrow_field<0x42::test::Wrapper<0x42::test::Coin>>.0($t0) + 1: $t1 := borrow_field<0x42::test::Coin>.0($t2) + 2: $t4 := read_ref($t1) + 3: $t5 := 1 + 4: $t3 := +($t4, $t5) + 5: write_ref($t1, $t3) + 6: return () +} + + +[variant baseline] +fun test::inc_wrapped_coin_old($t0: &mut 0x42::test::Wrapper<0x42::test::Coin>) { + var $t1: u256 + var $t2: u256 + var $t3: &0x42::test::Coin + var $t4: &u256 + var $t5: u256 + var $t6: &mut u256 + var $t7: &mut 0x42::test::Coin + 0: $t3 := borrow_field<0x42::test::Wrapper<0x42::test::Coin>>.0($t0) + 1: $t4 := borrow_field<0x42::test::Coin>.0($t3) + 2: $t2 := read_ref($t4) + 3: $t5 := 1 + 4: $t1 := +($t2, $t5) + 5: $t7 := borrow_field<0x42::test::Wrapper<0x42::test::Coin>>.0($t0) + 6: $t6 := borrow_field<0x42::test::Coin>.0($t7) + 7: write_ref($t6, $t1) + 8: return () +} + +============ after LiveVarAnalysisProcessor: ================ + +[variant baseline] +fun test::add1_new($t0: u256): u256 { + var $t1: u256 [unused] + var $t2: u256 [unused] + var $t3: u256 + # live vars: $t0 + 0: $t3 := 1 + # live vars: $t0, $t3 + 1: $t3 := +($t0, $t3) + # live vars: $t3 + 2: $t0 := move($t3) + # live vars: $t0 + 3: return $t0 +} + + +[variant baseline] +fun test::add1_old($t0: u256): u256 { + var $t1: u256 [unused] + var $t2: u256 [unused] + var $t3: u256 + # live vars: $t0 + 0: $t3 := 1 + # live vars: $t0, $t3 + 1: $t3 := +($t0, $t3) + # live vars: $t3 + 2: $t0 := move($t3) + # live vars: $t0 + 3: return $t0 +} + + +[variant baseline] +fun test::coin_inc_new_1($t0: &mut 0x42::test::Coin) { + var $t1: &mut u256 + var $t2: u256 [unused] + var $t3: u256 + var $t4: u256 + # live vars: $t0 + 0: $t1 := borrow_field<0x42::test::Coin>.0($t0) + # live vars: $t1 + 1: $t3 := read_ref($t1) + # live vars: $t1, $t3 + 2: $t4 := 1 + # live vars: $t1, $t3, $t4 + 3: $t3 := +($t3, $t4) + # live vars: $t1, $t3 + 4: write_ref($t1, $t3) + # live vars: + 5: return () +} + + +[variant baseline] +fun test::coin_inc_new_2($t0: &mut 0x42::test::Coin) { + var $t1: &mut u256 + var $t2: u256 [unused] + var $t3: u256 + var $t4: u256 + # live vars: $t0 + 0: $t1 := borrow_field<0x42::test::Coin>.0($t0) + # live vars: $t1 + 1: $t3 := read_ref($t1) + # live vars: $t1, $t3 + 2: $t4 := 1 + # live vars: $t1, $t3, $t4 + 3: $t3 := +($t3, $t4) + # live vars: $t1, $t3 + 4: write_ref($t1, $t3) + # live vars: + 5: return () +} + + +[variant baseline] +fun test::coin_inc_old_1($t0: &mut 0x42::test::Coin) { + var $t1: u256 [unused] + var $t2: u256 + var $t3: &u256 + var $t4: u256 + var $t5: &mut u256 + # live vars: $t0 + 0: $t3 := borrow_field<0x42::test::Coin>.0($t0) + # live vars: $t0, $t3 + 1: $t2 := read_ref($t3) + # live vars: $t0, $t2 + 2: $t4 := 1 + # live vars: $t0, $t2, $t4 + 3: $t2 := +($t2, $t4) + # live vars: $t0, $t2 + 4: $t5 := borrow_field<0x42::test::Coin>.0($t0) + # live vars: $t2, $t5 + 5: write_ref($t5, $t2) + # live vars: + 6: return () +} + + +[variant baseline] +fun test::coin_inc_old_2($t0: &mut 0x42::test::Coin) { + var $t1: &mut u256 + var $t2: u256 [unused] + var $t3: u256 + var $t4: u256 + # live vars: $t0 + 0: $t1 := borrow_field<0x42::test::Coin>.0($t0) + # live vars: $t1 + 1: $t3 := read_ref($t1) + # live vars: $t1, $t3 + 2: $t4 := 1 + # live vars: $t1, $t3, $t4 + 3: $t3 := +($t3, $t4) + # live vars: $t1, $t3 + 4: write_ref($t1, $t3) + # live vars: + 5: return () +} + + +[variant baseline] +fun test::inc_coin_at($t0: address) { + var $t1: &mut 0x42::test::Coin + var $t2: &mut u256 + var $t3: u256 [unused] + var $t4: u256 + var $t5: u256 + # live vars: $t0 + 0: $t1 := borrow_global<0x42::test::Coin>($t0) + # live vars: $t1 + 1: $t2 := borrow_field<0x42::test::Coin>.0($t1) + # live vars: $t2 + 2: $t4 := read_ref($t2) + # live vars: $t2, $t4 + 3: $t5 := 1 + # live vars: $t2, $t4, $t5 + 4: $t4 := +($t4, $t5) + # live vars: $t2, $t4 + 5: write_ref($t2, $t4) + # live vars: + 6: return () +} + + +[variant baseline] +fun test::inc_new($t0: &mut u256) { + var $t1: &mut u256 [unused] + var $t2: u256 [unused] + var $t3: u256 + var $t4: u256 + # live vars: $t0 + 0: $t3 := read_ref($t0) + # live vars: $t0, $t3 + 1: $t4 := 1 + # live vars: $t0, $t3, $t4 + 2: $t3 := +($t3, $t4) + # live vars: $t0, $t3 + 3: write_ref($t0, $t3) + # live vars: + 4: return () +} + + +[variant baseline] +fun test::inc_old($t0: &mut u256) { + var $t1: u256 [unused] + var $t2: u256 + var $t3: u256 + # live vars: $t0 + 0: $t2 := read_ref($t0) + # live vars: $t0, $t2 + 1: $t3 := 1 + # live vars: $t0, $t2, $t3 + 2: $t2 := +($t2, $t3) + # live vars: $t0, $t2 + 3: write_ref($t0, $t2) + # live vars: + 4: return () +} + + +[variant baseline] +fun test::inc_vec_coin_new($t0: vector<0x42::test::Coin>, $t1: u64) { + var $t2: &mut u256 + var $t3: &mut 0x42::test::Coin + var $t4: &mut vector<0x42::test::Coin> + var $t5: u256 [unused] + var $t6: u256 + var $t7: u256 + # live vars: $t0, $t1 + 0: $t4 := borrow_local($t0) + # live vars: $t1, $t4 + 1: $t3 := vector::borrow_mut<0x42::test::Coin>($t4, $t1) + # live vars: $t3 + 2: $t2 := borrow_field<0x42::test::Coin>.0($t3) + # live vars: $t2 + 3: $t6 := read_ref($t2) + # live vars: $t2, $t6 + 4: $t7 := 1 + # live vars: $t2, $t6, $t7 + 5: $t6 := +($t6, $t7) + # live vars: $t2, $t6 + 6: write_ref($t2, $t6) + # live vars: + 7: return () +} + + +[variant baseline] +fun test::inc_vec_coin_old($t0: vector<0x42::test::Coin>, $t1: u64) { + var $t2: u256 [unused] + var $t3: u256 + var $t4: &0x42::test::Coin + var $t5: &vector<0x42::test::Coin> + var $t6: &u256 + var $t7: u256 + var $t8: &mut u256 + var $t9: &mut 0x42::test::Coin + var $t10: &mut vector<0x42::test::Coin> + # live vars: $t0, $t1 + 0: $t5 := borrow_local($t0) + # live vars: $t0, $t1, $t5 + 1: $t4 := vector::borrow<0x42::test::Coin>($t5, $t1) + # live vars: $t0, $t1, $t4 + 2: $t6 := borrow_field<0x42::test::Coin>.0($t4) + # live vars: $t0, $t1, $t6 + 3: $t3 := read_ref($t6) + # live vars: $t0, $t1, $t3 + 4: $t7 := 1 + # live vars: $t0, $t1, $t3, $t7 + 5: $t3 := +($t3, $t7) + # live vars: $t0, $t1, $t3 + 6: $t10 := borrow_local($t0) + # live vars: $t1, $t3, $t10 + 7: $t9 := vector::borrow_mut<0x42::test::Coin>($t10, $t1) + # live vars: $t3, $t9 + 8: $t8 := borrow_field<0x42::test::Coin>.0($t9) + # live vars: $t3, $t8 + 9: write_ref($t8, $t3) + # live vars: + 10: return () +} + + +[variant baseline] +fun test::inc_vec_new($t0: &mut vector, $t1: u64) { + var $t2: &mut u256 + var $t3: u256 [unused] + var $t4: u256 + var $t5: u256 + # live vars: $t0, $t1 + 0: $t2 := vector::borrow_mut($t0, $t1) + # live vars: $t2 + 1: $t4 := read_ref($t2) + # live vars: $t2, $t4 + 2: $t5 := 1 + # live vars: $t2, $t4, $t5 + 3: $t4 := +($t4, $t5) + # live vars: $t2, $t4 + 4: write_ref($t2, $t4) + # live vars: + 5: return () +} + + +[variant baseline] +fun test::inc_vec_old($t0: vector, $t1: u64) { + var $t2: u256 [unused] + var $t3: u256 + var $t4: &u256 + var $t5: &vector + var $t6: u256 + var $t7: &mut u256 + var $t8: &mut vector + # live vars: $t0, $t1 + 0: $t5 := borrow_local($t0) + # live vars: $t0, $t1, $t5 + 1: $t4 := vector::borrow($t5, $t1) + # live vars: $t0, $t1, $t4 + 2: $t3 := read_ref($t4) + # live vars: $t0, $t1, $t3 + 3: $t6 := 1 + # live vars: $t0, $t1, $t3, $t6 + 4: $t3 := +($t3, $t6) + # live vars: $t0, $t1, $t3 + 5: $t8 := borrow_local($t0) + # live vars: $t1, $t3, $t8 + 6: $t7 := vector::borrow_mut($t8, $t1) + # live vars: $t3, $t7 + 7: write_ref($t7, $t3) + # live vars: + 8: return () +} + + +[variant baseline] +fun test::inc_vec_wrapped_coin_new($t0: vector<0x42::test::Wrapper<0x42::test::Coin>>, $t1: u64) { + var $t2: &mut u256 + var $t3: &mut 0x42::test::Coin + var $t4: &mut 0x42::test::Wrapper<0x42::test::Coin> + var $t5: &mut vector<0x42::test::Wrapper<0x42::test::Coin>> + var $t6: u256 [unused] + var $t7: u256 + var $t8: u256 + # live vars: $t0, $t1 + 0: $t5 := borrow_local($t0) + # live vars: $t1, $t5 + 1: $t4 := vector::borrow_mut<0x42::test::Wrapper<0x42::test::Coin>>($t5, $t1) + # live vars: $t4 + 2: $t3 := borrow_field<0x42::test::Wrapper<0x42::test::Coin>>.0($t4) + # live vars: $t3 + 3: $t2 := borrow_field<0x42::test::Coin>.0($t3) + # live vars: $t2 + 4: $t7 := read_ref($t2) + # live vars: $t2, $t7 + 5: $t8 := 1 + # live vars: $t2, $t7, $t8 + 6: $t7 := +($t7, $t8) + # live vars: $t2, $t7 + 7: write_ref($t2, $t7) + # live vars: + 8: return () +} + + +[variant baseline] +fun test::inc_vec_wrapped_coin_old($t0: vector<0x42::test::Wrapper<0x42::test::Coin>>, $t1: u64) { + var $t2: u256 [unused] + var $t3: u256 + var $t4: &0x42::test::Coin + var $t5: &0x42::test::Wrapper<0x42::test::Coin> + var $t6: &vector<0x42::test::Wrapper<0x42::test::Coin>> + var $t7: &u256 + var $t8: u256 + var $t9: &mut u256 + var $t10: &mut 0x42::test::Coin + var $t11: &mut 0x42::test::Wrapper<0x42::test::Coin> + var $t12: &mut vector<0x42::test::Wrapper<0x42::test::Coin>> + # live vars: $t0, $t1 + 0: $t6 := borrow_local($t0) + # live vars: $t0, $t1, $t6 + 1: $t5 := vector::borrow<0x42::test::Wrapper<0x42::test::Coin>>($t6, $t1) + # live vars: $t0, $t1, $t5 + 2: $t4 := borrow_field<0x42::test::Wrapper<0x42::test::Coin>>.0($t5) + # live vars: $t0, $t1, $t4 + 3: $t7 := borrow_field<0x42::test::Coin>.0($t4) + # live vars: $t0, $t1, $t7 + 4: $t3 := read_ref($t7) + # live vars: $t0, $t1, $t3 + 5: $t8 := 1 + # live vars: $t0, $t1, $t3, $t8 + 6: $t3 := +($t3, $t8) + # live vars: $t0, $t1, $t3 + 7: $t12 := borrow_local($t0) + # live vars: $t1, $t3, $t12 + 8: $t11 := vector::borrow_mut<0x42::test::Wrapper<0x42::test::Coin>>($t12, $t1) + # live vars: $t3, $t11 + 9: $t10 := borrow_field<0x42::test::Wrapper<0x42::test::Coin>>.0($t11) + # live vars: $t3, $t10 + 10: $t9 := borrow_field<0x42::test::Coin>.0($t10) + # live vars: $t3, $t9 + 11: write_ref($t9, $t3) + # live vars: + 12: return () +} + + +[variant baseline] +fun test::inc_wrapped_coin_new($t0: &mut 0x42::test::Wrapper<0x42::test::Coin>) { + var $t1: &mut u256 + var $t2: &mut 0x42::test::Coin + var $t3: u256 [unused] + var $t4: u256 + var $t5: u256 + # live vars: $t0 + 0: $t2 := borrow_field<0x42::test::Wrapper<0x42::test::Coin>>.0($t0) + # live vars: $t2 + 1: $t1 := borrow_field<0x42::test::Coin>.0($t2) + # live vars: $t1 + 2: $t4 := read_ref($t1) + # live vars: $t1, $t4 + 3: $t5 := 1 + # live vars: $t1, $t4, $t5 + 4: $t4 := +($t4, $t5) + # live vars: $t1, $t4 + 5: write_ref($t1, $t4) + # live vars: + 6: return () +} + + +[variant baseline] +fun test::inc_wrapped_coin_old($t0: &mut 0x42::test::Wrapper<0x42::test::Coin>) { + var $t1: u256 [unused] + var $t2: u256 + var $t3: &0x42::test::Coin + var $t4: &u256 + var $t5: u256 + var $t6: &mut u256 + var $t7: &mut 0x42::test::Coin + # live vars: $t0 + 0: $t3 := borrow_field<0x42::test::Wrapper<0x42::test::Coin>>.0($t0) + # live vars: $t0, $t3 + 1: $t4 := borrow_field<0x42::test::Coin>.0($t3) + # live vars: $t0, $t4 + 2: $t2 := read_ref($t4) + # live vars: $t0, $t2 + 3: $t5 := 1 + # live vars: $t0, $t2, $t5 + 4: $t2 := +($t2, $t5) + # live vars: $t0, $t2 + 5: $t7 := borrow_field<0x42::test::Wrapper<0x42::test::Coin>>.0($t0) + # live vars: $t2, $t7 + 6: $t6 := borrow_field<0x42::test::Coin>.0($t7) + # live vars: $t2, $t6 + 7: write_ref($t6, $t2) + # live vars: + 8: return () +} + + +============ disassembled file-format ================== +// Move bytecode v7 +module 42.test { +struct Coin has drop, key { + _0: u256 +} +struct Wrapper has drop, key { + _0: Ty0 +} + +add1_new(Arg0: u256): u256 /* def_idx: 0 */ { +L1: loc0: u256 +B0: + 0: MoveLoc[0](Arg0: u256) + 1: LdU256(1) + 2: Add + 3: Ret +} +add1_old(Arg0: u256): u256 /* def_idx: 1 */ { +L1: loc0: u256 +B0: + 0: MoveLoc[0](Arg0: u256) + 1: LdU256(1) + 2: Add + 3: Ret +} +coin_inc_new_1(Arg0: &mut Coin) /* def_idx: 2 */ { +L1: loc0: &mut u256 +B0: + 0: MoveLoc[0](Arg0: &mut Coin) + 1: MutBorrowField[0](Coin._0: u256) + 2: StLoc[1](loc0: &mut u256) + 3: CopyLoc[1](loc0: &mut u256) + 4: ReadRef + 5: LdU256(1) + 6: Add + 7: MoveLoc[1](loc0: &mut u256) + 8: WriteRef + 9: Ret +} +coin_inc_new_2(Arg0: &mut Coin) /* def_idx: 3 */ { +L1: loc0: &mut u256 +B0: + 0: MoveLoc[0](Arg0: &mut Coin) + 1: MutBorrowField[0](Coin._0: u256) + 2: StLoc[1](loc0: &mut u256) + 3: CopyLoc[1](loc0: &mut u256) + 4: ReadRef + 5: LdU256(1) + 6: Add + 7: MoveLoc[1](loc0: &mut u256) + 8: WriteRef + 9: Ret +} +coin_inc_old_1(Arg0: &mut Coin) /* def_idx: 4 */ { +B0: + 0: CopyLoc[0](Arg0: &mut Coin) + 1: ImmBorrowField[0](Coin._0: u256) + 2: ReadRef + 3: LdU256(1) + 4: Add + 5: MoveLoc[0](Arg0: &mut Coin) + 6: MutBorrowField[0](Coin._0: u256) + 7: WriteRef + 8: Ret +} +coin_inc_old_2(Arg0: &mut Coin) /* def_idx: 5 */ { +L1: loc0: &mut u256 +B0: + 0: MoveLoc[0](Arg0: &mut Coin) + 1: MutBorrowField[0](Coin._0: u256) + 2: StLoc[1](loc0: &mut u256) + 3: CopyLoc[1](loc0: &mut u256) + 4: ReadRef + 5: LdU256(1) + 6: Add + 7: MoveLoc[1](loc0: &mut u256) + 8: WriteRef + 9: Ret +} +inc_coin_at(Arg0: address) /* def_idx: 6 */ { +L1: loc0: &mut u256 +B0: + 0: MoveLoc[0](Arg0: address) + 1: MutBorrowGlobal[0](Coin) + 2: MutBorrowField[0](Coin._0: u256) + 3: StLoc[1](loc0: &mut u256) + 4: CopyLoc[1](loc0: &mut u256) + 5: ReadRef + 6: LdU256(1) + 7: Add + 8: MoveLoc[1](loc0: &mut u256) + 9: WriteRef + 10: Ret +} +inc_new(Arg0: &mut u256) /* def_idx: 7 */ { +B0: + 0: CopyLoc[0](Arg0: &mut u256) + 1: ReadRef + 2: LdU256(1) + 3: Add + 4: MoveLoc[0](Arg0: &mut u256) + 5: WriteRef + 6: Ret +} +inc_old(Arg0: &mut u256) /* def_idx: 8 */ { +B0: + 0: CopyLoc[0](Arg0: &mut u256) + 1: ReadRef + 2: LdU256(1) + 3: Add + 4: MoveLoc[0](Arg0: &mut u256) + 5: WriteRef + 6: Ret +} +inc_vec_coin_new(Arg0: vector, Arg1: u64) /* def_idx: 9 */ { +L2: loc0: &mut u256 +B0: + 0: MutBorrowLoc[0](Arg0: vector) + 1: MoveLoc[1](Arg1: u64) + 2: VecMutBorrow(6) + 3: MutBorrowField[0](Coin._0: u256) + 4: StLoc[2](loc0: &mut u256) + 5: CopyLoc[2](loc0: &mut u256) + 6: ReadRef + 7: LdU256(1) + 8: Add + 9: MoveLoc[2](loc0: &mut u256) + 10: WriteRef + 11: Ret +} +inc_vec_coin_old(Arg0: vector, Arg1: u64) /* def_idx: 10 */ { +B0: + 0: ImmBorrowLoc[0](Arg0: vector) + 1: CopyLoc[1](Arg1: u64) + 2: VecImmBorrow(6) + 3: ImmBorrowField[0](Coin._0: u256) + 4: ReadRef + 5: LdU256(1) + 6: Add + 7: MutBorrowLoc[0](Arg0: vector) + 8: MoveLoc[1](Arg1: u64) + 9: VecMutBorrow(6) + 10: MutBorrowField[0](Coin._0: u256) + 11: WriteRef + 12: Ret +} +inc_vec_new(Arg0: &mut vector, Arg1: u64) /* def_idx: 11 */ { +L2: loc0: &mut u256 +B0: + 0: MoveLoc[0](Arg0: &mut vector) + 1: MoveLoc[1](Arg1: u64) + 2: VecMutBorrow(0) + 3: StLoc[2](loc0: &mut u256) + 4: CopyLoc[2](loc0: &mut u256) + 5: ReadRef + 6: LdU256(1) + 7: Add + 8: MoveLoc[2](loc0: &mut u256) + 9: WriteRef + 10: Ret +} +inc_vec_old(Arg0: vector, Arg1: u64) /* def_idx: 12 */ { +B0: + 0: ImmBorrowLoc[0](Arg0: vector) + 1: CopyLoc[1](Arg1: u64) + 2: VecImmBorrow(0) + 3: ReadRef + 4: LdU256(1) + 5: Add + 6: MutBorrowLoc[0](Arg0: vector) + 7: MoveLoc[1](Arg1: u64) + 8: VecMutBorrow(0) + 9: WriteRef + 10: Ret +} +inc_vec_wrapped_coin_new(Arg0: vector>, Arg1: u64) /* def_idx: 13 */ { +L2: loc0: &mut u256 +B0: + 0: MutBorrowLoc[0](Arg0: vector>) + 1: MoveLoc[1](Arg1: u64) + 2: VecMutBorrow(10) + 3: MutBorrowFieldGeneric[0](Wrapper._0: Ty0) + 4: MutBorrowField[0](Coin._0: u256) + 5: StLoc[2](loc0: &mut u256) + 6: CopyLoc[2](loc0: &mut u256) + 7: ReadRef + 8: LdU256(1) + 9: Add + 10: MoveLoc[2](loc0: &mut u256) + 11: WriteRef + 12: Ret +} +inc_vec_wrapped_coin_old(Arg0: vector>, Arg1: u64) /* def_idx: 14 */ { +B0: + 0: ImmBorrowLoc[0](Arg0: vector>) + 1: CopyLoc[1](Arg1: u64) + 2: VecImmBorrow(10) + 3: ImmBorrowFieldGeneric[0](Wrapper._0: Ty0) + 4: ImmBorrowField[0](Coin._0: u256) + 5: ReadRef + 6: LdU256(1) + 7: Add + 8: MutBorrowLoc[0](Arg0: vector>) + 9: MoveLoc[1](Arg1: u64) + 10: VecMutBorrow(10) + 11: MutBorrowFieldGeneric[0](Wrapper._0: Ty0) + 12: MutBorrowField[0](Coin._0: u256) + 13: WriteRef + 14: Ret +} +inc_wrapped_coin_new(Arg0: &mut Wrapper) /* def_idx: 15 */ { +L1: loc0: &mut u256 +B0: + 0: MoveLoc[0](Arg0: &mut Wrapper) + 1: MutBorrowFieldGeneric[0](Wrapper._0: Ty0) + 2: MutBorrowField[0](Coin._0: u256) + 3: StLoc[1](loc0: &mut u256) + 4: CopyLoc[1](loc0: &mut u256) + 5: ReadRef + 6: LdU256(1) + 7: Add + 8: MoveLoc[1](loc0: &mut u256) + 9: WriteRef + 10: Ret +} +inc_wrapped_coin_old(Arg0: &mut Wrapper) /* def_idx: 16 */ { +B0: + 0: CopyLoc[0](Arg0: &mut Wrapper) + 1: ImmBorrowFieldGeneric[0](Wrapper._0: Ty0) + 2: ImmBorrowField[0](Coin._0: u256) + 3: ReadRef + 4: LdU256(1) + 5: Add + 6: MoveLoc[0](Arg0: &mut Wrapper) + 7: MutBorrowFieldGeneric[0](Wrapper._0: Ty0) + 8: MutBorrowField[0](Coin._0: u256) + 9: WriteRef + 10: Ret +} +} +============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/op-equal/valid0.move b/third_party/move/move-compiler-v2/tests/op-equal/valid0.move new file mode 100644 index 0000000000000..329e680102db5 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/op-equal/valid0.move @@ -0,0 +1,78 @@ +module 0x42::test { + struct Coin(u256) has drop, key; + + struct Wrapper(T) has drop, key; + + fun add1_old(x: u256): u256 { + x = x + 1; + x + } + + fun add1_new(x: u256): u256 { + x += 1; + x + } + + fun inc_new(x: &mut u256) { + *x += 1; + } + + fun inc_old(x: &mut u256) { + *x = *x + 1; + } + + fun coin_inc_new_1(self: &mut Coin) { + self.0 += 1; + } + + fun coin_inc_new_2(self: &mut Coin) { + let p = &mut self.0; + *p = *p + 1; + } + + fun coin_inc_old_1(self: &mut Coin) { + self.0 = self.0 + 1; + } + + fun coin_inc_old_2(self: &mut Coin) { + let p = &mut self.0; + *p = *p + 1; + } + + fun inc_wrapped_coin_new(x: &mut Wrapper) { + x.0.0 += 1; + } + + fun inc_wrapped_coin_old(x: &mut Wrapper) { + x.0.0 = x.0.0 + 1; + } + + fun inc_vec_new(x: &mut vector, index: u64) { + x[index] += 1; + } + + fun inc_vec_old(x: vector, index: u64) { + x[index] = x[index] + 1; + } + + fun inc_vec_coin_new(x: vector, index: u64) { + x[index].0 += 1; + } + + fun inc_vec_coin_old(x: vector, index: u64) { + x[index].0 = x[index].0 + 1; + } + + fun inc_vec_wrapped_coin_new(x: vector>, index: u64) { + x[index].0.0 += 1; + } + + fun inc_vec_wrapped_coin_old(x: vector>, index: u64) { + x[index].0.0 = x[index].0.0 + 1; + } + + fun inc_coin_at(addr: address) acquires Coin { + let coin = &mut Coin[addr]; + coin.0 += 1; + } +} diff --git a/third_party/move/move-compiler-v2/tests/op-equal/valid1.exp b/third_party/move/move-compiler-v2/tests/op-equal/valid1.exp new file mode 100644 index 0000000000000..d75c575a32c78 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/op-equal/valid1.exp @@ -0,0 +1,665 @@ +// -- Model dump before bytecode pipeline +module 0x42::test { + struct Coin { + 0: u256, + } + struct Wrapper { + 0: T, + } + private fun bitand_vec_coin_new(x: vector,index: u64) { + { + let $t1: &mut u256 = Borrow(Mutable)(select test::Coin.0(vector::borrow_mut(Borrow(Mutable)(x), index))); + $t1 = BitAnd(Deref($t1), 42) + }; + Tuple() + } + private fun bitor_vec_new(x: &mut vector,index: u64) { + { + let $t1: &mut u256 = vector::borrow_mut(x, index); + $t1 = BitOr(Deref($t1), 42) + }; + Tuple() + } + private fun coin_double(self: &mut Coin) { + { + let $t1: &mut u256 = Borrow(Mutable)(select test::Coin.0<&mut Coin>(self)); + $t1 = Mul(Deref($t1), 2) + }; + Tuple() + } + private fun coin_mod_2(self: &mut Coin) { + { + let $t1: &mut u256 = Borrow(Mutable)(select test::Coin.0<&mut Coin>(self)); + $t1 = Mod(Deref($t1), 2) + }; + Tuple() + } + private fun half_wrapped_coin_new(x: &mut Wrapper) { + { + let $t1: &mut u256 = Borrow(Mutable)(select test::Coin.0(select test::Wrapper.0<&mut Wrapper>(x))); + $t1 = Div(Deref($t1), 2) + }; + Tuple() + } + private fun shl_vec_wrapped_coin_old(x: vector>,index: u64) { + { + let $t1: &mut u256 = Borrow(Mutable)(select test::Coin.0(select test::Wrapper.0>(vector::borrow_mut>(Borrow(Mutable)(x), index)))); + $t1 = Shl(Deref($t1), 1) + }; + Tuple() + } + private fun shr_coin_at(addr: address) + acquires Coin(*) + { + { + let coin: &mut Coin = BorrowGlobal(Mutable)(addr); + { + let $t1: &mut u256 = Borrow(Mutable)(select test::Coin.0<&mut Coin>(coin)); + $t1 = Shr(Deref($t1), 1) + }; + Tuple() + } + } + private fun sub1(x: &mut u256) { + { + let $t1: &mut u256 = x; + $t1 = Sub(Deref($t1), 1) + }; + Tuple() + } + private fun xor_vec_wrapped_coin_new(x: vector>,index: u64) { + { + let $t1: &mut u256 = Borrow(Mutable)(select test::Coin.0(select test::Wrapper.0>(vector::borrow_mut>(Borrow(Mutable)(x), index)))); + $t1 = Xor(Deref($t1), 1) + }; + Tuple() + } +} // end 0x42::test + +// -- Sourcified model before bytecode pipeline +module 0x42::test { + struct Coin has drop, key { + 0: u256, + } + struct Wrapper has drop, key { + 0: T, + } + fun bitand_vec_coin_new(x: vector, index: u64) { + { + let $t1 = &mut 0x1::vector::borrow_mut(&mut x, index).0; + *$t1 = *$t1 & 42u256 + }; + } + fun bitor_vec_new(x: &mut vector, index: u64) { + { + let $t1 = 0x1::vector::borrow_mut(x, index); + *$t1 = *$t1 | 42u256 + }; + } + fun coin_double(self: &mut Coin) { + { + let $t1 = &mut self.0; + *$t1 = *$t1 * 2u256 + }; + } + fun coin_mod_2(self: &mut Coin) { + { + let $t1 = &mut self.0; + *$t1 = *$t1 % 2u256 + }; + } + fun half_wrapped_coin_new(x: &mut Wrapper) { + { + let $t1 = &mut x.0.0; + *$t1 = *$t1 / 2u256 + }; + } + fun shl_vec_wrapped_coin_old(x: vector>, index: u64) { + { + let $t1 = &mut 0x1::vector::borrow_mut>(&mut x, index).0.0; + *$t1 = *$t1 << 1u8 + }; + } + fun shr_coin_at(addr: address) + acquires Coin + { + let coin = borrow_global_mut(addr); + { + let $t1 = &mut coin.0; + *$t1 = *$t1 >> 1u8 + }; + } + fun sub1(x: &mut u256) { + { + let $t1 = x; + *$t1 = *$t1 - 1u256 + }; + } + fun xor_vec_wrapped_coin_new(x: vector>, index: u64) { + { + let $t1 = &mut 0x1::vector::borrow_mut>(&mut x, index).0.0; + *$t1 = *$t1 ^ 1u256 + }; + } +} + +============ initial bytecode ================ + +[variant baseline] +fun test::bitand_vec_coin_new($t0: vector<0x42::test::Coin>, $t1: u64) { + var $t2: &mut u256 + var $t3: &mut 0x42::test::Coin + var $t4: &mut vector<0x42::test::Coin> + var $t5: u256 + var $t6: u256 + var $t7: u256 + 0: $t4 := borrow_local($t0) + 1: $t3 := vector::borrow_mut<0x42::test::Coin>($t4, $t1) + 2: $t2 := borrow_field<0x42::test::Coin>.0($t3) + 3: $t6 := read_ref($t2) + 4: $t7 := 42 + 5: $t5 := &($t6, $t7) + 6: write_ref($t2, $t5) + 7: return () +} + + +[variant baseline] +fun test::bitor_vec_new($t0: &mut vector, $t1: u64) { + var $t2: &mut u256 + var $t3: u256 + var $t4: u256 + var $t5: u256 + 0: $t2 := vector::borrow_mut($t0, $t1) + 1: $t4 := read_ref($t2) + 2: $t5 := 42 + 3: $t3 := |($t4, $t5) + 4: write_ref($t2, $t3) + 5: return () +} + + +[variant baseline] +fun test::coin_double($t0: &mut 0x42::test::Coin) { + var $t1: &mut u256 + var $t2: u256 + var $t3: u256 + var $t4: u256 + 0: $t1 := borrow_field<0x42::test::Coin>.0($t0) + 1: $t3 := read_ref($t1) + 2: $t4 := 2 + 3: $t2 := *($t3, $t4) + 4: write_ref($t1, $t2) + 5: return () +} + + +[variant baseline] +fun test::coin_mod_2($t0: &mut 0x42::test::Coin) { + var $t1: &mut u256 + var $t2: u256 + var $t3: u256 + var $t4: u256 + 0: $t1 := borrow_field<0x42::test::Coin>.0($t0) + 1: $t3 := read_ref($t1) + 2: $t4 := 2 + 3: $t2 := %($t3, $t4) + 4: write_ref($t1, $t2) + 5: return () +} + + +[variant baseline] +fun test::half_wrapped_coin_new($t0: &mut 0x42::test::Wrapper<0x42::test::Coin>) { + var $t1: &mut u256 + var $t2: &mut 0x42::test::Coin + var $t3: u256 + var $t4: u256 + var $t5: u256 + 0: $t2 := borrow_field<0x42::test::Wrapper<0x42::test::Coin>>.0($t0) + 1: $t1 := borrow_field<0x42::test::Coin>.0($t2) + 2: $t4 := read_ref($t1) + 3: $t5 := 2 + 4: $t3 := /($t4, $t5) + 5: write_ref($t1, $t3) + 6: return () +} + + +[variant baseline] +fun test::shl_vec_wrapped_coin_old($t0: vector<0x42::test::Wrapper<0x42::test::Coin>>, $t1: u64) { + var $t2: &mut u256 + var $t3: &mut 0x42::test::Coin + var $t4: &mut 0x42::test::Wrapper<0x42::test::Coin> + var $t5: &mut vector<0x42::test::Wrapper<0x42::test::Coin>> + var $t6: u256 + var $t7: u256 + var $t8: u8 + 0: $t5 := borrow_local($t0) + 1: $t4 := vector::borrow_mut<0x42::test::Wrapper<0x42::test::Coin>>($t5, $t1) + 2: $t3 := borrow_field<0x42::test::Wrapper<0x42::test::Coin>>.0($t4) + 3: $t2 := borrow_field<0x42::test::Coin>.0($t3) + 4: $t7 := read_ref($t2) + 5: $t8 := 1 + 6: $t6 := <<($t7, $t8) + 7: write_ref($t2, $t6) + 8: return () +} + + +[variant baseline] +fun test::shr_coin_at($t0: address) { + var $t1: &mut 0x42::test::Coin + var $t2: &mut u256 + var $t3: u256 + var $t4: u256 + var $t5: u8 + 0: $t1 := borrow_global<0x42::test::Coin>($t0) + 1: $t2 := borrow_field<0x42::test::Coin>.0($t1) + 2: $t4 := read_ref($t2) + 3: $t5 := 1 + 4: $t3 := >>($t4, $t5) + 5: write_ref($t2, $t3) + 6: return () +} + + +[variant baseline] +fun test::sub1($t0: &mut u256) { + var $t1: &mut u256 + var $t2: u256 + var $t3: u256 + var $t4: u256 + 0: $t1 := infer($t0) + 1: $t3 := read_ref($t1) + 2: $t4 := 1 + 3: $t2 := -($t3, $t4) + 4: write_ref($t1, $t2) + 5: return () +} + + +[variant baseline] +fun test::xor_vec_wrapped_coin_new($t0: vector<0x42::test::Wrapper<0x42::test::Coin>>, $t1: u64) { + var $t2: &mut u256 + var $t3: &mut 0x42::test::Coin + var $t4: &mut 0x42::test::Wrapper<0x42::test::Coin> + var $t5: &mut vector<0x42::test::Wrapper<0x42::test::Coin>> + var $t6: u256 + var $t7: u256 + var $t8: u256 + 0: $t5 := borrow_local($t0) + 1: $t4 := vector::borrow_mut<0x42::test::Wrapper<0x42::test::Coin>>($t5, $t1) + 2: $t3 := borrow_field<0x42::test::Wrapper<0x42::test::Coin>>.0($t4) + 3: $t2 := borrow_field<0x42::test::Coin>.0($t3) + 4: $t7 := read_ref($t2) + 5: $t8 := 1 + 6: $t6 := ^($t7, $t8) + 7: write_ref($t2, $t6) + 8: return () +} + +============ after LiveVarAnalysisProcessor: ================ + +[variant baseline] +fun test::bitand_vec_coin_new($t0: vector<0x42::test::Coin>, $t1: u64) { + var $t2: &mut u256 + var $t3: &mut 0x42::test::Coin + var $t4: &mut vector<0x42::test::Coin> + var $t5: u256 [unused] + var $t6: u256 + var $t7: u256 + # live vars: $t0, $t1 + 0: $t4 := borrow_local($t0) + # live vars: $t1, $t4 + 1: $t3 := vector::borrow_mut<0x42::test::Coin>($t4, $t1) + # live vars: $t3 + 2: $t2 := borrow_field<0x42::test::Coin>.0($t3) + # live vars: $t2 + 3: $t6 := read_ref($t2) + # live vars: $t2, $t6 + 4: $t7 := 42 + # live vars: $t2, $t6, $t7 + 5: $t6 := &($t6, $t7) + # live vars: $t2, $t6 + 6: write_ref($t2, $t6) + # live vars: + 7: return () +} + + +[variant baseline] +fun test::bitor_vec_new($t0: &mut vector, $t1: u64) { + var $t2: &mut u256 + var $t3: u256 [unused] + var $t4: u256 + var $t5: u256 + # live vars: $t0, $t1 + 0: $t2 := vector::borrow_mut($t0, $t1) + # live vars: $t2 + 1: $t4 := read_ref($t2) + # live vars: $t2, $t4 + 2: $t5 := 42 + # live vars: $t2, $t4, $t5 + 3: $t4 := |($t4, $t5) + # live vars: $t2, $t4 + 4: write_ref($t2, $t4) + # live vars: + 5: return () +} + + +[variant baseline] +fun test::coin_double($t0: &mut 0x42::test::Coin) { + var $t1: &mut u256 + var $t2: u256 [unused] + var $t3: u256 + var $t4: u256 + # live vars: $t0 + 0: $t1 := borrow_field<0x42::test::Coin>.0($t0) + # live vars: $t1 + 1: $t3 := read_ref($t1) + # live vars: $t1, $t3 + 2: $t4 := 2 + # live vars: $t1, $t3, $t4 + 3: $t3 := *($t3, $t4) + # live vars: $t1, $t3 + 4: write_ref($t1, $t3) + # live vars: + 5: return () +} + + +[variant baseline] +fun test::coin_mod_2($t0: &mut 0x42::test::Coin) { + var $t1: &mut u256 + var $t2: u256 [unused] + var $t3: u256 + var $t4: u256 + # live vars: $t0 + 0: $t1 := borrow_field<0x42::test::Coin>.0($t0) + # live vars: $t1 + 1: $t3 := read_ref($t1) + # live vars: $t1, $t3 + 2: $t4 := 2 + # live vars: $t1, $t3, $t4 + 3: $t3 := %($t3, $t4) + # live vars: $t1, $t3 + 4: write_ref($t1, $t3) + # live vars: + 5: return () +} + + +[variant baseline] +fun test::half_wrapped_coin_new($t0: &mut 0x42::test::Wrapper<0x42::test::Coin>) { + var $t1: &mut u256 + var $t2: &mut 0x42::test::Coin + var $t3: u256 [unused] + var $t4: u256 + var $t5: u256 + # live vars: $t0 + 0: $t2 := borrow_field<0x42::test::Wrapper<0x42::test::Coin>>.0($t0) + # live vars: $t2 + 1: $t1 := borrow_field<0x42::test::Coin>.0($t2) + # live vars: $t1 + 2: $t4 := read_ref($t1) + # live vars: $t1, $t4 + 3: $t5 := 2 + # live vars: $t1, $t4, $t5 + 4: $t4 := /($t4, $t5) + # live vars: $t1, $t4 + 5: write_ref($t1, $t4) + # live vars: + 6: return () +} + + +[variant baseline] +fun test::shl_vec_wrapped_coin_old($t0: vector<0x42::test::Wrapper<0x42::test::Coin>>, $t1: u64) { + var $t2: &mut u256 + var $t3: &mut 0x42::test::Coin + var $t4: &mut 0x42::test::Wrapper<0x42::test::Coin> + var $t5: &mut vector<0x42::test::Wrapper<0x42::test::Coin>> + var $t6: u256 [unused] + var $t7: u256 + var $t8: u8 + # live vars: $t0, $t1 + 0: $t5 := borrow_local($t0) + # live vars: $t1, $t5 + 1: $t4 := vector::borrow_mut<0x42::test::Wrapper<0x42::test::Coin>>($t5, $t1) + # live vars: $t4 + 2: $t3 := borrow_field<0x42::test::Wrapper<0x42::test::Coin>>.0($t4) + # live vars: $t3 + 3: $t2 := borrow_field<0x42::test::Coin>.0($t3) + # live vars: $t2 + 4: $t7 := read_ref($t2) + # live vars: $t2, $t7 + 5: $t8 := 1 + # live vars: $t2, $t7, $t8 + 6: $t7 := <<($t7, $t8) + # live vars: $t2, $t7 + 7: write_ref($t2, $t7) + # live vars: + 8: return () +} + + +[variant baseline] +fun test::shr_coin_at($t0: address) { + var $t1: &mut 0x42::test::Coin + var $t2: &mut u256 + var $t3: u256 [unused] + var $t4: u256 + var $t5: u8 + # live vars: $t0 + 0: $t1 := borrow_global<0x42::test::Coin>($t0) + # live vars: $t1 + 1: $t2 := borrow_field<0x42::test::Coin>.0($t1) + # live vars: $t2 + 2: $t4 := read_ref($t2) + # live vars: $t2, $t4 + 3: $t5 := 1 + # live vars: $t2, $t4, $t5 + 4: $t4 := >>($t4, $t5) + # live vars: $t2, $t4 + 5: write_ref($t2, $t4) + # live vars: + 6: return () +} + + +[variant baseline] +fun test::sub1($t0: &mut u256) { + var $t1: &mut u256 [unused] + var $t2: u256 [unused] + var $t3: u256 + var $t4: u256 + # live vars: $t0 + 0: $t3 := read_ref($t0) + # live vars: $t0, $t3 + 1: $t4 := 1 + # live vars: $t0, $t3, $t4 + 2: $t3 := -($t3, $t4) + # live vars: $t0, $t3 + 3: write_ref($t0, $t3) + # live vars: + 4: return () +} + + +[variant baseline] +fun test::xor_vec_wrapped_coin_new($t0: vector<0x42::test::Wrapper<0x42::test::Coin>>, $t1: u64) { + var $t2: &mut u256 + var $t3: &mut 0x42::test::Coin + var $t4: &mut 0x42::test::Wrapper<0x42::test::Coin> + var $t5: &mut vector<0x42::test::Wrapper<0x42::test::Coin>> + var $t6: u256 [unused] + var $t7: u256 + var $t8: u256 + # live vars: $t0, $t1 + 0: $t5 := borrow_local($t0) + # live vars: $t1, $t5 + 1: $t4 := vector::borrow_mut<0x42::test::Wrapper<0x42::test::Coin>>($t5, $t1) + # live vars: $t4 + 2: $t3 := borrow_field<0x42::test::Wrapper<0x42::test::Coin>>.0($t4) + # live vars: $t3 + 3: $t2 := borrow_field<0x42::test::Coin>.0($t3) + # live vars: $t2 + 4: $t7 := read_ref($t2) + # live vars: $t2, $t7 + 5: $t8 := 1 + # live vars: $t2, $t7, $t8 + 6: $t7 := ^($t7, $t8) + # live vars: $t2, $t7 + 7: write_ref($t2, $t7) + # live vars: + 8: return () +} + + +============ disassembled file-format ================== +// Move bytecode v7 +module 42.test { +struct Coin has drop, key { + _0: u256 +} +struct Wrapper has drop, key { + _0: Ty0 +} + +bitand_vec_coin_new(Arg0: vector, Arg1: u64) /* def_idx: 0 */ { +L2: loc0: &mut u256 +B0: + 0: MutBorrowLoc[0](Arg0: vector) + 1: MoveLoc[1](Arg1: u64) + 2: VecMutBorrow(2) + 3: MutBorrowField[0](Coin._0: u256) + 4: StLoc[2](loc0: &mut u256) + 5: CopyLoc[2](loc0: &mut u256) + 6: ReadRef + 7: LdU256(42) + 8: BitAnd + 9: MoveLoc[2](loc0: &mut u256) + 10: WriteRef + 11: Ret +} +bitor_vec_new(Arg0: &mut vector, Arg1: u64) /* def_idx: 1 */ { +L2: loc0: &mut u256 +B0: + 0: MoveLoc[0](Arg0: &mut vector) + 1: MoveLoc[1](Arg1: u64) + 2: VecMutBorrow(5) + 3: StLoc[2](loc0: &mut u256) + 4: CopyLoc[2](loc0: &mut u256) + 5: ReadRef + 6: LdU256(42) + 7: BitOr + 8: MoveLoc[2](loc0: &mut u256) + 9: WriteRef + 10: Ret +} +coin_double(Arg0: &mut Coin) /* def_idx: 2 */ { +L1: loc0: &mut u256 +B0: + 0: MoveLoc[0](Arg0: &mut Coin) + 1: MutBorrowField[0](Coin._0: u256) + 2: StLoc[1](loc0: &mut u256) + 3: CopyLoc[1](loc0: &mut u256) + 4: ReadRef + 5: LdU256(2) + 6: Mul + 7: MoveLoc[1](loc0: &mut u256) + 8: WriteRef + 9: Ret +} +coin_mod_2(Arg0: &mut Coin) /* def_idx: 3 */ { +L1: loc0: &mut u256 +B0: + 0: MoveLoc[0](Arg0: &mut Coin) + 1: MutBorrowField[0](Coin._0: u256) + 2: StLoc[1](loc0: &mut u256) + 3: CopyLoc[1](loc0: &mut u256) + 4: ReadRef + 5: LdU256(2) + 6: Mod + 7: MoveLoc[1](loc0: &mut u256) + 8: WriteRef + 9: Ret +} +half_wrapped_coin_new(Arg0: &mut Wrapper) /* def_idx: 4 */ { +L1: loc0: &mut u256 +B0: + 0: MoveLoc[0](Arg0: &mut Wrapper) + 1: MutBorrowFieldGeneric[0](Wrapper._0: Ty0) + 2: MutBorrowField[0](Coin._0: u256) + 3: StLoc[1](loc0: &mut u256) + 4: CopyLoc[1](loc0: &mut u256) + 5: ReadRef + 6: LdU256(2) + 7: Div + 8: MoveLoc[1](loc0: &mut u256) + 9: WriteRef + 10: Ret +} +shl_vec_wrapped_coin_old(Arg0: vector>, Arg1: u64) /* def_idx: 5 */ { +L2: loc0: &mut u256 +B0: + 0: MutBorrowLoc[0](Arg0: vector>) + 1: MoveLoc[1](Arg1: u64) + 2: VecMutBorrow(9) + 3: MutBorrowFieldGeneric[0](Wrapper._0: Ty0) + 4: MutBorrowField[0](Coin._0: u256) + 5: StLoc[2](loc0: &mut u256) + 6: CopyLoc[2](loc0: &mut u256) + 7: ReadRef + 8: LdU8(1) + 9: Shl + 10: MoveLoc[2](loc0: &mut u256) + 11: WriteRef + 12: Ret +} +shr_coin_at(Arg0: address) /* def_idx: 6 */ { +L1: loc0: &mut u256 +B0: + 0: MoveLoc[0](Arg0: address) + 1: MutBorrowGlobal[0](Coin) + 2: MutBorrowField[0](Coin._0: u256) + 3: StLoc[1](loc0: &mut u256) + 4: CopyLoc[1](loc0: &mut u256) + 5: ReadRef + 6: LdU8(1) + 7: Shr + 8: MoveLoc[1](loc0: &mut u256) + 9: WriteRef + 10: Ret +} +sub1(Arg0: &mut u256) /* def_idx: 7 */ { +B0: + 0: CopyLoc[0](Arg0: &mut u256) + 1: ReadRef + 2: LdU256(1) + 3: Sub + 4: MoveLoc[0](Arg0: &mut u256) + 5: WriteRef + 6: Ret +} +xor_vec_wrapped_coin_new(Arg0: vector>, Arg1: u64) /* def_idx: 8 */ { +L2: loc0: &mut u256 +B0: + 0: MutBorrowLoc[0](Arg0: vector>) + 1: MoveLoc[1](Arg1: u64) + 2: VecMutBorrow(9) + 3: MutBorrowFieldGeneric[0](Wrapper._0: Ty0) + 4: MutBorrowField[0](Coin._0: u256) + 5: StLoc[2](loc0: &mut u256) + 6: CopyLoc[2](loc0: &mut u256) + 7: ReadRef + 8: LdU256(1) + 9: Xor + 10: MoveLoc[2](loc0: &mut u256) + 11: WriteRef + 12: Ret +} +} +============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/op-equal/valid1.move b/third_party/move/move-compiler-v2/tests/op-equal/valid1.move new file mode 100644 index 0000000000000..0ddf501781519 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/op-equal/valid1.move @@ -0,0 +1,42 @@ +module 0x42::test { + struct Coin(u256) has drop, key; + + struct Wrapper(T) has drop, key; + + fun sub1(x: &mut u256) { + *x -= 1; + } + + fun coin_double(self: &mut Coin) { + self.0 *= 2; + } + + fun coin_mod_2(self: &mut Coin) { + self.0 %= 2; + } + + fun half_wrapped_coin_new(x: &mut Wrapper) { + x.0.0 /= 2; + } + + fun bitor_vec_new(x: &mut vector, index: u64) { + x[index] |= 42; + } + + fun bitand_vec_coin_new(x: vector, index: u64) { + x[index].0 &= 42; + } + + fun xor_vec_wrapped_coin_new(x: vector>, index: u64) { + x[index].0.0 ^= 1; + } + + fun shl_vec_wrapped_coin_old(x: vector>, index: u64) { + x[index].0.0 <<= 1; + } + + fun shr_coin_at(addr: address) acquires Coin { + let coin = &mut Coin[addr]; + coin.0 >>= 1; + } +} diff --git a/third_party/move/move-compiler-v2/tests/op-equal/valid2.exp b/third_party/move/move-compiler-v2/tests/op-equal/valid2.exp new file mode 100644 index 0000000000000..e8c5f393627ac --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/op-equal/valid2.exp @@ -0,0 +1,214 @@ +// -- Model dump before bytecode pipeline +module 0xc0ffee::m { + struct S { + x: u64, + } + private fun foo(self: &mut S): u64 { + { + let $t1: &mut u64 = Borrow(Mutable)(select m::S.x<&mut S>(self)); + $t1 = Add(Deref($t1), 1) + }; + 1 + } + public fun test(): u64 { + { + let s: S = pack m::S(0); + { + let $t2: u64 = m::foo(Borrow(Mutable)(s)); + { + let $t1: &mut u64 = Borrow(Mutable)(select m::S.x(s)); + $t1 = Add(Deref($t1), $t2) + } + }; + select m::S.x(s) + } + } +} // end 0xc0ffee::m + +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + struct S has drop { + x: u64, + } + fun foo(self: &mut S): u64 { + { + let $t1 = &mut self.x; + *$t1 = *$t1 + 1 + }; + 1 + } + public fun test(): u64 { + let s = S{x: 0}; + { + let $t2 = foo(&mut s); + let $t1 = &mut s.x; + *$t1 = *$t1 + $t2 + }; + s.x + } +} + +============ initial bytecode ================ + +[variant baseline] +fun m::foo($t0: &mut 0xc0ffee::m::S): u64 { + var $t1: u64 + var $t2: &mut u64 + var $t3: u64 + var $t4: u64 + var $t5: u64 + 0: $t2 := borrow_field<0xc0ffee::m::S>.x($t0) + 1: $t4 := read_ref($t2) + 2: $t5 := 1 + 3: $t3 := +($t4, $t5) + 4: write_ref($t2, $t3) + 5: $t1 := 1 + 6: return $t1 +} + + +[variant baseline] +public fun m::test(): u64 { + var $t0: u64 + var $t1: 0xc0ffee::m::S + var $t2: u64 + var $t3: u64 + var $t4: &mut 0xc0ffee::m::S + var $t5: &mut u64 + var $t6: &mut 0xc0ffee::m::S + var $t7: u64 + var $t8: u64 + var $t9: &0xc0ffee::m::S + var $t10: &u64 + 0: $t2 := 0 + 1: $t1 := pack 0xc0ffee::m::S($t2) + 2: $t4 := borrow_local($t1) + 3: $t3 := m::foo($t4) + 4: $t6 := borrow_local($t1) + 5: $t5 := borrow_field<0xc0ffee::m::S>.x($t6) + 6: $t8 := read_ref($t5) + 7: $t7 := +($t8, $t3) + 8: write_ref($t5, $t7) + 9: $t9 := borrow_local($t1) + 10: $t10 := borrow_field<0xc0ffee::m::S>.x($t9) + 11: $t0 := read_ref($t10) + 12: return $t0 +} + +============ after LiveVarAnalysisProcessor: ================ + +[variant baseline] +fun m::foo($t0: &mut 0xc0ffee::m::S): u64 { + var $t1: u64 [unused] + var $t2: &mut u64 + var $t3: u64 [unused] + var $t4: u64 + var $t5: u64 + # live vars: $t0 + 0: $t2 := borrow_field<0xc0ffee::m::S>.x($t0) + # live vars: $t2 + 1: $t4 := read_ref($t2) + # live vars: $t2, $t4 + 2: $t5 := 1 + # live vars: $t2, $t4, $t5 + 3: $t4 := +($t4, $t5) + # live vars: $t2, $t4 + 4: write_ref($t2, $t4) + # live vars: + 5: $t4 := 1 + # live vars: $t4 + 6: return $t4 +} + + +[variant baseline] +public fun m::test(): u64 { + var $t0: u64 [unused] + var $t1: 0xc0ffee::m::S + var $t2: u64 + var $t3: u64 [unused] + var $t4: &mut 0xc0ffee::m::S + var $t5: &mut u64 + var $t6: &mut 0xc0ffee::m::S [unused] + var $t7: u64 [unused] + var $t8: u64 + var $t9: &0xc0ffee::m::S + var $t10: &u64 + # live vars: + 0: $t2 := 0 + # live vars: $t2 + 1: $t1 := pack 0xc0ffee::m::S($t2) + # live vars: $t1 + 2: $t4 := borrow_local($t1) + # live vars: $t1, $t4 + 3: $t2 := m::foo($t4) + # live vars: $t1, $t2 + 4: $t4 := borrow_local($t1) + # live vars: $t1, $t2, $t4 + 5: $t5 := borrow_field<0xc0ffee::m::S>.x($t4) + # live vars: $t1, $t2, $t5 + 6: $t8 := read_ref($t5) + # live vars: $t1, $t2, $t5, $t8 + 7: $t2 := +($t8, $t2) + # live vars: $t1, $t2, $t5 + 8: write_ref($t5, $t2) + # live vars: $t1 + 9: $t9 := borrow_local($t1) + # live vars: $t9 + 10: $t10 := borrow_field<0xc0ffee::m::S>.x($t9) + # live vars: $t10 + 11: $t2 := read_ref($t10) + # live vars: $t2 + 12: return $t2 +} + + +============ disassembled file-format ================== +// Move bytecode v7 +module c0ffee.m { +struct S has drop { + x: u64 +} + +foo(Arg0: &mut S): u64 /* def_idx: 0 */ { +L1: loc0: &mut u64 +B0: + 0: MoveLoc[0](Arg0: &mut S) + 1: MutBorrowField[0](S.x: u64) + 2: StLoc[1](loc0: &mut u64) + 3: CopyLoc[1](loc0: &mut u64) + 4: ReadRef + 5: LdU64(1) + 6: Add + 7: MoveLoc[1](loc0: &mut u64) + 8: WriteRef + 9: LdU64(1) + 10: Ret +} +public test(): u64 /* def_idx: 1 */ { +L0: loc0: S +L1: loc1: u64 +L2: loc2: &mut u64 +B0: + 0: LdU64(0) + 1: Pack[0](S) + 2: StLoc[0](loc0: S) + 3: MutBorrowLoc[0](loc0: S) + 4: Call foo(&mut S): u64 + 5: StLoc[1](loc1: u64) + 6: MutBorrowLoc[0](loc0: S) + 7: MutBorrowField[0](S.x: u64) + 8: StLoc[2](loc2: &mut u64) + 9: CopyLoc[2](loc2: &mut u64) + 10: ReadRef + 11: MoveLoc[1](loc1: u64) + 12: Add + 13: MoveLoc[2](loc2: &mut u64) + 14: WriteRef + 15: ImmBorrowLoc[0](loc0: S) + 16: ImmBorrowField[0](S.x: u64) + 17: ReadRef + 18: Ret +} +} +============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/op-equal/valid2.move b/third_party/move/move-compiler-v2/tests/op-equal/valid2.move new file mode 100644 index 0000000000000..2d6ca0632dd4f --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/op-equal/valid2.move @@ -0,0 +1,16 @@ +module 0xc0ffee::m { + struct S has drop { + x: u64, + } + + fun foo(self: &mut S): u64 { + self.x += 1; + 1 + } + + public fun test(): u64 { + let s = S { x: 0 }; + s.x += s.foo(); + s.x + } +} diff --git a/third_party/move/move-compiler-v2/tests/reference-safety/v1-borrow-tests/return_with_borrowed_loc_resource_invalid.exp b/third_party/move/move-compiler-v2/tests/reference-safety/v1-borrow-tests/return_with_borrowed_loc_resource_invalid.exp index 64d4f1e8ca18c..2845d31a0fb6f 100644 --- a/third_party/move/move-compiler-v2/tests/reference-safety/v1-borrow-tests/return_with_borrowed_loc_resource_invalid.exp +++ b/third_party/move/move-compiler-v2/tests/reference-safety/v1-borrow-tests/return_with_borrowed_loc_resource_invalid.exp @@ -1,6 +1,6 @@ Diagnostics: -error: local `s` of type `M::X` does not have the `drop` ability +error: local `s` of type `X` does not have the `drop` ability ┌─ tests/reference-safety/v1-borrow-tests/return_with_borrowed_loc_resource_invalid.move:6:18 │ 6 │ let u = &s.u; diff --git a/third_party/move/move-compiler-v2/tests/reference-safety/v1-borrow-tests/return_with_borrowed_loc_resource_invalid.no-opt.exp b/third_party/move/move-compiler-v2/tests/reference-safety/v1-borrow-tests/return_with_borrowed_loc_resource_invalid.no-opt.exp index 64d4f1e8ca18c..2845d31a0fb6f 100644 --- a/third_party/move/move-compiler-v2/tests/reference-safety/v1-borrow-tests/return_with_borrowed_loc_resource_invalid.no-opt.exp +++ b/third_party/move/move-compiler-v2/tests/reference-safety/v1-borrow-tests/return_with_borrowed_loc_resource_invalid.no-opt.exp @@ -1,6 +1,6 @@ Diagnostics: -error: local `s` of type `M::X` does not have the `drop` ability +error: local `s` of type `X` does not have the `drop` ability ┌─ tests/reference-safety/v1-borrow-tests/return_with_borrowed_loc_resource_invalid.move:6:18 │ 6 │ let u = &s.u; diff --git a/third_party/move/move-compiler-v2/tests/reference-safety/v1-borrow-tests/return_with_borrowed_loc_resource_invalid.old.exp b/third_party/move/move-compiler-v2/tests/reference-safety/v1-borrow-tests/return_with_borrowed_loc_resource_invalid.old.exp index 64d4f1e8ca18c..2845d31a0fb6f 100644 --- a/third_party/move/move-compiler-v2/tests/reference-safety/v1-borrow-tests/return_with_borrowed_loc_resource_invalid.old.exp +++ b/third_party/move/move-compiler-v2/tests/reference-safety/v1-borrow-tests/return_with_borrowed_loc_resource_invalid.old.exp @@ -1,6 +1,6 @@ Diagnostics: -error: local `s` of type `M::X` does not have the `drop` ability +error: local `s` of type `X` does not have the `drop` ability ┌─ tests/reference-safety/v1-borrow-tests/return_with_borrowed_loc_resource_invalid.move:6:18 │ 6 │ let u = &s.u; diff --git a/third_party/move/move-compiler-v2/tests/simplifier-elimination/always_false_branch.exp b/third_party/move/move-compiler-v2/tests/simplifier-elimination/always_false_branch.exp index 1b0ab31f62f3e..51f43c5de7f9b 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier-elimination/always_false_branch.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier-elimination/always_false_branch.exp @@ -14,5 +14,17 @@ module 0xc0ffee::m { } } // end 0xc0ffee::m +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + fun test(): u64 { + if (false) { + let i = 0; + i = i + 1; + return i + }; + 0 + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/simplifier-elimination/assign_unpack_references.exp b/third_party/move/move-compiler-v2/tests/simplifier-elimination/assign_unpack_references.exp index 338b267835624..b14d08d9ae98f 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier-elimination/assign_unpack_references.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier-elimination/assign_unpack_references.exp @@ -1,8 +1,8 @@ // -- Model dump before bytecode pipeline module 0x8675309::M { struct R { - s1: M::S, - s2: M::S, + s1: S, + s2: S, } struct S { f: u64, @@ -11,12 +11,12 @@ module 0x8675309::M { { let f: u64; { - let s2: M::S; + let s2: S; M::R{ s1: M::S{ f }, s2 } = pack M::R(pack M::S(0), pack M::S(1)); f; s2; f: u64 = 0; - s2: M::S = pack M::S(0); + s2: S = pack M::S(0); f; s2; Tuple() @@ -27,12 +27,12 @@ module 0x8675309::M { { let f: &u64; { - let s2: &M::S; + let s2: &S; M::R{ s1: M::S{ f }, s2 } = Borrow(Immutable)(pack M::R(pack M::S(0), pack M::S(1))); f; s2; f: &u64 = Borrow(Immutable)(0); - s2: &M::S = Borrow(Immutable)(pack M::S(0)); + s2: &S = Borrow(Immutable)(pack M::S(0)); f; s2; Tuple() @@ -43,12 +43,12 @@ module 0x8675309::M { { let f: &mut u64; { - let s2: &mut M::S; + let s2: &mut S; M::R{ s1: M::S{ f }, s2 } = Borrow(Mutable)(pack M::R(pack M::S(0), pack M::S(1))); f; s2; f: &mut u64 = Borrow(Mutable)(0); - s2: &mut M::S = Borrow(Mutable)(pack M::S(0)); + s2: &mut S = Borrow(Mutable)(pack M::S(0)); f; s2; Tuple() @@ -57,15 +57,59 @@ module 0x8675309::M { } } // end 0x8675309::M +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + s1: S, + s2: S, + } + struct S has drop { + f: u64, + } + fun t0() { + let f; + let s2; + R{s1: S{f: f},s2: s2} = R{s1: S{f: 0},s2: S{f: 1}}; + f; + s2; + f = 0; + s2 = S{f: 0}; + f; + s2; + } + fun t1() { + let f; + let s2; + R{s1: S{f: f},s2: s2} = &R{s1: S{f: 0},s2: S{f: 1}}; + f; + s2; + f = &0; + s2 = &S{f: 0}; + f; + s2; + } + fun t2() { + let f; + let s2; + R{s1: S{f: f},s2: s2} = &mut R{s1: S{f: 0},s2: S{f: 1}}; + f; + s2; + f = &mut 0; + s2 = &mut S{f: 0}; + f; + s2; + } +} + Diagnostics: -error: value of type `M::R` does not have the `drop` ability +error: value of type `R` does not have the `drop` ability ┌─ tests/simplifier-elimination/assign_unpack_references.move:17:33 │ 17 │ R { s1: S { f }, s2 } = &R { s1: S{f: 0}, s2: S{f: 1} }; f; s2; │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ still borrowed but will be implicitly dropped later since it is no longer used -error: value of type `M::R` does not have the `drop` ability +error: value of type `R` does not have the `drop` ability ┌─ tests/simplifier-elimination/assign_unpack_references.move:27:33 │ 27 │ R { s1: S { f }, s2 } = &mut R { s1: S{f: 0}, s2: S{f: 1} }; f; s2; diff --git a/third_party/move/move-compiler-v2/tests/simplifier-elimination/binary_add.exp b/third_party/move/move-compiler-v2/tests/simplifier-elimination/binary_add.exp index 2cf7745cb0aa4..bd4b2b38ac8da 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier-elimination/binary_add.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier-elimination/binary_add.exp @@ -53,10 +53,10 @@ module 0x8675309::M { struct R { f: u64, } - private fun t0(x: u64,r: M::R) { + private fun t0(x: u64,r: R) { Add(Copy(x), Move(x)); - Add(select M::R.f(r), select M::R.f(r)); - Add(Add(Add(1, select M::R.f(r)), select M::R.f(r)), 0); + Add(select M::R.f(r), select M::R.f(r)); + Add(Add(Add(1, select M::R.f(r)), select M::R.f(r)), 0); { let M::R{ f: _ } = r; Tuple() @@ -64,5 +64,18 @@ module 0x8675309::M { } } // end 0x8675309::M +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + f: u64, + } + fun t0(x: u64, r: R) { + (copy x) + (move x); + r.f + r.f; + 1 + r.f + r.f + 0; + let R{f: _} = r; + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/simplifier-elimination/bind_with_type_annot.exp b/third_party/move/move-compiler-v2/tests/simplifier-elimination/bind_with_type_annot.exp index 8568e22ecebc0..1f36cd2a23f36 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier-elimination/bind_with_type_annot.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier-elimination/bind_with_type_annot.exp @@ -31,12 +31,22 @@ module 0x8675309::M { } private fun t0() { { - let (x: u64, b: bool, M::R{ f }): (u64, bool, M::R) = Tuple(0, false, pack M::R(0)); + let (x: u64, b: bool, M::R{ f }): (u64, bool, R) = Tuple(0, false, pack M::R(0)); Tuple() } } } // end 0x8675309::M +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + f: u64, + } + fun t0() { + let (x,b,R{f: f}) = (0, false, R{f: 0}); + } +} + Diagnostics: warning: Unused assignment to `f`. Consider removing or prefixing with an underscore: `_f` diff --git a/third_party/move/move-compiler-v2/tests/simplifier-elimination/constant_all_valid_types.exp b/third_party/move/move-compiler-v2/tests/simplifier-elimination/constant_all_valid_types.exp index a92a5c0f0eaca..ed3b1526f28c9 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier-elimination/constant_all_valid_types.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier-elimination/constant_all_valid_types.exp @@ -72,5 +72,34 @@ module _0 { } } // end _0 +// -- Sourcified model before bytecode pipeline +module 0x42::M { + fun t1(): u8 { + 0u8 + } + fun t2(): u64 { + 0 + } + fun t3(): u128 { + 0u128 + } + fun t4(): bool { + false + } + fun t5(): address { + 0x0 + } + fun t6(): vector { + vector[1u8, 35u8] + } + fun t7(): vector { + vector[97u8, 98u8, 99u8, 100u8] + } +} +script { + fun t() { + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/simplifier-elimination/double_nesting.exp b/third_party/move/move-compiler-v2/tests/simplifier-elimination/double_nesting.exp index 704e33333decc..013e348b7560a 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier-elimination/double_nesting.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier-elimination/double_nesting.exp @@ -22,5 +22,24 @@ module 0x42::test { } } // end 0x42::test +// -- Sourcified model before bytecode pipeline +module 0x42::mathtest2 { + public inline fun fun2(a: u64, b: u64, c: u64): u64 { + 7u128 * (a as u128) + 11u128 * (b as u128) + 13u128 * (c as u128) as u64 + } +} +module 0x42::mathtest { + public inline fun fun1(a: u64, b: u64, c: u64): u64 { + 2u128 * (a as u128) + 3u128 * (b as u128) + 5u128 * (c as u128) as u64 + } +} +module 0x42::test { + use 0x42::mathtest2; + use 0x42::mathtest; + fun test_nested_fun1() { + if (true) () else abort 0; + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/simplifier-elimination/else_assigns_if_doesnt.exp b/third_party/move/move-compiler-v2/tests/simplifier-elimination/else_assigns_if_doesnt.exp index 5341e02a3dceb..dc7f6f24d283d 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier-elimination/else_assigns_if_doesnt.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier-elimination/else_assigns_if_doesnt.exp @@ -24,6 +24,21 @@ module _0 { } } // end _0 +// -- Sourcified model before bytecode pipeline +script { + fun main() { + let x; + let y; + if (true) { + y = 0; + } else { + x = 42; + x; + }; + if (y == 0) () else abort 42; + } +} + Diagnostics: error: use of possibly unassigned local `y` diff --git a/third_party/move/move-compiler-v2/tests/simplifier-elimination/if_assigns_else_doesnt.exp b/third_party/move/move-compiler-v2/tests/simplifier-elimination/if_assigns_else_doesnt.exp index 60725545fcc86..5c7d031f7cbe2 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier-elimination/if_assigns_else_doesnt.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier-elimination/if_assigns_else_doesnt.exp @@ -24,6 +24,21 @@ module _0 { } } // end _0 +// -- Sourcified model before bytecode pipeline +script { + fun main() { + let x; + let y; + if (true) { + x = 42; + } else { + y = 0; + y; + }; + if (x == 42) () else abort 42; + } +} + Diagnostics: error: use of possibly unassigned local `x` diff --git a/third_party/move/move-compiler-v2/tests/simplifier-elimination/if_assigns_no_else.exp b/third_party/move/move-compiler-v2/tests/simplifier-elimination/if_assigns_no_else.exp index 73f3d514c0647..7d45092d77f72 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier-elimination/if_assigns_no_else.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier-elimination/if_assigns_no_else.exp @@ -25,5 +25,14 @@ module _0 { } } // end _0 +// -- Sourcified model before bytecode pipeline +script { + fun main() { + let x; + x = 42; + if (x == 42) () else abort 42; + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/simplifier-elimination/if_condition.exp b/third_party/move/move-compiler-v2/tests/simplifier-elimination/if_condition.exp index d78ee58957b72..564bc31ccabb0 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier-elimination/if_condition.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier-elimination/if_condition.exp @@ -54,5 +54,13 @@ module 0x8675309::M { } } // end 0x8675309::M +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + fun t0() { + } + fun t1() { + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/simplifier-elimination/moved_var_not_simplified.exp b/third_party/move/move-compiler-v2/tests/simplifier-elimination/moved_var_not_simplified.exp index 6541388dec1c1..5993472addce0 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier-elimination/moved_var_not_simplified.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier-elimination/moved_var_not_simplified.exp @@ -11,6 +11,15 @@ module 0xc0ffee::m { } } // end 0xc0ffee::m +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + public fun test(): u8 { + let x = 40u8; + let y = move x; + x + y + } +} + Diagnostics: error: cannot move local `x` since it is still in use diff --git a/third_party/move/move-compiler-v2/tests/simplifier-elimination/moved_var_not_simplified2.exp b/third_party/move/move-compiler-v2/tests/simplifier-elimination/moved_var_not_simplified2.exp index e464e08c45a5e..d68aa02f51c4c 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier-elimination/moved_var_not_simplified2.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier-elimination/moved_var_not_simplified2.exp @@ -14,6 +14,16 @@ module 0xc0ffee::m { } } // end 0xc0ffee::m +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + public fun test(): u8 { + let x = 40u8; + let y = move x; + let _z = x; + y + } +} + Diagnostics: error: cannot move local `x` since it is still in use diff --git a/third_party/move/move-compiler-v2/tests/simplifier-elimination/recursive_nesting.exp b/third_party/move/move-compiler-v2/tests/simplifier-elimination/recursive_nesting.exp index a7c00dc82c2a1..be3551312c2b4 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier-elimination/recursive_nesting.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier-elimination/recursive_nesting.exp @@ -39,5 +39,35 @@ module 0x42::test { } } // end 0x42::test +// -- Sourcified model before bytecode pipeline +module 0x42::mathtest { + public inline fun mul_div(a: u64, b: u64, c: u64): u64 { + (a as u128) * (b as u128) / (c as u128) as u64 + } +} +module 0x42::mathtest2 { + use 0x42::mathtest; + public inline fun mul_div2(a: u64, b: u64, c: u64): u64 { + let (a,b,c) = (b, a, c); + (a as u128) * (b as u128) / (c as u128) as u64 + } +} +module 0x42::mathtest3 { + use 0x42::mathtest2; + public inline fun mul_div3(a: u64, b: u64, c: u64): u64 { + let (a,b,c) = (b, a, c); + let (a,b,c) = (b, a, c); + (a as u128) * (b as u128) / (c as u128) as u64 + } +} +module 0x42::test { + use 0x42::mathtest; + use 0x42::mathtest2; + use 0x42::mathtest3; + fun test_nested_mul_div() { + if (true) () else abort 0; + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/simplifier-elimination/use_before_assign.exp b/third_party/move/move-compiler-v2/tests/simplifier-elimination/use_before_assign.exp index ac63e9d86f699..2cd3b3cdb5ccb 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier-elimination/use_before_assign.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier-elimination/use_before_assign.exp @@ -19,6 +19,14 @@ module _0 { } } // end _0 +// -- Sourcified model before bytecode pipeline +script { + fun main() { + let x; + let y = x; + } +} + Diagnostics: error: use of unassigned local `x` diff --git a/third_party/move/move-compiler-v2/tests/simplifier-elimination/use_before_assign_loop.exp b/third_party/move/move-compiler-v2/tests/simplifier-elimination/use_before_assign_loop.exp index 56337567699df..549cc95c615c2 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier-elimination/use_before_assign_loop.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier-elimination/use_before_assign_loop.exp @@ -64,6 +64,45 @@ module 0x8675309::M { } } // end 0x8675309::M +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + fun tborrow1() { + let x; + loop { + let y = &x; + _ = move y; + x = 0 + } + } + fun tborrow2(cond: bool) { + let x; + loop { + let y = &x; + _ = move y; + if (cond) x = 0; + break + }; + x; + } + fun tcopy(cond: bool) { + let x; + loop { + let y = x + 1; + if (cond) continue; + x = 0; + y; + } + } + fun tmove() { + let x; + loop { + let y = (move x) + 1; + x = 0; + y; + } + } +} + Diagnostics: error: use of possibly unassigned local `x` diff --git a/third_party/move/move-compiler-v2/tests/simplifier-elimination/use_before_assign_while.exp b/third_party/move/move-compiler-v2/tests/simplifier-elimination/use_before_assign_while.exp index 2152707901fdc..ee8a0531edccb 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier-elimination/use_before_assign_while.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier-elimination/use_before_assign_while.exp @@ -78,6 +78,44 @@ module 0x8675309::M { } } // end 0x8675309::M +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + fun tborrow1(cond: bool) { + let x; + while (cond) { + let y = &x; + _ = move y; + x = 0 + } + } + fun tborrow2(cond: bool) { + let x; + while (cond) { + let y = &x; + _ = move y; + if (cond) x = 0; + break + } + } + fun tcopy(cond: bool) { + let x; + while (cond) { + let y = (move x) + 1; + if (cond) continue; + x = 0; + y; + } + } + fun tmove(cond: bool) { + let x; + while (cond) { + let y = (move x) + 1; + x = 0; + y; + } + } +} + Diagnostics: error: use of possibly unassigned local `x` diff --git a/third_party/move/move-compiler-v2/tests/simplifier/bug_11112.exp b/third_party/move/move-compiler-v2/tests/simplifier/bug_11112.exp index 2d26e677fd493..fb79d81744afa 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier/bug_11112.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier/bug_11112.exp @@ -35,5 +35,26 @@ module 0xcafe::vectors { } } // end 0xcafe::vectors +// -- Sourcified model before bytecode pipeline +module 0xcafe::vectors { + fun test_for_each_mut() { + let v = vector[1, 2, 3]; + let s = 2; + { + let (v) = (&mut v); + let i = 0; + while (i < 0x1::vector::length(/*freeze*/v)) { + { + let (e) = (0x1::vector::borrow_mut(v, i)); + *e = s; + s = s + 1 + }; + i = i + 1 + } + }; + if (v == vector[2, 3, 4]) () else abort 0; + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/simplifier/conditional_borrow.exp b/third_party/move/move-compiler-v2/tests/simplifier/conditional_borrow.exp index fe3356ff35068..eb1d17caa267e 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier/conditional_borrow.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier/conditional_borrow.exp @@ -47,39 +47,39 @@ module 0x8675::M { } } } - private fun test1b(r: M::S): u64 { + private fun test1b(r: S): u64 { { - let x: M::S = pack M::S(3); + let x: S = pack M::S(3); { - let tref: &mut M::S = Borrow(Mutable)(if Lt(select M::S.f(r), 4) { + let tref: &mut S = Borrow(Mutable)(if Lt(select M::S.f(r), 4) { r } else { x }); - select M::S.f(Deref(tref)) = 10; + select M::S.f(Deref(tref)) = 10; { - let y: M::S = r; + let y: S = r; { - let tref2: &mut M::S = Borrow(Mutable)(y); - select M::S.f(Deref(tref2)) = Add(select M::S.f(Deref(tref2)), 1); + let tref2: &mut S = Borrow(Mutable)(y); + select M::S.f(Deref(tref2)) = Add(select M::S.f(Deref(tref2)), 1); { - let z: M::S = y; + let z: S = y; { - let tref3: &mut u64 = Borrow(Mutable)(select M::S.f(z)); + let tref3: &mut u64 = Borrow(Mutable)(select M::S.f(z)); tref3 = Add(Deref(tref3), 1); { - let a: M::S = z; + let a: S = z; { - let tref4: &mut u64 = Borrow(Mutable)(select M::S.f(a)); + let tref4: &mut u64 = Borrow(Mutable)(select M::S.f(a)); tref4 = Add(Deref(tref4), 1); { - let tref5: &mut u64 = Borrow(Mutable)(select M::S.f(a)); + let tref5: &mut u64 = Borrow(Mutable)(select M::S.f(a)); tref5 = Add(Deref(tref5), 8); { let tref6: &mut u64 = Borrow(Mutable)(3; - select M::S.f(a)); + select M::S.f(a)); tref6 = Add(Deref(tref6), 16); - select M::S.f(a) + select M::S.f(a) } } } @@ -96,5 +96,61 @@ module 0x8675::M { } } // end 0x8675::M +// -- Sourcified model before bytecode pipeline +module 0x8675::M { + struct S has copy, drop { + f: u64, + } + public fun test(): u64 { + test1(7) + test1(2) + } + fun test1(r: u64): u64 { + let tref = &mut (if (r < 4) r else 3); + *tref = 10; + let y = r; + let tref2 = &mut y; + *tref2 = *tref2 + 1; + let z = y; + let tref3 = &mut (z + 0); + *tref3 = *tref3 + 2; + let a = z; + let tref4 = &mut a; + *tref4 = *tref4 + 4; + let tref5 = &mut a; + *tref5 = *tref5 + 8; + let tref6 = &mut { + 3; + a + }; + *tref6 = *tref6 + 16; + a + } + fun test1b(r: S): u64 { + let x = S{f: 3}; + let tref = &mut (if (r.f < 4) r else x); + (*tref).f = 10; + let y = r; + let tref2 = &mut y; + (*tref2).f = (*tref2).f + 1; + let z = y; + let tref3 = &mut z.f; + *tref3 = *tref3 + 1; + let a = z; + let tref4 = &mut a.f; + *tref4 = *tref4 + 1; + let tref5 = &mut a.f; + *tref5 = *tref5 + 8; + let tref6 = &mut { + 3; + a.f + }; + *tref6 = *tref6 + 16; + a.f + } + public fun testb(): u64 { + test1b(S{f: 7}) + test1b(S{f: 2}) + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/simplifier/constant_folding_addresses.exp b/third_party/move/move-compiler-v2/tests/simplifier/constant_folding_addresses.exp index 4d021dd51d00d..e8ce9561931f2 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier/constant_folding_addresses.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier/constant_folding_addresses.exp @@ -11,5 +11,12 @@ module 0xcafe::Addresses { } } // end 0xcafe::Addresses +// -- Sourcified model before bytecode pipeline +module 0xcafe::Addresses { + public fun test() { + if (0x1::vector::length
(&vector[0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234, 0x1234]) == 1845) () else abort 1; + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/simplifier/constant_folding_ristretto.exp b/third_party/move/move-compiler-v2/tests/simplifier/constant_folding_ristretto.exp index bf7ddb3eda7a4..dfe310b087e21 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier/constant_folding_ristretto.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier/constant_folding_ristretto.exp @@ -10,5 +10,12 @@ module 0xcafe::Ristretto { } } // end 0xcafe::Ristretto +// -- Sourcified model before bytecode pipeline +module 0xcafe::Ristretto { + public fun test() { + if (true) () else abort 1; + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/simplifier/deep_exp.exp b/third_party/move/move-compiler-v2/tests/simplifier/deep_exp.exp index 5651a5e32c294..9d4e0ede67812 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier/deep_exp.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier/deep_exp.exp @@ -2930,5 +2930,2549 @@ module 0x42::Test { } } // end 0x42::Test +// -- Sourcified model before bytecode pipeline +module 0x42::Test { + inline fun f1(x: u64): u64 { + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + } + inline fun f2(x: u64): u64 { + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + } + inline fun f3(x: u64): u64 { + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + } + inline fun f4(x: u64): u64 { + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = ({ + let (x) = (x); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + }); + x + 1 + } + inline fun f5(x: u64): u64 { + x + 1 + } + public fun test(): u64 { + 625 + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/simplifier/moved_var_not_simplified.exp b/third_party/move/move-compiler-v2/tests/simplifier/moved_var_not_simplified.exp index 4e64252a12e5a..bba70e2914393 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier/moved_var_not_simplified.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier/moved_var_not_simplified.exp @@ -11,6 +11,15 @@ module 0xc0ffee::m { } } // end 0xc0ffee::m +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + public fun test(): u8 { + let x = 40u8; + let y = move x; + x + y + } +} + Diagnostics: error: cannot move local `x` since it is still in use diff --git a/third_party/move/move-compiler-v2/tests/simplifier/moved_var_not_simplified2.exp b/third_party/move/move-compiler-v2/tests/simplifier/moved_var_not_simplified2.exp index 25ab7c17592e2..5e4a6305ef61b 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier/moved_var_not_simplified2.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier/moved_var_not_simplified2.exp @@ -14,6 +14,16 @@ module 0xc0ffee::m { } } // end 0xc0ffee::m +// -- Sourcified model before bytecode pipeline +module 0xc0ffee::m { + public fun test(): u8 { + let x = 40u8; + let y = move x; + let _z = x; + y + } +} + Diagnostics: error: cannot move local `x` since it is still in use diff --git a/third_party/move/move-compiler-v2/tests/simplifier/random.exp b/third_party/move/move-compiler-v2/tests/simplifier/random.exp index 89901686f6d2a..a5c62ea1ca5a5 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier/random.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier/random.exp @@ -12,10 +12,10 @@ module 0x8675::M { f: u64, g: u64, } - private fun id(r: �): � { + private fun id(r: &T): &T { r } - private fun id_mut(r: &mut #0): &mut #0 { + private fun id_mut(r: &mut T): &mut T { r } private inline fun iterloop(x: u64,y: &u64): u64 { @@ -143,6 +143,83 @@ module 0x8675::M { } } // end 0x8675::M +// -- Sourcified model before bytecode pipeline +module 0x8675::M { + struct S { + f: u64, + g: u64, + } + fun id(r: &T): &T { + r + } + fun id_mut(r: &mut T): &mut T { + r + } + inline fun iterloop(x: u64, y: &u64): u64 { + let r = x + 3; + while (x > 0) { + x = x - *y; + }; + r + x + } + fun t0() { + let v = 0; + let x = &mut v; + let y = &mut v; + *x; + *y; + if (v == 0) { + v = 3; + } else { + v = 2; + }; + let q = v; + let x = id_mut(&mut v); + let y = &mut v; + *x; + *y; + let x = &v; + let y = &mut v; + *y; + *x; + *y; + let x = &v; + let y = &v; + *x; + *y; + *x; + let x = id(&v); + let y = &v; + *x; + *y; + *x; + } + fun test1(r: u64): u64 { + let t = r; + let t2 = 0; + while (r > 0) { + let x = r; + r = r - 1; + t2 = t2 + x; + }; + let t3 = r + t + t2; + t3 + } + fun test1a(x: u64, r: &u64): u64 { + let t = *r; + let t3 = { + let (x,y) = (x, r); + let r = x + 3; + while (x > 0) { + x = x - *y; + }; + r + x + }; + let t2 = *r + t; + t2 + t3 + t + } +} + Diagnostics: warning: Unused assignment to `q`. Consider removing or prefixing with an underscore: `_q` diff --git a/third_party/move/move-compiler-v2/tests/simplifier/simplifier_test1.exp b/third_party/move/move-compiler-v2/tests/simplifier/simplifier_test1.exp index 17267d722a3a2..1b31a2726a83f 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier/simplifier_test1.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier/simplifier_test1.exp @@ -29,5 +29,23 @@ module 0x8675::M { } } // end 0x8675::M +// -- Sourcified model before bytecode pipeline +module 0x8675::M { + public fun test(): u64 { + test1(10) + } + fun test1(r: u64): u64 { + let t = r; + let t2 = 0; + while (r > 0) { + let x = r; + r = r - 1; + t2 = t2 + x; + }; + let t3 = r + t + t2; + t3 + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/simplifier/simplifier_test2.exp b/third_party/move/move-compiler-v2/tests/simplifier/simplifier_test2.exp index 761a7351a5fde..78ce707a0ac1e 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier/simplifier_test2.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier/simplifier_test2.exp @@ -53,5 +53,32 @@ module 0x8675::M { } } // end 0x8675::M +// -- Sourcified model before bytecode pipeline +module 0x8675::M { + public fun test(): u64 { + let (r) = (10); + let t = r; + let t2 = 0; + while (r > 0) { + let x = r; + r = r - 1; + t2 = t2 + x; + }; + let t3 = r + t + t2; + t3 + } + inline fun test1(r: u64): u64 { + let t = r; + let t2 = 0; + while (r > 0) { + let x = r; + r = r - 1; + t2 = t2 + x; + }; + let t3 = r + t + t2; + t3 + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/simplifier/simplifier_test3.exp b/third_party/move/move-compiler-v2/tests/simplifier/simplifier_test3.exp index be956d2fdd85f..8e06a6591f039 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier/simplifier_test3.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier/simplifier_test3.exp @@ -29,5 +29,24 @@ module 0x8675::M { } } // end 0x8675::M +// -- Sourcified model before bytecode pipeline +module 0x8675::M { + public fun test(): u64 { + test1(10) + } + fun test1(r: u64): u64 { + let t = r; + let t2 = 0; + while ({ + let x = r; + r = r - 1; + t2 = t2 + x; + r > 0 + }) (); + let t3 = r + t + t2; + t3 + } +} + ============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/simplifier/simplifier_test4.exp b/third_party/move/move-compiler-v2/tests/simplifier/simplifier_test4.exp index 66b1864dd5f27..8c31feee56126 100644 --- a/third_party/move/move-compiler-v2/tests/simplifier/simplifier_test4.exp +++ b/third_party/move/move-compiler-v2/tests/simplifier/simplifier_test4.exp @@ -6,20 +6,20 @@ module 0x8675309::M { private fun bar(_s: &mut u64,_u: u64) { Tuple() } - private fun foo(_s: &M::S,_u: u64) { + private fun foo(_s: &S,_u: u64) { Tuple() } - private fun t0(s: &mut M::S) { + private fun t0(s: &mut S) { { let x: u64 = 0; { let f: &mut u64 = x: u64 = Add(x, 1); - Borrow(Mutable)(select M::S.f<&mut M::S>(x: u64 = Add(x, 1); + Borrow(Mutable)(select M::S.f<&mut S>(x: u64 = Add(x, 1); 1; s)); if true { x: u64 = Add(x, 1); - M::foo(Freeze(true)(s), x: u64 = Add(x, 1); + M::foo(Freeze(true)(s), x: u64 = Add(x, 1); f = 0; 1) } else { @@ -34,12 +34,51 @@ module 0x8675309::M { } } } - private fun t1(s: &mut M::S) { - M::bar(Borrow(Mutable)(select M::S.f<&mut M::S>(s)), select M::S.f<&mut M::S>(s) = 0; + private fun t1(s: &mut S) { + M::bar(Borrow(Mutable)(select M::S.f<&mut S>(s)), select M::S.f<&mut S>(s) = 0; 1) } } // end 0x8675309::M +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct S { + f: u64, + } + fun bar(_s: &mut u64, _u: u64) { + } + fun foo(_s: &S, _u: u64) { + } + fun t0(s: &mut S) { + let x = 0; + let f = { + x = x + 1; + &mut { + x = x + 1; + 1; + s + }.f + }; + if (true) { + x = x + 1; + foo(/*freeze*/s, { + *{ + x = x + 1; + f + } = 0; + 1 + }) + } else x = x + 1; + if (x == 4) () else abort 0; + } + fun t1(s: &mut S) { + bar(&mut s.f, { + s.f = 0; + 1 + }) + } +} + Diagnostics: error: cannot freeze value which is still mutably borrowed diff --git a/third_party/move/move-compiler-v2/tests/testsuite.rs b/third_party/move/move-compiler-v2/tests/testsuite.rs index 850dd563cc379..132c5d11d30c1 100644 --- a/third_party/move/move-compiler-v2/tests/testsuite.rs +++ b/third_party/move/move-compiler-v2/tests/testsuite.rs @@ -11,7 +11,7 @@ use move_compiler_v2::{ logging, pipeline, plan_builder, run_bytecode_verifier, run_file_format_gen, Experiment, Options, }; -use move_model::{metadata::LanguageVersion, model::GlobalEnv}; +use move_model::{metadata::LanguageVersion, model::GlobalEnv, sourcifier::Sourcifier}; use move_prover_test_utils::{baseline_test, extract_test_directives}; use move_stackless_bytecode::function_target_pipeline::FunctionTargetPipeline; use once_cell::unsync::Lazy; @@ -106,7 +106,7 @@ const TEST_CONFIGS: Lazy> = Lazy::new(|| { // Turn optimization on by default. Some configs below may turn it off. .set_experiment(Experiment::OPTIMIZE, true) .set_experiment(Experiment::OPTIMIZE_WAITING_FOR_COMPARE_TESTS, true) - .set_language_version(LanguageVersion::V2_0); + .set_language_version(LanguageVersion::V2_1); opts.testing = true; let configs = vec![ // --- Tests for checking and ast processing @@ -127,9 +127,9 @@ const TEST_CONFIGS: Lazy> = Lazy::new(|| { options: opts .clone() .set_experiment(Experiment::ACQUIRES_CHECK, false), - stop_after: StopAfter::BytecodeGen, + stop_after: StopAfter::BytecodeGen, // FileFormat, dump_ast: DumpLevel::EndStage, - dump_bytecode: DumpLevel::None, + dump_bytecode: DumpLevel::None, // EndStage, dump_bytecode_filter: None, }, // Tests for checking v2 language features only supported if v2 @@ -169,15 +169,10 @@ const TEST_CONFIGS: Lazy> = Lazy::new(|| { exp_suffix: None, options: opts .clone() - // Need to turn off usage checks because they complain about - // lambda parameters outside of inline functions. Other checks - // also turned off for now since they mess up baseline. - .set_experiment(Experiment::CHECKS, false) - .set_experiment(Experiment::OPTIMIZE, false) - .set_experiment(Experiment::OPTIMIZE_WAITING_FOR_COMPARE_TESTS, false) - .set_experiment(Experiment::INLINING, false) - .set_experiment(Experiment::RECURSIVE_TYPE_CHECK, false) - .set_experiment(Experiment::SPEC_REWRITE, false) + .set_experiment(Experiment::LAMBDA_FIELDS, true) + .set_experiment(Experiment::LAMBDA_IN_PARAMS, true) + .set_experiment(Experiment::LAMBDA_IN_RETURNS, true) + .set_experiment(Experiment::LAMBDA_VALUES, true) .set_experiment(Experiment::LAMBDA_LIFTING, true), stop_after: StopAfter::AstPipeline, dump_ast: DumpLevel::AllStages, @@ -717,6 +712,19 @@ const TEST_CONFIGS: Lazy> = Lazy::new(|| { dump_bytecode: DumpLevel::EndStage, dump_bytecode_filter: Some(vec![FILE_FORMAT_STAGE]), }, + TestConfig { + name: "op-equal", + runner: |p| run_test(p, get_config_by_name("op-equal")), + include: vec!["/op-equal/"], + exclude: vec![], + exp_suffix: None, + options: opts.clone(), + // Run the entire compiler pipeline to double-check the result + stop_after: StopAfter::FileFormat, + dump_ast: DumpLevel::EndStage, + dump_bytecode: DumpLevel::EndStage, + dump_bytecode_filter: None, + }, ]; configs.into_iter().map(|c| (c.name, c)).collect() }); @@ -786,6 +794,16 @@ fn run_test(path: &Path, config: TestConfig) -> datatest_stable::Result<()> { "// -- Model dump before bytecode pipeline\n{}\n", env.dump_env() )); + let sourcifier = Sourcifier::new(&env); + for module in env.get_modules() { + if module.is_primary_target() { + sourcifier.print_module(module.get_id()) + } + } + test_output.borrow_mut().push_str(&format!( + "// -- Sourcified model before bytecode pipeline\n{}\n", + sourcifier.result() + )); } } } diff --git a/third_party/move/move-compiler-v2/tests/uninit-use-checker/struct_use_before_assign.exp b/third_party/move/move-compiler-v2/tests/uninit-use-checker/struct_use_before_assign.exp index f343d5441ac03..a133044ef3459 100644 --- a/third_party/move/move-compiler-v2/tests/uninit-use-checker/struct_use_before_assign.exp +++ b/third_party/move/move-compiler-v2/tests/uninit-use-checker/struct_use_before_assign.exp @@ -53,15 +53,15 @@ warning: Unused local variable `q`. Consider removing or prefixing with an under [variant baseline] fun M::main() { var $t0: u64 - var $t1: M::R + var $t1: 0x876543::M::R var $t2: u64 var $t3: u64 var $t4: u64 var $t5: u64 var $t6: u64 0: $t2 := 3 - 1: $t1 := pack M::R($t2, $t0) - 2: ($t3, $t4) := unpack M::R($t1) + 1: $t1 := pack 0x876543::M::R($t2, $t0) + 2: ($t3, $t4) := unpack 0x876543::M::R($t1) 3: $t5 := infer($t4) 4: $t6 := infer($t3) 5: return () @@ -71,15 +71,15 @@ fun M::main() { [variant baseline] fun M::main2() { var $t0: u64 - var $t1: M::R + var $t1: 0x876543::M::R var $t2: u64 var $t3: u64 var $t4: u64 var $t5: u64 var $t6: u64 0: $t2 := 0 - 1: $t1 := pack M::R($t2, $t0) - 2: ($t3, $t4) := unpack M::R($t1) + 1: $t1 := pack 0x876543::M::R($t2, $t0) + 2: ($t3, $t4) := unpack 0x876543::M::R($t1) 3: $t5 := infer($t4) 4: $t6 := infer($t3) 5: return () @@ -88,12 +88,12 @@ fun M::main2() { [variant baseline] fun M::main3() { - var $t0: M::R + var $t0: 0x876543::M::R var $t1: u64 var $t2: u64 var $t3: u64 var $t4: u64 - 0: ($t1, $t2) := unpack M::R($t0) + 0: ($t1, $t2) := unpack 0x876543::M::R($t0) 1: $t3 := infer($t2) 2: $t4 := infer($t1) 3: return () @@ -148,7 +148,7 @@ error: use of unassigned local `x` [variant baseline] fun M::main() { var $t0: u64 - var $t1: M::R + var $t1: 0x876543::M::R var $t2: u64 var $t3: u64 var $t4: u64 @@ -157,9 +157,9 @@ fun M::main() { # before: { no: $t0, $t1, $t2, $t3, $t4, $t5, $t6 }, after: { no: $t0, $t1, $t3, $t4, $t5, $t6 } 0: $t2 := 3 # before: { no: $t0, $t1, $t3, $t4, $t5, $t6 }, after: { no: $t0, $t3, $t4, $t5, $t6 } - 1: $t1 := pack M::R($t2, $t0) + 1: $t1 := pack 0x876543::M::R($t2, $t0) # before: { no: $t0, $t3, $t4, $t5, $t6 }, after: { no: $t0, $t5, $t6 } - 2: ($t3, $t4) := unpack M::R($t1) + 2: ($t3, $t4) := unpack 0x876543::M::R($t1) # before: { no: $t0, $t5, $t6 }, after: { no: $t0, $t6 } 3: $t5 := infer($t4) # before: { no: $t0, $t6 }, after: { no: $t0 } @@ -172,7 +172,7 @@ fun M::main() { [variant baseline] fun M::main2() { var $t0: u64 - var $t1: M::R + var $t1: 0x876543::M::R var $t2: u64 var $t3: u64 var $t4: u64 @@ -181,9 +181,9 @@ fun M::main2() { # before: { no: $t0, $t1, $t2, $t3, $t4, $t5, $t6 }, after: { no: $t0, $t1, $t3, $t4, $t5, $t6 } 0: $t2 := 0 # before: { no: $t0, $t1, $t3, $t4, $t5, $t6 }, after: { no: $t0, $t3, $t4, $t5, $t6 } - 1: $t1 := pack M::R($t2, $t0) + 1: $t1 := pack 0x876543::M::R($t2, $t0) # before: { no: $t0, $t3, $t4, $t5, $t6 }, after: { no: $t0, $t5, $t6 } - 2: ($t3, $t4) := unpack M::R($t1) + 2: ($t3, $t4) := unpack 0x876543::M::R($t1) # before: { no: $t0, $t5, $t6 }, after: { no: $t0, $t6 } 3: $t5 := infer($t4) # before: { no: $t0, $t6 }, after: { no: $t0 } @@ -195,13 +195,13 @@ fun M::main2() { [variant baseline] fun M::main3() { - var $t0: M::R + var $t0: 0x876543::M::R var $t1: u64 var $t2: u64 var $t3: u64 var $t4: u64 # before: { no: $t0, $t1, $t2, $t3, $t4 }, after: { no: $t0, $t3, $t4 } - 0: ($t1, $t2) := unpack M::R($t0) + 0: ($t1, $t2) := unpack 0x876543::M::R($t0) # before: { no: $t0, $t3, $t4 }, after: { no: $t0, $t4 } 1: $t3 := infer($t2) # before: { no: $t0, $t4 }, after: { no: $t0 } diff --git a/third_party/move/move-compiler-v2/tests/uninit-use-checker/unused_reference.exp b/third_party/move/move-compiler-v2/tests/uninit-use-checker/unused_reference.exp index 1749474d122c6..0157525a3a232 100644 --- a/third_party/move/move-compiler-v2/tests/uninit-use-checker/unused_reference.exp +++ b/third_party/move/move-compiler-v2/tests/uninit-use-checker/unused_reference.exp @@ -2,8 +2,8 @@ [variant baseline] public fun Module0::function0() { - var $t0: &Module0::S - var $t1: Module0::S + var $t0: &0xcafe::Module0::S + var $t1: 0xcafe::Module0::S 0: $t1 := read_ref($t0) 1: return () } @@ -20,8 +20,8 @@ error: use of unassigned local `y` [variant baseline] public fun Module0::function0() { - var $t0: &Module0::S - var $t1: Module0::S + var $t0: &0xcafe::Module0::S + var $t1: 0xcafe::Module0::S # before: { no: $t0, $t1 }, after: { no: $t0 } 0: $t1 := read_ref($t0) # before: { no: $t0 }, after: { no: $t0 } diff --git a/third_party/move/move-compiler-v2/tests/uninit-use-checker/v1-locals/use_before_assign_simple.exp b/third_party/move/move-compiler-v2/tests/uninit-use-checker/v1-locals/use_before_assign_simple.exp index 89b580d92ae0c..46ecf613deaa7 100644 --- a/third_party/move/move-compiler-v2/tests/uninit-use-checker/v1-locals/use_before_assign_simple.exp +++ b/third_party/move/move-compiler-v2/tests/uninit-use-checker/v1-locals/use_before_assign_simple.exp @@ -5,8 +5,8 @@ fun M::tborrow() { var $t0: u64 var $t1: &u64 var $t2: &u64 - var $t3: M::S - var $t4: &M::S + var $t3: 0x8675309::M::S + var $t4: &0x8675309::M::S 0: $t1 := borrow_local($t0) 1: $t2 := infer($t1) 2: $t4 := borrow_local($t3) @@ -20,8 +20,8 @@ fun M::tcopy() { var $t1: u64 var $t2: u64 var $t3: u64 - var $t4: M::S - var $t5: M::S + var $t4: 0x8675309::M::S + var $t5: 0x8675309::M::S 0: $t2 := 1 1: $t1 := +($t0, $t2) 2: $t3 := infer($t1) @@ -37,8 +37,8 @@ fun M::tmove() { var $t2: u64 var $t3: u64 var $t4: u64 - var $t5: M::S - var $t6: M::S + var $t5: 0x8675309::M::S + var $t6: 0x8675309::M::S 0: $t2 := move($t0) 1: $t3 := 1 2: $t1 := +($t2, $t3) @@ -92,8 +92,8 @@ fun M::tborrow() { var $t0: u64 var $t1: &u64 var $t2: &u64 - var $t3: M::S - var $t4: &M::S + var $t3: 0x8675309::M::S + var $t4: &0x8675309::M::S # before: { no: $t0, $t1, $t2, $t3, $t4 }, after: { no: $t0, $t2, $t3, $t4 } 0: $t1 := borrow_local($t0) # before: { no: $t0, $t2, $t3, $t4 }, after: { no: $t0, $t3, $t4 } @@ -111,8 +111,8 @@ fun M::tcopy() { var $t1: u64 var $t2: u64 var $t3: u64 - var $t4: M::S - var $t5: M::S + var $t4: 0x8675309::M::S + var $t5: 0x8675309::M::S # before: { no: $t0, $t1, $t2, $t3, $t4, $t5 }, after: { no: $t0, $t1, $t3, $t4, $t5 } 0: $t2 := 1 # before: { no: $t0, $t1, $t3, $t4, $t5 }, after: { no: $t0, $t3, $t4, $t5 } @@ -133,8 +133,8 @@ fun M::tmove() { var $t2: u64 var $t3: u64 var $t4: u64 - var $t5: M::S - var $t6: M::S + var $t5: 0x8675309::M::S + var $t6: 0x8675309::M::S # before: { no: $t0, $t1, $t2, $t3, $t4, $t5, $t6 }, after: { no: $t0, $t1, $t3, $t4, $t5, $t6 } 0: $t2 := move($t0) # before: { no: $t0, $t1, $t3, $t4, $t5, $t6 }, after: { no: $t0, $t1, $t4, $t5, $t6 } diff --git a/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_1.exp b/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_1.exp index 2f685d7e7352d..74ef544575ec3 100644 --- a/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_1.exp +++ b/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_1.exp @@ -7,7 +7,7 @@ fun m::consume($t0: u64) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -33,8 +33,8 @@ public fun m::test2($t0: u64) { [variant baseline] -public fun m::test3($t0: m::W) { - var $t1: m::W +public fun m::test3($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := infer($t0) 1: m::consume_($t0) 2: m::consume_($t1) @@ -43,8 +43,8 @@ public fun m::test3($t0: m::W) { [variant baseline] -public fun m::test4($t0: m::W) { - var $t1: m::W +public fun m::test4($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := infer($t0) 1: m::consume_($t1) 2: m::consume_($t0) @@ -60,7 +60,7 @@ fun m::consume($t0: u64) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -86,8 +86,8 @@ public fun m::test2($t0: u64) { [variant baseline] -public fun m::test3($t0: m::W) { - var $t1: m::W +public fun m::test3($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := copy($t0) 1: m::consume_($t0) 2: m::consume_($t1) @@ -96,8 +96,8 @@ public fun m::test3($t0: m::W) { [variant baseline] -public fun m::test4($t0: m::W) { - var $t1: m::W +public fun m::test4($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := copy($t0) 1: m::consume_($t1) 2: m::consume_($t0) @@ -115,7 +115,7 @@ fun m::consume($t0: u64) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { # live vars: $t0 # events: b:$t0, e:$t0 0: return () @@ -157,8 +157,8 @@ public fun m::test2($t0: u64) { [variant baseline] -public fun m::test3($t0: m::W) { - var $t1: m::W +public fun m::test3($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W # live vars: $t0 # events: b:$t0, b:$t1 0: $t1 := copy($t0) @@ -174,8 +174,8 @@ public fun m::test3($t0: m::W) { [variant baseline] -public fun m::test4($t0: m::W) { - var $t1: m::W +public fun m::test4($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W # live vars: $t0 # events: b:$t0, b:$t1 0: $t1 := copy($t0) @@ -198,7 +198,7 @@ fun m::consume($t0: u64) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -224,8 +224,8 @@ public fun m::test2($t0: u64) { [variant baseline] -public fun m::test3($t0: m::W) { - var $t1: m::W +public fun m::test3($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := copy($t0) 1: m::consume_($t0) 2: m::consume_($t1) @@ -234,8 +234,8 @@ public fun m::test3($t0: m::W) { [variant baseline] -public fun m::test4($t0: m::W) { - var $t1: m::W +public fun m::test4($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := copy($t0) 1: m::consume_($t1) 2: m::consume_($t0) @@ -251,7 +251,7 @@ fun m::consume($t0: u64) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -277,8 +277,8 @@ public fun m::test2($t0: u64) { [variant baseline] -public fun m::test3($t0: m::W) { - var $t1: m::W +public fun m::test3($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := copy($t0) 1: m::consume_($t0) 2: m::consume_($t1) @@ -287,8 +287,8 @@ public fun m::test3($t0: m::W) { [variant baseline] -public fun m::test4($t0: m::W) { - var $t1: m::W +public fun m::test4($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := copy($t0) 1: m::consume_($t1) 2: m::consume_($t0) diff --git a/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_1.opt.exp b/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_1.opt.exp index 2f685d7e7352d..74ef544575ec3 100644 --- a/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_1.opt.exp +++ b/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_1.opt.exp @@ -7,7 +7,7 @@ fun m::consume($t0: u64) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -33,8 +33,8 @@ public fun m::test2($t0: u64) { [variant baseline] -public fun m::test3($t0: m::W) { - var $t1: m::W +public fun m::test3($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := infer($t0) 1: m::consume_($t0) 2: m::consume_($t1) @@ -43,8 +43,8 @@ public fun m::test3($t0: m::W) { [variant baseline] -public fun m::test4($t0: m::W) { - var $t1: m::W +public fun m::test4($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := infer($t0) 1: m::consume_($t1) 2: m::consume_($t0) @@ -60,7 +60,7 @@ fun m::consume($t0: u64) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -86,8 +86,8 @@ public fun m::test2($t0: u64) { [variant baseline] -public fun m::test3($t0: m::W) { - var $t1: m::W +public fun m::test3($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := copy($t0) 1: m::consume_($t0) 2: m::consume_($t1) @@ -96,8 +96,8 @@ public fun m::test3($t0: m::W) { [variant baseline] -public fun m::test4($t0: m::W) { - var $t1: m::W +public fun m::test4($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := copy($t0) 1: m::consume_($t1) 2: m::consume_($t0) @@ -115,7 +115,7 @@ fun m::consume($t0: u64) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { # live vars: $t0 # events: b:$t0, e:$t0 0: return () @@ -157,8 +157,8 @@ public fun m::test2($t0: u64) { [variant baseline] -public fun m::test3($t0: m::W) { - var $t1: m::W +public fun m::test3($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W # live vars: $t0 # events: b:$t0, b:$t1 0: $t1 := copy($t0) @@ -174,8 +174,8 @@ public fun m::test3($t0: m::W) { [variant baseline] -public fun m::test4($t0: m::W) { - var $t1: m::W +public fun m::test4($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W # live vars: $t0 # events: b:$t0, b:$t1 0: $t1 := copy($t0) @@ -198,7 +198,7 @@ fun m::consume($t0: u64) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -224,8 +224,8 @@ public fun m::test2($t0: u64) { [variant baseline] -public fun m::test3($t0: m::W) { - var $t1: m::W +public fun m::test3($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := copy($t0) 1: m::consume_($t0) 2: m::consume_($t1) @@ -234,8 +234,8 @@ public fun m::test3($t0: m::W) { [variant baseline] -public fun m::test4($t0: m::W) { - var $t1: m::W +public fun m::test4($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := copy($t0) 1: m::consume_($t1) 2: m::consume_($t0) @@ -251,7 +251,7 @@ fun m::consume($t0: u64) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -277,8 +277,8 @@ public fun m::test2($t0: u64) { [variant baseline] -public fun m::test3($t0: m::W) { - var $t1: m::W +public fun m::test3($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := copy($t0) 1: m::consume_($t0) 2: m::consume_($t1) @@ -287,8 +287,8 @@ public fun m::test3($t0: m::W) { [variant baseline] -public fun m::test4($t0: m::W) { - var $t1: m::W +public fun m::test4($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := copy($t0) 1: m::consume_($t1) 2: m::consume_($t0) diff --git a/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_2.exp b/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_2.exp index d80d47d848cef..9467564dc9e2c 100644 --- a/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_2.exp +++ b/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_2.exp @@ -7,7 +7,7 @@ fun m::consume($t0: u64) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -23,8 +23,8 @@ public fun m::test1($t0: u64) { [variant baseline] -public fun m::test2($t0: m::W) { - var $t1: m::W +public fun m::test2($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := move($t0) 1: m::consume_($t1) 2: m::consume_($t1) @@ -40,7 +40,7 @@ fun m::consume($t0: u64) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -56,8 +56,8 @@ public fun m::test1($t0: u64) { [variant baseline] -public fun m::test2($t0: m::W) { - var $t1: m::W +public fun m::test2($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := move($t0) 1: m::consume_($t1) 2: m::consume_($t1) @@ -75,7 +75,7 @@ fun m::consume($t0: u64) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { # live vars: $t0 # events: b:$t0, e:$t0 0: return () @@ -99,8 +99,8 @@ public fun m::test1($t0: u64) { [variant baseline] -public fun m::test2($t0: m::W) { - var $t1: m::W +public fun m::test2($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W # live vars: $t0 # events: b:$t0, e:$t0, b:$t1 0: $t1 := move($t0) @@ -122,7 +122,7 @@ fun m::consume($t0: u64) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -138,8 +138,8 @@ public fun m::test1($t0: u64) { [variant baseline] -public fun m::test2($t0: m::W) { - var $t1: m::W [unused] +public fun m::test2($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W [unused] 0: $t0 := move($t0) 1: m::consume_($t0) 2: m::consume_($t0) @@ -155,7 +155,7 @@ fun m::consume($t0: u64) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -170,8 +170,8 @@ public fun m::test1($t0: u64) { [variant baseline] -public fun m::test2($t0: m::W) { - var $t1: m::W [unused] +public fun m::test2($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W [unused] 0: m::consume_($t0) 1: m::consume_($t0) 2: return () diff --git a/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_2.opt.exp b/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_2.opt.exp index d80d47d848cef..9467564dc9e2c 100644 --- a/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_2.opt.exp +++ b/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_2.opt.exp @@ -7,7 +7,7 @@ fun m::consume($t0: u64) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -23,8 +23,8 @@ public fun m::test1($t0: u64) { [variant baseline] -public fun m::test2($t0: m::W) { - var $t1: m::W +public fun m::test2($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := move($t0) 1: m::consume_($t1) 2: m::consume_($t1) @@ -40,7 +40,7 @@ fun m::consume($t0: u64) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -56,8 +56,8 @@ public fun m::test1($t0: u64) { [variant baseline] -public fun m::test2($t0: m::W) { - var $t1: m::W +public fun m::test2($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := move($t0) 1: m::consume_($t1) 2: m::consume_($t1) @@ -75,7 +75,7 @@ fun m::consume($t0: u64) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { # live vars: $t0 # events: b:$t0, e:$t0 0: return () @@ -99,8 +99,8 @@ public fun m::test1($t0: u64) { [variant baseline] -public fun m::test2($t0: m::W) { - var $t1: m::W +public fun m::test2($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W # live vars: $t0 # events: b:$t0, e:$t0, b:$t1 0: $t1 := move($t0) @@ -122,7 +122,7 @@ fun m::consume($t0: u64) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -138,8 +138,8 @@ public fun m::test1($t0: u64) { [variant baseline] -public fun m::test2($t0: m::W) { - var $t1: m::W [unused] +public fun m::test2($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W [unused] 0: $t0 := move($t0) 1: m::consume_($t0) 2: m::consume_($t0) @@ -155,7 +155,7 @@ fun m::consume($t0: u64) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -170,8 +170,8 @@ public fun m::test1($t0: u64) { [variant baseline] -public fun m::test2($t0: m::W) { - var $t1: m::W [unused] +public fun m::test2($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W [unused] 0: m::consume_($t0) 1: m::consume_($t0) 2: return () diff --git a/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_3.exp b/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_3.exp index 606dd277acd0c..7ecc0e6a884b1 100644 --- a/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_3.exp +++ b/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_3.exp @@ -7,7 +7,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -23,8 +23,8 @@ public fun m::test($t0: u32) { [variant baseline] -public fun m::test_($t0: m::W) { - var $t1: m::W +public fun m::test_($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := copy($t0) 1: m::consume_($t1) 2: m::consume_($t0) @@ -40,7 +40,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -56,8 +56,8 @@ public fun m::test($t0: u32) { [variant baseline] -public fun m::test_($t0: m::W) { - var $t1: m::W +public fun m::test_($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := copy($t0) 1: m::consume_($t1) 2: m::consume_($t0) @@ -75,7 +75,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { # live vars: $t0 # events: b:$t0, e:$t0 0: return () @@ -100,8 +100,8 @@ public fun m::test($t0: u32) { [variant baseline] -public fun m::test_($t0: m::W) { - var $t1: m::W +public fun m::test_($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W # live vars: $t0 # events: b:$t0, b:$t1 0: $t1 := copy($t0) @@ -124,7 +124,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -140,8 +140,8 @@ public fun m::test($t0: u32) { [variant baseline] -public fun m::test_($t0: m::W) { - var $t1: m::W +public fun m::test_($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := copy($t0) 1: m::consume_($t1) 2: m::consume_($t0) @@ -157,7 +157,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -173,8 +173,8 @@ public fun m::test($t0: u32) { [variant baseline] -public fun m::test_($t0: m::W) { - var $t1: m::W +public fun m::test_($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := copy($t0) 1: m::consume_($t1) 2: m::consume_($t0) diff --git a/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_3.opt.exp b/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_3.opt.exp index 606dd277acd0c..7ecc0e6a884b1 100644 --- a/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_3.opt.exp +++ b/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_3.opt.exp @@ -7,7 +7,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -23,8 +23,8 @@ public fun m::test($t0: u32) { [variant baseline] -public fun m::test_($t0: m::W) { - var $t1: m::W +public fun m::test_($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := copy($t0) 1: m::consume_($t1) 2: m::consume_($t0) @@ -40,7 +40,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -56,8 +56,8 @@ public fun m::test($t0: u32) { [variant baseline] -public fun m::test_($t0: m::W) { - var $t1: m::W +public fun m::test_($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := copy($t0) 1: m::consume_($t1) 2: m::consume_($t0) @@ -75,7 +75,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { # live vars: $t0 # events: b:$t0, e:$t0 0: return () @@ -100,8 +100,8 @@ public fun m::test($t0: u32) { [variant baseline] -public fun m::test_($t0: m::W) { - var $t1: m::W +public fun m::test_($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W # live vars: $t0 # events: b:$t0, b:$t1 0: $t1 := copy($t0) @@ -124,7 +124,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -140,8 +140,8 @@ public fun m::test($t0: u32) { [variant baseline] -public fun m::test_($t0: m::W) { - var $t1: m::W +public fun m::test_($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := copy($t0) 1: m::consume_($t1) 2: m::consume_($t0) @@ -157,7 +157,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -173,8 +173,8 @@ public fun m::test($t0: u32) { [variant baseline] -public fun m::test_($t0: m::W) { - var $t1: m::W +public fun m::test_($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W 0: $t1 := copy($t0) 1: m::consume_($t1) 2: m::consume_($t0) diff --git a/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_4.exp b/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_4.exp index 0c6d1f781f678..f2b1a0c6caaf7 100644 --- a/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_4.exp +++ b/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_4.exp @@ -7,7 +7,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -25,9 +25,9 @@ public fun m::test($t0: u32) { [variant baseline] -public fun m::test_struct($t0: m::W) { - var $t1: m::W - var $t2: m::W +public fun m::test_struct($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W + var $t2: 0xc0ffee::m::W 0: $t1 := copy($t0) 1: $t2 := move($t1) 2: m::consume_($t2) @@ -44,7 +44,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -62,9 +62,9 @@ public fun m::test($t0: u32) { [variant baseline] -public fun m::test_struct($t0: m::W) { - var $t1: m::W - var $t2: m::W +public fun m::test_struct($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W + var $t2: 0xc0ffee::m::W 0: $t1 := copy($t0) 1: $t2 := move($t1) 2: m::consume_($t2) @@ -83,7 +83,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { # live vars: $t0 # events: b:$t0, e:$t0 0: return () @@ -112,9 +112,9 @@ public fun m::test($t0: u32) { [variant baseline] -public fun m::test_struct($t0: m::W) { - var $t1: m::W - var $t2: m::W +public fun m::test_struct($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W + var $t2: 0xc0ffee::m::W # live vars: $t0 # events: b:$t0, b:$t1 0: $t1 := copy($t0) @@ -140,7 +140,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -158,9 +158,9 @@ public fun m::test($t0: u32) { [variant baseline] -public fun m::test_struct($t0: m::W) { - var $t1: m::W - var $t2: m::W [unused] +public fun m::test_struct($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W + var $t2: 0xc0ffee::m::W [unused] 0: $t1 := copy($t0) 1: $t1 := move($t1) 2: m::consume_($t1) @@ -177,7 +177,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -194,9 +194,9 @@ public fun m::test($t0: u32) { [variant baseline] -public fun m::test_struct($t0: m::W) { - var $t1: m::W - var $t2: m::W [unused] +public fun m::test_struct($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W + var $t2: 0xc0ffee::m::W [unused] 0: $t1 := copy($t0) 1: m::consume_($t1) 2: m::consume_($t0) diff --git a/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_4.opt.exp b/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_4.opt.exp index 0c6d1f781f678..f2b1a0c6caaf7 100644 --- a/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_4.opt.exp +++ b/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_4.opt.exp @@ -7,7 +7,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -25,9 +25,9 @@ public fun m::test($t0: u32) { [variant baseline] -public fun m::test_struct($t0: m::W) { - var $t1: m::W - var $t2: m::W +public fun m::test_struct($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W + var $t2: 0xc0ffee::m::W 0: $t1 := copy($t0) 1: $t2 := move($t1) 2: m::consume_($t2) @@ -44,7 +44,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -62,9 +62,9 @@ public fun m::test($t0: u32) { [variant baseline] -public fun m::test_struct($t0: m::W) { - var $t1: m::W - var $t2: m::W +public fun m::test_struct($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W + var $t2: 0xc0ffee::m::W 0: $t1 := copy($t0) 1: $t2 := move($t1) 2: m::consume_($t2) @@ -83,7 +83,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { # live vars: $t0 # events: b:$t0, e:$t0 0: return () @@ -112,9 +112,9 @@ public fun m::test($t0: u32) { [variant baseline] -public fun m::test_struct($t0: m::W) { - var $t1: m::W - var $t2: m::W +public fun m::test_struct($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W + var $t2: 0xc0ffee::m::W # live vars: $t0 # events: b:$t0, b:$t1 0: $t1 := copy($t0) @@ -140,7 +140,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -158,9 +158,9 @@ public fun m::test($t0: u32) { [variant baseline] -public fun m::test_struct($t0: m::W) { - var $t1: m::W - var $t2: m::W [unused] +public fun m::test_struct($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W + var $t2: 0xc0ffee::m::W [unused] 0: $t1 := copy($t0) 1: $t1 := move($t1) 2: m::consume_($t1) @@ -177,7 +177,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -194,9 +194,9 @@ public fun m::test($t0: u32) { [variant baseline] -public fun m::test_struct($t0: m::W) { - var $t1: m::W - var $t2: m::W [unused] +public fun m::test_struct($t0: 0xc0ffee::m::W) { + var $t1: 0xc0ffee::m::W + var $t2: 0xc0ffee::m::W [unused] 0: $t1 := copy($t0) 1: m::consume_($t1) 2: m::consume_($t0) diff --git a/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_5.exp b/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_5.exp index 77bfd28f9ff4c..1c5c097d9e7c9 100644 --- a/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_5.exp +++ b/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_5.exp @@ -7,7 +7,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -37,8 +37,8 @@ public fun m::test($t0: bool, $t1: u32) { [variant baseline] -public fun m::test_struct($t0: bool, $t1: m::W) { - var $t2: m::W +public fun m::test_struct($t0: bool, $t1: 0xc0ffee::m::W) { + var $t2: 0xc0ffee::m::W var $t3: bool 0: $t2 := copy($t1) 1: if ($t0) goto 2 else goto 5 @@ -68,7 +68,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -98,8 +98,8 @@ public fun m::test($t0: bool, $t1: u32) { [variant baseline] -public fun m::test_struct($t0: bool, $t1: m::W) { - var $t2: m::W +public fun m::test_struct($t0: bool, $t1: 0xc0ffee::m::W) { + var $t2: 0xc0ffee::m::W var $t3: bool 0: $t2 := copy($t1) 1: if ($t0) goto 2 else goto 14 @@ -131,7 +131,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { # live vars: $t0 # events: b:$t0, e:$t0 0: return () @@ -184,8 +184,8 @@ public fun m::test($t0: bool, $t1: u32) { [variant baseline] -public fun m::test_struct($t0: bool, $t1: m::W) { - var $t2: m::W +public fun m::test_struct($t0: bool, $t1: 0xc0ffee::m::W) { + var $t2: 0xc0ffee::m::W var $t3: bool # live vars: $t0, $t1 # events: b:$t0, b:$t1, b:$t2 @@ -236,7 +236,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -266,8 +266,8 @@ public fun m::test($t0: bool, $t1: u32) { [variant baseline] -public fun m::test_struct($t0: bool, $t1: m::W) { - var $t2: m::W +public fun m::test_struct($t0: bool, $t1: 0xc0ffee::m::W) { + var $t2: 0xc0ffee::m::W var $t3: bool 0: $t2 := copy($t1) 1: if ($t0) goto 2 else goto 14 @@ -297,7 +297,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -327,8 +327,8 @@ public fun m::test($t0: bool, $t1: u32) { [variant baseline] -public fun m::test_struct($t0: bool, $t1: m::W) { - var $t2: m::W +public fun m::test_struct($t0: bool, $t1: 0xc0ffee::m::W) { + var $t2: 0xc0ffee::m::W var $t3: bool 0: $t2 := copy($t1) 1: if ($t0) goto 2 else goto 14 diff --git a/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_5.opt.exp b/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_5.opt.exp index 77bfd28f9ff4c..1c5c097d9e7c9 100644 --- a/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_5.opt.exp +++ b/third_party/move/move-compiler-v2/tests/variable-coalescing/consume_5.opt.exp @@ -7,7 +7,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -37,8 +37,8 @@ public fun m::test($t0: bool, $t1: u32) { [variant baseline] -public fun m::test_struct($t0: bool, $t1: m::W) { - var $t2: m::W +public fun m::test_struct($t0: bool, $t1: 0xc0ffee::m::W) { + var $t2: 0xc0ffee::m::W var $t3: bool 0: $t2 := copy($t1) 1: if ($t0) goto 2 else goto 5 @@ -68,7 +68,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -98,8 +98,8 @@ public fun m::test($t0: bool, $t1: u32) { [variant baseline] -public fun m::test_struct($t0: bool, $t1: m::W) { - var $t2: m::W +public fun m::test_struct($t0: bool, $t1: 0xc0ffee::m::W) { + var $t2: 0xc0ffee::m::W var $t3: bool 0: $t2 := copy($t1) 1: if ($t0) goto 2 else goto 14 @@ -131,7 +131,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { # live vars: $t0 # events: b:$t0, e:$t0 0: return () @@ -184,8 +184,8 @@ public fun m::test($t0: bool, $t1: u32) { [variant baseline] -public fun m::test_struct($t0: bool, $t1: m::W) { - var $t2: m::W +public fun m::test_struct($t0: bool, $t1: 0xc0ffee::m::W) { + var $t2: 0xc0ffee::m::W var $t3: bool # live vars: $t0, $t1 # events: b:$t0, b:$t1, b:$t2 @@ -236,7 +236,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -266,8 +266,8 @@ public fun m::test($t0: bool, $t1: u32) { [variant baseline] -public fun m::test_struct($t0: bool, $t1: m::W) { - var $t2: m::W +public fun m::test_struct($t0: bool, $t1: 0xc0ffee::m::W) { + var $t2: 0xc0ffee::m::W var $t3: bool 0: $t2 := copy($t1) 1: if ($t0) goto 2 else goto 14 @@ -297,7 +297,7 @@ fun m::consume($t0: u32) { [variant baseline] -fun m::consume_($t0: m::W) { +fun m::consume_($t0: 0xc0ffee::m::W) { 0: return () } @@ -327,8 +327,8 @@ public fun m::test($t0: bool, $t1: u32) { [variant baseline] -public fun m::test_struct($t0: bool, $t1: m::W) { - var $t2: m::W +public fun m::test_struct($t0: bool, $t1: 0xc0ffee::m::W) { + var $t2: 0xc0ffee::m::W var $t3: bool 0: $t2 := copy($t1) 1: if ($t0) goto 2 else goto 14 diff --git a/third_party/move/move-compiler-v2/tests/variable-coalescing/mut_refs_2.exp b/third_party/move/move-compiler-v2/tests/variable-coalescing/mut_refs_2.exp index 1a59b3c7f2a73..1b05ff1d6aaec 100644 --- a/third_party/move/move-compiler-v2/tests/variable-coalescing/mut_refs_2.exp +++ b/third_party/move/move-compiler-v2/tests/variable-coalescing/mut_refs_2.exp @@ -1,23 +1,23 @@ ============ initial bytecode ================ [variant baseline] -fun m::test($t0: m::S): u64 { +fun m::test($t0: 0xc0ffee::m::S): u64 { var $t1: u64 - var $t2: m::S - var $t3: m::S + var $t2: 0xc0ffee::m::S + var $t3: 0xc0ffee::m::S var $t4: &mut u64 - var $t5: &mut m::S + var $t5: &mut 0xc0ffee::m::S var $t6: u64 - var $t7: &m::S + var $t7: &0xc0ffee::m::S var $t8: &u64 0: $t2 := infer($t0) 1: $t3 := infer($t2) 2: $t5 := borrow_local($t2) - 3: $t4 := borrow_field.a($t5) + 3: $t4 := borrow_field<0xc0ffee::m::S>.a($t5) 4: $t6 := 0 5: write_ref($t4, $t6) 6: $t7 := borrow_local($t3) - 7: $t8 := borrow_field.a($t7) + 7: $t8 := borrow_field<0xc0ffee::m::S>.a($t7) 8: $t1 := read_ref($t8) 9: return $t1 } @@ -25,23 +25,23 @@ fun m::test($t0: m::S): u64 { ============ after DeadStoreElimination: ================ [variant baseline] -fun m::test($t0: m::S): u64 { +fun m::test($t0: 0xc0ffee::m::S): u64 { var $t1: u64 - var $t2: m::S - var $t3: m::S + var $t2: 0xc0ffee::m::S + var $t3: 0xc0ffee::m::S var $t4: &mut u64 - var $t5: &mut m::S + var $t5: &mut 0xc0ffee::m::S var $t6: u64 - var $t7: &m::S + var $t7: &0xc0ffee::m::S var $t8: &u64 0: $t2 := move($t0) 1: $t3 := copy($t2) 2: $t5 := borrow_local($t2) - 3: $t4 := borrow_field.a($t5) + 3: $t4 := borrow_field<0xc0ffee::m::S>.a($t5) 4: $t6 := 0 5: write_ref($t4, $t6) 6: $t7 := borrow_local($t3) - 7: $t8 := borrow_field.a($t7) + 7: $t8 := borrow_field<0xc0ffee::m::S>.a($t7) 8: $t1 := read_ref($t8) 9: return $t1 } @@ -49,14 +49,14 @@ fun m::test($t0: m::S): u64 { ============ after VariableCoalescingAnnotator: ================ [variant baseline] -fun m::test($t0: m::S): u64 { +fun m::test($t0: 0xc0ffee::m::S): u64 { var $t1: u64 - var $t2: m::S - var $t3: m::S + var $t2: 0xc0ffee::m::S + var $t3: 0xc0ffee::m::S var $t4: &mut u64 - var $t5: &mut m::S + var $t5: &mut 0xc0ffee::m::S var $t6: u64 - var $t7: &m::S + var $t7: &0xc0ffee::m::S var $t8: &u64 # live vars: $t0 # events: b:$t0, e:$t0 @@ -68,7 +68,7 @@ fun m::test($t0: m::S): u64 { 2: $t5 := borrow_local($t2) # live vars: $t3, $t5 # events: e:$t5, b:$t4 - 3: $t4 := borrow_field.a($t5) + 3: $t4 := borrow_field<0xc0ffee::m::S>.a($t5) # live vars: $t3, $t4 # events: b:$t6 4: $t6 := 0 @@ -80,7 +80,7 @@ fun m::test($t0: m::S): u64 { 6: $t7 := borrow_local($t3) # live vars: $t7 # events: e:$t7, b:$t8 - 7: $t8 := borrow_field.a($t7) + 7: $t8 := borrow_field<0xc0ffee::m::S>.a($t7) # live vars: $t8 # events: e:$t8, b:$t1 8: $t1 := read_ref($t8) @@ -92,23 +92,23 @@ fun m::test($t0: m::S): u64 { ============ after VariableCoalescingTransformer: ================ [variant baseline] -fun m::test($t0: m::S): u64 { +fun m::test($t0: 0xc0ffee::m::S): u64 { var $t1: u64 [unused] - var $t2: m::S - var $t3: m::S + var $t2: 0xc0ffee::m::S + var $t3: 0xc0ffee::m::S var $t4: &mut u64 - var $t5: &mut m::S + var $t5: &mut 0xc0ffee::m::S var $t6: u64 - var $t7: &m::S + var $t7: &0xc0ffee::m::S var $t8: &u64 0: $t2 := move($t0) 1: $t3 := copy($t2) 2: $t5 := borrow_local($t2) - 3: $t4 := borrow_field.a($t5) + 3: $t4 := borrow_field<0xc0ffee::m::S>.a($t5) 4: $t6 := 0 5: write_ref($t4, $t6) 6: $t7 := borrow_local($t3) - 7: $t8 := borrow_field.a($t7) + 7: $t8 := borrow_field<0xc0ffee::m::S>.a($t7) 8: $t6 := read_ref($t8) 9: return $t6 } @@ -116,23 +116,23 @@ fun m::test($t0: m::S): u64 { ============ after DeadStoreElimination: ================ [variant baseline] -fun m::test($t0: m::S): u64 { +fun m::test($t0: 0xc0ffee::m::S): u64 { var $t1: u64 [unused] - var $t2: m::S - var $t3: m::S + var $t2: 0xc0ffee::m::S + var $t3: 0xc0ffee::m::S var $t4: &mut u64 - var $t5: &mut m::S + var $t5: &mut 0xc0ffee::m::S var $t6: u64 - var $t7: &m::S + var $t7: &0xc0ffee::m::S var $t8: &u64 0: $t2 := move($t0) 1: $t3 := copy($t2) 2: $t5 := borrow_local($t2) - 3: $t4 := borrow_field.a($t5) + 3: $t4 := borrow_field<0xc0ffee::m::S>.a($t5) 4: $t6 := 0 5: write_ref($t4, $t6) 6: $t7 := borrow_local($t3) - 7: $t8 := borrow_field.a($t7) + 7: $t8 := borrow_field<0xc0ffee::m::S>.a($t7) 8: $t6 := read_ref($t8) 9: return $t6 } diff --git a/third_party/move/move-compiler-v2/tests/variable-coalescing/mut_refs_2.opt.exp b/third_party/move/move-compiler-v2/tests/variable-coalescing/mut_refs_2.opt.exp index 1a59b3c7f2a73..1b05ff1d6aaec 100644 --- a/third_party/move/move-compiler-v2/tests/variable-coalescing/mut_refs_2.opt.exp +++ b/third_party/move/move-compiler-v2/tests/variable-coalescing/mut_refs_2.opt.exp @@ -1,23 +1,23 @@ ============ initial bytecode ================ [variant baseline] -fun m::test($t0: m::S): u64 { +fun m::test($t0: 0xc0ffee::m::S): u64 { var $t1: u64 - var $t2: m::S - var $t3: m::S + var $t2: 0xc0ffee::m::S + var $t3: 0xc0ffee::m::S var $t4: &mut u64 - var $t5: &mut m::S + var $t5: &mut 0xc0ffee::m::S var $t6: u64 - var $t7: &m::S + var $t7: &0xc0ffee::m::S var $t8: &u64 0: $t2 := infer($t0) 1: $t3 := infer($t2) 2: $t5 := borrow_local($t2) - 3: $t4 := borrow_field.a($t5) + 3: $t4 := borrow_field<0xc0ffee::m::S>.a($t5) 4: $t6 := 0 5: write_ref($t4, $t6) 6: $t7 := borrow_local($t3) - 7: $t8 := borrow_field.a($t7) + 7: $t8 := borrow_field<0xc0ffee::m::S>.a($t7) 8: $t1 := read_ref($t8) 9: return $t1 } @@ -25,23 +25,23 @@ fun m::test($t0: m::S): u64 { ============ after DeadStoreElimination: ================ [variant baseline] -fun m::test($t0: m::S): u64 { +fun m::test($t0: 0xc0ffee::m::S): u64 { var $t1: u64 - var $t2: m::S - var $t3: m::S + var $t2: 0xc0ffee::m::S + var $t3: 0xc0ffee::m::S var $t4: &mut u64 - var $t5: &mut m::S + var $t5: &mut 0xc0ffee::m::S var $t6: u64 - var $t7: &m::S + var $t7: &0xc0ffee::m::S var $t8: &u64 0: $t2 := move($t0) 1: $t3 := copy($t2) 2: $t5 := borrow_local($t2) - 3: $t4 := borrow_field.a($t5) + 3: $t4 := borrow_field<0xc0ffee::m::S>.a($t5) 4: $t6 := 0 5: write_ref($t4, $t6) 6: $t7 := borrow_local($t3) - 7: $t8 := borrow_field.a($t7) + 7: $t8 := borrow_field<0xc0ffee::m::S>.a($t7) 8: $t1 := read_ref($t8) 9: return $t1 } @@ -49,14 +49,14 @@ fun m::test($t0: m::S): u64 { ============ after VariableCoalescingAnnotator: ================ [variant baseline] -fun m::test($t0: m::S): u64 { +fun m::test($t0: 0xc0ffee::m::S): u64 { var $t1: u64 - var $t2: m::S - var $t3: m::S + var $t2: 0xc0ffee::m::S + var $t3: 0xc0ffee::m::S var $t4: &mut u64 - var $t5: &mut m::S + var $t5: &mut 0xc0ffee::m::S var $t6: u64 - var $t7: &m::S + var $t7: &0xc0ffee::m::S var $t8: &u64 # live vars: $t0 # events: b:$t0, e:$t0 @@ -68,7 +68,7 @@ fun m::test($t0: m::S): u64 { 2: $t5 := borrow_local($t2) # live vars: $t3, $t5 # events: e:$t5, b:$t4 - 3: $t4 := borrow_field.a($t5) + 3: $t4 := borrow_field<0xc0ffee::m::S>.a($t5) # live vars: $t3, $t4 # events: b:$t6 4: $t6 := 0 @@ -80,7 +80,7 @@ fun m::test($t0: m::S): u64 { 6: $t7 := borrow_local($t3) # live vars: $t7 # events: e:$t7, b:$t8 - 7: $t8 := borrow_field.a($t7) + 7: $t8 := borrow_field<0xc0ffee::m::S>.a($t7) # live vars: $t8 # events: e:$t8, b:$t1 8: $t1 := read_ref($t8) @@ -92,23 +92,23 @@ fun m::test($t0: m::S): u64 { ============ after VariableCoalescingTransformer: ================ [variant baseline] -fun m::test($t0: m::S): u64 { +fun m::test($t0: 0xc0ffee::m::S): u64 { var $t1: u64 [unused] - var $t2: m::S - var $t3: m::S + var $t2: 0xc0ffee::m::S + var $t3: 0xc0ffee::m::S var $t4: &mut u64 - var $t5: &mut m::S + var $t5: &mut 0xc0ffee::m::S var $t6: u64 - var $t7: &m::S + var $t7: &0xc0ffee::m::S var $t8: &u64 0: $t2 := move($t0) 1: $t3 := copy($t2) 2: $t5 := borrow_local($t2) - 3: $t4 := borrow_field.a($t5) + 3: $t4 := borrow_field<0xc0ffee::m::S>.a($t5) 4: $t6 := 0 5: write_ref($t4, $t6) 6: $t7 := borrow_local($t3) - 7: $t8 := borrow_field.a($t7) + 7: $t8 := borrow_field<0xc0ffee::m::S>.a($t7) 8: $t6 := read_ref($t8) 9: return $t6 } @@ -116,23 +116,23 @@ fun m::test($t0: m::S): u64 { ============ after DeadStoreElimination: ================ [variant baseline] -fun m::test($t0: m::S): u64 { +fun m::test($t0: 0xc0ffee::m::S): u64 { var $t1: u64 [unused] - var $t2: m::S - var $t3: m::S + var $t2: 0xc0ffee::m::S + var $t3: 0xc0ffee::m::S var $t4: &mut u64 - var $t5: &mut m::S + var $t5: &mut 0xc0ffee::m::S var $t6: u64 - var $t7: &m::S + var $t7: &0xc0ffee::m::S var $t8: &u64 0: $t2 := move($t0) 1: $t3 := copy($t2) 2: $t5 := borrow_local($t2) - 3: $t4 := borrow_field.a($t5) + 3: $t4 := borrow_field<0xc0ffee::m::S>.a($t5) 4: $t6 := 0 5: write_ref($t4, $t6) 6: $t7 := borrow_local($t3) - 7: $t8 := borrow_field.a($t7) + 7: $t8 := borrow_field<0xc0ffee::m::S>.a($t7) 8: $t6 := read_ref($t8) 9: return $t6 } diff --git a/third_party/move/move-compiler-v2/tests/variable-coalescing/sequential_assign_struct.exp b/third_party/move/move-compiler-v2/tests/variable-coalescing/sequential_assign_struct.exp index ef5799020a8ac..d07e8b35bdcd3 100644 --- a/third_party/move/move-compiler-v2/tests/variable-coalescing/sequential_assign_struct.exp +++ b/third_party/move/move-compiler-v2/tests/variable-coalescing/sequential_assign_struct.exp @@ -1,13 +1,13 @@ ============ initial bytecode ================ [variant baseline] -fun m::sequential($t0: m::Foo): m::Foo { - var $t1: m::Foo - var $t2: m::Foo - var $t3: m::Foo - var $t4: m::Foo - var $t5: m::Foo - var $t6: m::Foo +fun m::sequential($t0: 0xc0ffee::m::Foo): 0xc0ffee::m::Foo { + var $t1: 0xc0ffee::m::Foo + var $t2: 0xc0ffee::m::Foo + var $t3: 0xc0ffee::m::Foo + var $t4: 0xc0ffee::m::Foo + var $t5: 0xc0ffee::m::Foo + var $t6: 0xc0ffee::m::Foo 0: $t2 := infer($t0) 1: $t3 := infer($t2) 2: $t4 := infer($t3) @@ -20,13 +20,13 @@ fun m::sequential($t0: m::Foo): m::Foo { ============ after DeadStoreElimination: ================ [variant baseline] -fun m::sequential($t0: m::Foo): m::Foo { - var $t1: m::Foo - var $t2: m::Foo - var $t3: m::Foo - var $t4: m::Foo - var $t5: m::Foo - var $t6: m::Foo +fun m::sequential($t0: 0xc0ffee::m::Foo): 0xc0ffee::m::Foo { + var $t1: 0xc0ffee::m::Foo + var $t2: 0xc0ffee::m::Foo + var $t3: 0xc0ffee::m::Foo + var $t4: 0xc0ffee::m::Foo + var $t5: 0xc0ffee::m::Foo + var $t6: 0xc0ffee::m::Foo 0: $t2 := move($t0) 1: $t3 := move($t2) 2: $t4 := move($t3) @@ -39,13 +39,13 @@ fun m::sequential($t0: m::Foo): m::Foo { ============ after VariableCoalescingAnnotator: ================ [variant baseline] -fun m::sequential($t0: m::Foo): m::Foo { - var $t1: m::Foo - var $t2: m::Foo - var $t3: m::Foo - var $t4: m::Foo - var $t5: m::Foo - var $t6: m::Foo +fun m::sequential($t0: 0xc0ffee::m::Foo): 0xc0ffee::m::Foo { + var $t1: 0xc0ffee::m::Foo + var $t2: 0xc0ffee::m::Foo + var $t3: 0xc0ffee::m::Foo + var $t4: 0xc0ffee::m::Foo + var $t5: 0xc0ffee::m::Foo + var $t6: 0xc0ffee::m::Foo # live vars: $t0 # events: b:$t0, e:$t0, b:$t2 0: $t2 := move($t0) @@ -72,13 +72,13 @@ fun m::sequential($t0: m::Foo): m::Foo { ============ after VariableCoalescingTransformer: ================ [variant baseline] -fun m::sequential($t0: m::Foo): m::Foo { - var $t1: m::Foo [unused] - var $t2: m::Foo [unused] - var $t3: m::Foo [unused] - var $t4: m::Foo [unused] - var $t5: m::Foo [unused] - var $t6: m::Foo [unused] +fun m::sequential($t0: 0xc0ffee::m::Foo): 0xc0ffee::m::Foo { + var $t1: 0xc0ffee::m::Foo [unused] + var $t2: 0xc0ffee::m::Foo [unused] + var $t3: 0xc0ffee::m::Foo [unused] + var $t4: 0xc0ffee::m::Foo [unused] + var $t5: 0xc0ffee::m::Foo [unused] + var $t6: 0xc0ffee::m::Foo [unused] 0: $t0 := move($t0) 1: $t0 := move($t0) 2: $t0 := move($t0) @@ -91,13 +91,13 @@ fun m::sequential($t0: m::Foo): m::Foo { ============ after DeadStoreElimination: ================ [variant baseline] -fun m::sequential($t0: m::Foo): m::Foo { - var $t1: m::Foo [unused] - var $t2: m::Foo [unused] - var $t3: m::Foo [unused] - var $t4: m::Foo [unused] - var $t5: m::Foo [unused] - var $t6: m::Foo [unused] +fun m::sequential($t0: 0xc0ffee::m::Foo): 0xc0ffee::m::Foo { + var $t1: 0xc0ffee::m::Foo [unused] + var $t2: 0xc0ffee::m::Foo [unused] + var $t3: 0xc0ffee::m::Foo [unused] + var $t4: 0xc0ffee::m::Foo [unused] + var $t5: 0xc0ffee::m::Foo [unused] + var $t6: 0xc0ffee::m::Foo [unused] 0: return $t0 } diff --git a/third_party/move/move-compiler-v2/tests/variable-coalescing/sequential_assign_struct.opt.exp b/third_party/move/move-compiler-v2/tests/variable-coalescing/sequential_assign_struct.opt.exp index ef5799020a8ac..d07e8b35bdcd3 100644 --- a/third_party/move/move-compiler-v2/tests/variable-coalescing/sequential_assign_struct.opt.exp +++ b/third_party/move/move-compiler-v2/tests/variable-coalescing/sequential_assign_struct.opt.exp @@ -1,13 +1,13 @@ ============ initial bytecode ================ [variant baseline] -fun m::sequential($t0: m::Foo): m::Foo { - var $t1: m::Foo - var $t2: m::Foo - var $t3: m::Foo - var $t4: m::Foo - var $t5: m::Foo - var $t6: m::Foo +fun m::sequential($t0: 0xc0ffee::m::Foo): 0xc0ffee::m::Foo { + var $t1: 0xc0ffee::m::Foo + var $t2: 0xc0ffee::m::Foo + var $t3: 0xc0ffee::m::Foo + var $t4: 0xc0ffee::m::Foo + var $t5: 0xc0ffee::m::Foo + var $t6: 0xc0ffee::m::Foo 0: $t2 := infer($t0) 1: $t3 := infer($t2) 2: $t4 := infer($t3) @@ -20,13 +20,13 @@ fun m::sequential($t0: m::Foo): m::Foo { ============ after DeadStoreElimination: ================ [variant baseline] -fun m::sequential($t0: m::Foo): m::Foo { - var $t1: m::Foo - var $t2: m::Foo - var $t3: m::Foo - var $t4: m::Foo - var $t5: m::Foo - var $t6: m::Foo +fun m::sequential($t0: 0xc0ffee::m::Foo): 0xc0ffee::m::Foo { + var $t1: 0xc0ffee::m::Foo + var $t2: 0xc0ffee::m::Foo + var $t3: 0xc0ffee::m::Foo + var $t4: 0xc0ffee::m::Foo + var $t5: 0xc0ffee::m::Foo + var $t6: 0xc0ffee::m::Foo 0: $t2 := move($t0) 1: $t3 := move($t2) 2: $t4 := move($t3) @@ -39,13 +39,13 @@ fun m::sequential($t0: m::Foo): m::Foo { ============ after VariableCoalescingAnnotator: ================ [variant baseline] -fun m::sequential($t0: m::Foo): m::Foo { - var $t1: m::Foo - var $t2: m::Foo - var $t3: m::Foo - var $t4: m::Foo - var $t5: m::Foo - var $t6: m::Foo +fun m::sequential($t0: 0xc0ffee::m::Foo): 0xc0ffee::m::Foo { + var $t1: 0xc0ffee::m::Foo + var $t2: 0xc0ffee::m::Foo + var $t3: 0xc0ffee::m::Foo + var $t4: 0xc0ffee::m::Foo + var $t5: 0xc0ffee::m::Foo + var $t6: 0xc0ffee::m::Foo # live vars: $t0 # events: b:$t0, e:$t0, b:$t2 0: $t2 := move($t0) @@ -72,13 +72,13 @@ fun m::sequential($t0: m::Foo): m::Foo { ============ after VariableCoalescingTransformer: ================ [variant baseline] -fun m::sequential($t0: m::Foo): m::Foo { - var $t1: m::Foo [unused] - var $t2: m::Foo [unused] - var $t3: m::Foo [unused] - var $t4: m::Foo [unused] - var $t5: m::Foo [unused] - var $t6: m::Foo [unused] +fun m::sequential($t0: 0xc0ffee::m::Foo): 0xc0ffee::m::Foo { + var $t1: 0xc0ffee::m::Foo [unused] + var $t2: 0xc0ffee::m::Foo [unused] + var $t3: 0xc0ffee::m::Foo [unused] + var $t4: 0xc0ffee::m::Foo [unused] + var $t5: 0xc0ffee::m::Foo [unused] + var $t6: 0xc0ffee::m::Foo [unused] 0: $t0 := move($t0) 1: $t0 := move($t0) 2: $t0 := move($t0) @@ -91,13 +91,13 @@ fun m::sequential($t0: m::Foo): m::Foo { ============ after DeadStoreElimination: ================ [variant baseline] -fun m::sequential($t0: m::Foo): m::Foo { - var $t1: m::Foo [unused] - var $t2: m::Foo [unused] - var $t3: m::Foo [unused] - var $t4: m::Foo [unused] - var $t5: m::Foo [unused] - var $t6: m::Foo [unused] +fun m::sequential($t0: 0xc0ffee::m::Foo): 0xc0ffee::m::Foo { + var $t1: 0xc0ffee::m::Foo [unused] + var $t2: 0xc0ffee::m::Foo [unused] + var $t3: 0xc0ffee::m::Foo [unused] + var $t4: 0xc0ffee::m::Foo [unused] + var $t5: 0xc0ffee::m::Foo [unused] + var $t6: 0xc0ffee::m::Foo [unused] 0: return $t0 } diff --git a/third_party/move/move-compiler-v2/tests/visibility-checker/mix_friend_package_visibility_valid.exp b/third_party/move/move-compiler-v2/tests/visibility-checker/mix_friend_package_visibility_valid.exp index cd4161e3b9462..45287e306b344 100644 --- a/third_party/move/move-compiler-v2/tests/visibility-checker/mix_friend_package_visibility_valid.exp +++ b/third_party/move/move-compiler-v2/tests/visibility-checker/mix_friend_package_visibility_valid.exp @@ -10,3 +10,15 @@ module 0x42::A { Tuple() } } // end 0x42::A + +// -- Sourcified model before bytecode pipeline +module 0x42::B { + friend 0x42::A; + friend fun foo() { + } +} +module 0x42::A { + friend fun foo() { + 0x42::B::foo(); + } +} diff --git a/third_party/move/move-compiler-v2/tests/visibility-checker/package_visibility.exp b/third_party/move/move-compiler-v2/tests/visibility-checker/package_visibility.exp index 1423e7fb8a51c..f06c111924612 100644 --- a/third_party/move/move-compiler-v2/tests/visibility-checker/package_visibility.exp +++ b/third_party/move/move-compiler-v2/tests/visibility-checker/package_visibility.exp @@ -31,3 +31,37 @@ module 0x42::C { B::foo() } } // end 0x42::C + +// -- Sourcified model before bytecode pipeline +module 0x42::A { + friend 0x42::B; + friend fun bar() { + } + fun foo() { + } +} +module 0x42::B { + use 0x42::A; + friend 0x42::C; + public fun bar() { + A::bar() + } + friend fun foo() { + A::bar() + } + fun baz() { + A::bar() + } +} +module 0x42::C { + use 0x42::B; + public fun bar() { + B::foo() + } + friend fun foo() { + B::foo() + } + fun baz() { + B::foo() + } +} diff --git a/third_party/move/move-compiler-v2/tests/visibility-checker/v1-typing/module_call_visibility_friend.exp b/third_party/move/move-compiler-v2/tests/visibility-checker/v1-typing/module_call_visibility_friend.exp index f3424d16dd504..46adaacca8b01 100644 --- a/third_party/move/move-compiler-v2/tests/visibility-checker/v1-typing/module_call_visibility_friend.exp +++ b/third_party/move/move-compiler-v2/tests/visibility-checker/v1-typing/module_call_visibility_friend.exp @@ -49,3 +49,51 @@ module 0x2::M { M::f_friend() } } // end 0x2::M + +// -- Sourcified model before bytecode pipeline +module 0x2::Y { + friend 0x2::M; + friend fun f_friend() { + } +} +module 0x2::X { + public fun f_public() { + } +} +module 0x2::M { + use 0x2::Y; + use 0x2::X; + friend fun f_friend() { + } + public fun f_public() { + } + friend fun f_friend_call_friend() { + Y::f_friend() + } + friend fun f_friend_call_public() { + X::f_public() + } + friend fun f_friend_call_self_friend() { + f_friend() + } + friend fun f_friend_call_self_private() { + f_private() + } + friend fun f_friend_call_self_public() { + f_public() + } + fun f_private() { + } + fun f_private_call_friend() { + Y::f_friend() + } + fun f_private_call_self_friend() { + f_friend() + } + public fun f_public_call_friend() { + Y::f_friend() + } + public fun f_public_call_self_friend() { + f_friend() + } +} diff --git a/third_party/move/move-compiler-v2/tests/visibility-checker/visibility_complex.exp b/third_party/move/move-compiler-v2/tests/visibility-checker/visibility_complex.exp index 6cf8b10e577f7..1196b8a46fbea 100644 --- a/third_party/move/move-compiler-v2/tests/visibility-checker/visibility_complex.exp +++ b/third_party/move/move-compiler-v2/tests/visibility-checker/visibility_complex.exp @@ -23,3 +23,29 @@ module 0x42::D { Tuple() } } // end 0x42::D + +// -- Sourcified model before bytecode pipeline +module 0x42::B { + friend 0x42::C; + friend 0x42::D; + friend fun foo() { + } +} +module 0x42::A { + friend 0x42::C; + friend fun foo() { + } +} +module 0x42::C { + friend 0x42::D; + friend fun foo() { + 0x42::A::foo(); + 0x42::B::foo(); + } +} +module 0x42::D { + friend fun bar() { + 0x42::B::foo(); + 0x42::C::foo(); + } +} diff --git a/third_party/move/move-compiler-v2/transactional-tests/tests/misc/bug_14243_stack_size.no-optimize.exp b/third_party/move/move-compiler-v2/transactional-tests/tests/misc/bug_14243_stack_size.no-optimize.exp index 626523b1b0547..478e6e10b07a0 100644 --- a/third_party/move/move-compiler-v2/transactional-tests/tests/misc/bug_14243_stack_size.no-optimize.exp +++ b/third_party/move/move-compiler-v2/transactional-tests/tests/misc/bug_14243_stack_size.no-optimize.exp @@ -2,8 +2,7 @@ comparison between v1 and v2 failed: = processed 1 task = = task 0 'print-bytecode'. lines 2-14: -- // Move bytecode v6 -+ // Move bytecode v7 += // Move bytecode v7 = module c0ffee.m { = = diff --git a/third_party/move/move-compiler-v2/transactional-tests/tests/misc/bug_14243_stack_size.optimize-no-simplify.exp b/third_party/move/move-compiler-v2/transactional-tests/tests/misc/bug_14243_stack_size.optimize-no-simplify.exp index c892ee61a15af..5644195ea665d 100644 --- a/third_party/move/move-compiler-v2/transactional-tests/tests/misc/bug_14243_stack_size.optimize-no-simplify.exp +++ b/third_party/move/move-compiler-v2/transactional-tests/tests/misc/bug_14243_stack_size.optimize-no-simplify.exp @@ -1,33 +1,32 @@ -comparison between v1 and v2 failed: -= processed 1 task -= -= task 0 'print-bytecode'. lines 2-14: -- // Move bytecode v6 -+ // Move bytecode v7 -= module c0ffee.m { -= -= -= id_mut(Arg0: &mut Ty0): &mut Ty0 /* def_idx: 0 */ { -= B0: -= 0: MoveLoc[0](Arg0: &mut Ty0) -= 1: Ret -= } -= t0() /* def_idx: 1 */ { -= L0: loc0: u64 -= L1: loc1: &mut u64 -= B0: -= 0: LdU64(0) -= 1: StLoc[0](loc0: u64) -= 2: MutBorrowLoc[0](loc0: u64) -= 3: StLoc[1](loc1: &mut u64) -= 4: CopyLoc[1](loc1: &mut u64) -= 5: Call id_mut(&mut u64): &mut u64 -= 6: ReadRef -= 7: Pop -= 8: MoveLoc[1](loc1: &mut u64) -= 9: ReadRef -= 10: Pop -= 11: Ret -= } -= } -= +processed 1 task + +task 0 'print-bytecode'. lines 2-14: +// Move bytecode v7 +module c0ffee.m { + + +id_mut(Arg0: &mut Ty0): &mut Ty0 /* def_idx: 0 */ { +B0: + 0: MoveLoc[0](Arg0: &mut Ty0) + 1: Ret +} +t0() /* def_idx: 1 */ { +L0: loc0: u64 +L1: loc1: &mut u64 +B0: + 0: LdU64(0) + 1: StLoc[0](loc0: u64) + 2: MutBorrowLoc[0](loc0: u64) + 3: StLoc[1](loc1: &mut u64) + 4: CopyLoc[1](loc1: &mut u64) + 5: Call id_mut(&mut u64): &mut u64 + 6: ReadRef + 7: Pop + 8: MoveLoc[1](loc1: &mut u64) + 9: ReadRef + 10: Pop + 11: Ret +} +} + +==> Compiler v2 delivered same results! diff --git a/third_party/move/move-compiler-v2/transactional-tests/tests/misc/bug_14243_stack_size.optimize.exp b/third_party/move/move-compiler-v2/transactional-tests/tests/misc/bug_14243_stack_size.optimize.exp index c892ee61a15af..5644195ea665d 100644 --- a/third_party/move/move-compiler-v2/transactional-tests/tests/misc/bug_14243_stack_size.optimize.exp +++ b/third_party/move/move-compiler-v2/transactional-tests/tests/misc/bug_14243_stack_size.optimize.exp @@ -1,33 +1,32 @@ -comparison between v1 and v2 failed: -= processed 1 task -= -= task 0 'print-bytecode'. lines 2-14: -- // Move bytecode v6 -+ // Move bytecode v7 -= module c0ffee.m { -= -= -= id_mut(Arg0: &mut Ty0): &mut Ty0 /* def_idx: 0 */ { -= B0: -= 0: MoveLoc[0](Arg0: &mut Ty0) -= 1: Ret -= } -= t0() /* def_idx: 1 */ { -= L0: loc0: u64 -= L1: loc1: &mut u64 -= B0: -= 0: LdU64(0) -= 1: StLoc[0](loc0: u64) -= 2: MutBorrowLoc[0](loc0: u64) -= 3: StLoc[1](loc1: &mut u64) -= 4: CopyLoc[1](loc1: &mut u64) -= 5: Call id_mut(&mut u64): &mut u64 -= 6: ReadRef -= 7: Pop -= 8: MoveLoc[1](loc1: &mut u64) -= 9: ReadRef -= 10: Pop -= 11: Ret -= } -= } -= +processed 1 task + +task 0 'print-bytecode'. lines 2-14: +// Move bytecode v7 +module c0ffee.m { + + +id_mut(Arg0: &mut Ty0): &mut Ty0 /* def_idx: 0 */ { +B0: + 0: MoveLoc[0](Arg0: &mut Ty0) + 1: Ret +} +t0() /* def_idx: 1 */ { +L0: loc0: u64 +L1: loc1: &mut u64 +B0: + 0: LdU64(0) + 1: StLoc[0](loc0: u64) + 2: MutBorrowLoc[0](loc0: u64) + 3: StLoc[1](loc1: &mut u64) + 4: CopyLoc[1](loc1: &mut u64) + 5: Call id_mut(&mut u64): &mut u64 + 6: ReadRef + 7: Pop + 8: MoveLoc[1](loc1: &mut u64) + 9: ReadRef + 10: Pop + 11: Ret +} +} + +==> Compiler v2 delivered same results! diff --git a/third_party/move/move-compiler-v2/transactional-tests/tests/no-v1-comparison/loop_labels.exp b/third_party/move/move-compiler-v2/transactional-tests/tests/no-v1-comparison/loop_labels.exp new file mode 100644 index 0000000000000..6cd67db3f6472 --- /dev/null +++ b/third_party/move/move-compiler-v2/transactional-tests/tests/no-v1-comparison/loop_labels.exp @@ -0,0 +1 @@ +processed 1 task diff --git a/third_party/move/move-compiler-v2/transactional-tests/tests/no-v1-comparison/loop_labels.move b/third_party/move/move-compiler-v2/transactional-tests/tests/no-v1-comparison/loop_labels.move new file mode 100644 index 0000000000000..b1c0cf1f69c32 --- /dev/null +++ b/third_party/move/move-compiler-v2/transactional-tests/tests/no-v1-comparison/loop_labels.move @@ -0,0 +1,18 @@ +//# run +script { + fun main() { + let result = 0; + 'outer: while (result < 100) { + while (result < 50) { + 'inner: while (result < 30) { + result += 1; + continue 'outer + }; + result += 10; + continue 'outer + }; + result += 20 + }; + assert!(result == 110); + } +} diff --git a/third_party/move/move-compiler-v2/transactional-tests/tests/no-v1-comparison/op_equal/eval_order.exp b/third_party/move/move-compiler-v2/transactional-tests/tests/no-v1-comparison/op_equal/eval_order.exp new file mode 100644 index 0000000000000..2301000e50ae4 --- /dev/null +++ b/third_party/move/move-compiler-v2/transactional-tests/tests/no-v1-comparison/op_equal/eval_order.exp @@ -0,0 +1 @@ +processed 6 tasks diff --git a/third_party/move/move-compiler-v2/transactional-tests/tests/no-v1-comparison/op_equal/eval_order.move b/third_party/move/move-compiler-v2/transactional-tests/tests/no-v1-comparison/op_equal/eval_order.move new file mode 100644 index 0000000000000..3629bf1b1ae26 --- /dev/null +++ b/third_party/move/move-compiler-v2/transactional-tests/tests/no-v1-comparison/op_equal/eval_order.move @@ -0,0 +1,52 @@ +//# publish +module 0xc0ffee::m { + public fun test0() { + let v = 1; + v += {v += {v += 2; v}; v}; + assert!(v == 12); + } + + public fun test1() { + let v = 1; + v += {v += 2; v}; + assert!(v == 6); + } + + fun mod1(r: &mut u64) { + *r += 2; + } + + public fun test2() { + let v = 1; + v += {mod1(&mut v); v}; + assert!(v == 6); + } + + fun mod2(r: &mut u64): u64 { + *r += 2; + *r + } + + public fun test3() { + let v = 1; + v += mod2(&mut v); + assert!(v == 6); + } + + public fun test4() { + let i = 0; + let xs = vector[1, 2, 3]; + xs[{ i += 1; i }] += xs[{ i += 1; i }]; + assert!(xs == vector[1, 2, 5]); + } +} + +//# run --verbose -- 0xc0ffee::m::test0 + +//# run --verbose -- 0xc0ffee::m::test1 + +//# run --verbose -- 0xc0ffee::m::test2 + +//# run --verbose -- 0xc0ffee::m::test3 + +//# run --verbose -- 0xc0ffee::m::test4 diff --git a/third_party/move/move-compiler-v2/transactional-tests/tests/no-v1-comparison/op_equal/no_double_eval.exp b/third_party/move/move-compiler-v2/transactional-tests/tests/no-v1-comparison/op_equal/no_double_eval.exp new file mode 100644 index 0000000000000..457ace9c4acb6 --- /dev/null +++ b/third_party/move/move-compiler-v2/transactional-tests/tests/no-v1-comparison/op_equal/no_double_eval.exp @@ -0,0 +1 @@ +processed 4 tasks diff --git a/third_party/move/move-compiler-v2/transactional-tests/tests/no-v1-comparison/op_equal/no_double_eval.move b/third_party/move/move-compiler-v2/transactional-tests/tests/no-v1-comparison/op_equal/no_double_eval.move new file mode 100644 index 0000000000000..1ca982b34a547 --- /dev/null +++ b/third_party/move/move-compiler-v2/transactional-tests/tests/no-v1-comparison/op_equal/no_double_eval.move @@ -0,0 +1,30 @@ +//# publish +module 0xc0ffee::m1 { + fun foo(r: &mut u64): &mut u64 { + *r += 1; + r + } + + public fun test() { + let x = 1; + *{foo(&mut x)} += 1; + assert!(x == 3); + } +} + +//# publish +module 0xc0ffee::m2 { + fun foo(r: &mut u64) { + *r += 2; + } + + public fun test() { + let x = 1; + *{foo(&mut x); foo(&mut x); &mut x} += 1; + assert!(x == 6); + } +} + +//# run --verbose -- 0xc0ffee::m1::test + +//# run --verbose -- 0xc0ffee::m2::test diff --git a/third_party/move/move-compiler-v2/transactional-tests/tests/no-v1-comparison/op_equal/valid.exp b/third_party/move/move-compiler-v2/transactional-tests/tests/no-v1-comparison/op_equal/valid.exp new file mode 100644 index 0000000000000..8c37110a41a8c --- /dev/null +++ b/third_party/move/move-compiler-v2/transactional-tests/tests/no-v1-comparison/op_equal/valid.exp @@ -0,0 +1 @@ +processed 9 tasks diff --git a/third_party/move/move-compiler-v2/transactional-tests/tests/no-v1-comparison/op_equal/valid.move b/third_party/move/move-compiler-v2/transactional-tests/tests/no-v1-comparison/op_equal/valid.move new file mode 100644 index 0000000000000..d1827deb71830 --- /dev/null +++ b/third_party/move/move-compiler-v2/transactional-tests/tests/no-v1-comparison/op_equal/valid.move @@ -0,0 +1,166 @@ +//# publish +module 0x42::test { + struct Coin(u256) has drop; + + struct Wrapper(T) has drop; + + fun add1_old(x: u256): u256 { + x = x + 1; + x + } + + fun add1_new(x: u256): u256 { + x += 1; + x + } + + fun test1() { + assert!(add1_old(42) == add1_new(42)); + } + + fun inc_new(x: &mut u256) { + *x += 1; + } + + fun inc_old(x: &mut u256) { + *x = *x + 1; + } + + fun test2() { + let x = 42; + let y = x; + inc_new(&mut x); + inc_old(&mut y); + assert!(x == y); + } + + fun coin_inc_new_1(self: &mut Coin) { + self.0 += 1; + } + + fun coin_inc_new_2(self: &mut Coin) { + let p = &mut self.0; + *p = *p + 1; + } + + fun coin_inc_old_1(self: &mut Coin) { + self.0 = self.0 + 1; + } + + fun coin_inc_old_2(self: &mut Coin) { + let p = &mut self.0; + *p = *p + 1; + } + + fun test3() { + let x = Coin(42); + let y = Coin(42); + let z = Coin(42); + let w = Coin(42); + coin_inc_new_1(&mut x); + coin_inc_new_2(&mut y); + coin_inc_old_1(&mut z); + coin_inc_old_2(&mut w); + assert!(&x == &y); + assert!(&x == &z); + assert!(&x == &w); + } + + fun inc_wrapped_coin_new(x: &mut Wrapper) { + x.0.0 += 1; + } + + fun inc_wrapped_coin_old(x: &mut Wrapper) { + x.0.0 = x.0.0 + 1; + } + + fun test4() { + let x = Wrapper(Coin(42)); + let y = Wrapper(Coin(42)); + inc_wrapped_coin_new(&mut x); + inc_wrapped_coin_old(&mut y); + assert!(x == y); + } + + fun inc_vec_new(x: &mut vector, index: u64) { + x[index] += 1; + } + + fun inc_vec_old(x: &mut vector, index: u64) { + x[index] = x[index] + 1; + } + + fun test5() { + let x = vector[42]; + let y = vector[42]; + inc_vec_new(&mut x, 0); + inc_vec_old(&mut y, 0); + assert!(x == y); + } + + fun inc_vec_coin_new(x: vector, index: u64): vector { + x[index].0 += 1; + x + } + + fun inc_vec_coin_old(x: vector, index: u64): vector { + x[index].0 = x[index].0 + 1; + x + } + + fun test6() { + let x = vector[Coin(42)]; + let y = vector[Coin(42)]; + let x = inc_vec_coin_new(x, 0); + let y = inc_vec_coin_old(y, 0); + assert!(x == y); + } + + fun inc_vec_wrapped_coin_new(x: vector>, index: u64): vector> { + x[index].0.0 += 1; + x + } + + fun inc_vec_wrapped_coin_old(x: vector>, index: u64): vector> { + x[index].0.0 = x[index].0.0 + 1; + x + } + + fun test7() { + let x = vector>[Wrapper(Coin(42))]; + let y = vector>[Wrapper(Coin(42))]; + let x = inc_vec_wrapped_coin_new(x, 0); + let y = inc_vec_wrapped_coin_old(y, 0); + assert!(x == y); + } + + fun x_plusplus(x: &mut u64): u64 { + let res = *x; + *x += 1; + res + } + + fun test8() { + let x = 0; + let y = vector[0, 1]; + y[x_plusplus(&mut x)] += 1; + assert!(y == vector[1, 1]); + } + +} + +//# run --verbose -- 0x42::test::test1 + +//# run --verbose -- 0x42::test::test2 + +//# run --verbose -- 0x42::test::test3 + +//# run --verbose -- 0x42::test::test4 + +//# run --verbose -- 0x42::test::test5 + +//# run --verbose -- 0x42::test::test6 + +//# run --verbose -- 0x42::test::test7 + +//# run --verbose -- 0x42::test::test8 diff --git a/third_party/move/move-compiler-v2/transactional-tests/tests/tests.rs b/third_party/move/move-compiler-v2/transactional-tests/tests/tests.rs index 7f92f87fa4522..15ac83d8d0dae 100644 --- a/third_party/move/move-compiler-v2/transactional-tests/tests/tests.rs +++ b/third_party/move/move-compiler-v2/transactional-tests/tests/tests.rs @@ -44,7 +44,7 @@ const TEST_CONFIGS: &[TestConfig] = &[ (Experiment::OPTIMIZE_WAITING_FOR_COMPARE_TESTS, true), (Experiment::ACQUIRES_CHECK, false), ], - language_version: LanguageVersion::V2_0, + language_version: LanguageVersion::V2_1, include: &[], // all tests except those excluded below exclude: &["/operator_eval/"], }, @@ -55,7 +55,7 @@ const TEST_CONFIGS: &[TestConfig] = &[ (Experiment::OPTIMIZE, false), (Experiment::ACQUIRES_CHECK, false), ], - language_version: LanguageVersion::V2_0, + language_version: LanguageVersion::V2_1, include: &[], // all tests except those excluded below exclude: &["/operator_eval/"], }, @@ -68,7 +68,7 @@ const TEST_CONFIGS: &[TestConfig] = &[ (Experiment::AST_SIMPLIFY, false), (Experiment::ACQUIRES_CHECK, false), ], - language_version: LanguageVersion::V2_0, + language_version: LanguageVersion::V2_1, include: &[], // all tests except those excluded below exclude: &["/operator_eval/"], }, @@ -84,7 +84,7 @@ const TEST_CONFIGS: &[TestConfig] = &[ name: "operator-eval-lang-2", runner: |p| run(p, get_config_by_name("operator-eval-lang-2")), experiments: &[(Experiment::OPTIMIZE, true)], - language_version: LanguageVersion::V2_0, + language_version: LanguageVersion::V2_1, include: &["/operator_eval/"], exclude: &[], }, diff --git a/third_party/move/move-compiler/src/command_line/mod.rs b/third_party/move/move-compiler/src/command_line/mod.rs index 4439856d1ab52..c829b905d5f36 100644 --- a/third_party/move/move-compiler/src/command_line/mod.rs +++ b/third_party/move/move-compiler/src/command_line/mod.rs @@ -34,6 +34,8 @@ pub const FLAVOR: &str = "flavor"; pub const BYTECODE_VERSION: &str = "bytecode-version"; +pub const LANGUAGE_VERSION: &str = "language-version"; + /// Color flag interpreted locally in diagnostics/mod.rs. /// (Is translated to codespan_reporting::term::termcolor::ColorChoice). /// Choices here are `NONE`, `ANSI`, `ALWAYS`, with default to Auto. diff --git a/third_party/move/move-compiler/src/expansion/ast.rs b/third_party/move/move-compiler/src/expansion/ast.rs index 880234d6fc715..47db86f2654ba 100644 --- a/third_party/move/move-compiler/src/expansion/ast.rs +++ b/third_party/move/move-compiler/src/expansion/ast.rs @@ -5,7 +5,7 @@ use crate::{ expansion::translate::is_valid_struct_constant_or_schema_name, parser::ast::{ - self as P, Ability, Ability_, BinOp, CallKind, ConstantName, Field, FunctionName, + self as P, Ability, Ability_, BinOp, CallKind, ConstantName, Field, FunctionName, Label, ModuleName, QuantKind, SpecApplyPattern, StructName, UnaryOp, UseDecl, Var, VariantName, ENTRY_MODIFIER, }, @@ -25,7 +25,6 @@ use std::{ fmt, hash::Hash, }; - //************************************************************************************************** // Program //************************************************************************************************** @@ -504,8 +503,8 @@ pub enum Exp_ { IfElse(Box, Box, Box), Match(Box, Vec, Exp)>>), - While(Box, Box), - Loop(Box), + While(Option