Skip to content

Commit

Permalink
Merge branch 'main' into teng/fix-14762
Browse files Browse the repository at this point in the history
  • Loading branch information
rahxephon89 authored Oct 15, 2024
2 parents cb59b44 + 184ece9 commit 651f2de
Show file tree
Hide file tree
Showing 235 changed files with 7,985 additions and 1,783 deletions.
15 changes: 13 additions & 2 deletions .github/workflows/copy-images-to-dockerhub.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,11 @@ on:
required: false
type: string
description: the git sha to use for the image tag. If not provided, the git sha of the triggering branch will be used
dry_run:
required: false
type: boolean
default: false
description: If true, run the workflow without actually pushing images
workflow_dispatch:
inputs:
image_tag_prefix:
Expand All @@ -21,6 +26,11 @@ on:
required: false
type: string
description: the git sha to use for the image tag. If not provided, the git sha of the triggering branch will be used
dry_run:
required: false
type: boolean
default: false
description: If true, run the workflow without actually pushing images

permissions:
contents: read
Expand All @@ -29,7 +39,7 @@ permissions:
jobs:
copy-images:
# Run on a machine with more local storage for large docker images
runs-on: medium-perf-docker-with-local-ssd
runs-on: runs-on,cpu=16,family=m6id,hdd=500,image=aptos-ubuntu-x64,run-id=${{ github.run_id }}
steps:
- uses: actions/checkout@v4

Expand Down Expand Up @@ -61,4 +71,5 @@ jobs:
AWS_ACCOUNT_ID: ${{ secrets.AWS_ECR_ACCOUNT_NUM }}
GCP_DOCKER_ARTIFACT_REPO: ${{ vars.GCP_DOCKER_ARTIFACT_REPO }}
IMAGE_TAG_PREFIX: ${{ inputs.image_tag_prefix }}
run: ./docker/release-images.mjs --wait-for-image-seconds=3600
DRY_RUN: ${{ inputs.dry_run }}
run: ./docker/release-images.mjs --wait-for-image-seconds=3600 ${{ inputs.dry_run && '--dry-run' || '' }}
4 changes: 3 additions & 1 deletion .github/workflows/indexer-processor-testing.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ on:
pull_request: # Trigger on PR-level events
branches:
- main
paths:
- 'ecosystem/indexer-grpc/indexer-test-transactions/**' # Only trigger if files under this path change

# the required permissions to request the ID token
permissions:
Expand All @@ -14,7 +16,7 @@ permissions:

jobs:
dispatch_event:
runs-on: runs-on,cpu=64,family=c7,hdd=500,image=aptos-ubuntu-x64,run-id=${{ github.run_id }}
runs-on: runs-on,cpu=16,family=c7,hdd=500,image=aptos-ubuntu-x64,run-id=${{ github.run_id }}

steps:
- name: Checkout the repository
Expand Down
24 changes: 23 additions & 1 deletion .github/workflows/workflow-run-execution-performance.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,19 @@ on:
default: false
type: boolean
description: Ignore target determination and run the tests
SKIP_MOVE_E2E:
required: false
default: false
type: boolean
description: Whether to run or skip move-only e2e tests at the beginning.
SOURCE:
required: false
default: CI
type: string
NUMBER_OF_EXECUTION_THREADS:
required: false
default: "32"
type: string
# This allows the workflow to be triggered manually from the Github UI or CLI
# NOTE: because the "number" type is not supported, we default to 720 minute timeout
workflow_dispatch:
Expand All @@ -43,9 +52,17 @@ on:
- benchmark-t2d-32
- benchmark-t2d-60
- benchmark-c3d-30
- benchmark-c3d-60
- benchmark-c3d-180
- benchmark-n4-32
- benchmark-c4-32
- benchmark-c4-48
- benchmark-c4-96
description: The name of the runner to use for the test. (which decides machine specs)
NUMBER_OF_EXECUTION_THREADS:
required: false
default: "32"
type: string
FLOW:
required: false
default: LAND_BLOCKING
Expand All @@ -56,6 +73,11 @@ on:
- MAINNET_LARGE_DB
type: choice
description: Which set of tests to run. MAINNET/MAINNET_LARGE_DB are for performance validation of mainnet nodes.
SKIP_MOVE_E2E:
required: false
default: false
type: boolean
description: Whether to skip move-only e2e tests at the beginning.
IGNORE_TARGET_DETERMINATION:
required: false
default: true
Expand Down Expand Up @@ -98,7 +120,7 @@ jobs:

- name: Run single node execution benchmark in performance build mode
shell: bash
run: TABULATE_INSTALL=lib-only pip install tabulate && FLOW=${{ inputs.FLOW }} SOURCE=${{ inputs.SOURCE }} testsuite/single_node_performance.py
run: TABULATE_INSTALL=lib-only pip install tabulate && FLOW="${{ inputs.FLOW }}" SOURCE="${{ inputs.SOURCE }}" RUNNER_NAME="${{ inputs.RUNNER_NAME }}" SKIP_MOVE_E2E="${{ inputs.SKIP_MOVE_E2E && '1' || '' }}" NUMBER_OF_EXECUTION_THREADS="${{ inputs.NUMBER_OF_EXECUTION_THREADS }}" testsuite/single_node_performance.py
if: ${{ (inputs.IGNORE_TARGET_DETERMINATION || needs.test-target-determinator.outputs.run_execution_performance_test == 'true') }}

- run: echo "Skipping single node execution performance! Unrelated changes detected."
Expand Down
39 changes: 32 additions & 7 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,7 @@ members = [
"dkg",
"ecosystem/indexer-grpc/indexer-grpc-cache-worker",
"ecosystem/indexer-grpc/indexer-grpc-data-service",
"ecosystem/indexer-grpc/indexer-grpc-file-checker",
"ecosystem/indexer-grpc/indexer-grpc-file-store",
"ecosystem/indexer-grpc/indexer-grpc-file-store-backfiller",
"ecosystem/indexer-grpc/indexer-grpc-fullnode",
Expand Down Expand Up @@ -360,6 +361,7 @@ aptos-indexer = { path = "crates/indexer" }
aptos-indexer-grpc-cache-worker = { path = "ecosystem/indexer-grpc/indexer-grpc-cache-worker" }
aptos-indexer-grpc-data-service = { path = "ecosystem/indexer-grpc/indexer-grpc-data-service" }
aptos-indexer-grpc-file-store = { path = "ecosystem/indexer-grpc/indexer-grpc-file-store" }
aptos-indexer-grpc-file-checker = { path = "ecosystem/indexer-grpc/indexer-grpc-file-checker" }
aptos-indexer-grpc-file-store-backfiller = { path = "ecosystem/indexer-grpc/indexer-grpc-file-store-backfiller" }
aptos-indexer-grpc-fullnode = { path = "ecosystem/indexer-grpc/indexer-grpc-fullnode" }
aptos-indexer-grpc-in-memory-cache-benchmark = { path = "ecosystem/indexer-grpc/indexer-grpc-in-memory-cache-benchmark" }
Expand Down
2 changes: 1 addition & 1 deletion RUST_SECURE_CODING.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ These Rust Secure Coding Guidelines are essential for anyone contributing to Apt

### Rustup

Utilize Rustup for managing Rust toolchains. However, keep in mind that, from a security perspective, Rustup performs all downloads over HTTPS, but it does not yet validate signatures of downloads. Security is shifted to [create.io](http://create.io) and GitHub repository hosting the code [[rustup]](https://www.rust-lang.org/tools/install).
Utilize Rustup for managing Rust toolchains. However, keep in mind that, from a security perspective, Rustup performs all downloads over HTTPS, but it does not yet validate signatures of downloads. Security is shifted to [crates.io](http://crates.io) and GitHub repository hosting the code [[rustup]](https://www.rust-lang.org/tools/install).

### Stable Toolchain

Expand Down
22 changes: 22 additions & 0 deletions api/doc/spec.json
Original file line number Diff line number Diff line change
Expand Up @@ -1900,6 +1900,28 @@
"operationId": "spec"
}
},
"/info": {
"get": {
"tags": [
"General"
],
"summary": "Show some basic info of the node.",
"responses": {
"200": {
"description": "",
"content": {
"application/json": {
"schema": {
"type": "object",
"additionalProperties": {}
}
}
}
}
},
"operationId": "info"
}
},
"/-/healthy": {
"get": {
"tags": [
Expand Down
14 changes: 14 additions & 0 deletions api/doc/spec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -1416,6 +1416,20 @@ paths:
schema:
type: string
operationId: spec
/info:
get:
tags:
- General
summary: Show some basic info of the node.
responses:
'200':
description: ''
content:
application/json:
schema:
type: object
additionalProperties: {}
operationId: info
/-/healthy:
get:
tags:
Expand Down
57 changes: 56 additions & 1 deletion api/src/basic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,14 @@ use crate::{
};
use anyhow::Context as AnyhowContext;
use aptos_api_types::AptosErrorCode;
use poem_openapi::{param::Query, payload::Html, Object, OpenApi};
use poem_openapi::{
param::Query,
payload::{Html, Json},
Object, OpenApi,
};
use serde::{Deserialize, Serialize};
use std::{
collections::HashMap,
ops::Sub,
sync::Arc,
time::{Duration, SystemTime, UNIX_EPOCH},
Expand Down Expand Up @@ -60,6 +65,56 @@ impl BasicApi {
Html(OPEN_API_HTML.to_string())
}

/// Show some basic info of the node.
#[oai(
path = "/info",
method = "get",
operation_id = "info",
tag = "ApiTags::General"
)]
async fn info(&self) -> Json<HashMap<String, serde_json::Value>> {
let mut info = HashMap::new();
info.insert(
"bootstrapping_mode".to_string(),
serde_json::to_value(
self.context
.node_config
.state_sync
.state_sync_driver
.bootstrapping_mode,
)
.unwrap(),
);
info.insert(
"continuous_syncing_mode".to_string(),
serde_json::to_value(
self.context
.node_config
.state_sync
.state_sync_driver
.continuous_syncing_mode,
)
.unwrap(),
);
info.insert(
"new_storage_format".to_string(),
serde_json::to_value(
self.context
.node_config
.storage
.rocksdb_configs
.enable_storage_sharding,
)
.unwrap(),
);
info.insert(
"internal_indexer_config".to_string(),
serde_json::to_value(&self.context.node_config.indexer_db_config).unwrap(),
);

Json(info)
}

/// Check basic node health
///
/// By default this endpoint just checks that it can get the latest ledger
Expand Down
4 changes: 2 additions & 2 deletions api/src/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -858,7 +858,7 @@ impl Context {
} else {
self.indexer_reader
.as_ref()
.ok_or(anyhow!("Indexer reader is None"))
.ok_or_else(|| anyhow!("Indexer reader is None"))
.map_err(|err| {
E::internal_with_code(err, AptosErrorCode::InternalError, ledger_info)
})?
Expand Down Expand Up @@ -957,7 +957,7 @@ impl Context {
} else {
self.indexer_reader
.as_ref()
.ok_or(anyhow!("Internal indexer reader doesn't exist"))?
.ok_or_else(|| anyhow!("Internal indexer reader doesn't exist"))?
.get_events(event_key, start, order, limit as u64, ledger_version)?
};
if order == Order::Descending {
Expand Down
Loading

0 comments on commit 651f2de

Please sign in to comment.