diff --git a/.github/workflows/deny.yml b/.github/workflows/deny.yml deleted file mode 100644 index 758c8e589..000000000 --- a/.github/workflows/deny.yml +++ /dev/null @@ -1,26 +0,0 @@ -name: deny - -on: - push: - branches: [main] - paths: [Cargo.lock, deny.toml] - pull_request: - branches: [main] - paths: [Cargo.lock, deny.toml] - -env: - CARGO_TERM_COLOR: always - -jobs: - cargo-deny: - name: cargo deny check - runs-on: ubuntu-latest - timeout-minutes: 30 - steps: - - uses: actions/checkout@v4 - - uses: EmbarkStudios/cargo-deny-action@v1 - with: - command: check all - # Clear out arguments to not pass `--all-features` to `cargo deny`. - # many crates have an `openssl` feature which enables banned dependencies - arguments: "" diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 4a2f8caa7..4b99917c9 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -106,7 +106,6 @@ jobs: zk-cargo-test: runs-on: ubuntu-22.04-github-hosted-16core - steps: - name: Checkout code uses: actions/checkout@v4 @@ -145,6 +144,14 @@ jobs: run: | ZK_DEBUG_HISTORICAL_BLOCK_HASHES=5 cargo nextest run --package '*' --lib --test '*' --filter-expr 'test(~zk) and not test(~test_zk_aave_di)' + deny: + uses: ithacaxyz/ci/.github/workflows/deny.yml@main + with: + # Clear out arguments to not pass `--all-features` to `cargo deny`. + # Many crates have an `openssl` feature which enables banned dependencies. + deny-flags: "" + + check-ci-install: name: CI install runs-on: ubuntu-22.04 diff --git a/Cargo.lock b/Cargo.lock index 872764abb..fd2091c4b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "Inflector" @@ -90,9 +90,9 @@ dependencies = [ [[package]] name = "allocator-api2" -version = "0.2.20" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45862d1c77f2228b9e10bc609d5bc203d86ebc9b87ad8d5d5167a6c9abf739d9" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "alloy" @@ -100,30 +100,30 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5b524b8c28a7145d1fe4950f84360b5de3e307601679ff0558ddc20ea229399" dependencies = [ - "alloy-consensus", - "alloy-contract", + "alloy-consensus 0.6.4", + "alloy-contract 0.6.4", "alloy-core", - "alloy-eips", - "alloy-genesis", - "alloy-network", - "alloy-provider", - "alloy-pubsub", - "alloy-rpc-client", - "alloy-rpc-types", - "alloy-serde", - "alloy-signer", - "alloy-signer-local", - "alloy-transport", - "alloy-transport-http", - "alloy-transport-ipc", - "alloy-transport-ws", + "alloy-eips 0.6.4", + "alloy-genesis 0.6.4", + "alloy-network 0.6.4", + "alloy-provider 0.6.4", + "alloy-pubsub 0.6.4", + "alloy-rpc-client 0.6.4", + "alloy-rpc-types 0.6.4", + "alloy-serde 0.6.4", + "alloy-signer 0.6.4", + "alloy-signer-local 0.6.4", + "alloy-transport 0.6.4", + "alloy-transport-http 0.6.4", + "alloy-transport-ipc 0.6.4", + "alloy-transport-ws 0.6.4", ] [[package]] name = "alloy-chains" -version = "0.1.47" +version = "0.1.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18c5c520273946ecf715c0010b4e3503d7eba9893cd9ce6b7fff5654c4a3c470" +checksum = "a0161082e0edd9013d23083465cc04b20e44b7a15646d36ba7b0cdb7cd6fe18f" dependencies = [ "alloy-primitives", "num_enum 0.7.3", @@ -137,10 +137,28 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae09ffd7c29062431dd86061deefe4e3c6f07fa0d674930095f8dcedb0baf02c" dependencies = [ - "alloy-eips", + "alloy-eips 0.6.4", + "alloy-primitives", + "alloy-rlp", + "alloy-serde 0.6.4", + "auto_impl", + "c-kzg", + "derive_more 1.0.0", + "k256 0.13.4", + "serde", +] + +[[package]] +name = "alloy-consensus" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ba14856660f31807ebb26ce8f667e814c72694e1077e97ef102e326ad580f3f" +dependencies = [ + "alloy-eips 0.8.0", "alloy-primitives", "alloy-rlp", - "alloy-serde", + "alloy-serde 0.8.0", + "alloy-trie", "auto_impl", "c-kzg", "derive_more 1.0.0", @@ -148,6 +166,20 @@ dependencies = [ "serde", ] +[[package]] +name = "alloy-consensus-any" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28666307e76441e7af37a2b90cde7391c28112121bea59f4e0d804df8b20057e" +dependencies = [ + "alloy-consensus 0.8.0", + "alloy-eips 0.8.0", + "alloy-primitives", + "alloy-rlp", + "alloy-serde 0.8.0", + "serde", +] + [[package]] name = "alloy-contract" version = "0.6.4" @@ -156,24 +188,45 @@ checksum = "66430a72d5bf5edead101c8c2f0a24bada5ec9f3cf9909b3e08b6d6899b4803e" dependencies = [ "alloy-dyn-abi", "alloy-json-abi", - "alloy-network", - "alloy-network-primitives", + "alloy-network 0.6.4", + "alloy-network-primitives 0.6.4", "alloy-primitives", - "alloy-provider", - "alloy-pubsub", - "alloy-rpc-types-eth", + "alloy-provider 0.6.4", + "alloy-pubsub 0.6.4", + "alloy-rpc-types-eth 0.6.4", "alloy-sol-types", - "alloy-transport", + "alloy-transport 0.6.4", "futures 0.3.31", "futures-util", "thiserror 1.0.69", ] +[[package]] +name = "alloy-contract" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3510769905590b8991a8e63a5e0ab4aa72cf07a13ab5fbe23f12f4454d161da" +dependencies = [ + "alloy-dyn-abi", + "alloy-json-abi", + "alloy-network 0.8.0", + "alloy-network-primitives 0.8.0", + "alloy-primitives", + "alloy-provider 0.8.0", + "alloy-pubsub 0.8.0", + "alloy-rpc-types-eth 0.8.0", + "alloy-sol-types", + "alloy-transport 0.8.0", + "futures 0.3.31", + "futures-util", + "thiserror 2.0.6", +] + [[package]] name = "alloy-core" -version = "0.8.13" +version = "0.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8d22df68fa7d9744be0b1a9be3260e9aa089fbf41903ab182328333061ed186" +checksum = "c618bd382f0bc2ac26a7e4bfae01c9b015ca8f21b37ca40059ae35a7e62b3dc6" dependencies = [ "alloy-dyn-abi", "alloy-json-abi", @@ -184,9 +237,9 @@ dependencies = [ [[package]] name = "alloy-dyn-abi" -version = "0.8.13" +version = "0.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cf633ae9a1f0c82fdb9e559ed2be1c8e415c3e48fc47e1feaf32c6078ec0cdd" +checksum = "41056bde53ae10ffbbf11618efbe1e0290859e5eab0fe9ef82ebdb62f12a866f" dependencies = [ "alloy-json-abi", "alloy-primitives", @@ -241,7 +294,25 @@ dependencies = [ "alloy-eip7702", "alloy-primitives", "alloy-rlp", - "alloy-serde", + "alloy-serde 0.6.4", + "c-kzg", + "derive_more 1.0.0", + "once_cell", + "serde", + "sha2 0.10.8", +] + +[[package]] +name = "alloy-eips" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e922d558006ba371681d484d12aa73fe673d84884f83747730af7433c0e86d" +dependencies = [ + "alloy-eip2930", + "alloy-eip7702", + "alloy-primitives", + "alloy-rlp", + "alloy-serde 0.8.0", "c-kzg", "derive_more 1.0.0", "once_cell", @@ -256,15 +327,27 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e53f7877ded3921d18a0a9556d55bedf84535567198c9edab2aa23106da91855" dependencies = [ "alloy-primitives", - "alloy-serde", + "alloy-serde 0.6.4", + "serde", +] + +[[package]] +name = "alloy-genesis" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dca170827a7ca156b43588faebf9e9d27c27d0fb07cab82cfd830345e2b24f5" +dependencies = [ + "alloy-primitives", + "alloy-serde 0.8.0", + "alloy-trie", "serde", ] [[package]] name = "alloy-json-abi" -version = "0.8.13" +version = "0.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a500037938085feed8a20dbfc8fce58c599db68c948cfae711147175dee392c" +checksum = "c357da577dfb56998d01f574d81ad7a1958d248740a7981b205d69d65a7da404" dependencies = [ "alloy-primitives", "alloy-sol-type-parser", @@ -286,20 +369,34 @@ dependencies = [ "tracing", ] +[[package]] +name = "alloy-json-rpc" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9335278f50b0273e0a187680ee742bb6b154a948adf036f448575bacc5ccb315" +dependencies = [ + "alloy-primitives", + "alloy-sol-types", + "serde", + "serde_json", + "thiserror 2.0.6", + "tracing", +] + [[package]] name = "alloy-network" version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ea94b8ceb5c75d7df0a93ba0acc53b55a22b47b532b600a800a87ef04eb5b0b4" dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-json-rpc", - "alloy-network-primitives", + "alloy-consensus 0.6.4", + "alloy-eips 0.6.4", + "alloy-json-rpc 0.6.4", + "alloy-network-primitives 0.6.4", "alloy-primitives", - "alloy-rpc-types-eth", - "alloy-serde", - "alloy-signer", + "alloy-rpc-types-eth 0.6.4", + "alloy-serde 0.6.4", + "alloy-signer 0.6.4", "alloy-sol-types", "async-trait", "auto_impl", @@ -309,24 +406,79 @@ dependencies = [ "thiserror 1.0.69", ] +[[package]] +name = "alloy-network" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad4e6ad4230df8c4a254c20f8d6a84ab9df151bfca13f463177dbc96571cc1f8" +dependencies = [ + "alloy-consensus 0.8.0", + "alloy-consensus-any", + "alloy-eips 0.8.0", + "alloy-json-rpc 0.8.0", + "alloy-network-primitives 0.8.0", + "alloy-primitives", + "alloy-rpc-types-any", + "alloy-rpc-types-eth 0.8.0", + "alloy-serde 0.8.0", + "alloy-signer 0.8.0", + "alloy-sol-types", + "async-trait", + "auto_impl", + "futures-utils-wasm", + "serde", + "serde_json", + "thiserror 2.0.6", +] + [[package]] name = "alloy-network-primitives" version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df9f3e281005943944d15ee8491534a1c7b3cbf7a7de26f8c433b842b93eb5f9" dependencies = [ - "alloy-consensus", - "alloy-eips", + "alloy-consensus 0.6.4", + "alloy-eips 0.6.4", + "alloy-primitives", + "alloy-serde 0.6.4", + "serde", +] + +[[package]] +name = "alloy-network-primitives" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4df88a2f8020801e0fefce79471d3946d39ca3311802dbbd0ecfdeee5e972e3" +dependencies = [ + "alloy-consensus 0.8.0", + "alloy-eips 0.8.0", "alloy-primitives", - "alloy-serde", + "alloy-serde 0.8.0", "serde", ] +[[package]] +name = "alloy-node-bindings" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2db5cefbc736b2b26a960dcf82279c70a03695dd11a0032a6dc27601eeb29182" +dependencies = [ + "alloy-genesis 0.8.0", + "alloy-primitives", + "k256 0.13.4", + "rand 0.8.5", + "serde_json", + "tempfile", + "thiserror 2.0.6", + "tracing", + "url", +] + [[package]] name = "alloy-primitives" -version = "0.8.13" +version = "0.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3aeeb5825c2fc8c2662167058347cd0cafc3cb15bcb5cdb1758a63c2dca0409e" +checksum = "6259a506ab13e1d658796c31e6e39d2e2ee89243bcc505ddc613b35732e0a430" dependencies = [ "alloy-rlp", "arbitrary", @@ -339,7 +491,7 @@ dependencies = [ "getrandom 0.2.15", "hashbrown 0.15.2", "hex-literal", - "indexmap 2.6.0", + "indexmap 2.7.0", "itoa", "k256 0.13.4", "keccak-asm", @@ -348,7 +500,7 @@ dependencies = [ "proptest-derive", "rand 0.8.5", "ruint", - "rustc-hash 2.0.0", + "rustc-hash 2.1.0", "serde", "sha3 0.10.8", "tiny-keccak 2.0.2", @@ -361,21 +513,62 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40c1f9eede27bf4c13c099e8e64d54efd7ce80ef6ea47478aa75d5d74e2dba3b" dependencies = [ "alloy-chains", - "alloy-consensus", - "alloy-eips", - "alloy-json-rpc", - "alloy-network", - "alloy-network-primitives", + "alloy-consensus 0.6.4", + "alloy-eips 0.6.4", + "alloy-json-rpc 0.6.4", + "alloy-network 0.6.4", + "alloy-network-primitives 0.6.4", "alloy-primitives", - "alloy-pubsub", - "alloy-rpc-client", - "alloy-rpc-types-eth", + "alloy-pubsub 0.6.4", + "alloy-rpc-client 0.6.4", + "alloy-rpc-types-eth 0.6.4", + "alloy-transport 0.6.4", + "alloy-transport-http 0.6.4", + "alloy-transport-ipc 0.6.4", + "alloy-transport-ws 0.6.4", + "async-stream", + "async-trait", + "auto_impl", + "dashmap 6.1.0", + "futures 0.3.31", + "futures-utils-wasm", + "lru", + "parking_lot 0.12.3", + "pin-project 1.1.7", + "reqwest 0.12.9", + "schnellru", + "serde", + "serde_json", + "thiserror 1.0.69", + "tokio", + "tracing", + "url", + "wasmtimer", +] + +[[package]] +name = "alloy-provider" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5115c74c037714e1b02a86f742289113afa5d494b5ea58308ba8aa378e739101" +dependencies = [ + "alloy-chains", + "alloy-consensus 0.8.0", + "alloy-eips 0.8.0", + "alloy-json-rpc 0.8.0", + "alloy-network 0.8.0", + "alloy-network-primitives 0.8.0", + "alloy-primitives", + "alloy-pubsub 0.8.0", + "alloy-rpc-client 0.8.0", + "alloy-rpc-types-debug", + "alloy-rpc-types-eth 0.8.0", "alloy-rpc-types-trace", "alloy-rpc-types-txpool", - "alloy-transport", - "alloy-transport-http", - "alloy-transport-ipc", - "alloy-transport-ws", + "alloy-transport 0.8.0", + "alloy-transport-http 0.8.0", + "alloy-transport-ipc 0.8.0", + "alloy-transport-ws 0.8.0", "async-stream", "async-trait", "auto_impl", @@ -389,7 +582,7 @@ dependencies = [ "schnellru", "serde", "serde_json", - "thiserror 1.0.69", + "thiserror 2.0.6", "tokio", "tracing", "url", @@ -402,9 +595,28 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "90f1f34232f77341076541c405482e4ae12f0ee7153d8f9969fc1691201b2247" dependencies = [ - "alloy-json-rpc", + "alloy-json-rpc 0.6.4", + "alloy-primitives", + "alloy-transport 0.6.4", + "bimap", + "futures 0.3.31", + "serde", + "serde_json", + "tokio", + "tokio-stream", + "tower 0.5.1", + "tracing", +] + +[[package]] +name = "alloy-pubsub" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b073afa409698d1b9a30522565815f3bf7010e5b47b997cf399209e6110df097" +dependencies = [ + "alloy-json-rpc 0.8.0", "alloy-primitives", - "alloy-transport", + "alloy-transport 0.8.0", "bimap", "futures 0.3.31", "serde", @@ -417,9 +629,9 @@ dependencies = [ [[package]] name = "alloy-rlp" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da0822426598f95e45dd1ea32a738dac057529a709ee645fcc516ffa4cbde08f" +checksum = "f542548a609dca89fcd72b3b9f355928cf844d4363c5eed9c5273a3dd225e097" dependencies = [ "alloy-rlp-derive", "arrayvec 0.7.6", @@ -428,13 +640,13 @@ dependencies = [ [[package]] name = "alloy-rlp-derive" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b09cae092c27b6f1bde952653a22708691802e57bfef4a2973b80bea21efd3f" +checksum = "5a833d97bf8a5f0f878daf2c8451fff7de7f9de38baa5a45d936ec718d81255a" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -443,13 +655,39 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "374dbe0dc3abdc2c964f36b3d3edf9cdb3db29d16bda34aa123f03d810bec1dd" dependencies = [ - "alloy-json-rpc", + "alloy-json-rpc 0.6.4", "alloy-primitives", - "alloy-pubsub", - "alloy-transport", - "alloy-transport-http", - "alloy-transport-ipc", - "alloy-transport-ws", + "alloy-pubsub 0.6.4", + "alloy-transport 0.6.4", + "alloy-transport-http 0.6.4", + "alloy-transport-ipc 0.6.4", + "alloy-transport-ws 0.6.4", + "futures 0.3.31", + "pin-project 1.1.7", + "reqwest 0.12.9", + "serde", + "serde_json", + "tokio", + "tokio-stream", + "tower 0.5.1", + "tracing", + "url", + "wasmtimer", +] + +[[package]] +name = "alloy-rpc-client" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c6a0bd0ce5660ac48e4f3bb0c7c5c3a94db287a0be94971599d83928476cbcd" +dependencies = [ + "alloy-json-rpc 0.8.0", + "alloy-primitives", + "alloy-pubsub 0.8.0", + "alloy-transport 0.8.0", + "alloy-transport-http 0.8.0", + "alloy-transport-ipc 0.8.0", + "alloy-transport-ws 0.8.0", "futures 0.3.31", "pin-project 1.1.7", "reqwest 0.12.9", @@ -468,26 +706,60 @@ name = "alloy-rpc-types" version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c74832aa474b670309c20fffc2a869fa141edab7c79ff7963fad0a08de60bae1" +dependencies = [ + "alloy-primitives", + "alloy-rpc-types-engine 0.6.4", + "alloy-rpc-types-eth 0.6.4", + "alloy-serde 0.6.4", + "serde", +] + +[[package]] +name = "alloy-rpc-types" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "374ac12e35bb90ebccd86e7c943ddba9590149a6e35cc4d9cd860d6635fd1018" dependencies = [ "alloy-primitives", "alloy-rpc-types-anvil", - "alloy-rpc-types-engine", - "alloy-rpc-types-eth", + "alloy-rpc-types-engine 0.8.0", + "alloy-rpc-types-eth 0.8.0", "alloy-rpc-types-trace", "alloy-rpc-types-txpool", - "alloy-serde", + "alloy-serde 0.8.0", "serde", ] [[package]] name = "alloy-rpc-types-anvil" -version = "0.6.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ca97963132f78ddfc60e43a017348e6d52eea983925c23652f5b330e8e02291" +checksum = "f0b85a5f5f5d99047544f4ec31330ee15121dcb8ef5af3e791a5207e6b92b05b" +dependencies = [ + "alloy-primitives", + "alloy-rpc-types-eth 0.8.0", + "alloy-serde 0.8.0", + "serde", +] + +[[package]] +name = "alloy-rpc-types-any" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea98f81bcd759dbfa3601565f9d7a02220d8ef1d294ec955948b90aaafbfd857" +dependencies = [ + "alloy-consensus-any", + "alloy-rpc-types-eth 0.8.0", + "alloy-serde 0.8.0", +] + +[[package]] +name = "alloy-rpc-types-debug" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fd14f68a482e67dfba52d404dfff1d3b0d9fc3b4775bd0923f3175d7661c3bd" dependencies = [ "alloy-primitives", - "alloy-rpc-types-eth", - "alloy-serde", "serde", ] @@ -497,11 +769,27 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f56294dce86af23ad6ee8df46cf8b0d292eb5d1ff67dc88a0886051e32b1faf" dependencies = [ - "alloy-consensus", - "alloy-eips", + "alloy-consensus 0.6.4", + "alloy-eips 0.6.4", "alloy-primitives", "alloy-rlp", - "alloy-serde", + "alloy-serde 0.6.4", + "derive_more 1.0.0", + "serde", + "strum", +] + +[[package]] +name = "alloy-rpc-types-engine" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ca5898f753ff0d15a0dc955c169523d8fee57e05bb5a38a398b3451b0b988be" +dependencies = [ + "alloy-consensus 0.8.0", + "alloy-eips 0.8.0", + "alloy-primitives", + "alloy-rlp", + "alloy-serde 0.8.0", "derive_more 1.0.0", "jsonwebtoken", "rand 0.8.5", @@ -515,12 +803,32 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8a477281940d82d29315846c7216db45b15e90bcd52309da9f54bcf7ad94a11" dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-network-primitives", + "alloy-consensus 0.6.4", + "alloy-eips 0.6.4", + "alloy-network-primitives 0.6.4", "alloy-primitives", "alloy-rlp", - "alloy-serde", + "alloy-serde 0.6.4", + "alloy-sol-types", + "derive_more 1.0.0", + "itertools 0.13.0", + "serde", + "serde_json", +] + +[[package]] +name = "alloy-rpc-types-eth" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e518b0a7771e00728f18be0708f828b18a1cfc542a7153bef630966a26388e0" +dependencies = [ + "alloy-consensus 0.8.0", + "alloy-consensus-any", + "alloy-eips 0.8.0", + "alloy-network-primitives 0.8.0", + "alloy-primitives", + "alloy-rlp", + "alloy-serde 0.8.0", "alloy-sol-types", "derive_more 1.0.0", "itertools 0.13.0", @@ -530,27 +838,27 @@ dependencies = [ [[package]] name = "alloy-rpc-types-trace" -version = "0.6.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecd8b4877ef520c138af702097477cdd19504a8e1e4675ba37e92ba40f2d3c6f" +checksum = "cdff93fa38be6982f8613a060e18fa0a37ce440d69ed3b7f37c6c69036ce1c53" dependencies = [ "alloy-primitives", - "alloy-rpc-types-eth", - "alloy-serde", + "alloy-rpc-types-eth 0.8.0", + "alloy-serde 0.8.0", "serde", "serde_json", - "thiserror 1.0.69", + "thiserror 2.0.6", ] [[package]] name = "alloy-rpc-types-txpool" -version = "0.6.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d4ab49acf90a71f7fb894dc5fd485f1f07a1e348966c714c4d1e0b7478850a8" +checksum = "2d9dc647985db41fd164e807577134da1179b9f5ba0959f8698d6587eaa568f5" dependencies = [ "alloy-primitives", - "alloy-rpc-types-eth", - "alloy-serde", + "alloy-rpc-types-eth 0.8.0", + "alloy-serde 0.8.0", "serde", ] @@ -565,11 +873,36 @@ dependencies = [ "serde_json", ] +[[package]] +name = "alloy-serde" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed3dc8d4a08ffc90c1381d39a4afa2227668259a42c97ab6eecf51cbd82a8761" +dependencies = [ + "alloy-primitives", + "serde", + "serde_json", +] + [[package]] name = "alloy-signer" version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e10aec39d60dc27edcac447302c7803d2371946fb737245320a05b78eb2fafd" +dependencies = [ + "alloy-primitives", + "async-trait", + "auto_impl", + "elliptic-curve 0.13.8", + "k256 0.13.4", + "thiserror 1.0.69", +] + +[[package]] +name = "alloy-signer" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16188684100f6e0f2a2b949968fe3007749c5be431549064a1bce4e7b3a196a9" dependencies = [ "alloy-dyn-abi", "alloy-primitives", @@ -578,62 +911,62 @@ dependencies = [ "auto_impl", "elliptic-curve 0.13.8", "k256 0.13.4", - "thiserror 1.0.69", + "thiserror 2.0.6", ] [[package]] name = "alloy-signer-aws" -version = "0.6.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0109e5b18079aec2a022e4bc9db1d74bcc046f8b66274ffa8b0e4322b44b2b44" +checksum = "fe06d524ac84fefce1184f2d1273704e62faade7ff1f29c17ac9d493d3ffbdbf" dependencies = [ - "alloy-consensus", - "alloy-network", + "alloy-consensus 0.8.0", + "alloy-network 0.8.0", "alloy-primitives", - "alloy-signer", + "alloy-signer 0.8.0", "async-trait", "aws-sdk-kms", "k256 0.13.4", "spki 0.7.3", - "thiserror 1.0.69", + "thiserror 2.0.6", "tracing", ] [[package]] name = "alloy-signer-gcp" -version = "0.6.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "558651eb0d76bcf2224de694481e421112fa2cbc6fe6a413cc76fd67e14cf0d7" +checksum = "492cedcb4819a588aaef8d59edd5d65291f485d25f64b2aa0806dd86feeafd18" dependencies = [ - "alloy-consensus", - "alloy-network", + "alloy-consensus 0.8.0", + "alloy-network 0.8.0", "alloy-primitives", - "alloy-signer", + "alloy-signer 0.8.0", "async-trait", "gcloud-sdk", "k256 0.13.4", "spki 0.7.3", - "thiserror 1.0.69", + "thiserror 2.0.6", "tracing", ] [[package]] name = "alloy-signer-ledger" -version = "0.6.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29781b6a064b6235de4ec3cc0810f59fe227b8d31258f23a077570fc9525d7a6" +checksum = "426409a02587b98e118d2fd32dda3f423805e264a32f9e247a65164163bc0e9b" dependencies = [ - "alloy-consensus", + "alloy-consensus 0.8.0", "alloy-dyn-abi", - "alloy-network", + "alloy-network 0.8.0", "alloy-primitives", - "alloy-signer", + "alloy-signer 0.8.0", "alloy-sol-types", "async-trait", "coins-ledger", "futures-util", "semver 1.0.23", - "thiserror 1.0.69", + "thiserror 2.0.6", "tracing", ] @@ -643,74 +976,90 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d8396f6dff60700bc1d215ee03d86ff56de268af96e2bf833a14d0bafcab9882" dependencies = [ - "alloy-consensus", - "alloy-network", + "alloy-consensus 0.6.4", + "alloy-network 0.6.4", + "alloy-primitives", + "alloy-signer 0.6.4", + "async-trait", + "k256 0.13.4", + "rand 0.8.5", + "thiserror 1.0.69", +] + +[[package]] +name = "alloy-signer-local" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2184dab8c9493ab3e1c9f6bd3bdb563ed322b79023d81531935e84a4fdf7cf1" +dependencies = [ + "alloy-consensus 0.8.0", + "alloy-network 0.8.0", "alloy-primitives", - "alloy-signer", + "alloy-signer 0.8.0", "async-trait", "coins-bip32", "coins-bip39", "eth-keystore", "k256 0.13.4", "rand 0.8.5", - "thiserror 1.0.69", + "thiserror 2.0.6", ] [[package]] name = "alloy-signer-trezor" -version = "0.6.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21267541177607141a5db6fd1abed5a46553b7a6d9363cf3d047721634705905" +checksum = "290ead62e020b751761de95f60056340faba341b20493ae929013d1357b9ba5b" dependencies = [ - "alloy-consensus", - "alloy-network", + "alloy-consensus 0.8.0", + "alloy-network 0.8.0", "alloy-primitives", - "alloy-signer", + "alloy-signer 0.8.0", "async-trait", "semver 1.0.23", - "thiserror 1.0.69", + "thiserror 2.0.6", "tracing", "trezor-client", ] [[package]] name = "alloy-sol-macro" -version = "0.8.13" +version = "0.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c0279d09463a4695788a3622fd95443625f7be307422deba4b55dd491a9c7a1" +checksum = "d9d64f851d95619233f74b310f12bcf16e0cbc27ee3762b6115c14a84809280a" dependencies = [ "alloy-sol-macro-expander", "alloy-sol-macro-input", "proc-macro-error2", "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] name = "alloy-sol-macro-expander" -version = "0.8.13" +version = "0.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4feea540fc8233df2ad1156efd744b2075372f43a8f942a68b3b19c8a00e2c12" +checksum = "6bf7ed1574b699f48bf17caab4e6e54c6d12bc3c006ab33d58b1e227c1c3559f" dependencies = [ "alloy-json-abi", "alloy-sol-macro-input", "const-hex", "heck", - "indexmap 2.6.0", + "indexmap 2.7.0", "proc-macro-error2", "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", "syn-solidity", "tiny-keccak 2.0.2", ] [[package]] name = "alloy-sol-macro-input" -version = "0.8.13" +version = "0.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a0ad281f3d1b613af814b66977ee698e443d4644a1510962d0241f26e0e53ae" +checksum = "8c02997ccef5f34f9c099277d4145f183b422938ed5322dc57a089fe9b9ad9ee" dependencies = [ "alloy-json-abi", "const-hex", @@ -719,15 +1068,15 @@ dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", "serde_json", - "syn 2.0.89", + "syn 2.0.90", "syn-solidity", ] [[package]] name = "alloy-sol-type-parser" -version = "0.8.13" +version = "0.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96eff16c797438add6c37bb335839d015b186c5421ee5626f5559a7bfeb38ef5" +checksum = "ce13ff37285b0870d0a0746992a4ae48efaf34b766ae4c2640fa15e5305f8e73" dependencies = [ "serde", "winnow 0.6.20", @@ -735,9 +1084,9 @@ dependencies = [ [[package]] name = "alloy-sol-types" -version = "0.8.13" +version = "0.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cff34e0682d6665da243a3e81da96f07a2dd50f7e64073e382b1a141f5a2a2f6" +checksum = "1174cafd6c6d810711b4e00383037bdb458efc4fe3dbafafa16567e0320c54d8" dependencies = [ "alloy-json-abi", "alloy-primitives", @@ -752,7 +1101,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f99acddb34000d104961897dbb0240298e8b775a7efffb9fda2a1a3efedd65b3" dependencies = [ - "alloy-json-rpc", + "alloy-json-rpc 0.6.4", "base64 0.22.1", "futures-util", "futures-utils-wasm", @@ -766,14 +1115,49 @@ dependencies = [ "wasmtimer", ] +[[package]] +name = "alloy-transport" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "628be5b9b75e4f4c4f2a71d985bbaca4f23de356dc83f1625454c505f5eef4df" +dependencies = [ + "alloy-json-rpc 0.8.0", + "base64 0.22.1", + "futures-util", + "futures-utils-wasm", + "serde", + "serde_json", + "thiserror 2.0.6", + "tokio", + "tower 0.5.1", + "tracing", + "url", + "wasmtimer", +] + [[package]] name = "alloy-transport-http" version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5dc013132e34eeadaa0add7e74164c1503988bfba8bae885b32e0918ba85a8a6" dependencies = [ - "alloy-json-rpc", - "alloy-transport", + "alloy-json-rpc 0.6.4", + "alloy-transport 0.6.4", + "reqwest 0.12.9", + "serde_json", + "tower 0.5.1", + "tracing", + "url", +] + +[[package]] +name = "alloy-transport-http" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e24412cf72f79c95cd9b1d9482e3a31f9d94c24b43c4b3b710cc8d4341eaab0" +dependencies = [ + "alloy-json-rpc 0.8.0", + "alloy-transport 0.8.0", "reqwest 0.12.9", "serde_json", "tower 0.5.1", @@ -787,9 +1171,28 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "063edc0660e81260653cc6a95777c29d54c2543a668aa5da2359fb450d25a1ba" dependencies = [ - "alloy-json-rpc", - "alloy-pubsub", - "alloy-transport", + "alloy-json-rpc 0.6.4", + "alloy-pubsub 0.6.4", + "alloy-transport 0.6.4", + "bytes", + "futures 0.3.31", + "interprocess", + "pin-project 1.1.7", + "serde_json", + "tokio", + "tokio-util 0.7.13", + "tracing", +] + +[[package]] +name = "alloy-transport-ipc" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0577a1f67ce70ece3f2b27cf1011da7222ef0a5701f7dcb558e5356278eeb531" +dependencies = [ + "alloy-json-rpc 0.8.0", + "alloy-pubsub 0.8.0", + "alloy-transport 0.8.0", "bytes", "futures 0.3.31", "interprocess", @@ -798,21 +1201,39 @@ dependencies = [ "serde_json", "tempfile", "tokio", - "tokio-util 0.7.12", + "tokio-util 0.7.13", + "tracing", +] + +[[package]] +name = "alloy-transport-ws" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abd170e600801116d5efe64f74a4fc073dbbb35c807013a7d0a388742aeebba0" +dependencies = [ + "alloy-pubsub 0.6.4", + "alloy-transport 0.6.4", + "futures 0.3.31", + "http 1.2.0", + "rustls 0.23.19", + "serde_json", + "tokio", + "tokio-tungstenite", "tracing", + "ws_stream_wasm", ] [[package]] name = "alloy-transport-ws" -version = "0.6.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abd170e600801116d5efe64f74a4fc073dbbb35c807013a7d0a388742aeebba0" +checksum = "1ca46272d17f9647fdb56080ed26c72b3ea5078416831130f5ed46f3b4be0ed6" dependencies = [ - "alloy-pubsub", - "alloy-transport", + "alloy-pubsub 0.8.0", + "alloy-transport 0.8.0", "futures 0.3.31", - "http 1.1.0", - "rustls 0.23.18", + "http 1.2.0", + "rustls 0.23.19", "serde_json", "tokio", "tokio-tungstenite", @@ -822,12 +1243,13 @@ dependencies = [ [[package]] name = "alloy-trie" -version = "0.6.0" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9703ce68b97f8faae6f7739d1e003fc97621b856953cbcdbb2b515743f23288" +checksum = "3a5fd8fea044cc9a8c8a50bb6f28e31f0385d820f116c5b98f6f4e55d6e5590b" dependencies = [ "alloy-primitives", "alloy-rlp", + "arrayvec 0.7.6", "derive_more 1.0.0", "nybbles", "serde", @@ -955,27 +1377,27 @@ name = "anvil" version = "0.0.2" dependencies = [ "alloy-chains", - "alloy-consensus", - "alloy-contract", + "alloy-consensus 0.8.0", + "alloy-contract 0.8.0", "alloy-dyn-abi", - "alloy-eips", - "alloy-genesis", + "alloy-eips 0.8.0", + "alloy-genesis 0.8.0", "alloy-json-abi", - "alloy-json-rpc", - "alloy-network", + "alloy-json-rpc 0.8.0", + "alloy-network 0.8.0", "alloy-primitives", - "alloy-provider", - "alloy-pubsub", + "alloy-provider 0.8.0", + "alloy-pubsub 0.8.0", "alloy-rlp", - "alloy-rpc-client", - "alloy-rpc-types", - "alloy-serde", - "alloy-signer", - "alloy-signer-local", + "alloy-rpc-client 0.8.0", + "alloy-rpc-types 0.8.0", + "alloy-serde 0.8.0", + "alloy-signer 0.8.0", + "alloy-signer-local 0.8.0", "alloy-sol-types", - "alloy-transport", - "alloy-transport-ipc", - "alloy-transport-ws", + "alloy-transport 0.8.0", + "alloy-transport-ipc 0.8.0", + "alloy-transport-ws 0.8.0", "alloy-trie", "anvil-core", "anvil-rpc", @@ -1010,7 +1432,7 @@ dependencies = [ "serde_repr", "similar-asserts", "tempfile", - "thiserror 1.0.69", + "thiserror 2.0.6", "tikv-jemallocator", "tokio", "tower 0.4.13", @@ -1024,14 +1446,14 @@ dependencies = [ name = "anvil-core" version = "0.0.2" dependencies = [ - "alloy-consensus", + "alloy-consensus 0.8.0", "alloy-dyn-abi", - "alloy-eips", - "alloy-network", + "alloy-eips 0.8.0", + "alloy-network 0.8.0", "alloy-primitives", "alloy-rlp", - "alloy-rpc-types", - "alloy-serde", + "alloy-rpc-types 0.8.0", + "alloy-serde 0.8.0", "alloy-trie", "bytes", "foundry-common", @@ -1041,7 +1463,7 @@ dependencies = [ "revm", "serde", "serde_json", - "thiserror 1.0.69", + "thiserror 2.0.6", ] [[package]] @@ -1067,17 +1489,17 @@ dependencies = [ "pin-project 1.1.7", "serde", "serde_json", - "thiserror 1.0.69", - "tokio-util 0.7.12", + "thiserror 2.0.6", + "tokio-util 0.7.13", "tower-http", "tracing", ] [[package]] name = "anyhow" -version = "1.0.93" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775" +checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7" [[package]] name = "arbitrary" @@ -1090,9 +1512,9 @@ dependencies = [ [[package]] name = "ariadne" -version = "0.4.1" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44055e597c674aef7cb903b2b9f6e4cba1277ed0d2d61dae7cd52d7ffa81f8e2" +checksum = "31beedec3ce83ae6da3a79592b3d8d7afd146a5b15bb9bb940279aced60faa89" dependencies = [ "unicode-width 0.1.14", "yansi", @@ -1269,6 +1691,9 @@ name = "arrayvec" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" +dependencies = [ + "serde", +] [[package]] name = "ascii-canvas" @@ -1309,7 +1734,7 @@ checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -1331,7 +1756,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -1342,7 +1767,7 @@ checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -1404,7 +1829,7 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -1426,7 +1851,7 @@ dependencies = [ "aws-sdk-sts", "aws-smithy-async", "aws-smithy-http", - "aws-smithy-json", + "aws-smithy-json 0.60.7", "aws-smithy-runtime", "aws-smithy-runtime-api", "aws-smithy-types", @@ -1457,20 +1882,21 @@ dependencies = [ [[package]] name = "aws-lc-rs" -version = "1.11.1" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f47bb8cc16b669d267eeccf585aea077d0882f4777b1c1f740217885d6e6e5a3" +checksum = "cdd82dba44d209fddb11c190e0a94b78651f95299598e472215667417a03ff1d" dependencies = [ "aws-lc-sys", + "mirai-annotations", "paste", "zeroize", ] [[package]] name = "aws-lc-sys" -version = "0.23.1" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2101df3813227bbaaaa0b04cd61c534c7954b22bd68d399b440be937dc63ff7" +checksum = "df7a4168111d7eb622a31b214057b8509c0a7e1794f44c546d742330dc793972" dependencies = [ "bindgen 0.69.5", "cc", @@ -1483,9 +1909,9 @@ dependencies = [ [[package]] name = "aws-runtime" -version = "1.4.3" +version = "1.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a10d5c055aa540164d9561a0e2e74ad30f0dcf7393c3a92f6733ddf9c5762468" +checksum = "b5ac934720fbb46206292d2c75b57e67acfc56fe7dfd34fb9a02334af08409ea" dependencies = [ "aws-credential-types", "aws-sigv4", @@ -1508,15 +1934,15 @@ dependencies = [ [[package]] name = "aws-sdk-kms" -version = "1.50.0" +version = "1.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfd059dacda4dfd5b57f2bd453fc6555f9acb496cb77508d517da24cf5d73167" +checksum = "3c30f6fd5646b99d9b45ec3a0c22e67112c175b2383100c960d7ee39d96c8d96" dependencies = [ "aws-credential-types", "aws-runtime", "aws-smithy-async", "aws-smithy-http", - "aws-smithy-json", + "aws-smithy-json 0.61.1", "aws-smithy-runtime", "aws-smithy-runtime-api", "aws-smithy-types", @@ -1530,15 +1956,15 @@ dependencies = [ [[package]] name = "aws-sdk-sso" -version = "1.49.0" +version = "1.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09677244a9da92172c8dc60109b4a9658597d4d298b188dd0018b6a66b410ca4" +checksum = "05ca43a4ef210894f93096039ef1d6fa4ad3edfabb3be92b80908b9f2e4b4eab" dependencies = [ "aws-credential-types", "aws-runtime", "aws-smithy-async", "aws-smithy-http", - "aws-smithy-json", + "aws-smithy-json 0.61.1", "aws-smithy-runtime", "aws-smithy-runtime-api", "aws-smithy-types", @@ -1552,15 +1978,15 @@ dependencies = [ [[package]] name = "aws-sdk-ssooidc" -version = "1.50.0" +version = "1.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81fea2f3a8bb3bd10932ae7ad59cc59f65f270fc9183a7e91f501dc5efbef7ee" +checksum = "abaf490c2e48eed0bb8e2da2fb08405647bd7f253996e0f93b981958ea0f73b0" dependencies = [ "aws-credential-types", "aws-runtime", "aws-smithy-async", "aws-smithy-http", - "aws-smithy-json", + "aws-smithy-json 0.61.1", "aws-smithy-runtime", "aws-smithy-runtime-api", "aws-smithy-types", @@ -1574,15 +2000,15 @@ dependencies = [ [[package]] name = "aws-sdk-sts" -version = "1.50.0" +version = "1.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ada54e5f26ac246dc79727def52f7f8ed38915cb47781e2a72213957dc3a7d5" +checksum = "b68fde0d69c8bfdc1060ea7da21df3e39f6014da316783336deff0a9ec28f4bf" dependencies = [ "aws-credential-types", "aws-runtime", "aws-smithy-async", "aws-smithy-http", - "aws-smithy-json", + "aws-smithy-json 0.61.1", "aws-smithy-query", "aws-smithy-runtime", "aws-smithy-runtime-api", @@ -1597,9 +2023,9 @@ dependencies = [ [[package]] name = "aws-sigv4" -version = "1.2.5" +version = "1.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5619742a0d8f253be760bfbb8e8e8368c69e3587e4637af5754e488a611499b1" +checksum = "7d3820e0c08d0737872ff3c7c1f21ebbb6693d832312d6152bf18ef50a5471c2" dependencies = [ "aws-credential-types", "aws-smithy-http", @@ -1610,7 +2036,7 @@ dependencies = [ "hex", "hmac", "http 0.2.12", - "http 1.1.0", + "http 1.2.0", "once_cell", "percent-encoding", "sha2 0.10.8", @@ -1658,6 +2084,15 @@ dependencies = [ "aws-smithy-types", ] +[[package]] +name = "aws-smithy-json" +version = "0.61.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee4e69cc50921eb913c6b662f8d909131bb3e6ad6cb6090d3a39b66fc5c52095" +dependencies = [ + "aws-smithy-types", +] + [[package]] name = "aws-smithy-query" version = "0.60.7" @@ -1670,9 +2105,9 @@ dependencies = [ [[package]] name = "aws-smithy-runtime" -version = "1.7.3" +version = "1.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be28bd063fa91fd871d131fc8b68d7cd4c5fa0869bea68daca50dcb1cbd76be2" +checksum = "9f20685047ca9d6f17b994a07f629c813f08b5bce65523e47124879e60103d45" dependencies = [ "aws-smithy-async", "aws-smithy-http", @@ -1705,7 +2140,7 @@ dependencies = [ "aws-smithy-types", "bytes", "http 0.2.12", - "http 1.1.0", + "http 1.2.0", "pin-project-lite", "tokio", "tracing", @@ -1722,7 +2157,7 @@ dependencies = [ "bytes", "bytes-utils", "http 0.2.12", - "http 1.1.0", + "http 1.2.0", "http-body 0.4.6", "http-body 1.0.1", "http-body-util", @@ -1769,7 +2204,7 @@ dependencies = [ "base64 0.22.1", "bytes", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", "hyper 1.5.1", @@ -1804,7 +2239,7 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", "mime", @@ -1918,9 +2353,9 @@ dependencies = [ [[package]] name = "bigdecimal" -version = "0.4.6" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f850665a0385e070b64c38d2354e6c104c8479c59868d1e48a0c13ee2c7a1c1" +checksum = "51d712318a27c7150326677b321a5fa91b55f6d9034ffd67f20319e147d40cee" dependencies = [ "autocfg", "libm", @@ -1962,7 +2397,7 @@ dependencies = [ "regex", "rustc-hash 1.1.0", "shlex", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -1974,7 +2409,7 @@ dependencies = [ "bitflags 2.6.0", "cexpr", "clang-sys", - "itertools 0.12.1", + "itertools 0.11.0", "lazy_static", "lazycell", "log", @@ -1984,7 +2419,7 @@ dependencies = [ "regex", "rustc-hash 1.1.0", "shlex", - "syn 2.0.89", + "syn 2.0.90", "which 4.4.2", ] @@ -2135,39 +2570,16 @@ dependencies = [ "zeroize", ] -[[package]] -name = "bon" -version = "2.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97493a391b4b18ee918675fb8663e53646fd09321c58b46afa04e8ce2499c869" -dependencies = [ - "bon-macros 2.3.0", - "rustversion", -] - [[package]] name = "bon" version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f265cdb2e8501f1c952749e78babe8f1937be92c98120e5f78fc72d634682bad" dependencies = [ - "bon-macros 3.3.0", + "bon-macros", "rustversion", ] -[[package]] -name = "bon-macros" -version = "2.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a2af3eac944c12cdf4423eab70d310da0a8e5851a18ffb192c0a5e3f7ae1663" -dependencies = [ - "darling 0.20.10", - "ident_case", - "proc-macro2 1.0.92", - "quote 1.0.37", - "syn 2.0.89", -] - [[package]] name = "bon-macros" version = "3.3.0" @@ -2180,7 +2592,7 @@ dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", "rustversion", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -2216,9 +2628,9 @@ dependencies = [ [[package]] name = "borsh" -version = "1.5.3" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2506947f73ad44e344215ccd6403ac2ae18cd8e046e581a441bf8d199f257f03" +checksum = "a6362ed55def622cddc70a4746a68554d7b687713770de539e59a739b249f8ed" dependencies = [ "borsh-derive", "cfg_aliases 0.2.1", @@ -2226,15 +2638,16 @@ dependencies = [ [[package]] name = "borsh-derive" -version = "1.5.3" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2593a3b8b938bd68373196c9832f516be11fa487ef4ae745eb282e6a56a7244" +checksum = "c3ef8005764f53cd4dca619f5bf64cafd4664dada50ece25e4d81de54c80cc0b" dependencies = [ "once_cell", "proc-macro-crate 3.2.0", "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", + "syn_derive", ] [[package]] @@ -2318,9 +2731,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da" +checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" dependencies = [ "serde", ] @@ -2382,9 +2795,9 @@ dependencies = [ [[package]] name = "cargo-platform" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24b1f0365a6c6bb4020cd05806fd0d33c44d38046b8bd7f0e40814b9763cabfc" +checksum = "e35af189006b9c0f00a064685c727031e3ed2d8020f7ba284d78cc2671bd36ea" dependencies = [ "serde", ] @@ -2427,21 +2840,22 @@ name = "cast" version = "0.0.2" dependencies = [ "alloy-chains", - "alloy-consensus", - "alloy-contract", + "alloy-consensus 0.8.0", + "alloy-contract 0.8.0", "alloy-dyn-abi", "alloy-json-abi", - "alloy-json-rpc", - "alloy-network", + "alloy-json-rpc 0.8.0", + "alloy-network 0.8.0", + "alloy-node-bindings", "alloy-primitives", - "alloy-provider", + "alloy-provider 0.8.0", "alloy-rlp", - "alloy-rpc-types", - "alloy-serde", - "alloy-signer", - "alloy-signer-local", + "alloy-rpc-types 0.8.0", + "alloy-serde 0.8.0", + "alloy-signer 0.8.0", + "alloy-signer-local 0.8.0", "alloy-sol-types", - "alloy-transport", + "alloy-transport 0.8.0", "alloy-zksync", "anvil", "async-trait", @@ -2471,7 +2885,7 @@ dependencies = [ "rayon", "regex", "rpassword", - "rustls 0.23.18", + "rustls 0.23.19", "semver 1.0.23", "serde", "serde_json", @@ -2495,9 +2909,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.1" +version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd9de9f2205d5ef3fd67e685b0df337994ddd4495e2a28d185500d0e1edfea47" +checksum = "27f657647bcff5394bf56c7317665bbf790a137a50eaaa5c6bfbb9e27a518f2d" dependencies = [ "jobserver", "libc", @@ -2550,7 +2964,7 @@ dependencies = [ "alloy-dyn-abi", "alloy-json-abi", "alloy-primitives", - "alloy-rpc-types", + "alloy-rpc-types 0.8.0", "clap", "dirs 5.0.1", "eyre", @@ -2570,6 +2984,7 @@ dependencies = [ "serde_json", "serial_test", "solang-parser", + "solar-parse", "strum", "tikv-jemallocator", "time", @@ -2582,9 +2997,9 @@ dependencies = [ [[package]] name = "chrono" -version = "0.4.38" +version = "0.4.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" dependencies = [ "android-tzdata", "iana-time-zone", @@ -2761,9 +3176,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.21" +version = "4.5.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb3b4b9e5a7c7514dfa52869339ee98b3156b0bfb4e8a77c4ff4babb64b1604f" +checksum = "3135e7ec2ef7b10c6ed8950f0f792ed96ee093fa088608f1c76e569722700c84" dependencies = [ "clap_builder", "clap_derive", @@ -2771,9 +3186,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.21" +version = "4.5.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b17a95aa67cc7b5ebd32aa5370189aa0d79069ef1c64ce893bd30fb24bff20ec" +checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838" dependencies = [ "anstream", "anstyle", @@ -2812,14 +3227,14 @@ dependencies = [ "heck", "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] name = "clap_lex" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afb84c814227b90d6895e01398aee0d8033c00e7466aca416fb6a8e0eb19d8a7" +checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" [[package]] name = "clearscreen" @@ -2859,9 +3274,9 @@ dependencies = [ [[package]] name = "cmake" -version = "0.1.52" +version = "0.1.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c682c223677e0e5b6b7f63a64b9351844c3f1b1678a68b7ee617e30fb082620e" +checksum = "fb1e43aa7fd152b1f968787f7dbcdeb306d1867ff373c69955211876c053f91a" dependencies = [ "cc", ] @@ -3065,9 +3480,9 @@ dependencies = [ [[package]] name = "const-hex" -version = "1.13.2" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "487981fa1af147182687064d0a2c336586d337a606595ced9ffb0c685c250c73" +checksum = "4b0485bab839b018a8f1723fc5391819fea5f8f0f32288ef8a735fd096b6160c" dependencies = [ "cfg-if 1.0.0", "cpufeatures", @@ -3246,7 +3661,7 @@ checksum = "829d955a0bb380ef178a640b91779e3987da38c9aea133b20614cfed8cdea9c6" dependencies = [ "bitflags 2.6.0", "crossterm_winapi", - "mio 1.0.2", + "mio 1.0.3", "parking_lot 0.12.3", "rustix", "signal-hook", @@ -3315,12 +3730,12 @@ dependencies = [ [[package]] name = "ctor" -version = "0.2.9" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501" +checksum = "edb49164822f3ee45b17acd4a208cfc1251410cf0cad9a833234c9890774dd9f" dependencies = [ "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -3366,7 +3781,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -3414,7 +3829,7 @@ dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", "strsim 0.11.1", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -3436,7 +3851,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core 0.20.10", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -3543,7 +3958,7 @@ checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -3564,7 +3979,7 @@ dependencies = [ "darling 0.20.10", "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -3574,7 +3989,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" dependencies = [ "derive_builder_core", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -3587,7 +4002,7 @@ dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", "rustc_version 0.4.1", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -3608,7 +4023,7 @@ dependencies = [ "convert_case 0.6.0", "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", "unicode-xid 0.2.6", ] @@ -3722,14 +4137,14 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] name = "divan" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccc40f214f0d9e897cfc72e2edfa5c225d3252f758c537f11ac0a80371c073a6" +checksum = "e0583193020b29b03682d8d33bb53a5b0f50df6daacece12ca99b904cfdcb8c4" dependencies = [ "cfg-if 1.0.0", "clap", @@ -3741,13 +4156,13 @@ dependencies = [ [[package]] name = "divan-macros" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bdb5411188f7f878a17964798c1264b6b0a9f915bd39b20bf99193c923e1b4e" +checksum = "8dc51d98e636f5e3b0759a39257458b22619cac7e96d932da6eeb052891bb67c" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -3937,9 +4352,9 @@ checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" [[package]] name = "encoding_rs" -version = "0.8.35" +version = "0.8.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" dependencies = [ "cfg-if 1.0.0", ] @@ -3959,7 +4374,7 @@ dependencies = [ "once_cell", "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -3970,7 +4385,7 @@ checksum = "2f9ed6b3789237c8a0c1c505af1c7eb2c560df6186f01b098c3a1064ea532f38" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -4025,7 +4440,7 @@ dependencies = [ "eyre", "futures 0.3.31", "hex", - "indexmap 2.6.0", + "indexmap 2.7.0", "itertools 0.10.5", "jsonrpc-core", "jsonrpc-core-client", @@ -4053,12 +4468,12 @@ dependencies = [ [[package]] name = "errno" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -4216,7 +4631,7 @@ dependencies = [ "regex", "serde", "serde_json", - "syn 2.0.89", + "syn 2.0.90", "toml 0.8.19", "walkdir", ] @@ -4244,7 +4659,7 @@ dependencies = [ "serde", "serde_json", "strum", - "syn 2.0.89", + "syn 2.0.90", "tempfile", "thiserror 1.0.69", "tiny-keccak 2.0.2", @@ -4285,9 +4700,9 @@ dependencies = [ [[package]] name = "evmole" -version = "0.5.1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6fcfb15a14bc209e2b3d2bd32291ec445b1e348d7d9d986aa61a09149350fd7" +checksum = "c19906a94bb5656904a6c9c0f36d492cb1da96f284d59bb56f555bd472d96e51" dependencies = [ "alloy-dyn-abi", "alloy-primitives", @@ -4311,9 +4726,9 @@ checksum = "a2a2b11eda1d40935b26cf18f6833c526845ae8c41e58d09af6adeb6f0269183" [[package]] name = "fastrand" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "486f806e73c5707928240ddc295403b1b93c96a02038563881c4a2fd84b81ac4" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "fastrlp" @@ -4461,9 +4876,9 @@ dependencies = [ [[package]] name = "flume" -version = "0.11.1" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da0e4dd2a88388a1f4ccc7c9ce104604dab68d9f408dc34cd45823d5a9069095" +checksum = "55ac459de2512911e4b674ce33cf20befaba382d05b62b008afc1c8b57cbf181" dependencies = [ "futures-core", "futures-sink", @@ -4502,19 +4917,19 @@ name = "forge" version = "0.0.2" dependencies = [ "alloy-chains", - "alloy-consensus", + "alloy-consensus 0.8.0", "alloy-dyn-abi", "alloy-json-abi", - "alloy-network", + "alloy-network 0.8.0", "alloy-primitives", - "alloy-provider", - "alloy-rpc-types", - "alloy-serde", - "alloy-signer", - "alloy-signer-local", + "alloy-provider 0.8.0", + "alloy-rpc-types 0.8.0", + "alloy-serde 0.8.0", + "alloy-signer 0.8.0", + "alloy-signer-local 0.8.0", "alloy-sol-macro-expander", "alloy-sol-macro-input", - "alloy-transport", + "alloy-transport 0.8.0", "alloy-zksync", "anvil", "async-trait", @@ -4567,18 +4982,20 @@ dependencies = [ "regex", "reqwest 0.12.9", "revm-inspectors", - "rustls 0.23.18", + "rustls 0.23.19", "semver 1.0.23", "serde", "serde_json", "similar", "similar-asserts", "solang-parser", + "solar-ast", + "solar-parse", "soldeer-commands", "strum", "svm-rs", "tempfile", - "thiserror 1.0.69", + "thiserror 2.0.6", "tikv-jemallocator", "tokio", "toml 0.8.19", @@ -4612,7 +5029,7 @@ dependencies = [ "serde", "serde_json", "solang-parser", - "thiserror 1.0.69", + "thiserror 2.0.6", "toml 0.8.19", "tracing", ] @@ -4627,7 +5044,7 @@ dependencies = [ "itertools 0.13.0", "similar-asserts", "solang-parser", - "thiserror 1.0.69", + "thiserror 2.0.6", "toml 0.8.19", "tracing", "tracing-subscriber", @@ -4638,19 +5055,18 @@ name = "forge-script" version = "0.0.2" dependencies = [ "alloy-chains", - "alloy-consensus", + "alloy-consensus 0.8.0", "alloy-dyn-abi", - "alloy-eips", + "alloy-eips 0.8.0", "alloy-json-abi", - "alloy-network", + "alloy-network 0.8.0", "alloy-primitives", - "alloy-provider", - "alloy-rpc-types", - "alloy-serde", - "alloy-signer", - "alloy-transport", + "alloy-provider 0.8.0", + "alloy-rpc-types 0.8.0", + "alloy-serde 0.8.0", + "alloy-signer 0.8.0", + "alloy-transport 0.8.0", "alloy-zksync", - "async-recursion", "clap", "dialoguer", "dunce", @@ -4686,8 +5102,9 @@ dependencies = [ name = "forge-script-sequence" version = "0.0.2" dependencies = [ + "alloy-network 0.8.0", "alloy-primitives", - "alloy-rpc-types", + "alloy-rpc-types 0.8.0", "eyre", "foundry-common", "foundry-compilers", @@ -4713,7 +5130,7 @@ dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", "serde_json", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -4723,8 +5140,8 @@ dependencies = [ "alloy-dyn-abi", "alloy-json-abi", "alloy-primitives", - "alloy-provider", - "alloy-rpc-types", + "alloy-provider 0.8.0", + "alloy-rpc-types 0.8.0", "async-trait", "ciborium", "clap", @@ -4763,7 +5180,8 @@ dependencies = [ [[package]] name = "foundry-block-explorers" version = "0.9.0" -source = "git+https://github.com/Moonsong-Labs/block-explorers?branch=zksync-v0.9.0#6a8e00b830638ddcaa86ab83126a715a499ef482" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0faa449506113b4969029da2ac1df3a1b3201bf10c99a4a8e6d684977b80c938" dependencies = [ "alloy-chains", "alloy-json-abi", @@ -4781,16 +5199,17 @@ dependencies = [ name = "foundry-cheatcodes" version = "0.0.2" dependencies = [ - "alloy-consensus", + "alloy-consensus 0.8.0", "alloy-dyn-abi", - "alloy-genesis", + "alloy-genesis 0.8.0", "alloy-json-abi", + "alloy-network 0.8.0", "alloy-primitives", - "alloy-provider", + "alloy-provider 0.8.0", "alloy-rlp", - "alloy-rpc-types", - "alloy-signer", - "alloy-signer-local", + "alloy-rpc-types 0.8.0", + "alloy-signer 0.8.0", + "alloy-signer-local 0.8.0", "alloy-sol-types", "base64 0.22.1", "chrono", @@ -4820,8 +5239,9 @@ dependencies = [ "revm", "revm-inspectors", "semver 1.0.23", + "serde", "serde_json", - "thiserror 1.0.69", + "thiserror 2.0.6", "toml 0.8.19", "tracing", "vergen", @@ -4854,12 +5274,12 @@ version = "0.0.2" dependencies = [ "alloy-chains", "alloy-dyn-abi", - "alloy-eips", + "alloy-eips 0.8.0", "alloy-json-abi", "alloy-primitives", - "alloy-provider", + "alloy-provider 0.8.0", "alloy-rlp", - "alloy-transport", + "alloy-transport 0.8.0", "alloy-zksync", "clap", "color-eyre", @@ -4892,23 +5312,24 @@ dependencies = [ name = "foundry-common" version = "0.0.2" dependencies = [ - "alloy-consensus", - "alloy-contract", + "alloy-consensus 0.8.0", + "alloy-contract 0.8.0", "alloy-dyn-abi", - "alloy-eips", + "alloy-eips 0.8.0", "alloy-json-abi", - "alloy-json-rpc", + "alloy-json-rpc 0.8.0", + "alloy-network 0.8.0", "alloy-primitives", - "alloy-provider", - "alloy-pubsub", - "alloy-rpc-client", - "alloy-rpc-types", - "alloy-serde", + "alloy-provider 0.8.0", + "alloy-pubsub 0.8.0", + "alloy-rpc-client 0.8.0", + "alloy-rpc-types 0.8.0", + "alloy-serde 0.8.0", "alloy-sol-types", - "alloy-transport", - "alloy-transport-http", - "alloy-transport-ipc", - "alloy-transport-ws", + "alloy-transport 0.8.0", + "alloy-transport-http 0.8.0", + "alloy-transport-ipc 0.8.0", + "alloy-transport-ws 0.8.0", "alloy-zksync", "anstream", "anstyle", @@ -4930,7 +5351,7 @@ dependencies = [ "serde_json", "similar-asserts", "terminal_size", - "thiserror 1.0.69", + "thiserror 2.0.6", "tokio", "tower 0.4.13", "tracing", @@ -4943,12 +5364,12 @@ dependencies = [ name = "foundry-common-fmt" version = "0.0.2" dependencies = [ - "alloy-consensus", + "alloy-consensus 0.8.0", "alloy-dyn-abi", - "alloy-network", + "alloy-network 0.8.0", "alloy-primitives", - "alloy-rpc-types", - "alloy-serde", + "alloy-rpc-types 0.8.0", + "alloy-serde 0.8.0", "chrono", "comfy-table", "foundry-macros", @@ -4961,8 +5382,9 @@ dependencies = [ [[package]] name = "foundry-compilers" -version = "0.12.3" -source = "git+https://github.com/Moonsong-Labs/compilers?branch=zksync-v0.12.3#c0cc4bcaedac36b7b61b4dcf4a289c305af312aa" +version = "0.12.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7235826f00dd9196bcbdbb9c168ea38235601db95883a78819ba2303dee34bb8" dependencies = [ "alloy-json-abi", "alloy-primitives", @@ -4972,17 +5394,14 @@ dependencies = [ "dyn-clone", "foundry-compilers-artifacts", "foundry-compilers-core", - "fs4 0.8.4", "fs_extra", "futures-util", "home", "itertools 0.13.0", "md-5", - "once_cell", "path-slash", "rand 0.8.5", "rayon", - "reqwest 0.12.9", "semver 1.0.23", "serde", "serde_json", @@ -4991,28 +5410,28 @@ dependencies = [ "svm-rs", "svm-rs-builds", "tempfile", - "thiserror 1.0.69", + "thiserror 2.0.6", "tokio", "tracing", - "walkdir", "winnow 0.6.20", "yansi", ] [[package]] name = "foundry-compilers-artifacts" -version = "0.12.3" -source = "git+https://github.com/Moonsong-Labs/compilers?branch=zksync-v0.12.3#c0cc4bcaedac36b7b61b4dcf4a289c305af312aa" +version = "0.12.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "097bc5db7be5acf6d92938ad7daabf1932d7aa7c44326cdfc256531a53034d31" dependencies = [ "foundry-compilers-artifacts-solc", "foundry-compilers-artifacts-vyper", - "foundry-compilers-artifacts-zksolc", ] [[package]] name = "foundry-compilers-artifacts-solc" -version = "0.12.3" -source = "git+https://github.com/Moonsong-Labs/compilers?branch=zksync-v0.12.3#c0cc4bcaedac36b7b61b4dcf4a289c305af312aa" +version = "0.12.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4168053c1ad217866c677a074517e8d51988e5b1bad044b95f3c513aa5b6caa" dependencies = [ "alloy-json-abi", "alloy-primitives", @@ -5025,7 +5444,7 @@ dependencies = [ "serde", "serde_json", "serde_repr", - "thiserror 1.0.69", + "thiserror 2.0.6", "tokio", "tracing", "walkdir", @@ -5034,49 +5453,29 @@ dependencies = [ [[package]] name = "foundry-compilers-artifacts-vyper" -version = "0.12.3" -source = "git+https://github.com/Moonsong-Labs/compilers?branch=zksync-v0.12.3#c0cc4bcaedac36b7b61b4dcf4a289c305af312aa" -dependencies = [ - "alloy-json-abi", - "alloy-primitives", - "foundry-compilers-artifacts-solc", - "foundry-compilers-core", - "path-slash", - "semver 1.0.23", - "serde", -] - -[[package]] -name = "foundry-compilers-artifacts-zksolc" -version = "0.12.3" -source = "git+https://github.com/Moonsong-Labs/compilers?branch=zksync-v0.12.3#c0cc4bcaedac36b7b61b4dcf4a289c305af312aa" +version = "0.12.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b7beffe7182551d01d249f022a5eab17c36c73b39ae8efd404e0fb9c98b9f80" dependencies = [ "alloy-json-abi", "alloy-primitives", "foundry-compilers-artifacts-solc", "foundry-compilers-core", - "md-5", "path-slash", - "rayon", "semver 1.0.23", "serde", - "serde_json", - "thiserror 1.0.69", - "tracing", - "walkdir", - "yansi", ] [[package]] name = "foundry-compilers-core" -version = "0.12.3" -source = "git+https://github.com/Moonsong-Labs/compilers?branch=zksync-v0.12.3#c0cc4bcaedac36b7b61b4dcf4a289c305af312aa" +version = "0.12.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac5247875b96dfb99da12d0cd0f6ce98954116d1cf8a9188d613b2a35cd6937b" dependencies = [ "alloy-primitives", "cfg-if 1.0.0", "dunce", "fs_extra", - "once_cell", "path-slash", "regex", "semver 1.0.23", @@ -5084,7 +5483,7 @@ dependencies = [ "serde_json", "svm-rs", "tempfile", - "thiserror 1.0.69", + "thiserror 2.0.6", "tokio", "walkdir", ] @@ -5104,6 +5503,7 @@ dependencies = [ "foundry-compilers", "glob", "globset", + "itertools 0.13.0", "mesc", "number_prefix", "path-slash", @@ -5117,7 +5517,7 @@ dependencies = [ "similar-asserts", "solang-parser", "tempfile", - "thiserror 1.0.69", + "thiserror 2.0.6", "toml 0.8.19", "toml_edit 0.22.22", "tracing", @@ -5168,7 +5568,7 @@ dependencies = [ "revm", "revm-inspectors", "serde", - "thiserror 1.0.69", + "thiserror 2.0.6", "tracing", ] @@ -5189,16 +5589,16 @@ dependencies = [ name = "foundry-evm-core" version = "0.0.2" dependencies = [ - "alloy-consensus", + "alloy-consensus 0.8.0", "alloy-dyn-abi", - "alloy-genesis", + "alloy-genesis 0.8.0", "alloy-json-abi", - "alloy-network", + "alloy-network 0.8.0", "alloy-primitives", - "alloy-provider", - "alloy-rpc-types", + "alloy-provider 0.8.0", + "alloy-rpc-types 0.8.0", "alloy-sol-types", - "alloy-transport", + "alloy-transport 0.8.0", "auto_impl", "eyre", "foundry-cheatcodes-spec", @@ -5215,7 +5615,7 @@ dependencies = [ "revm-inspectors", "serde", "serde_json", - "thiserror 1.0.69", + "thiserror 2.0.6", "tokio", "tracing", "url", @@ -5252,14 +5652,14 @@ dependencies = [ "foundry-evm-coverage", "foundry-evm-traces", "foundry-zksync-core", - "indexmap 2.6.0", + "indexmap 2.7.0", "itertools 0.13.0", "parking_lot 0.12.3", "proptest", "rand 0.8.5", "revm", "serde", - "thiserror 1.0.69", + "thiserror 2.0.6", "tracing", ] @@ -5286,7 +5686,8 @@ dependencies = [ "revm", "revm-inspectors", "serde", - "solang-parser", + "serde_json", + "solar-parse", "tempfile", "tokio", "tracing", @@ -5294,22 +5695,23 @@ dependencies = [ [[package]] name = "foundry-fork-db" -version = "0.7.1" -source = "git+https://github.com/Moonsong-Labs/foundry-zksync-fork-db?branch=zksync-v0.7.1#402d191fa5c4d12a003f5dca0e5c32611c3476f7" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "491e9f9f138086b3627a8c406730dfbb6afcdcf688e6da0eb15df52f0c8ed163" dependencies = [ - "alloy-consensus", + "alloy-consensus 0.8.0", "alloy-primitives", - "alloy-provider", - "alloy-rpc-types", - "alloy-serde", - "alloy-transport", + "alloy-provider 0.8.0", + "alloy-rpc-types 0.8.0", + "alloy-serde 0.8.0", + "alloy-transport 0.8.0", "eyre", "futures 0.3.31", "parking_lot 0.12.3", "revm", "serde", "serde_json", - "thiserror 1.0.69", + "thiserror 2.0.6", "tokio", "tracing", "url", @@ -5322,7 +5724,7 @@ dependencies = [ "alloy-primitives", "foundry-compilers", "semver 1.0.23", - "thiserror 1.0.69", + "thiserror 2.0.6", ] [[package]] @@ -5332,7 +5734,7 @@ dependencies = [ "proc-macro-error", "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -5340,7 +5742,7 @@ name = "foundry-test-utils" version = "0.0.2" dependencies = [ "alloy-primitives", - "alloy-provider", + "alloy-provider 0.8.0", "era_test_node", "eyre", "fd-lock", @@ -5355,6 +5757,7 @@ dependencies = [ "regex", "serde_json", "snapbox", + "tempfile", "tokio", "tracing", "tracing-subscriber", @@ -5365,15 +5768,15 @@ dependencies = [ name = "foundry-wallets" version = "0.0.2" dependencies = [ - "alloy-consensus", + "alloy-consensus 0.8.0", "alloy-dyn-abi", - "alloy-network", + "alloy-network 0.8.0", "alloy-primitives", - "alloy-signer", + "alloy-signer 0.8.0", "alloy-signer-aws", "alloy-signer-gcp", "alloy-signer-ledger", - "alloy-signer-local", + "alloy-signer-local 0.8.0", "alloy-signer-trezor", "alloy-sol-types", "async-trait", @@ -5387,7 +5790,7 @@ dependencies = [ "gcloud-sdk", "rpassword", "serde", - "thiserror 1.0.69", + "thiserror 2.0.6", "tokio", "tracing", ] @@ -5411,10 +5814,10 @@ dependencies = [ name = "foundry-zksync-core" version = "0.0.2" dependencies = [ - "alloy-network", + "alloy-network 0.8.0", "alloy-primitives", "alloy-sol-types", - "alloy-transport", + "alloy-transport 0.8.0", "alloy-zksync", "ansiterm", "era_test_node", @@ -5485,16 +5888,6 @@ dependencies = [ "zksync_bellman", ] -[[package]] -name = "fs4" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7e180ac76c23b45e767bd7ae9579bc0bb458618c4bc71835926e098e61d15f8" -dependencies = [ - "rustix", - "windows-sys 0.52.0", -] - [[package]] name = "fs4" version = "0.9.1" @@ -5622,7 +6015,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -5685,8 +6078,8 @@ dependencies = [ "hyper 1.5.1", "jsonwebtoken", "once_cell", - "prost 0.13.3", - "prost-types 0.13.3", + "prost 0.13.4", + "prost-types 0.13.4", "reqwest 0.12.9", "secret-vault-value", "serde", @@ -5702,9 +6095,9 @@ dependencies = [ [[package]] name = "generator" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbb949699c3e4df3a183b1d2142cb24277057055ed23c68ed58894f76c517223" +checksum = "cc6bd114ceda131d3b1d665eba35788690ad37f5916457286b32ab6fd3c438dd" dependencies = [ "cfg-if 1.0.0", "libc", @@ -5799,7 +6192,7 @@ dependencies = [ "bstr", "gix-path", "libc", - "thiserror 2.0.3", + "thiserror 2.0.6", ] [[package]] @@ -5811,7 +6204,7 @@ dependencies = [ "bstr", "itoa", "jiff", - "thiserror 2.0.3", + "thiserror 2.0.6", ] [[package]] @@ -5902,7 +6295,7 @@ dependencies = [ "gix-trace", "home", "once_cell", - "thiserror 2.0.3", + "thiserror 2.0.6", ] [[package]] @@ -5974,7 +6367,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd520d09f9f585b34b32aba1d0b36ada89ab7fefb54a8ca3fe37fc482a750937" dependencies = [ "bstr", - "thiserror 2.0.3", + "thiserror 2.0.6", ] [[package]] @@ -6151,10 +6544,10 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap 2.6.0", + "indexmap 2.7.0", "slab", "tokio", - "tokio-util 0.7.12", + "tokio-util 0.7.13", "tracing", ] @@ -6169,11 +6562,11 @@ dependencies = [ "fnv", "futures-core", "futures-sink", - "http 1.1.0", - "indexmap 2.6.0", + "http 1.2.0", + "indexmap 2.7.0", "slab", "tokio", - "tokio-util 0.7.12", + "tokio-util 0.7.13", "tracing", ] @@ -6356,7 +6749,7 @@ dependencies = [ "markup5ever", "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -6372,9 +6765,9 @@ dependencies = [ [[package]] name = "http" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" dependencies = [ "bytes", "fnv", @@ -6399,7 +6792,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http 1.1.0", + "http 1.2.0", ] [[package]] @@ -6410,16 +6803,16 @@ checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" dependencies = [ "bytes", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "pin-project-lite", ] [[package]] name = "http-range-header" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08a397c49fec283e3d6211adbe480be95aae5f304cfb923e9970e08956d5168a" +checksum = "9171a2ea8a68358193d15dd5d70c1c10a2afc3e7e4c5bc92bc9f025cebd7359c" [[package]] name = "httparse" @@ -6483,7 +6876,7 @@ dependencies = [ "futures-channel", "futures-util", "h2 0.4.7", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "httparse", "httpdate", @@ -6517,15 +6910,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" dependencies = [ "futures-util", - "http 1.1.0", + "http 1.2.0", "hyper 1.5.1", "hyper-util", "log", - "rustls 0.23.18", + "rustls 0.23.19", "rustls-native-certs 0.8.1", "rustls-pki-types", "tokio", - "tokio-rustls 0.26.0", + "tokio-rustls 0.26.1", "tower-service", "webpki-roots", ] @@ -6581,7 +6974,7 @@ dependencies = [ "bytes", "futures-channel", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "hyper 1.5.1", "pin-project-lite", @@ -6729,7 +7122,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -6785,7 +7178,7 @@ dependencies = [ "futures 0.3.31", "gix-config", "ignore", - "miette 7.3.0", + "miette 7.4.0", "normalize-path", "project-origins", "radix_trie", @@ -6847,7 +7240,7 @@ checksum = "a0eb5a3343abf848c0984fe4604b2b105da9539376e24fc0a3b0007411ae4fd9" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -6874,9 +7267,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" +checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" dependencies = [ "arbitrary", "equivalent", @@ -6905,17 +7298,16 @@ checksum = "b248f5224d1d606005e02c97f5aa4e88eeb230488bcc03bc9ca4d7991399f2b5" [[package]] name = "inferno" -version = "0.11.21" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "232929e1d75fe899576a3d5c7416ad0d88dbfbb3c3d6aa00873a7408a50ddb88" +checksum = "75a5d75fee4d36809e6b021e4b96b686e763d365ffdb03af2bd00786353f84fe" dependencies = [ "ahash 0.8.11", - "is-terminal", "itoa", "log", "num-format", "once_cell", - "quick-xml 0.26.0", + "quick-xml 0.37.1", "rgb", "str_stack", ] @@ -6966,7 +7358,7 @@ dependencies = [ "pretty_assertions", "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -7043,15 +7435,6 @@ dependencies = [ "either", ] -[[package]] -name = "itertools" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" -dependencies = [ - "either", -] - [[package]] name = "itertools" version = "0.13.0" @@ -7069,9 +7452,9 @@ checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" [[package]] name = "jiff" -version = "0.1.14" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9d9d414fc817d3e3d62b2598616733f76c4cc74fbac96069674739b881295c8" +checksum = "db69f08d4fb10524cacdb074c10b296299d71274ddbc830a8ee65666867002e9" dependencies = [ "jiff-tzdb-platform", "windows-sys 0.59.0", @@ -7123,10 +7506,11 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.72" +version = "0.3.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9" +checksum = "6717b6b5b077764fb5966237269cb3c64edddde4b14ce42647430a78ced9e7b7" dependencies = [ + "once_cell", "wasm-bindgen", ] @@ -7263,17 +7647,17 @@ dependencies = [ "futures-channel", "futures-util", "gloo-net", - "http 1.1.0", + "http 1.2.0", "jsonrpsee-core", "pin-project 1.1.7", - "rustls 0.23.18", + "rustls 0.23.19", "rustls-pki-types", "rustls-platform-verifier", "soketto", "thiserror 1.0.69", "tokio", - "tokio-rustls 0.26.0", - "tokio-util 0.7.12", + "tokio-rustls 0.26.1", + "tokio-util 0.7.13", "tracing", "url", ] @@ -7290,7 +7674,7 @@ dependencies = [ "bytes", "futures-timer", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", "jsonrpsee-types", @@ -7321,7 +7705,7 @@ dependencies = [ "hyper-util", "jsonrpsee-core", "jsonrpsee-types", - "rustls 0.23.18", + "rustls 0.23.19", "rustls-platform-verifier", "serde", "serde_json", @@ -7342,7 +7726,7 @@ dependencies = [ "proc-macro-crate 3.2.0", "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -7353,7 +7737,7 @@ checksum = "654afab2e92e5d88ebd8a39d6074483f3f2bfdf91c5ac57fe285e7127cdd4f51" dependencies = [ "anyhow", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", "hyper 1.5.1", @@ -7368,7 +7752,7 @@ dependencies = [ "thiserror 1.0.69", "tokio", "tokio-stream", - "tokio-util 0.7.12", + "tokio-util 0.7.13", "tower 0.4.13", "tracing", ] @@ -7380,7 +7764,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9c465fbe385238e861fdc4d1c85e04ada6c1fd246161d26385c1b311724d2af" dependencies = [ "beef", - "http 1.1.0", + "http 1.2.0", "serde", "serde_json", "thiserror 1.0.69", @@ -7403,7 +7787,7 @@ version = "0.23.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1c28759775f5cb2f1ea9667672d3fe2b0e701d1f4b7b67954e60afe7fd058b5e" dependencies = [ - "http 1.1.0", + "http 1.2.0", "jsonrpsee-client-transport", "jsonrpsee-core", "jsonrpsee-types", @@ -7547,9 +7931,9 @@ checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "libc" -version = "0.2.165" +version = "0.2.168" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcb4d3d38eab6c5239a362fa8bae48c03baf980a6e7079f063942d563ef3533e" +checksum = "5aaeb2981e0606ca11d79718f8bb01164f1d6ed75080182d3abf017e6d244b6d" [[package]] name = "libdbus-sys" @@ -7692,7 +8076,7 @@ dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", "regex-syntax 0.6.29", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -7870,12 +8254,12 @@ dependencies = [ [[package]] name = "miette" -version = "7.3.0" +version = "7.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "551cefdb9e93e2a40037f24cd139d88009a7660158c9d5d6076afeca5c8cfb82" +checksum = "317f146e2eb7021892722af37cf1b971f0a70c8406f487e24952667616192c64" dependencies = [ "cfg-if 1.0.0", - "miette-derive 7.3.0", + "miette-derive 7.4.0", "thiserror 1.0.69", "unicode-width 0.1.14", ] @@ -7888,18 +8272,18 @@ checksum = "49e7bc1560b95a3c4a25d03de42fe76ca718ab92d1a22a55b9b4cf67b3ae635c" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] name = "miette-derive" -version = "7.3.0" +version = "7.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acd70bb0b631435c30f187b3c2b528d0b78c65654f542ead7857915e37c177da" +checksum = "23c9b935fbe1d6cbd1dac857b54a688145e2d93f48db36010514d0f612d0ad67" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -7971,17 +8355,22 @@ dependencies = [ [[package]] name = "mio" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" +checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" dependencies = [ - "hermit-abi 0.3.9", "libc", "log", "wasi 0.11.0+wasi-snapshot-preview1", "windows-sys 0.52.0", ] +[[package]] +name = "mirai-annotations" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9be0862c1b3f26a88803c4a49de6889c10e608b3ee9344e6ef5b45fb37ad3d1" + [[package]] name = "mockall" version = "0.13.1" @@ -8005,7 +8394,7 @@ dependencies = [ "cfg-if 1.0.0", "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -8363,7 +8752,7 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -8375,7 +8764,7 @@ dependencies = [ "proc-macro-crate 3.2.0", "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -8423,32 +8812,32 @@ checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" [[package]] name = "op-alloy-consensus" -version = "0.6.8" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fce158d886815d419222daa67fcdf949a34f7950653a4498ebeb4963331f70ed" +checksum = "f9d95d0ec6457ad4d3d7fc0ad41db490b219587ed837ada87a26b28e535db15f" dependencies = [ - "alloy-consensus", - "alloy-eips", + "alloy-consensus 0.8.0", + "alloy-eips 0.8.0", "alloy-primitives", "alloy-rlp", - "alloy-serde", + "alloy-serde 0.8.0", "derive_more 1.0.0", "serde", - "thiserror 2.0.3", + "thiserror 2.0.6", ] [[package]] name = "op-alloy-rpc-types" -version = "0.6.8" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "060ebeaea8c772e396215f69bb86d231ec8b7f36aca0dd6ce367ceaa9a8c33e6" +checksum = "eba1b44e2035ec04cc61762cb9b5457d0ecd29d9af631e1a1c107ef571ce2318" dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-network-primitives", + "alloy-consensus 0.8.0", + "alloy-eips 0.8.0", + "alloy-network-primitives 0.8.0", "alloy-primitives", - "alloy-rpc-types-eth", - "alloy-serde", + "alloy-rpc-types-eth 0.8.0", + "alloy-serde 0.8.0", "derive_more 1.0.0", "op-alloy-consensus", "serde", @@ -8521,7 +8910,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -8532,9 +8921,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-src" -version = "300.4.1+3.4.0" +version = "300.4.0+3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "faa4eac4138c62414b5622d1b31c5c304f34b406b013c079c2bbc652fdd6678c" +checksum = "a709e02f2b4aca747929cca5ed248880847c650233cf8b8cdc48f40aaf4898a6" dependencies = [ "cc", ] @@ -8586,7 +8975,7 @@ checksum = "ad31e9de44ee3538fb9d64fe3376c1362f406162434609e79aea2a41a0af78ab" dependencies = [ "async-trait", "bytes", - "http 1.1.0", + "http 1.2.0", "opentelemetry", "reqwest 0.12.9", ] @@ -8599,12 +8988,12 @@ checksum = "6b925a602ffb916fb7421276b86756027b37ee708f9dce2dbdcc51739f07e727" dependencies = [ "async-trait", "futures-core", - "http 1.1.0", + "http 1.2.0", "opentelemetry", "opentelemetry-http", "opentelemetry-proto", "opentelemetry_sdk", - "prost 0.13.3", + "prost 0.13.4", "reqwest 0.12.9", "thiserror 1.0.69", "tokio", @@ -8619,7 +9008,7 @@ checksum = "30ee9f20bff9c984511a02f082dc8ede839e4a9bf15cc2487c8d6fea5ad850d9" dependencies = [ "opentelemetry", "opentelemetry_sdk", - "prost 0.13.3", + "prost 0.13.4", "tonic", ] @@ -8756,7 +9145,7 @@ dependencies = [ "proc-macro-crate 3.2.0", "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -8864,7 +9253,7 @@ dependencies = [ "proc-macro2 1.0.92", "proc-macro2-diagnostics", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -8900,20 +9289,20 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" -version = "2.7.14" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "879952a81a83930934cbf1786752d6dedc3b1f29e8f8fb2ad1d0a36f377cf442" +checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc" dependencies = [ "memchr", - "thiserror 1.0.69", + "thiserror 2.0.6", "ucd-trie", ] [[package]] name = "pest_derive" -version = "2.7.14" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d214365f632b123a47fd913301e14c946c61d1c183ee245fa76eb752e59a02dd" +checksum = "816518421cfc6887a0d62bf441b6ffb4536fcc926395a69e1a85852d4363f57e" dependencies = [ "pest", "pest_generator", @@ -8921,22 +9310,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.14" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb55586734301717aea2ac313f50b2eb8f60d2fc3dc01d190eefa2e625f60c4e" +checksum = "7d1396fd3a870fc7838768d171b4616d5c91f6cc25e377b673d714567d99377b" dependencies = [ "pest", "pest_meta", "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] name = "pest_meta" -version = "2.7.14" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b75da2a70cf4d9cb76833c990ac9cd3923c9a8905a8929789ce347c84564d03d" +checksum = "e1e58089ea25d717bfd31fb534e4f3afcc2cc569c70de3e239778991ea3b7dea" dependencies = [ "once_cell", "pest", @@ -8950,7 +9339,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", - "indexmap 2.6.0", + "indexmap 2.7.0", ] [[package]] @@ -9013,7 +9402,7 @@ dependencies = [ "phf_shared 0.11.2", "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -9071,7 +9460,7 @@ checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -9193,7 +9582,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033" dependencies = [ "proc-macro2 1.0.92", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -9303,7 +9692,7 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -9338,7 +9727,7 @@ checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", "version_check", "yansi", ] @@ -9350,7 +9739,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38ee68ae331824036479c84060534b18254c864fa73366c58d86db3b7b811619" dependencies = [ "futures 0.3.31", - "indexmap 2.6.0", + "indexmap 2.7.0", "nix 0.28.0", "tokio", "tracing", @@ -9394,7 +9783,7 @@ checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -9425,7 +9814,7 @@ checksum = "6ff7ff745a347b87471d859a377a9a404361e7efc2a971d73424a6d183c0fc77" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -9440,12 +9829,12 @@ dependencies = [ [[package]] name = "prost" -version = "0.13.3" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b0487d90e047de87f984913713b85c601c05609aad5b0df4b4573fbf69aa13f" +checksum = "2c0fef6c4230e4ccf618a35c59d7ede15dea37de8427500f50aff708806e42ec" dependencies = [ "bytes", - "prost-derive 0.13.3", + "prost-derive 0.13.4", ] [[package]] @@ -9456,7 +9845,7 @@ checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4" dependencies = [ "bytes", "heck", - "itertools 0.12.1", + "itertools 0.11.0", "log", "multimap", "once_cell", @@ -9465,7 +9854,7 @@ dependencies = [ "prost 0.12.6", "prost-types 0.12.6", "regex", - "syn 2.0.89", + "syn 2.0.90", "tempfile", ] @@ -9476,23 +9865,23 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1" dependencies = [ "anyhow", - "itertools 0.12.1", + "itertools 0.11.0", "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] name = "prost-derive" -version = "0.13.3" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9552f850d5f0964a4e4d0bf306459ac29323ddfbae05e35a7c0d35cb0803cc5" +checksum = "157c5a9d7ea5c2ed2d9fb8f495b64759f7816c7eaea54ba3978f0d63000162e3" dependencies = [ "anyhow", "itertools 0.13.0", "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -9522,11 +9911,11 @@ dependencies = [ [[package]] name = "prost-types" -version = "0.13.3" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4759aa0d3a6232fb8dbdb97b61de2c20047c68aca932c7ed76da9d788508d670" +checksum = "cc2f1e56baa61e93533aebc21af4d2134b70f66275e0fcdf3cbe43d77ff7e8fc" dependencies = [ - "prost 0.13.3", + "prost 0.13.4", ] [[package]] @@ -9644,11 +10033,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ed1a693391a16317257103ad06a88c6529ac640846021da7c435a06fffdacd7" dependencies = [ "chrono", - "indexmap 2.6.0", + "indexmap 2.7.0", "newtype-uuid", "quick-xml 0.37.1", "strip-ansi-escapes", - "thiserror 2.0.3", + "thiserror 2.0.6", "uuid 1.11.0", ] @@ -9670,15 +10059,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "quick-xml" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f50b1c63b38611e7d4d7f68b82d3ad0cc71a2ad2e7f61fc10f1328d917c93cd" -dependencies = [ - "memchr", -] - [[package]] name = "quick-xml" version = "0.37.1" @@ -9698,10 +10078,10 @@ dependencies = [ "pin-project-lite", "quinn-proto", "quinn-udp", - "rustc-hash 2.0.0", - "rustls 0.23.18", + "rustc-hash 2.1.0", + "rustls 0.23.19", "socket2", - "thiserror 2.0.3", + "thiserror 2.0.6", "tokio", "tracing", ] @@ -9716,11 +10096,11 @@ dependencies = [ "getrandom 0.2.15", "rand 0.8.5", "ring", - "rustc-hash 2.0.0", - "rustls 0.23.18", + "rustc-hash 2.1.0", + "rustls 0.23.19", "rustls-pki-types", "slab", - "thiserror 2.0.3", + "thiserror 2.0.6", "tinyvec", "tracing", "web-time", @@ -9728,9 +10108,9 @@ dependencies = [ [[package]] name = "quinn-udp" -version = "0.5.7" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d5a626c6807713b15cac82a6acaccd6043c9a5408c24baae07611fec3f243da" +checksum = "52cd4b1eff68bf27940dd39811292c49e007f4d0b4c357358dc9b0197be6b527" dependencies = [ "cfg_aliases 0.2.1", "libc", @@ -9891,23 +10271,23 @@ dependencies = [ [[package]] name = "ratatui" -version = "0.28.1" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdef7f9be5c0122f890d58bdf4d964349ba6a6161f705907526d891efabba57d" +checksum = "eabd94c2f37801c20583fc49dd5cd6b0ba68c716787c2dd6ed18571e1e63117b" dependencies = [ "bitflags 2.6.0", "cassowary", "compact_str", "crossterm", + "indoc", "instability", "itertools 0.13.0", "lru", "paste", "strum", - "strum_macros", "unicode-segmentation", "unicode-truncate", - "unicode-width 0.1.14", + "unicode-width 0.2.0", ] [[package]] @@ -10087,7 +10467,7 @@ dependencies = [ "futures-core", "futures-util", "h2 0.4.7", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", "hyper 1.5.1", @@ -10104,7 +10484,7 @@ dependencies = [ "percent-encoding", "pin-project-lite", "quinn", - "rustls 0.23.18", + "rustls 0.23.19", "rustls-native-certs 0.8.1", "rustls-pemfile 2.2.0", "rustls-pki-types", @@ -10115,9 +10495,9 @@ dependencies = [ "system-configuration 0.6.1", "tokio", "tokio-native-tls", - "tokio-rustls 0.26.0", + "tokio-rustls 0.26.1", "tokio-socks", - "tokio-util 0.7.12", + "tokio-util 0.7.13", "tower-service", "url", "wasm-bindgen", @@ -10136,7 +10516,7 @@ checksum = "562ceb5a604d3f7c885a792d42c199fd8af239d0a51b2fa6a78aafa092452b04" dependencies = [ "anyhow", "async-trait", - "http 1.1.0", + "http 1.2.0", "reqwest 0.12.9", "serde", "thiserror 1.0.69", @@ -10185,12 +10565,12 @@ dependencies = [ [[package]] name = "revm-inspectors" -version = "0.11.0" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "747291a18ad6726a08dd73f8b6a6b3a844db582ecae2063ccf0a04880c44f482" +checksum = "8d056aaa21f36038ab35fe8ce940ee332903a0b4b992b8ca805fb60c85eb2086" dependencies = [ "alloy-primitives", - "alloy-rpc-types-eth", + "alloy-rpc-types-eth 0.8.0", "alloy-rpc-types-trace", "alloy-sol-types", "anstyle", @@ -10198,7 +10578,7 @@ dependencies = [ "revm", "serde", "serde_json", - "thiserror 1.0.69", + "thiserror 2.0.6", ] [[package]] @@ -10385,9 +10765,9 @@ dependencies = [ [[package]] name = "rsa" -version = "0.9.7" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47c75d7c5c6b673e58bf54d8544a9f432e3a925b0e80f7cd3602ab5c50c55519" +checksum = "5d0e5124fcb30e76a7e79bfee683a2746db83784b86289f6251b54b7950a0dfc" dependencies = [ "const-oid", "digest 0.10.7", @@ -10484,9 +10864,9 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustc-hash" -version = "2.0.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152" +checksum = "c7fb8039b3032c191086b10f11f319a6e99e1e82889c5cc6046f515c9db1d497" dependencies = [ "rand 0.8.5", ] @@ -10517,15 +10897,15 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.41" +version = "0.38.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7f649912bc1495e167a6edee79151c84b1bad49748cb4f1f1167f459f6224f6" +checksum = "f93dc38ecbab2eb790ff964bb77fa94faf256fd3e73285fd7ba0903b76bedb85" dependencies = [ "bitflags 2.6.0", "errno", "libc", "linux-raw-sys", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -10542,9 +10922,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.18" +version = "0.23.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c9cc1d47e243d655ace55ed38201c19ae02c148ae56412ab8750e8f0166ab7f" +checksum = "934b404430bb06b3fae2cba809eb45a1ab1aecd64491213d7c3301b88393f8d1" dependencies = [ "aws-lc-rs", "log", @@ -10631,7 +11011,7 @@ dependencies = [ "jni", "log", "once_cell", - "rustls 0.23.18", + "rustls 0.23.19", "rustls-native-certs 0.7.3", "rustls-platform-verifier-android", "rustls-webpki 0.102.8", @@ -10689,9 +11069,9 @@ dependencies = [ [[package]] name = "rustyline" -version = "14.0.0" +version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7803e8936da37efd9b6d4478277f4b2b9bb5cdb37a113e8d63222e58da647e63" +checksum = "2ee1e066dc922e513bda599c6ccb5f3bb2b0ea5870a579448f2622993f0a9a2f" dependencies = [ "bitflags 2.6.0", "cfg-if 1.0.0", @@ -10701,12 +11081,12 @@ dependencies = [ "libc", "log", "memchr", - "nix 0.28.0", + "nix 0.29.0", "radix_trie", "unicode-segmentation", - "unicode-width 0.1.14", + "unicode-width 0.2.0", "utf8parse", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -10764,7 +11144,7 @@ dependencies = [ "proc-macro-crate 3.2.0", "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -10806,7 +11186,7 @@ dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", "serde_derive_internals", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -10946,8 +11326,8 @@ version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc32a777b53b3433b974c9c26b6d502a50037f8da94e46cb8ce2ced2cfdfaea0" dependencies = [ - "prost 0.13.3", - "prost-types 0.13.3", + "prost 0.13.4", + "prost-types 0.13.4", "serde", "serde_json", "zeroize", @@ -11170,7 +11550,7 @@ checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -11181,7 +11561,7 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -11190,7 +11570,7 @@ version = "1.0.133" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" dependencies = [ - "indexmap 2.6.0", + "indexmap 2.7.0", "itoa", "memchr", "ryu", @@ -11225,7 +11605,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -11279,7 +11659,7 @@ version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ - "indexmap 2.6.0", + "indexmap 2.7.0", "itoa", "ryu", "serde", @@ -11308,7 +11688,7 @@ checksum = "5d69265a08751de7844521fd15003ae0a888e035773ba05695c5c759a6f89eef" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -11443,7 +11823,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34db1a06d485c9142248b7a054f034b349b212551f3dfd19c94d45a754a217cd" dependencies = [ "libc", - "mio 1.0.2", + "mio 1.0.3", "signal-hook", ] @@ -11592,9 +11972,9 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.7" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" dependencies = [ "libc", "windows-sys 0.52.0", @@ -11609,7 +11989,7 @@ dependencies = [ "base64 0.22.1", "bytes", "futures 0.3.31", - "http 1.1.0", + "http 1.2.0", "httparse", "log", "rand 0.8.5", @@ -11666,10 +12046,10 @@ checksum = "8b6e4eb0b72ed7adbb808897c85de08ea99609774a58c72e3dce55c758043ca2" dependencies = [ "bumpalo", "index_vec", - "indexmap 2.6.0", + "indexmap 2.7.0", "parking_lot 0.12.3", "rayon", - "rustc-hash 2.0.0", + "rustc-hash 2.1.0", "smallvec", ] @@ -11709,7 +12089,7 @@ checksum = "f0cc54b74e214647c1bbfc098d080cc5deac77f8dcb99aca91747276b01a15ad" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -11735,11 +12115,11 @@ dependencies = [ [[package]] name = "soldeer-commands" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5969c09f89ae6f0e18d5904e5bdbb8842ba948dad0f8202edb7ea510e35654d" +checksum = "a4bd924da31914871820d1404b63a89b100097957f6dc7f3bbb9c094f16d8f4e" dependencies = [ - "bon 2.3.0", + "bon", "clap", "cliclack", "derive_more 1.0.0", @@ -11754,7 +12134,7 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7a3129568ab6b38132efa9c956b5ae14c09c0a1a1167353e337081d1d7f0c32" dependencies = [ - "bon 3.3.0", + "bon", "chrono", "cliclack", "const-hex", @@ -11771,7 +12151,7 @@ dependencies = [ "serde", "serde_json", "sha2 0.10.8", - "thiserror 2.0.3", + "thiserror 2.0.6", "tokio", "toml_edit 0.22.22", "uuid 1.11.0", @@ -11844,7 +12224,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4d8060b456358185f7d50c55d9b5066ad956956fddec42ee2e8567134a8936e" dependencies = [ "atoi", - "bigdecimal 0.4.6", + "bigdecimal 0.4.5", "byteorder", "bytes", "chrono", @@ -11860,7 +12240,7 @@ dependencies = [ "hashbrown 0.14.5", "hashlink", "hex", - "indexmap 2.6.0", + "indexmap 2.7.0", "ipnetwork", "log", "memchr", @@ -11891,7 +12271,7 @@ dependencies = [ "quote 1.0.37", "sqlx-core", "sqlx-macros-core", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -11914,7 +12294,7 @@ dependencies = [ "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", - "syn 2.0.89", + "syn 2.0.90", "tempfile", "tokio", "url", @@ -11928,7 +12308,7 @@ checksum = "64bb4714269afa44aef2755150a0fc19d756fb580a67db8885608cf02f47d06a" dependencies = [ "atoi", "base64 0.22.1", - "bigdecimal 0.4.6", + "bigdecimal 0.4.5", "bitflags 2.6.0", "byteorder", "bytes", @@ -11973,7 +12353,7 @@ checksum = "6fa91a732d854c5d7726349bb4bb879bb9478993ceb764247660aee25f67c2f8" dependencies = [ "atoi", "base64 0.22.1", - "bigdecimal 0.4.6", + "bigdecimal 0.4.5", "bitflags 2.6.0", "byteorder", "chrono", @@ -12127,7 +12507,7 @@ dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", "rustversion", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -12157,7 +12537,7 @@ checksum = "4aebac1b1ef2b46e2e2bdf3c09db304800f2a77c1fa902bd5231490203042be8" dependencies = [ "const-hex", "dirs 5.0.1", - "fs4 0.9.1", + "fs4", "reqwest 0.12.9", "semver 1.0.23", "serde", @@ -12206,9 +12586,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.89" +version = "2.0.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d46482f1c1c87acd84dea20c1bf5ebff4c757009ed6bf19cfd36fb10e92c4e" +checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", @@ -12217,14 +12597,26 @@ dependencies = [ [[package]] name = "syn-solidity" -version = "0.8.13" +version = "0.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bdaa7b9e815582ba343a20c66627437cf45f1c6fba7f69772cbfd1358c7e197" +checksum = "219389c1ebe89f8333df8bdfb871f6631c552ff399c23cac02480b6088aad8f0" dependencies = [ "paste", "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", +] + +[[package]] +name = "syn_derive" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1329189c02ff984e9736652b1631330da25eaa6bc639089ed4915d25446cbe7b" +dependencies = [ + "proc-macro-error", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] @@ -12250,7 +12642,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -12344,9 +12736,9 @@ dependencies = [ [[package]] name = "terminal_size" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f599bd7ca042cfdf8f4512b277c02ba102247820f9d9d4a9f521f496751a6ef" +checksum = "5352447f921fda68cf61b4101566c0bdb5104eff6804d0678e5227580ab6a4e9" dependencies = [ "rustix", "windows-sys 0.59.0", @@ -12393,11 +12785,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.3" +version = "2.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c006c85c7651b3cf2ada4584faa36773bd07bac24acfb39f3c431b36d7e667aa" +checksum = "8fec2a1820ebd077e2b90c4df007bebf344cd394098a13c563957d0afc83ea47" dependencies = [ - "thiserror-impl 2.0.3", + "thiserror-impl 2.0.6", ] [[package]] @@ -12408,18 +12800,18 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] name = "thiserror-impl" -version = "2.0.3" +version = "2.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f077553d607adc1caf65430528a576c757a71ed73944b66ebb58ef2bbd243568" +checksum = "d65750cab40f4ff1929fb1ba509e9914eb756131cef4210da8d5d700d26f6312" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -12463,9 +12855,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.36" +version = "0.3.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21" dependencies = [ "deranged", "itoa", @@ -12486,9 +12878,9 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.18" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de" dependencies = [ "num-conv", "time-core", @@ -12539,14 +12931,14 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.41.1" +version = "1.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22cfb5bee7a6a52939ca9224d6ac897bb669134078daa8735560897f69de4d33" +checksum = "5cec9b21b0450273377fc97bd4c33a8acffc8c996c987a7c5b319a0083707551" dependencies = [ "backtrace", "bytes", "libc", - "mio 1.0.2", + "mio 1.0.3", "parking_lot 0.12.3", "pin-project-lite", "signal-hook-registry", @@ -12563,7 +12955,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -12588,12 +12980,11 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.26.0" +version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" +checksum = "5f6d0975eaace0cf0fcadee4e4aaa5da15b5c079146f2cffb67c113be122bf37" dependencies = [ - "rustls 0.23.18", - "rustls-pki-types", + "rustls 0.23.19", "tokio", ] @@ -12611,14 +13002,14 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" dependencies = [ "futures-core", "pin-project-lite", "tokio", - "tokio-util 0.7.12", + "tokio-util 0.7.13", ] [[package]] @@ -12629,10 +13020,10 @@ checksum = "edc5f74e248dc973e0dbb7b74c7e0d6fcc301c694ff50049504004ef4d0cdcd9" dependencies = [ "futures-util", "log", - "rustls 0.23.18", + "rustls 0.23.19", "rustls-pki-types", "tokio", - "tokio-rustls 0.26.0", + "tokio-rustls 0.26.1", "tungstenite", "webpki-roots", ] @@ -12653,9 +13044,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.12" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" +checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078" dependencies = [ "bytes", "futures-core", @@ -12680,7 +13071,7 @@ version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" dependencies = [ - "indexmap 2.6.0", + "indexmap 2.7.0", "serde", "serde_spanned", "toml_datetime", @@ -12702,7 +13093,7 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.6.0", + "indexmap 2.7.0", "toml_datetime", "winnow 0.5.40", ] @@ -12713,7 +13104,7 @@ version = "0.22.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" dependencies = [ - "indexmap 2.6.0", + "indexmap 2.7.0", "serde", "serde_spanned", "toml_datetime", @@ -12732,7 +13123,7 @@ dependencies = [ "base64 0.22.1", "bytes", "h2 0.4.7", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", "hyper 1.5.1", @@ -12740,12 +13131,12 @@ dependencies = [ "hyper-util", "percent-encoding", "pin-project 1.1.7", - "prost 0.13.3", + "prost 0.13.4", "rustls-native-certs 0.8.1", "rustls-pemfile 2.2.0", "socket2", "tokio", - "tokio-rustls 0.26.0", + "tokio-rustls 0.26.1", "tokio-stream", "tower 0.4.13", "tower-layer", @@ -12773,7 +13164,7 @@ dependencies = [ "rand 0.8.5", "slab", "tokio", - "tokio-util 0.7.12", + "tokio-util 0.7.13", "tower-layer", "tower-service", "tracing", @@ -12804,7 +13195,7 @@ dependencies = [ "bitflags 2.6.0", "bytes", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", "http-range-header", @@ -12814,7 +13205,7 @@ dependencies = [ "percent-encoding", "pin-project-lite", "tokio", - "tokio-util 0.7.12", + "tokio-util 0.7.13", "tower-layer", "tower-service", "tracing", @@ -12846,9 +13237,9 @@ dependencies = [ [[package]] name = "tracing" -version = "0.1.40" +version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "log", "pin-project-lite", @@ -12858,13 +13249,13 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -12879,9 +13270,9 @@ dependencies = [ [[package]] name = "tracing-error" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d686ec1c0f384b1277f097b2f279a2ecc11afe8c133c1aabf036a27cb4cd206e" +checksum = "8b1581020d7a273442f5b45074a6a57d5757ad0a47dac0e9f0bd57b81936f3db" dependencies = [ "tracing", "tracing-subscriber", @@ -12918,9 +13309,9 @@ dependencies = [ [[package]] name = "tracing-serde" -version = "0.1.3" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" +checksum = "704b1aeb7be0d0a84fc9828cae51dab5970fee5088f83d1dd7ee6f6246fc6ff1" dependencies = [ "serde", "tracing-core", @@ -12928,9 +13319,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.18" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" dependencies = [ "matchers", "nu-ansi-term", @@ -12961,9 +13352,9 @@ dependencies = [ [[package]] name = "tracy-client" -version = "0.17.4" +version = "0.17.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "746b078c6a09ebfd5594609049e07116735c304671eaab06ce749854d23435bc" +checksum = "51e295eae54124872df35720dc3a5b1e827c7deee352b342ec7f7e626d0d0ef3" dependencies = [ "loom", "once_cell", @@ -13015,11 +13406,11 @@ dependencies = [ "byteorder", "bytes", "data-encoding", - "http 1.1.0", + "http 1.2.0", "httparse", "log", "rand 0.8.5", - "rustls 0.23.18", + "rustls 0.23.19", "rustls-pki-types", "sha1", "thiserror 1.0.69", @@ -13340,7 +13731,7 @@ checksum = "6a511871dc5de990a3b2a0e715facfbc5da848c0c0395597a1415029fb7c250a" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -13417,9 +13808,9 @@ checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" [[package]] name = "wasm-bindgen" -version = "0.2.95" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e" +checksum = "a474f6281d1d70c17ae7aa6a613c87fce69a127e2624002df63dcb39d6cf6396" dependencies = [ "cfg-if 1.0.0", "once_cell", @@ -13428,36 +13819,36 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.95" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358" +checksum = "5f89bb38646b4f81674e8f5c3fb81b562be1fd936d84320f3264486418519c79" dependencies = [ "bumpalo", "log", - "once_cell", "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.45" +version = "0.4.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc7ec4f8827a71586374db3e87abdb5a2bb3a15afed140221307c3ec06b1f63b" +checksum = "38176d9b44ea84e9184eff0bc34cc167ed044f816accfe5922e54d84cf48eca2" dependencies = [ "cfg-if 1.0.0", "js-sys", + "once_cell", "wasm-bindgen", "web-sys", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.95" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56" +checksum = "2cc6181fd9a7492eef6fef1f33961e3695e4579b9872a6f7c83aee556666d4fe" dependencies = [ "quote 1.0.37", "wasm-bindgen-macro-support", @@ -13465,22 +13856,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.95" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" +checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.95" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d" +checksum = "943aab3fdaaa029a6e0271b35ea10b72b943135afe9bffca82384098ad0e06a6" [[package]] name = "wasm-streams" @@ -13511,17 +13902,17 @@ dependencies = [ [[package]] name = "watchexec" -version = "4.1.0" +version = "5.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c635816bdb583dcd1cf58935899df38b5c5ffb1b9d0cc89f8d3c7b33e2c005e3" +checksum = "81e682bb1fe9526a6c78ffcfc6bb662ab36c213764fdd173babfbaf05cc56254" dependencies = [ "async-priority-channel", "async-recursion", "atomic-take", "futures 0.3.31", "ignore-files", - "miette 7.3.0", - "nix 0.28.0", + "miette 7.4.0", + "nix 0.29.0", "normalize-path", "notify", "once_cell", @@ -13537,34 +13928,34 @@ dependencies = [ [[package]] name = "watchexec-events" -version = "3.0.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ce015ba32ff91a7f796cea3798e7998d3645411f03fc373ef0e7c7e564291bc" +checksum = "2404ed3aa5e4a8f6139a2ee137926886c9144234c945102143ef9bf65309a751" dependencies = [ - "nix 0.28.0", + "nix 0.29.0", "notify", "watchexec-signals", ] [[package]] name = "watchexec-signals" -version = "3.0.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f7ccc54db7df8cbbe3251508321e46986ce179af4c4a03b4c70bda539d72755" +checksum = "be07d7855a3617d996ce0c7df4b6232159c526634dff668dd95491c22a9a7262" dependencies = [ - "miette 7.3.0", - "nix 0.28.0", + "miette 7.4.0", + "nix 0.29.0", "thiserror 1.0.69", ] [[package]] name = "watchexec-supervisor" -version = "2.0.0" +version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97efb9292bebdf72a777a0d6e400b69b32b4f3daee1ddd30214317a18ff20ab" +checksum = "6026815bdc9653d7820f6499b83ecadacd97a804dfabf2b2c55b061557f5f1f4" dependencies = [ "futures 0.3.31", - "nix 0.28.0", + "nix 0.29.0", "process-wrap", "tokio", "tracing", @@ -13574,9 +13965,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.72" +version = "0.3.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6488b90108c040df0fe62fa815cbdee25124641df01814dd7282749234c6112" +checksum = "04dd7223427d52553d3702c004d3b2fe07c148165faa56313cb00211e31c12bc" dependencies = [ "js-sys", "wasm-bindgen", @@ -13734,7 +14125,7 @@ checksum = "f6fc35f58ecd95a9b71c4f2329b911016e6bec66b3f2e6a4aad86bd2e99e2f9b" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -13745,7 +14136,7 @@ checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -13756,7 +14147,7 @@ checksum = "08990546bf4edef8f431fa6326e032865f27138718c587dc21bc0265bbcb57cc" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -13767,7 +14158,7 @@ checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -14072,7 +14463,7 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", "synstructure", ] @@ -14094,7 +14485,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -14114,7 +14505,7 @@ checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", "synstructure", ] @@ -14135,7 +14526,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -14157,7 +14548,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -14172,9 +14563,9 @@ dependencies = [ "crossbeam-utils", "displaydoc", "flate2", - "indexmap 2.6.0", + "indexmap 2.7.0", "memchr", - "thiserror 2.0.3", + "thiserror 2.0.6", "zopfli", ] @@ -14639,7 +15030,7 @@ version = "0.1.0" source = "git+https://github.com/matter-labs/zksync-era.git?rev=6c034f6e180cc92e99766f14c8840c90efa56cec#6c034f6e180cc92e99766f14c8840c90efa56cec" dependencies = [ "anyhow", - "bigdecimal 0.4.6", + "bigdecimal 0.4.5", "bincode", "chrono", "hex", @@ -14801,7 +15192,7 @@ dependencies = [ "flate2", "google-cloud-auth", "google-cloud-storage", - "http 1.1.0", + "http 1.2.0", "prost 0.12.6", "rand 0.8.5", "reqwest 0.12.9", @@ -14862,7 +15253,7 @@ dependencies = [ "prost-reflect", "protox", "quote 1.0.37", - "syn 2.0.89", + "syn 2.0.90", ] [[package]] @@ -14961,7 +15352,7 @@ version = "0.1.0" source = "git+https://github.com/matter-labs/zksync-era.git?rev=6c034f6e180cc92e99766f14c8840c90efa56cec#6c034f6e180cc92e99766f14c8840c90efa56cec" dependencies = [ "anyhow", - "bigdecimal 0.4.6", + "bigdecimal 0.4.5", "blake2 0.10.6", "chrono", "derive_more 1.0.0", @@ -14994,7 +15385,7 @@ version = "0.1.0" source = "git+https://github.com/matter-labs/zksync-era.git?rev=6c034f6e180cc92e99766f14c8840c90efa56cec#6c034f6e180cc92e99766f14c8840c90efa56cec" dependencies = [ "anyhow", - "bigdecimal 0.4.6", + "bigdecimal 0.4.5", "futures 0.3.31", "hex", "num", @@ -15084,7 +15475,7 @@ dependencies = [ "jsonrpsee", "pin-project-lite", "rlp", - "rustls 0.23.18", + "rustls 0.23.19", "serde", "serde_json", "thiserror 1.0.69", diff --git a/Cargo.toml b/Cargo.toml index 23cb3b0e7..1eead3ebe 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -30,7 +30,7 @@ resolver = "2" version = "0.0.2" edition = "2021" # Remember to update clippy.toml as well -rust-version = "1.80" +rust-version = "1.83" authors = ["Foundry Contributors"] license = "MIT OR Apache-2.0" homepage = "https://github.com/foundry-rs/foundry" @@ -175,66 +175,68 @@ foundry-zksync-compiler = { path = "crates/zksync/compiler" } foundry-zksync-inspectors = { path = "crates/zksync/inspectors" } # solc & compilation utilities -# foundry-block-explorers = { version = "0.9.0", default-features = false } -# foundry-compilers = { version = "0.12.3", default-features = false } -# foundry-fork-db = "0.7.0" -foundry-block-explorers = { git = "https://github.com/Moonsong-Labs/block-explorers", branch = "zksync-v0.9.0", default-features = false } -foundry-compilers = { git = "https://github.com/Moonsong-Labs/compilers", branch = "zksync-v0.12.3" } -foundry-fork-db = { git = "https://github.com/Moonsong-Labs/foundry-zksync-fork-db", branch = "zksync-v0.7.1" } +foundry-block-explorers = { version = "0.9.0", default-features = false } +foundry-compilers = { version = "0.12.7", default-features = false } +foundry-fork-db = "0.9.0" solang-parser = "=0.3.3" +solar-ast = { version = "=0.1.0", default-features = false } +solar-parse = { version = "=0.1.0", default-features = false } ## revm revm = { version = "18.0.0", default-features = false } revm-primitives = { version = "14.0.0", default-features = false } -revm-inspectors = { version = "0.11.0", features = ["serde"] } +revm-inspectors = { version = "0.13.0", features = ["serde"] } ## ethers ethers-contract-abigen = { version = "2.0.14", default-features = false } ## alloy -alloy-consensus = { version = "0.6.4", default-features = false } -alloy-contract = { version = "0.6.4", default-features = false } -alloy-eips = { version = "0.6.4", default-features = false } -alloy-genesis = { version = "0.6.4", default-features = false } -alloy-json-rpc = { version = "0.6.4", default-features = false } -alloy-network = { version = "0.6.4", default-features = false } -alloy-provider = { version = "0.6.4", default-features = false } -alloy-pubsub = { version = "0.6.4", default-features = false } -alloy-rpc-client = { version = "0.6.4", default-features = false } -alloy-rpc-types = { version = "0.6.4", default-features = true } -alloy-serde = { version = "0.6.4", default-features = false } -alloy-signer = { version = "0.6.4", default-features = false } -alloy-signer-aws = { version = "0.6.4", default-features = false } -alloy-signer-gcp = { version = "0.6.4", default-features = false } -alloy-signer-ledger = { version = "0.6.4", default-features = false } -alloy-signer-local = { version = "0.6.4", default-features = false } -alloy-signer-trezor = { version = "0.6.4", default-features = false } -alloy-transport = { version = "0.6.4", default-features = false } -alloy-transport-http = { version = "0.6.4", default-features = false } -alloy-transport-ipc = { version = "0.6.4", default-features = false } -alloy-transport-ws = { version = "0.6.4", default-features = false } +alloy-consensus = { version = "0.8.0", default-features = false } +alloy-contract = { version = "0.8.0", default-features = false } +alloy-eips = { version = "0.8.0", default-features = false } +alloy-genesis = { version = "0.8.0", default-features = false } +alloy-json-rpc = { version = "0.8.0", default-features = false } +alloy-network = { version = "0.8.0", default-features = false } +alloy-provider = { version = "0.8.0", default-features = false } +alloy-pubsub = { version = "0.8.0", default-features = false } +alloy-rpc-client = { version = "0.8.0", default-features = false } +alloy-rpc-types = { version = "0.8.0", default-features = true } +alloy-serde = { version = "0.8.0", default-features = false } +alloy-signer = { version = "0.8.0", default-features = false } +alloy-signer-aws = { version = "0.8.0", default-features = false } +alloy-signer-gcp = { version = "0.8.0", default-features = false } +alloy-signer-ledger = { version = "0.8.0", default-features = false } +alloy-signer-local = { version = "0.8.0", default-features = false } +alloy-signer-trezor = { version = "0.8.0", default-features = false } +alloy-transport = { version = "0.8.0", default-features = false } +alloy-transport-http = { version = "0.8.0", default-features = false } +alloy-transport-ipc = { version = "0.8.0", default-features = false } +alloy-transport-ws = { version = "0.8.0", default-features = false } +alloy-node-bindings = { version = "0.8.0", default-features = false } +alloy-network-primitives = { version = "0.8.0", default-features = false } ## alloy-core -alloy-dyn-abi = "0.8.11" -alloy-json-abi = "0.8.11" -alloy-primitives = { version = "0.8.11", features = [ +alloy-dyn-abi = "0.8.14" +alloy-json-abi = "0.8.14" +alloy-primitives = { version = "0.8.14", features = [ "getrandom", "rand", "map-foldhash", ] } -alloy-sol-macro-expander = "0.8.11" -alloy-sol-macro-input = "0.8.11" -alloy-sol-types = "0.8.11" -syn-solidity = "0.8.11" +alloy-sol-macro-expander = "0.8.14" +alloy-sol-macro-input = "0.8.14" +alloy-sol-types = "0.8.14" +syn-solidity = "0.8.14" alloy-chains = "0.1" alloy-rlp = "0.3" -alloy-trie = "0.6.0" alloy-zksync = "0.6.1" -## op-alloy -op-alloy-rpc-types = "0.6.5" -op-alloy-consensus = "0.6.5" +alloy-trie = "0.7.0" + +## op-alloy +op-alloy-rpc-types = "0.8.0" +op-alloy-consensus = "0.8.0" ## cli anstream = "0.6" @@ -257,7 +259,7 @@ quote = "1.0" syn = "2.0" async-trait = "0.1" derive_more = { version = "1.0", features = ["full"] } -thiserror = "1" +thiserror = "2" # bench divan = "0.1" @@ -278,7 +280,7 @@ color-eyre = "0.6" comfy-table = "7" dunce = "1" evm-disassembler = "0.5" -evmole = "0.5" +evmole = "0.6" eyre = "0.6" figment = "0.10" futures = "0.3" @@ -299,7 +301,7 @@ semver = "1" serde = { version = "1.0", features = ["derive"] } serde_json = { version = "1.0", features = ["arbitrary_precision"] } similar-asserts = "1.6" -soldeer-commands = "=0.5.1" +soldeer-commands = "=0.5.2" strum = "0.26" tempfile = "3.13" tikv-jemallocator = "0.6" diff --git a/clippy.toml b/clippy.toml index b1756dfd9..8581063b6 100644 --- a/clippy.toml +++ b/clippy.toml @@ -1,4 +1,4 @@ -msrv = "1.80" +msrv = "1.83" # bytes::Bytes is included by default and alloy_primitives::Bytes is a wrapper around it, # so it is safe to ignore it as well ignore-interior-mutability = ["bytes::Bytes", "alloy_primitives::Bytes"] diff --git a/crates/anvil/core/src/eth/block.rs b/crates/anvil/core/src/eth/block.rs index c9f9048b8..50a9a66b3 100644 --- a/crates/anvil/core/src/eth/block.rs +++ b/crates/anvil/core/src/eth/block.rs @@ -65,6 +65,7 @@ impl Block { nonce: partial_header.nonce, base_fee_per_gas: partial_header.base_fee, requests_hash: partial_header.requests_hash, + target_blobs_per_block: None, }, transactions, ommers: vec![], @@ -157,6 +158,7 @@ mod tests { parent_beacon_block_root: Default::default(), base_fee_per_gas: None, requests_hash: None, + target_blobs_per_block: None, }; let encoded = alloy_rlp::encode(&header); @@ -198,6 +200,7 @@ mod tests { nonce: B64::ZERO, base_fee_per_gas: None, requests_hash: None, + target_blobs_per_block: None, }; header.encode(&mut data); @@ -231,6 +234,7 @@ mod tests { parent_beacon_block_root: None, base_fee_per_gas: None, requests_hash: None, + target_blobs_per_block: None, }; let header = Header::decode(&mut data.as_slice()).unwrap(); assert_eq!(header, expected); @@ -263,6 +267,7 @@ mod tests { excess_blob_gas: None, parent_beacon_block_root: None, requests_hash: None, + target_blobs_per_block: None, }; assert_eq!(header.hash_slow(), expected_hash); } diff --git a/crates/anvil/core/src/eth/transaction/mod.rs b/crates/anvil/core/src/eth/transaction/mod.rs index 8de659799..29b8aee88 100644 --- a/crates/anvil/core/src/eth/transaction/mod.rs +++ b/crates/anvil/core/src/eth/transaction/mod.rs @@ -6,18 +6,18 @@ use alloy_consensus::{ eip4844::{TxEip4844, TxEip4844Variant, TxEip4844WithSidecar}, TxEip7702, }, - AnyReceiptEnvelope, Receipt, ReceiptEnvelope, ReceiptWithBloom, Signed, Transaction, TxEip1559, - TxEip2930, TxEnvelope, TxLegacy, TxReceipt, + Receipt, ReceiptEnvelope, ReceiptWithBloom, Signed, TxEip1559, TxEip2930, TxEnvelope, TxLegacy, + TxReceipt, Typed2718, }; use alloy_eips::eip2718::{Decodable2718, Eip2718Error, Encodable2718}; -use alloy_network::{AnyRpcTransaction, AnyTxEnvelope}; +use alloy_network::{AnyReceiptEnvelope, AnyRpcTransaction, AnyTransactionReceipt, AnyTxEnvelope}; use alloy_primitives::{ Address, Bloom, Bytes, Log, PrimitiveSignature, TxHash, TxKind, B256, U256, U64, }; use alloy_rlp::{length_of_length, Decodable, Encodable, Header}; use alloy_rpc_types::{ - request::TransactionRequest, trace::otterscan::OtsReceipt, AccessList, AnyTransactionReceipt, - ConversionError, Transaction as RpcTransaction, TransactionReceipt, + request::TransactionRequest, trace::otterscan::OtsReceipt, AccessList, ConversionError, + Transaction as RpcTransaction, TransactionReceipt, }; use alloy_serde::{OtherFields, WithOtherFields}; use bytes::BufMut; @@ -1109,7 +1109,7 @@ pub struct TransactionInfo { #[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] #[serde(rename_all = "camelCase")] -pub struct DepositReceipt { +pub struct DepositReceipt> { #[serde(flatten)] pub inner: ReceiptWithBloom, #[serde(default, with = "alloy_serde::quantity::opt")] @@ -1136,7 +1136,7 @@ impl DepositReceipt { /// Encodes the receipt data. fn encode_fields(&self, out: &mut dyn BufMut) { self.receipt_rlp_header().encode(out); - self.inner.receipt.status.encode(out); + self.inner.status().encode(out); self.inner.receipt.cumulative_gas_used.encode(out); self.inner.logs_bloom.encode(out); self.inner.receipt.logs.encode(out); @@ -1161,7 +1161,7 @@ impl DepositReceipt { let status = Decodable::decode(b)?; let cumulative_gas_used = Decodable::decode(b)?; let logs_bloom = Decodable::decode(b)?; - let logs = Decodable::decode(b)?; + let logs: Vec = Decodable::decode(b)?; let deposit_nonce = remaining(b).then(|| alloy_rlp::Decodable::decode(b)).transpose()?; let deposit_nonce_version = remaining(b).then(|| alloy_rlp::Decodable::decode(b)).transpose()?; @@ -1207,7 +1207,7 @@ impl alloy_rlp::Decodable for DepositReceipt { #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] #[serde(tag = "type")] -pub enum TypedReceipt { +pub enum TypedReceipt> { #[serde(rename = "0x0", alias = "0x00")] Legacy(ReceiptWithBloom), #[serde(rename = "0x1", alias = "0x01")] @@ -1248,8 +1248,8 @@ impl From> for ReceiptWithBloom { } } -impl From> for OtsReceipt { - fn from(value: TypedReceipt) -> Self { +impl From>> for OtsReceipt { + fn from(value: TypedReceipt>) -> Self { let r#type = match value { TypedReceipt::Legacy(_) => 0x00, TypedReceipt::EIP2930(_) => 0x01, @@ -1258,7 +1258,7 @@ impl From> for OtsReceipt { TypedReceipt::EIP7702(_) => 0x04, TypedReceipt::Deposit(_) => 0x7E, } as u8; - let receipt = ReceiptWithBloom::::from(value); + let receipt = ReceiptWithBloom::>::from(value); let status = receipt.status(); let cumulative_gas_used = receipt.cumulative_gas_used() as u64; let logs = receipt.logs().to_vec(); @@ -1282,7 +1282,7 @@ impl TypedReceipt { } } -impl From> for TypedReceipt { +impl From> for TypedReceipt> { fn from(value: ReceiptEnvelope) -> Self { match value { ReceiptEnvelope::Legacy(r) => Self::Legacy(r), @@ -1439,7 +1439,7 @@ impl Decodable2718 for TypedReceipt { } } -pub type ReceiptResponse = TransactionReceipt>; +pub type ReceiptResponse = TransactionReceipt>>; pub fn convert_to_anvil_receipt(receipt: AnyTransactionReceipt) -> Option { let WithOtherFields { diff --git a/crates/anvil/src/cmd.rs b/crates/anvil/src/cmd.rs index eda009418..19e9193f9 100644 --- a/crates/anvil/src/cmd.rs +++ b/crates/anvil/src/cmd.rs @@ -75,7 +75,7 @@ pub struct NodeArgs { /// The EVM hardfork to use. /// - /// Choose the hardfork by name, e.g. `shanghai`, `paris`, `london`, etc... + /// Choose the hardfork by name, e.g. `cancun`, `shanghai`, `paris`, `london`, etc... /// [default: latest] #[arg(long)] pub hardfork: Option, @@ -89,8 +89,8 @@ pub struct NodeArgs { pub slots_in_an_epoch: u64, /// Writes output of `anvil` as json to user-specified file. - #[arg(long, value_name = "OUT_FILE")] - pub config_out: Option, + #[arg(long, value_name = "FILE", value_hint = clap::ValueHint::FilePath)] + pub config_out: Option, /// Disable auto and interval mining, and mine on demand instead. #[arg(long, visible_alias = "no-mine", conflicts_with = "block_time")] @@ -177,7 +177,7 @@ pub struct NodeArgs { /// Max number of states to persist on disk. /// /// Note that `prune_history` will overwrite `max_persisted_states` to 0. - #[arg(long)] + #[arg(long, conflicts_with = "prune_history")] pub max_persisted_states: Option, /// Number of blocks with transactions to keep in memory. @@ -275,7 +275,7 @@ impl NodeArgs { .with_transaction_block_keeper(self.transaction_block_keeper) .with_max_persisted_states(self.max_persisted_states) .with_optimism(self.evm_opts.optimism) - .with_alphanet(self.evm_opts.alphanet) + .with_odyssey(self.evm_opts.odyssey) .with_disable_default_create2_deployer(self.evm_opts.disable_default_create2_deployer) .with_slots_in_an_epoch(self.slots_in_an_epoch) .with_memory_limit(self.evm_opts.memory_limit) @@ -583,9 +583,9 @@ pub struct AnvilEvmArgs { #[arg(long)] pub memory_limit: Option, - /// Enable Alphanet features - #[arg(long, visible_alias = "odyssey")] - pub alphanet: bool, + /// Enable Odyssey features + #[arg(long, alias = "alphanet")] + pub odyssey: bool, } /// Resolves an alias passed as fork-url to the matching url defined in the rpc_endpoints section diff --git a/crates/anvil/src/config.rs b/crates/anvil/src/config.rs index ada482329..9e22adeed 100644 --- a/crates/anvil/src/config.rs +++ b/crates/anvil/src/config.rs @@ -28,7 +28,7 @@ use alloy_signer_local::{ }; use alloy_transport::{Transport, TransportError}; use anvil_server::ServerConfig; -use eyre::Result; +use eyre::{Context, Result}; use foundry_common::{ provider::{ProviderBuilder, RetryProvider}, ALCHEMY_FREE_TIER_CUPS, NON_ARCHIVE_NODE_WARNING, REQUEST_TIMEOUT, @@ -44,15 +44,17 @@ use itertools::Itertools; use parking_lot::RwLock; use rand::thread_rng; use revm::primitives::BlobExcessGasAndPrice; -use serde_json::{json, to_writer, Value}; +use serde_json::{json, Value}; use std::{ fmt::Write as FmtWrite, fs::File, + io, net::{IpAddr, Ipv4Addr}, path::{Path, PathBuf}, sync::Arc, time::Duration, }; +use tokio::sync::RwLock as TokioRwLock; use yansi::Paint; /// Default port the rpc will open @@ -144,7 +146,7 @@ pub struct NodeConfig { /// How transactions are sorted in the mempool pub transaction_order: TransactionOrder, /// Filename to write anvil output as json - pub config_out: Option, + pub config_out: Option, /// The genesis to use to initialize the node pub genesis: Option, /// Timeout in for requests sent to remote JSON-RPC server in forking mode @@ -185,8 +187,8 @@ pub struct NodeConfig { pub memory_limit: Option, /// Factory used by `anvil` to extend the EVM's precompiles. pub precompile_factory: Option>, - /// Enable Alphanet features. - pub alphanet: bool, + /// Enable Odyssey features. + pub odyssey: bool, /// Do not print log messages. pub silent: bool, /// The path where states are cached. @@ -195,13 +197,13 @@ pub struct NodeConfig { impl NodeConfig { fn as_string(&self, fork: Option<&ClientFork>) -> String { - let mut config_string: String = String::new(); - let _ = write!(config_string, "\n{}", BANNER.green()); - let _ = write!(config_string, "\n {VERSION_MESSAGE}"); - let _ = write!(config_string, "\n {}", "https://github.com/foundry-rs/foundry".green()); + let mut s: String = String::new(); + let _ = write!(s, "\n{}", BANNER.green()); + let _ = write!(s, "\n {VERSION_MESSAGE}"); + let _ = write!(s, "\n {}", "https://github.com/foundry-rs/foundry".green()); let _ = write!( - config_string, + s, r#" Available Accounts @@ -210,11 +212,11 @@ Available Accounts ); let balance = alloy_primitives::utils::format_ether(self.genesis_balance); for (idx, wallet) in self.genesis_accounts.iter().enumerate() { - write!(config_string, "\n({idx}) {} ({balance} ETH)", wallet.address()).unwrap(); + write!(s, "\n({idx}) {} ({balance} ETH)", wallet.address()).unwrap(); } let _ = write!( - config_string, + s, r#" Private Keys @@ -224,12 +226,12 @@ Private Keys for (idx, wallet) in self.genesis_accounts.iter().enumerate() { let hex = hex::encode(wallet.credential().to_bytes()); - let _ = write!(config_string, "\n({idx}) 0x{hex}"); + let _ = write!(s, "\n({idx}) 0x{hex}"); } if let Some(ref gen) = self.account_generator { let _ = write!( - config_string, + s, r#" Wallet @@ -244,7 +246,7 @@ Derivation path: {} if let Some(fork) = fork { let _ = write!( - config_string, + s, r#" Fork @@ -261,11 +263,11 @@ Chain ID: {} ); if let Some(tx_hash) = fork.transaction_hash() { - let _ = writeln!(config_string, "Transaction hash: {tx_hash}"); + let _ = writeln!(s, "Transaction hash: {tx_hash}"); } } else { let _ = write!( - config_string, + s, r#" Chain ID @@ -279,7 +281,7 @@ Chain ID if (SpecId::from(self.get_hardfork()) as u8) < (SpecId::LONDON as u8) { let _ = write!( - config_string, + s, r#" Gas Price ================== @@ -290,7 +292,7 @@ Gas Price ); } else { let _ = write!( - config_string, + s, r#" Base Fee ================== @@ -302,7 +304,7 @@ Base Fee } let _ = write!( - config_string, + s, r#" Gas Limit ================== @@ -326,7 +328,7 @@ Gas Limit ); let _ = write!( - config_string, + s, r#" Genesis Timestamp ================== @@ -336,7 +338,7 @@ Genesis Timestamp self.get_genesis_timestamp().green() ); - config_string + s } fn as_json(&self, fork: Option<&ClientFork>) -> Value { @@ -465,7 +467,7 @@ impl Default for NodeConfig { slots_in_an_epoch: 32, memory_limit: None, precompile_factory: None, - alphanet: false, + odyssey: false, silent: false, cache_path: None, } @@ -505,7 +507,7 @@ impl NodeConfig { /// Returns the hardfork to use pub fn get_hardfork(&self) -> ChainHardfork { - if self.alphanet { + if self.odyssey { return ChainHardfork::Ethereum(EthereumHardfork::PragueEOF); } if let Some(hardfork) = self.hardfork { @@ -749,7 +751,7 @@ impl NodeConfig { /// Sets the file path to write the Anvil node's config info to. #[must_use] - pub fn set_config_out(mut self, config_out: Option) -> Self { + pub fn set_config_out(mut self, config_out: Option) -> Self { self.config_out = config_out; self } @@ -903,21 +905,18 @@ impl NodeConfig { } /// Prints the config info - pub fn print(&self, fork: Option<&ClientFork>) { - if self.config_out.is_some() { - let config_out = self.config_out.as_deref().unwrap(); - to_writer( - &File::create(config_out).expect("Unable to create anvil config description file"), - &self.as_json(fork), - ) - .expect("Failed writing json"); + pub fn print(&self, fork: Option<&ClientFork>) -> Result<()> { + if let Some(path) = &self.config_out { + let file = io::BufWriter::new( + File::create(path).wrap_err("unable to create anvil config description file")?, + ); + let value = self.as_json(fork); + serde_json::to_writer(file, &value).wrap_err("failed writing JSON")?; } - - if self.silent { - return; + if !self.silent { + sh_println!("{}", self.as_string(fork))?; } - - let _ = sh_println!("{}", self.as_string(fork)); + Ok(()) } /// Returns the path where the cache file should be stored @@ -953,10 +952,10 @@ impl NodeConfig { self } - /// Sets whether to enable Alphanet support + /// Sets whether to enable Odyssey support #[must_use] - pub fn with_alphanet(mut self, alphanet: bool) -> Self { - self.alphanet = alphanet; + pub fn with_odyssey(mut self, odyssey: bool) -> Self { + self.odyssey = odyssey; self } @@ -983,7 +982,7 @@ impl NodeConfig { /// [Backend](mem::Backend) /// /// *Note*: only memory based backend for now - pub(crate) async fn setup(&mut self) -> mem::Backend { + pub(crate) async fn setup(&mut self) -> Result { // configure the revm environment let mut cfg = @@ -1020,11 +1019,11 @@ impl NodeConfig { self.get_blob_excess_gas_and_price(), ); - let (db, fork): (Arc>>, Option) = + let (db, fork): (Arc>>, Option) = if let Some(eth_rpc_url) = self.eth_rpc_url.clone() { - self.setup_fork_db(eth_rpc_url, &mut env, &fees).await + self.setup_fork_db(eth_rpc_url, &mut env, &fees).await? } else { - (Arc::new(tokio::sync::RwLock::new(Box::::default())), None) + (Arc::new(TokioRwLock::new(Box::::default())), None) }; // if provided use all settings of `genesis.json` @@ -1056,15 +1055,15 @@ impl NodeConfig { Arc::new(RwLock::new(fork)), self.enable_steps_tracing, self.print_logs, - self.alphanet, + self.odyssey, self.prune_history, self.max_persisted_states, self.transaction_block_keeper, self.block_time, self.cache_path.clone(), - Arc::new(tokio::sync::RwLock::new(self.clone())), + Arc::new(TokioRwLock::new(self.clone())), ) - .await; + .await?; // Writes the default create2 deployer to the backend, // if the option is not disabled and we are not forking. @@ -1072,19 +1071,19 @@ impl NodeConfig { backend .set_create2_deployer(DEFAULT_CREATE2_DEPLOYER) .await - .expect("Failed to create default create2 deployer"); + .wrap_err("failed to create default create2 deployer")?; } if let Some(state) = self.init_state.clone() { - backend.load_state(state).await.expect("Failed to load init state"); + backend.load_state(state).await.wrap_err("failed to load init state")?; } - backend + Ok(backend) } /// Configures everything related to forking based on the passed `eth_rpc_url`: - /// - returning a tuple of a [ForkedDatabase] wrapped in an [Arc] [RwLock](tokio::sync::RwLock) - /// and [ClientFork] wrapped in an [Option] which can be used in a [Backend](mem::Backend) to + /// - returning a tuple of a [ForkedDatabase] wrapped in an [Arc] [RwLock](TokioRwLock) and + /// [ClientFork] wrapped in an [Option] which can be used in a [Backend](mem::Backend) to /// fork from. /// - modifying some parameters of the passed `env` /// - mutating some members of `self` @@ -1093,15 +1092,11 @@ impl NodeConfig { eth_rpc_url: String, env: &mut EnvWithHandlerCfg, fees: &FeeManager, - ) -> (Arc>>, Option) { - let (db, config) = self.setup_fork_db_config(eth_rpc_url, env, fees).await; - - let db: Arc>> = - Arc::new(tokio::sync::RwLock::new(Box::new(db))); - + ) -> Result<(Arc>>, Option)> { + let (db, config) = self.setup_fork_db_config(eth_rpc_url, env, fees).await?; + let db: Arc>> = Arc::new(TokioRwLock::new(Box::new(db))); let fork = ClientFork::new(config, Arc::clone(&db)); - - (db, Some(fork)) + Ok((db, Some(fork))) } /// Configures everything related to forking based on the passed `eth_rpc_url`: @@ -1114,7 +1109,7 @@ impl NodeConfig { eth_rpc_url: String, env: &mut EnvWithHandlerCfg, fees: &FeeManager, - ) -> (ForkedDatabase, ClientForkConfig) { + ) -> Result<(ForkedDatabase, ClientForkConfig)> { // TODO make provider agnostic let provider = Arc::new( ProviderBuilder::new(ð_rpc_url) @@ -1125,23 +1120,22 @@ impl NodeConfig { .initial_backoff(1000) .headers(self.fork_headers.clone()) .build() - .expect("Failed to establish provider to fork url"), + .wrap_err("failed to establish provider to fork url")?, ); let (fork_block_number, fork_chain_id, force_transactions) = if let Some(fork_choice) = &self.fork_choice { let (fork_block_number, force_transactions) = - derive_block_and_transactions(fork_choice, &provider).await.expect( - "Failed to derive fork block number and force transactions from fork choice", - ); + derive_block_and_transactions(fork_choice, &provider).await.wrap_err( + "failed to derive fork block number and force transactions from fork choice", + )?; let chain_id = if let Some(chain_id) = self.fork_chain_id { Some(chain_id) } else if self.hardfork.is_none() { - // auto adjust hardfork if not specified - // but only if we're forking mainnet + // Auto-adjust hardfork if not specified, but only if we're forking mainnet. let chain_id = - provider.get_chain_id().await.expect("Failed to fetch network chain ID"); + provider.get_chain_id().await.wrap_err("failed to fetch network chain ID")?; if alloy_chains::NamedChain::Mainnet == chain_id { let hardfork: EthereumHardfork = fork_block_number.into(); env.handler_cfg.spec_id = hardfork.into(); @@ -1155,15 +1149,16 @@ impl NodeConfig { (fork_block_number, chain_id, force_transactions) } else { // pick the last block number but also ensure it's not pending anymore - let bn = - find_latest_fork_block(&provider).await.expect("Failed to get fork block number"); + let bn = find_latest_fork_block(&provider) + .await + .wrap_err("failed to get fork block number")?; (bn, None, None) }; let block = provider .get_block(BlockNumberOrTag::Number(fork_block_number).into(), false.into()) .await - .expect("Failed to get fork block"); + .wrap_err("failed to get fork block")?; let block = if let Some(block) = block { block @@ -1179,9 +1174,9 @@ latest block number: {latest_block}" if fork_block_number <= latest_block { message.push_str(&format!("\n{NON_ARCHIVE_NODE_WARNING}")); } - panic!("{}", message); + eyre::bail!("{message}"); } - panic!("Failed to get block for block number: {fork_block_number}") + eyre::bail!("failed to get block for block number: {fork_block_number}") }; let gas_limit = self.fork_gas_limit(&block); @@ -1243,7 +1238,7 @@ latest block number: {latest_block}" let chain_id = if let Some(fork_chain_id) = fork_chain_id { fork_chain_id.to() } else { - provider.get_chain_id().await.unwrap() + provider.get_chain_id().await.wrap_err("failed to fetch network chain ID")? }; // need to update the dev signers and env with the chain id @@ -1296,7 +1291,7 @@ latest block number: {latest_block}" // need to insert the forked block's hash db.insert_block_hash(U256::from(config.block_number), config.block_hash); - (db, config) + Ok((db, config)) } /// we only use the gas limit value of the block if it is non-zero and the block gas @@ -1344,7 +1339,7 @@ async fn derive_block_and_transactions( let transaction = provider .get_transaction_by_hash(transaction_hash.0.into()) .await? - .ok_or(eyre::eyre!("Failed to get fork transaction by hash"))?; + .ok_or_else(|| eyre::eyre!("failed to get fork transaction by hash"))?; let transaction_block_number = transaction.block_number.unwrap(); // Get the block pertaining to the fork transaction @@ -1354,13 +1349,13 @@ async fn derive_block_and_transactions( alloy_rpc_types::BlockTransactionsKind::Full, ) .await? - .ok_or(eyre::eyre!("Failed to get fork block by number"))?; + .ok_or_else(|| eyre::eyre!("failed to get fork block by number"))?; // Filter out transactions that are after the fork transaction let filtered_transactions = transaction_block .transactions .as_transactions() - .ok_or(eyre::eyre!("Failed to get transactions from full fork block"))? + .ok_or_else(|| eyre::eyre!("failed to get transactions from full fork block"))? .iter() .take_while_inclusive(|&transaction| transaction.tx_hash() != transaction_hash.0) .collect::>(); diff --git a/crates/anvil/src/eth/backend/executor.rs b/crates/anvil/src/eth/backend/executor.rs index a00afd962..f4b20868f 100644 --- a/crates/anvil/src/eth/backend/executor.rs +++ b/crates/anvil/src/eth/backend/executor.rs @@ -28,7 +28,7 @@ use foundry_evm::{ }, }, traces::CallTraceNode, - utils::alphanet_handler_register, + utils::odyssey_handler_register, }; use revm::{db::WrapDatabaseRef, primitives::MAX_BLOB_GAS_PER_BLOCK}; use std::sync::Arc; @@ -106,7 +106,7 @@ pub struct TransactionExecutor<'a, Db: ?Sized, V: TransactionValidator> { /// Cumulative blob gas used by all executed transactions pub blob_gas_used: u64, pub enable_steps_tracing: bool, - pub alphanet: bool, + pub odyssey: bool, pub print_logs: bool, /// Precompiles to inject to the EVM. pub precompile_factory: Option>, @@ -314,7 +314,7 @@ impl Iterator for &mut TransactionExec } let exec_result = { - let mut evm = new_evm_with_inspector(&mut *self.db, env, &mut inspector, self.alphanet); + let mut evm = new_evm_with_inspector(&mut *self.db, env, &mut inspector, self.odyssey); if let Some(factory) = &self.precompile_factory { inject_precompiles(&mut evm, factory.precompiles()); } @@ -398,20 +398,20 @@ fn build_logs_bloom(logs: Vec, bloom: &mut Bloom) { } } -/// Creates a database with given database and inspector, optionally enabling alphanet features. +/// Creates a database with given database and inspector, optionally enabling odyssey features. pub fn new_evm_with_inspector( db: DB, env: EnvWithHandlerCfg, inspector: &mut dyn revm::Inspector, - alphanet: bool, + odyssey: bool, ) -> revm::Evm<'_, &mut dyn revm::Inspector, DB> { let EnvWithHandlerCfg { env, handler_cfg } = env; let mut handler = revm::Handler::new(handler_cfg); handler.append_handler_register_plain(revm::inspector_handle_register); - if alphanet { - handler.append_handler_register_plain(alphanet_handler_register); + if odyssey { + handler.append_handler_register_plain(odyssey_handler_register); } let context = revm::Context::new(revm::EvmContext::new_with_env(db, env), inspector); @@ -424,10 +424,10 @@ pub fn new_evm_with_inspector_ref<'a, DB>( db: DB, env: EnvWithHandlerCfg, inspector: &mut dyn revm::Inspector>, - alphanet: bool, + odyssey: bool, ) -> revm::Evm<'a, &mut dyn revm::Inspector>, WrapDatabaseRef> where DB: revm::DatabaseRef, { - new_evm_with_inspector(WrapDatabaseRef(db), env, inspector, alphanet) + new_evm_with_inspector(WrapDatabaseRef(db), env, inspector, odyssey) } diff --git a/crates/anvil/src/eth/backend/mem/fork_db.rs b/crates/anvil/src/eth/backend/mem/fork_db.rs index a4528a8f0..be5c3bcd7 100644 --- a/crates/anvil/src/eth/backend/mem/fork_db.rs +++ b/crates/anvil/src/eth/backend/mem/fork_db.rs @@ -5,7 +5,7 @@ use crate::{ }, revm::primitives::AccountInfo, }; -use alloy_primitives::{Address, B256, U256, U64}; +use alloy_primitives::{map::HashMap, Address, B256, U256, U64}; use alloy_rpc_types::BlockId; use foundry_evm::{ backend::{ @@ -14,7 +14,7 @@ use foundry_evm::{ fork::database::ForkDbStateSnapshot, revm::{primitives::BlockEnv, Database}, }; -use revm::DatabaseRef; +use revm::{db::DbAccount, DatabaseRef}; pub use foundry_evm::fork::database::ForkedDatabase; @@ -92,6 +92,10 @@ impl MaybeFullDatabase for ForkedDatabase { self } + fn maybe_as_full_db(&self) -> Option<&HashMap> { + Some(&self.database().accounts) + } + fn clear_into_state_snapshot(&mut self) -> StateSnapshot { let db = self.inner().db(); let accounts = std::mem::take(&mut *db.accounts.write()); @@ -127,6 +131,10 @@ impl MaybeFullDatabase for ForkDbStateSnapshot { self } + fn maybe_as_full_db(&self) -> Option<&HashMap> { + Some(&self.local.accounts) + } + fn clear_into_state_snapshot(&mut self) -> StateSnapshot { std::mem::take(&mut self.state_snapshot) } diff --git a/crates/anvil/src/eth/backend/mem/mod.rs b/crates/anvil/src/eth/backend/mem/mod.rs index 83718ad82..26787cca6 100644 --- a/crates/anvil/src/eth/backend/mem/mod.rs +++ b/crates/anvil/src/eth/backend/mem/mod.rs @@ -75,6 +75,7 @@ use anvil_core::eth::{ }; use anvil_rpc::error::RpcError; use chrono::Datelike; +use eyre::{Context, Result}; use flate2::{read::GzDecoder, write::GzEncoder, Compression}; use foundry_evm::{ backend::{DatabaseError, DatabaseResult, RevertStateSnapshotAction}, @@ -194,7 +195,7 @@ pub struct Backend { active_state_snapshots: Arc>>, enable_steps_tracing: bool, print_logs: bool, - alphanet: bool, + odyssey: bool, /// How to keep history state prune_state_history_config: PruneStateHistoryConfig, /// max number of blocks with transactions in memory @@ -222,14 +223,14 @@ impl Backend { fork: Arc>>, enable_steps_tracing: bool, print_logs: bool, - alphanet: bool, + odyssey: bool, prune_state_history_config: PruneStateHistoryConfig, max_persisted_states: Option, transaction_block_keeper: Option, automine_block_time: Option, cache_path: Option, node_config: Arc>, - ) -> Self { + ) -> Result { // if this is a fork then adjust the blockchain storage let blockchain = if let Some(fork) = fork.read().as_ref() { trace!(target: "backend", "using forked blockchain at {}", fork.block_number()); @@ -274,7 +275,7 @@ impl Backend { (cfg.slots_in_an_epoch, cfg.precompile_factory.clone()) }; - let (capabilities, executor_wallet) = if alphanet { + let (capabilities, executor_wallet) = if odyssey { // Insert account that sponsors the delegated txs. And deploy P256 delegation contract. let mut db = db.write().await; @@ -325,7 +326,7 @@ impl Backend { active_state_snapshots: Arc::new(Mutex::new(Default::default())), enable_steps_tracing, print_logs, - alphanet, + odyssey, prune_state_history_config, transaction_block_keeper, node_config, @@ -341,8 +342,8 @@ impl Backend { } // Note: this can only fail in forking mode, in which case we can't recover - backend.apply_genesis().await.expect("Failed to create genesis"); - backend + backend.apply_genesis().await.wrap_err("failed to create genesis")?; + Ok(backend) } /// Writes the CREATE2 deployer code directly to the database at the address provided. @@ -500,7 +501,7 @@ impl Backend { // `setup_fork_db_config` node_config.base_fee.take(); - node_config.setup_fork_db_config(eth_rpc_url, &mut env, &self.fees).await + node_config.setup_fork_db_config(eth_rpc_url, &mut env, &self.fees).await? }; *self.db.write().await = Box::new(db); @@ -536,7 +537,7 @@ impl Backend { let mut env = self.env.read().clone(); let (forked_db, client_fork_config) = - node_config.setup_fork_db_config(fork_url, &mut env, &self.fees).await; + node_config.setup_fork_db_config(fork_url, &mut env, &self.fees).await?; *self.db.write().await = Box::new(forked_db); let fork = ClientFork::new(client_fork_config, Arc::clone(&self.db)); @@ -998,7 +999,7 @@ impl Backend { &'i mut dyn revm::Inspector>>, WrapDatabaseRef<&'db dyn DatabaseRef>, > { - let mut evm = new_evm_with_inspector_ref(db, env, inspector, self.alphanet); + let mut evm = new_evm_with_inspector_ref(db, env, inspector, self.odyssey); if let Some(factory) = &self.precompile_factory { inject_precompiles(&mut evm, factory.precompiles()); } @@ -1079,7 +1080,7 @@ impl Backend { enable_steps_tracing: self.enable_steps_tracing, print_logs: self.print_logs, precompile_factory: self.precompile_factory.clone(), - alphanet: self.alphanet, + odyssey: self.odyssey, }; // create a new pending block @@ -1161,7 +1162,7 @@ impl Backend { blob_gas_used: 0, enable_steps_tracing: self.enable_steps_tracing, print_logs: self.print_logs, - alphanet: self.alphanet, + odyssey: self.odyssey, precompile_factory: self.precompile_factory.clone(), }; let executed_tx = executor.execute(); @@ -1232,7 +1233,7 @@ impl Backend { if storage.blocks.len() > transaction_block_keeper { let to_clear = block_number .to::() - .saturating_sub(transaction_block_keeper.try_into().unwrap()); + .saturating_sub(transaction_block_keeper.try_into().unwrap_or(u64::MAX)); storage.remove_block_transactions_by_number(to_clear) } } @@ -2877,7 +2878,7 @@ pub fn transaction_build( gas_limit, }; - let ser = serde_json::to_value(&dep_tx).unwrap(); + let ser = serde_json::to_value(&dep_tx).expect("could not serialize TxDeposit"); let maybe_deposit_fields = OtherFields::try_from(ser); match maybe_deposit_fields { @@ -3013,11 +3014,8 @@ pub fn prove_storage(storage: &HashMap, keys: &[B256]) -> Vec bool { - matches!( - NamedChain::try_from(chain_id), - Ok(NamedChain::Arbitrum | - NamedChain::ArbitrumTestnet | - NamedChain::ArbitrumGoerli | - NamedChain::ArbitrumNova) - ) + if let Ok(chain) = NamedChain::try_from(chain_id) { + return chain.is_arbitrum() + } + false } diff --git a/crates/anvil/src/eth/backend/mem/storage.rs b/crates/anvil/src/eth/backend/mem/storage.rs index 056b88627..5635a7acc 100644 --- a/crates/anvil/src/eth/backend/mem/storage.rs +++ b/crates/anvil/src/eth/backend/mem/storage.rs @@ -555,15 +555,12 @@ impl MinedTransaction { } GethDebugBuiltInTracerType::CallTracer => { return match tracer_config.into_call_config() { - Ok(call_config) => Ok(GethTraceBuilder::new( - self.info.traces.clone(), - TracingInspectorConfig::from_geth_config(&config), - ) - .geth_call_traces( - call_config, - self.receipt.cumulative_gas_used() as u64, - ) - .into()), + Ok(call_config) => Ok(GethTraceBuilder::new(self.info.traces.clone()) + .geth_call_traces( + call_config, + self.receipt.cumulative_gas_used() as u64, + ) + .into()), Err(e) => Err(RpcError::invalid_params(e.to_string()).into()), }; } @@ -579,16 +576,13 @@ impl MinedTransaction { } // default structlog tracer - Ok(GethTraceBuilder::new( - self.info.traces.clone(), - TracingInspectorConfig::from_geth_config(&config), - ) - .geth_traces( - self.receipt.cumulative_gas_used() as u64, - self.info.out.clone().unwrap_or_default(), - opts.config, - ) - .into()) + Ok(GethTraceBuilder::new(self.info.traces.clone()) + .geth_traces( + self.receipt.cumulative_gas_used() as u64, + self.info.out.clone().unwrap_or_default(), + config, + ) + .into()) } } diff --git a/crates/anvil/src/eth/error.rs b/crates/anvil/src/eth/error.rs index 394f33492..dda9b8bb2 100644 --- a/crates/anvil/src/eth/error.rs +++ b/crates/anvil/src/eth/error.rs @@ -98,6 +98,12 @@ pub enum BlockchainError { Message(String), } +impl From for BlockchainError { + fn from(err: eyre::Report) -> Self { + Self::Message(err.to_string()) + } +} + impl From for BlockchainError { fn from(err: RpcError) -> Self { Self::RpcError(err) diff --git a/crates/anvil/src/eth/otterscan/api.rs b/crates/anvil/src/eth/otterscan/api.rs index 617655444..f9a7334e0 100644 --- a/crates/anvil/src/eth/otterscan/api.rs +++ b/crates/anvil/src/eth/otterscan/api.rs @@ -46,7 +46,7 @@ pub fn mentions_address(trace: LocalizedTransactionTrace, address: Address) -> O /// Converts the list of traces for a transaction into the expected Otterscan format. /// -/// Follows format specified in the [`ots_traceTransaction`](https://github.com/otterscan/otterscan/blob/develop/docs/custom-jsonrpc.md#ots_tracetransaction) spec. +/// Follows format specified in the [`ots_traceTransaction`](https://github.com/otterscan/otterscan/blob/main/docs/custom-jsonrpc.md#ots_tracetransaction) spec. pub fn batch_build_ots_traces(traces: Vec) -> Vec { traces .into_iter() @@ -350,7 +350,7 @@ impl EthApi { /// their `gas_used`. This would be extremely inefficient in a real blockchain RPC, but we can /// get away with that in this context. /// - /// The [original spec](https://github.com/otterscan/otterscan/blob/develop/docs/custom-jsonrpc.md#ots_getblockdetails) + /// The [original spec](https://github.com/otterscan/otterscan/blob/main/docs/custom-jsonrpc.md#ots_getblockdetails) /// also mentions we can hardcode `transactions` and `logsBloom` to an empty array to save /// bandwidth, because fields weren't intended to be used in the Otterscan UI at this point. /// @@ -402,7 +402,7 @@ impl EthApi { /// Fetches all receipts for the blocks's transactions, as required by the /// [`ots_getBlockTransactions`] endpoint spec, and returns the final response object. /// - /// [`ots_getBlockTransactions`]: https://github.com/otterscan/otterscan/blob/develop/docs/custom-jsonrpc.md#ots_getblockdetails + /// [`ots_getBlockTransactions`]: https://github.com/otterscan/otterscan/blob/main/docs/custom-jsonrpc.md#ots_getblockdetails pub async fn build_ots_block_tx( &self, mut block: AnyRpcBlock, diff --git a/crates/anvil/src/lib.rs b/crates/anvil/src/lib.rs index 4fc0621c8..6d2e6d5e4 100644 --- a/crates/anvil/src/lib.rs +++ b/crates/anvil/src/lib.rs @@ -20,6 +20,7 @@ use crate::{ use alloy_primitives::{Address, U256}; use alloy_signer_local::PrivateKeySigner; use eth::backend::fork::ClientFork; +use eyre::Result; use foundry_common::provider::{ProviderBuilder, RetryProvider}; use foundry_evm::revm; use futures::{FutureExt, TryFutureExt}; @@ -27,7 +28,6 @@ use parking_lot::Mutex; use server::try_spawn_ipc; use std::{ future::Future, - io, net::SocketAddr, pin::Pin, sync::Arc, @@ -126,11 +126,11 @@ pub async fn spawn(config: NodeConfig) -> (EthApi, NodeHandle) { /// # Ok(()) /// # } /// ``` -pub async fn try_spawn(mut config: NodeConfig) -> io::Result<(EthApi, NodeHandle)> { +pub async fn try_spawn(mut config: NodeConfig) -> Result<(EthApi, NodeHandle)> { let logger = if config.enable_tracing { init_tracing() } else { Default::default() }; logger.set_enabled(!config.silent); - let backend = Arc::new(config.setup().await); + let backend = Arc::new(config.setup().await?); if config.enable_auto_impersonate { backend.auto_impersonate_account(true); @@ -251,7 +251,7 @@ pub async fn try_spawn(mut config: NodeConfig) -> io::Result<(EthApi, NodeHandle task_manager, }; - handle.print(fork.as_ref()); + handle.print(fork.as_ref())?; Ok((api, handle)) } @@ -281,7 +281,7 @@ impl Drop for NodeHandle { fn drop(&mut self) { // Fire shutdown signal to make sure anvil instance is terminated. if let Some(signal) = self._signal.take() { - signal.fire().unwrap() + let _ = signal.fire(); } } } @@ -293,21 +293,22 @@ impl NodeHandle { } /// Prints the launch info. - pub(crate) fn print(&self, fork: Option<&ClientFork>) { - self.config.print(fork); + pub(crate) fn print(&self, fork: Option<&ClientFork>) -> Result<()> { + self.config.print(fork)?; if !self.config.silent { if let Some(ipc_path) = self.ipc_path() { - let _ = sh_println!("IPC path: {ipc_path}"); + sh_println!("IPC path: {ipc_path}")?; } - let _ = sh_println!( + sh_println!( "Listening on {}", self.addresses .iter() .map(|addr| { addr.to_string() }) .collect::>() .join(", ") - ); + )?; } + Ok(()) } /// The address of the launched server. diff --git a/crates/anvil/tests/it/anvil_api.rs b/crates/anvil/tests/it/anvil_api.rs index b75b088b0..9eb44c69b 100644 --- a/crates/anvil/tests/it/anvil_api.rs +++ b/crates/anvil/tests/it/anvil_api.rs @@ -808,7 +808,7 @@ async fn test_reorg() { // === wallet endpoints === // #[tokio::test(flavor = "multi_thread")] async fn can_get_wallet_capabilities() { - let (api, handle) = spawn(NodeConfig::test().with_alphanet(true)).await; + let (api, handle) = spawn(NodeConfig::test().with_odyssey(true)).await; let provider = handle.http_provider(); @@ -834,7 +834,7 @@ async fn can_get_wallet_capabilities() { #[tokio::test(flavor = "multi_thread")] async fn can_add_capability() { - let (api, _handle) = spawn(NodeConfig::test().with_alphanet(true)).await; + let (api, _handle) = spawn(NodeConfig::test().with_odyssey(true)).await; let init_capabilities = api.get_capabilities().unwrap(); @@ -864,7 +864,7 @@ async fn can_add_capability() { #[tokio::test(flavor = "multi_thread")] async fn can_set_executor() { - let (api, _handle) = spawn(NodeConfig::test().with_alphanet(true)).await; + let (api, _handle) = spawn(NodeConfig::test().with_odyssey(true)).await; let expected_addr = address!("f39Fd6e51aad88F6F4ce6aB8827279cffFb92266"); let pk = "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80".to_string(); diff --git a/crates/anvil/tests/it/fork.rs b/crates/anvil/tests/it/fork.rs index 3d470894b..8e7736b0d 100644 --- a/crates/anvil/tests/it/fork.rs +++ b/crates/anvil/tests/it/fork.rs @@ -55,7 +55,7 @@ impl LocalFork { pub fn fork_config() -> NodeConfig { NodeConfig::test() - .with_eth_rpc_url(Some(rpc::next_http_archive_rpc_endpoint())) + .with_eth_rpc_url(Some(rpc::next_http_archive_rpc_url())) .with_fork_block_number(Some(BLOCK_NUMBER)) } @@ -287,7 +287,7 @@ async fn test_fork_reset_setup() { assert_eq!(local_balance, U256::ZERO); api.anvil_reset(Some(Forking { - json_rpc_url: Some(rpc::next_http_archive_rpc_endpoint()), + json_rpc_url: Some(rpc::next_http_archive_rpc_url()), block_number: Some(BLOCK_NUMBER), })) .await @@ -829,8 +829,7 @@ async fn test_fork_init_base_fee() { #[tokio::test(flavor = "multi_thread")] async fn test_reset_fork_on_new_blocks() { let (api, handle) = - spawn(NodeConfig::test().with_eth_rpc_url(Some(rpc::next_http_archive_rpc_endpoint()))) - .await; + spawn(NodeConfig::test().with_eth_rpc_url(Some(rpc::next_http_archive_rpc_url()))).await; let anvil_provider = handle.http_provider(); let endpoint = next_http_rpc_endpoint(); @@ -864,7 +863,7 @@ async fn test_fork_call() { let to: Address = "0x99d1Fa417f94dcD62BfE781a1213c092a47041Bc".parse().unwrap(); let block_number = 14746300u64; - let provider = http_provider(rpc::next_http_archive_rpc_endpoint().as_str()); + let provider = http_provider(rpc::next_http_archive_rpc_url().as_str()); let tx = TransactionRequest::default().to(to).with_input(input.clone()); let tx = WithOtherFields::new(tx); let res0 = provider.call(&tx).block(BlockId::Number(block_number.into())).await.unwrap(); @@ -1473,3 +1472,42 @@ async fn test_reset_dev_account_nonce() { assert!(receipt.status()); } + +#[tokio::test(flavor = "multi_thread")] +async fn test_fork_get_account() { + let (_api, handle) = spawn(fork_config()).await; + let provider = handle.http_provider(); + + let accounts = handle.dev_accounts().collect::>(); + + let alice = accounts[0]; + let bob = accounts[1]; + + let init_block = provider.get_block_number().await.unwrap(); + let alice_bal = provider.get_balance(alice).await.unwrap(); + let alice_nonce = provider.get_transaction_count(alice).await.unwrap(); + let alice_acc_init = provider.get_account(alice).await.unwrap(); + + assert_eq!(alice_acc_init.balance, alice_bal); + assert_eq!(alice_acc_init.nonce, alice_nonce); + + let tx = TransactionRequest::default().from(alice).to(bob).value(U256::from(142)); + + let tx = WithOtherFields::new(tx); + let receipt = provider.send_transaction(tx).await.unwrap().get_receipt().await.unwrap(); + + assert!(receipt.status()); + assert_eq!(init_block + 1, receipt.block_number.unwrap()); + + let alice_acc = provider.get_account(alice).await.unwrap(); + + assert_eq!( + alice_acc.balance, + alice_bal - (U256::from(142) + U256::from(receipt.gas_used * receipt.effective_gas_price)), + ); + assert_eq!(alice_acc.nonce, alice_nonce + 1); + + let alice_acc_prev_block = provider.get_account(alice).number(init_block).await.unwrap(); + + assert_eq!(alice_acc_init, alice_acc_prev_block); +} diff --git a/crates/cast/Cargo.toml b/crates/cast/Cargo.toml index 7d9b07b0f..a4409746e 100644 --- a/crates/cast/Cargo.toml +++ b/crates/cast/Cargo.toml @@ -99,6 +99,7 @@ tikv-jemallocator = { workspace = true, optional = true } [dev-dependencies] anvil.workspace = true foundry-test-utils.workspace = true +alloy-node-bindings.workspace = true async-trait.workspace = true divan.workspace = true diff --git a/crates/cast/bin/args.rs b/crates/cast/bin/args.rs index f010c279e..47bf9a884 100644 --- a/crates/cast/bin/args.rs +++ b/crates/cast/bin/args.rs @@ -422,7 +422,7 @@ pub enum CastSubcommand { #[command(visible_alias = "ca")] ComputeAddress { /// The deployer address. - address: Option, + address: Option
, /// The nonce of the deployer address. #[arg(long)] @@ -432,11 +432,11 @@ pub enum CastSubcommand { rpc: RpcOpts, }, - /// Disassembles hex encoded bytecode into individual / human readable opcodes + /// Disassembles a hex-encoded bytecode into a human-readable representation. #[command(visible_alias = "da")] Disassemble { - /// The hex encoded bytecode. - bytecode: String, + /// The hex-encoded bytecode. + bytecode: Option, }, /// Build and sign a transaction. @@ -512,8 +512,8 @@ pub enum CastSubcommand { /// /// Similar to `abi-decode --input`, but function selector MUST be prefixed in `calldata` /// string - #[command(visible_aliases = &["--calldata-decode", "cdd"])] - CalldataDecode { + #[command(visible_aliases = &["calldata-decode", "--calldata-decode", "cdd"])] + DecodeCalldata { /// The function signature in the format `()()`. sig: String, @@ -524,19 +524,39 @@ pub enum CastSubcommand { /// Decode ABI-encoded string. /// /// Similar to `calldata-decode --input`, but the function argument is a `string` - #[command(visible_aliases = &["--string-decode", "sd"])] - StringDecode { + #[command(visible_aliases = &["string-decode", "--string-decode", "sd"])] + DecodeString { /// The ABI-encoded string. data: String, }, + /// Decode event data. + #[command(visible_aliases = &["event-decode", "--event-decode", "ed"])] + DecodeEvent { + /// The event signature. If none provided then tries to decode from local cache or `https://api.openchain.xyz`. + #[arg(long, visible_alias = "event-sig")] + sig: Option, + /// The event data to decode. + data: String, + }, + + /// Decode custom error data. + #[command(visible_aliases = &["error-decode", "--error-decode", "erd"])] + DecodeError { + /// The error signature. If none provided then tries to decode from local cache or `https://api.openchain.xyz`. + #[arg(long, visible_alias = "error-sig")] + sig: Option, + /// The error data to decode. + data: String, + }, + /// Decode ABI-encoded input or output data. /// /// Defaults to decoding output data. To decode input data pass --input. /// /// When passing `--input`, function selector must NOT be prefixed in `calldata` string - #[command(name = "abi-decode", visible_aliases = &["ad", "--abi-decode"])] - AbiDecode { + #[command(name = "decode-abi", visible_aliases = &["abi-decode", "--abi-decode", "ad"])] + DecodeAbi { /// The function signature in the format `()()`. sig: String, @@ -734,7 +754,7 @@ pub enum CastSubcommand { #[arg(value_parser = NameOrAddress::from_str)] who: NameOrAddress, - /// Disassemble bytecodes into individual opcodes. + /// Disassemble bytecodes. #[arg(long, short)] disassemble: bool, @@ -1010,8 +1030,8 @@ pub enum CastSubcommand { /// Extracts function selectors and arguments from bytecode #[command(visible_alias = "sel")] Selectors { - /// The hex encoded bytecode. - bytecode: String, + /// The hex-encoded bytecode. + bytecode: Option, /// Resolve the function signatures for the extracted selectors using https://openchain.xyz #[arg(long, short)] diff --git a/crates/cast/bin/cmd/call.rs b/crates/cast/bin/cmd/call.rs index aefc5f1c0..1383bea27 100644 --- a/crates/cast/bin/cmd/call.rs +++ b/crates/cast/bin/cmd/call.rs @@ -8,7 +8,7 @@ use foundry_cli::{ opts::{EthereumOpts, TransactionOpts}, utils::{self, handle_traces, parse_ether_value, TraceResult}, }; -use foundry_common::ens::NameOrAddress; +use foundry_common::{ens::NameOrAddress, shell}; use foundry_compilers::artifacts::EvmVersion; use foundry_config::{ figment::{ @@ -18,7 +18,11 @@ use foundry_config::{ }, Config, }; -use foundry_evm::{executors::TracingExecutor, opts::EvmOpts}; +use foundry_evm::{ + executors::TracingExecutor, + opts::EvmOpts, + traces::{InternalTraceMode, TraceMode}, +}; use std::str::FromStr; /// CLI arguments for `cast call`. @@ -69,9 +73,9 @@ pub struct CallArgs { #[arg(long, short)] block: Option, - /// Enable Alphanet features. - #[arg(long, alias = "odyssey")] - pub alphanet: bool, + /// Enable Odyssey features. + #[arg(long, alias = "alphanet")] + pub odyssey: bool, #[command(subcommand)] command: Option, @@ -81,6 +85,10 @@ pub struct CallArgs { #[command(flatten)] eth: EthereumOpts, + + /// Use current project artifacts for trace decoding. + #[arg(long, visible_alias = "la")] + pub with_local_artifacts: bool, } #[derive(Debug, Parser)] @@ -127,6 +135,7 @@ impl CallArgs { decode_internal, labels, data, + with_local_artifacts, .. } = self; @@ -170,15 +179,24 @@ impl CallArgs { config.fork_block_number = Some(block_number); } - let (mut env, fork, chain, alphanet) = + let create2_deployer = evm_opts.create2_deployer; + let (mut env, fork, chain, odyssey) = TracingExecutor::get_fork_material(&config, evm_opts).await?; // modify settings that usually set in eth_call env.cfg.disable_block_gas_limit = true; env.block.gas_limit = U256::MAX; + let trace_mode = TraceMode::Call + .with_debug(debug) + .with_decode_internal(if decode_internal { + InternalTraceMode::Full + } else { + InternalTraceMode::None + }) + .with_state_changes(shell::verbosity() > 4); let mut executor = - TracingExecutor::new(env, fork, evm_version, debug, decode_internal, alphanet); + TracingExecutor::new(env, fork, evm_version, trace_mode, odyssey, create2_deployer); let value = tx.value.unwrap_or_default(); let input = tx.inner.input.into_input().unwrap_or_default(); @@ -195,7 +213,16 @@ impl CallArgs { ), }; - handle_traces(trace, &config, chain, labels, debug, decode_internal, false).await?; + handle_traces( + trace, + &config, + chain, + labels, + with_local_artifacts, + debug, + decode_internal, + ) + .await?; return Ok(()); } @@ -214,8 +241,8 @@ impl figment::Provider for CallArgs { fn data(&self) -> Result, figment::Error> { let mut map = Map::new(); - if self.alphanet { - map.insert("alphanet".into(), self.alphanet.into()); + if self.odyssey { + map.insert("odyssey".into(), self.odyssey.into()); } if let Some(evm_version) = self.evm_version { diff --git a/crates/cast/bin/cmd/run.rs b/crates/cast/bin/cmd/run.rs index 79083fa8d..d323d627f 100644 --- a/crates/cast/bin/cmd/run.rs +++ b/crates/cast/bin/cmd/run.rs @@ -23,6 +23,7 @@ use foundry_config::{ use foundry_evm::{ executors::{EvmError, TracingExecutor}, opts::EvmOpts, + traces::{InternalTraceMode, TraceMode}, utils::configure_tx_env, }; @@ -84,9 +85,13 @@ pub struct RunArgs { #[arg(long, value_name = "NO_RATE_LIMITS", visible_alias = "no-rpc-rate-limit")] pub no_rate_limit: bool, - /// Enables Alphanet features. - #[arg(long, alias = "odyssey")] - pub alphanet: bool, + /// Enables Odyssey features. + #[arg(long, alias = "alphanet")] + pub odyssey: bool, + + /// Use current project artifacts for trace decoding. + #[arg(long, visible_alias = "la")] + pub with_local_artifacts: bool, } impl RunArgs { @@ -103,11 +108,9 @@ impl RunArgs { let compute_units_per_second = if self.no_rate_limit { Some(u64::MAX) } else { self.compute_units_per_second }; - let provider = foundry_common::provider::ProviderBuilder::new( - &config.get_rpc_url_or_localhost_http()?, - ) - .compute_units_per_second_opt(compute_units_per_second) - .build()?; + let provider = foundry_cli::utils::get_provider_builder(&config)? + .compute_units_per_second_opt(compute_units_per_second) + .build()?; let tx_hash = self.tx_hash.parse().wrap_err("invalid tx hash")?; let tx = provider @@ -134,7 +137,8 @@ impl RunArgs { // we need to fork off the parent block config.fork_block_number = Some(tx_block_number - 1); - let (mut env, fork, chain, alphanet) = + let create2_deployer = evm_opts.create2_deployer; + let (mut env, fork, chain, odyssey) = TracingExecutor::get_fork_material(&config, evm_opts).await?; let mut evm_version = self.evm_version; @@ -159,20 +163,30 @@ impl RunArgs { } } + let trace_mode = TraceMode::Call + .with_debug(self.debug) + .with_decode_internal(if self.decode_internal { + InternalTraceMode::Full + } else { + InternalTraceMode::None + }) + .with_state_changes(shell::verbosity() > 4); let mut executor = TracingExecutor::new( env.clone(), fork, evm_version, - self.debug, - self.decode_internal, - alphanet, + trace_mode, + odyssey, + create2_deployer, ); let mut env = EnvWithHandlerCfg::new_with_spec_id(Box::new(env.clone()), executor.spec_id()); // Set the state to the moment right before the transaction if !self.quick { - sh_println!("Executing previous transactions from the block.")?; + if !shell::is_json() { + sh_println!("Executing previous transactions from the block.")?; + } if let Some(block) = block { let pb = init_progress(block.transactions.len() as u64, "tx"); @@ -251,9 +265,9 @@ impl RunArgs { &config, chain, self.label, + self.with_local_artifacts, self.debug, self.decode_internal, - shell::verbosity() > 0, ) .await?; @@ -269,8 +283,12 @@ impl figment::Provider for RunArgs { fn data(&self) -> Result, figment::Error> { let mut map = Map::new(); - if self.alphanet { - map.insert("alphanet".into(), self.alphanet.into()); + if self.odyssey { + map.insert("odyssey".into(), self.odyssey.into()); + } + + if let Some(api_key) = &self.etherscan.key { + map.insert("etherscan_api_key".into(), api_key.as_str().into()); } if let Some(api_key) = &self.etherscan.key { diff --git a/crates/cast/bin/cmd/send.rs b/crates/cast/bin/cmd/send.rs index e332c4824..0cc1ddeac 100644 --- a/crates/cast/bin/cmd/send.rs +++ b/crates/cast/bin/cmd/send.rs @@ -113,7 +113,7 @@ pub enum SendTxSubcommands { impl SendTxArgs { #[allow(unknown_lints, dependency_on_unit_never_type_fallback)] - pub async fn run(self) -> Result<(), eyre::Report> { + pub async fn run(self) -> eyre::Result<()> { let Self { eth, to, diff --git a/crates/cast/bin/cmd/storage.rs b/crates/cast/bin/cmd/storage.rs index 13fa908bc..7121f1a98 100644 --- a/crates/cast/bin/cmd/storage.rs +++ b/crates/cast/bin/cmd/storage.rs @@ -6,7 +6,7 @@ use alloy_rpc_types::BlockId; use alloy_transport::Transport; use cast::Cast; use clap::Parser; -use comfy_table::{presets::ASCII_MARKDOWN, Table}; +use comfy_table::{modifiers::UTF8_ROUND_CORNERS, Cell, Table}; use eyre::Result; use foundry_block_explorers::Client; use foundry_cli::{ @@ -20,7 +20,7 @@ use foundry_common::{ shell, }; use foundry_compilers::{ - artifacts::{ConfigurableContractArtifact, StorageLayout}, + artifacts::{ConfigurableContractArtifact, Contract, StorageLayout}, compilers::{ solc::{Solc, SolcCompiler}, Compiler, @@ -284,12 +284,22 @@ fn print_storage(layout: StorageLayout, values: Vec, pretty: bool) "{}", serde_json::to_string_pretty(&serde_json::to_value(StorageReport { layout, values })?)? )?; - return Ok(()) + return Ok(()); } let mut table = Table::new(); - table.load_preset(ASCII_MARKDOWN); - table.set_header(["Name", "Type", "Slot", "Offset", "Bytes", "Value", "Hex Value", "Contract"]); + table.apply_modifier(UTF8_ROUND_CORNERS); + + table.set_header(vec![ + Cell::new("Name"), + Cell::new("Type"), + Cell::new("Slot"), + Cell::new("Offset"), + Cell::new("Bytes"), + Cell::new("Value"), + Cell::new("Hex Value"), + Cell::new("Contract"), + ]); for (slot, storage_value) in layout.storage.into_iter().zip(values) { let storage_type = layout.types.get(&slot.storage_type); @@ -309,12 +319,12 @@ fn print_storage(layout: StorageLayout, values: Vec, pretty: bool) ]); } - sh_println!("{table}")?; + sh_println!("\n{table}\n")?; Ok(()) } -fn add_storage_layout_output(project: &mut Project) { +fn add_storage_layout_output>(project: &mut Project) { project.artifacts.additional_values.storage_layout = true; project.update_output_selection(|selection| { selection.0.values_mut().for_each(|contract_selection| { diff --git a/crates/cast/bin/cmd/wallet/mod.rs b/crates/cast/bin/cmd/wallet/mod.rs index b6dea48e1..7960cab6e 100644 --- a/crates/cast/bin/cmd/wallet/mod.rs +++ b/crates/cast/bin/cmd/wallet/mod.rs @@ -1,6 +1,6 @@ use alloy_chains::Chain; use alloy_dyn_abi::TypedData; -use alloy_primitives::{hex, Address, Signature, B256}; +use alloy_primitives::{hex, Address, PrimitiveSignature as Signature, B256}; use alloy_provider::Provider; use alloy_signer::Signer; use alloy_signer_local::{ diff --git a/crates/cast/bin/main.rs b/crates/cast/bin/main.rs index 7df33496a..e7d94be87 100644 --- a/crates/cast/bin/main.rs +++ b/crates/cast/bin/main.rs @@ -1,7 +1,7 @@ #[macro_use] extern crate tracing; -use alloy_dyn_abi::DynSolValue; +use alloy_dyn_abi::{DynSolValue, ErrorExt, EventExt}; use alloy_primitives::{eip191_hash_message, hex, keccak256, Address, B256}; use alloy_provider::Provider; use alloy_rpc_types::{BlockId, BlockNumberOrTag::Latest}; @@ -11,7 +11,7 @@ use clap_complete::generate; use eyre::Result; use foundry_cli::{handler, utils}; use foundry_common::{ - abi::get_event, + abi::{get_error, get_event}, ens::{namehash, ProviderEnsExt}, fmt::{format_tokens, format_tokens_raw, format_uint_exp}, fs, @@ -30,6 +30,7 @@ pub mod cmd; pub mod tx; use args::{Cast as CastArgs, CastSubcommand, ToBaseArgs}; +use cast::traces::identifier::SignaturesIdentifier; #[macro_use] extern crate foundry_common; @@ -194,7 +195,7 @@ async fn main_args(args: CastArgs) -> Result<()> { } // ABI encoding & decoding - CastSubcommand::AbiDecode { sig, calldata, input } => { + CastSubcommand::DecodeAbi { sig, calldata, input } => { let tokens = SimpleCast::abi_decode(&sig, &calldata, input)?; print_tokens(&tokens); } @@ -205,17 +206,65 @@ async fn main_args(args: CastArgs) -> Result<()> { sh_println!("{}", SimpleCast::abi_encode_packed(&sig, &args)?)? } } - CastSubcommand::CalldataDecode { sig, calldata } => { + CastSubcommand::DecodeCalldata { sig, calldata } => { let tokens = SimpleCast::calldata_decode(&sig, &calldata, true)?; print_tokens(&tokens); } CastSubcommand::CalldataEncode { sig, args } => { sh_println!("{}", SimpleCast::calldata_encode(sig, &args)?)?; } - CastSubcommand::StringDecode { data } => { + CastSubcommand::DecodeString { data } => { let tokens = SimpleCast::calldata_decode("Any(string)", &data, true)?; print_tokens(&tokens); } + CastSubcommand::DecodeEvent { sig, data } => { + let decoded_event = if let Some(event_sig) = sig { + get_event(event_sig.as_str())?.decode_log_parts(None, &hex::decode(data)?, false)? + } else { + let data = data.strip_prefix("0x").unwrap_or(data.as_str()); + let selector = data.get(..64).unwrap_or_default(); + let identified_event = + SignaturesIdentifier::new(Config::foundry_cache_dir(), false)? + .write() + .await + .identify_event(&hex::decode(selector)?) + .await; + if let Some(event) = identified_event { + let _ = sh_println!("{}", event.signature()); + let data = data.get(64..).unwrap_or_default(); + get_event(event.signature().as_str())?.decode_log_parts( + None, + &hex::decode(data)?, + false, + )? + } else { + eyre::bail!("No matching event signature found for selector `{selector}`") + } + }; + print_tokens(&decoded_event.body); + } + CastSubcommand::DecodeError { sig, data } => { + let error = if let Some(err_sig) = sig { + get_error(err_sig.as_str())? + } else { + let data = data.strip_prefix("0x").unwrap_or(data.as_str()); + let selector = data.get(..8).unwrap_or_default(); + let identified_error = + SignaturesIdentifier::new(Config::foundry_cache_dir(), false)? + .write() + .await + .identify_error(&hex::decode(selector)?) + .await; + if let Some(error) = identified_error { + let _ = sh_println!("{}", error.signature()); + error + } else { + eyre::bail!("No matching error signature found for selector `{selector}`") + } + }; + let decoded_error = error.decode_error(&hex::decode(data)?)?; + print_tokens(&decoded_error.body); + } CastSubcommand::Interface(cmd) => cmd.run().await?, CastSubcommand::CreationCode(cmd) => cmd.run().await?, CastSubcommand::ConstructorArgs(cmd) => cmd.run().await?, @@ -336,14 +385,16 @@ async fn main_args(args: CastArgs) -> Result<()> { let config = Config::from(&rpc); let provider = utils::get_provider(&config)?; - let address: Address = stdin::unwrap_line(address)?.parse()?; + let address = stdin::unwrap_line(address)?; let computed = Cast::new(provider).compute_address(address, nonce).await?; sh_println!("Computed Address: {}", computed.to_checksum(None))? } CastSubcommand::Disassemble { bytecode } => { + let bytecode = stdin::unwrap_line(bytecode)?; sh_println!("{}", SimpleCast::disassemble(&hex::decode(bytecode)?)?)? } CastSubcommand::Selectors { bytecode, resolve } => { + let bytecode = stdin::unwrap_line(bytecode)?; let functions = SimpleCast::extract_functions(&bytecode)?; let max_args_len = functions.iter().map(|r| r.1.len()).max().unwrap_or(0); let max_mutability_len = functions.iter().map(|r| r.2.len()).max().unwrap_or(0); diff --git a/crates/cast/src/lib.rs b/crates/cast/src/lib.rs index 01741ae1e..8357f8164 100644 --- a/crates/cast/src/lib.rs +++ b/crates/cast/src/lib.rs @@ -2073,7 +2073,7 @@ impl SimpleCast { pub fn disassemble(code: &[u8]) -> Result { let mut output = String::new(); - for step in decode_instructions(code) { + for step in decode_instructions(code)? { write!(output, "{:08x}: ", step.pc)?; if let Some(op) = step.op { @@ -2159,13 +2159,28 @@ impl SimpleCast { /// ``` pub fn extract_functions(bytecode: &str) -> Result> { let code = hex::decode(strip_0x(bytecode))?; - Ok(evmole::function_selectors(&code, 0) + let info = evmole::contract_info( + evmole::ContractInfoArgs::new(&code) + .with_selectors() + .with_arguments() + .with_state_mutability(), + ); + Ok(info + .functions + .expect("functions extraction was requested") .into_iter() - .map(|s| { + .map(|f| { ( - hex::encode_prefixed(s), - evmole::function_arguments(&code, &s, 0), - evmole::function_state_mutability(&code, &s, 0).as_json_str(), + hex::encode_prefixed(f.selector), + f.arguments + .expect("arguments extraction was requested") + .into_iter() + .map(|t| t.sol_type_name().to_string()) + .collect::>() + .join(","), + f.state_mutability + .expect("state_mutability extraction was requested") + .as_json_str(), ) }) .collect()) @@ -2346,4 +2361,23 @@ mod tests { r#"["0x2b5df5f0757397573e8ff34a8b987b21680357de1f6c8d10273aa528a851eaca","0x","0x","0x2838ac1d2d2721ba883169179b48480b2ba4f43d70fcf806956746bd9e83f903","0x","0xe46fff283b0ab96a32a7cc375cecc3ed7b6303a43d64e0a12eceb0bc6bd87549","0x","0x1d818c1c414c665a9c9a0e0c0ef1ef87cacb380b8c1f6223cb2a68a4b2d023f5","0x","0x","0x","0x236e8f61ecde6abfebc6c529441f782f62469d8a2cc47b7aace2c136bd3b1ff0","0x","0x","0x","0x","0x"]"# ) } + + #[test] + fn disassemble_incomplete_sequence() { + let incomplete = &hex!("60"); // PUSH1 + let disassembled = Cast::disassemble(incomplete); + assert!(disassembled.is_err()); + + let complete = &hex!("6000"); // PUSH1 0x00 + let disassembled = Cast::disassemble(complete); + assert!(disassembled.is_ok()); + + let incomplete = &hex!("7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"); // PUSH32 with 31 bytes + let disassembled = Cast::disassemble(incomplete); + assert!(disassembled.is_err()); + + let complete = &hex!("7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"); // PUSH32 with 32 bytes + let disassembled = Cast::disassemble(complete); + assert!(disassembled.is_ok()); + } } diff --git a/crates/cast/tests/cli/main.rs b/crates/cast/tests/cli/main.rs index f7e1c1b81..24ff9cea4 100644 --- a/crates/cast/tests/cli/main.rs +++ b/crates/cast/tests/cli/main.rs @@ -1,10 +1,12 @@ //! Contains various tests for checking cast commands use alloy_chains::NamedChain; +use alloy_network::TransactionResponse; use alloy_primitives::{b256, B256}; +use alloy_rpc_types::{BlockNumberOrTag, Index}; use anvil::{EthereumHardfork, NodeConfig}; use foundry_test_utils::{ - casttest, file, + casttest, file, forgetest, forgetest_async, rpc::{ next_etherscan_api_key, next_http_rpc_endpoint, next_mainnet_etherscan_api_key, next_rpc_endpoint, next_ws_rpc_endpoint, @@ -63,7 +65,7 @@ Display options: - 2 (-vv): Print logs for all tests. - 3 (-vvv): Print execution traces for failing tests. - 4 (-vvvv): Print execution traces for all tests, and setup traces for failing tests. - - 5 (-vvvvv): Print execution and setup traces for all tests. + - 5 (-vvvvv): Print execution and setup traces for all tests, including storage changes. Find more information in the book: http://book.getfoundry.sh/reference/cast/cast.html @@ -103,6 +105,7 @@ totalDifficulty [..] blobGasUsed [..] excessBlobGas [..] requestsHash [..] +targetBlobsPerBlock [..] transactions: [ ... ] @@ -1129,10 +1132,15 @@ casttest!(storage_layout_simple, |_prj, cmd| { ]) .assert_success() .stdout_eq(str![[r#" + +╭---------+---------+------+--------+-------+-------+--------------------------------------------------------------------+-----------------------------------------------╮ | Name | Type | Slot | Offset | Bytes | Value | Hex Value | Contract | -|---------|---------|------|--------|-------|-------|--------------------------------------------------------------------|-----------------------------------------------| ++========================================================================================================================================================================+ | _owner | address | 0 | 0 | 20 | 0 | 0x0000000000000000000000000000000000000000000000000000000000000000 | contracts/Create2Deployer.sol:Create2Deployer | +|---------+---------+------+--------+-------+-------+--------------------------------------------------------------------+-----------------------------------------------| | _paused | bool | 0 | 20 | 1 | 0 | 0x0000000000000000000000000000000000000000000000000000000000000000 | contracts/Create2Deployer.sol:Create2Deployer | +╰---------+---------+------+--------+-------+-------+--------------------------------------------------------------------+-----------------------------------------------╯ + "#]]); }); @@ -1168,21 +1176,37 @@ casttest!(storage_layout_complex, |_prj, cmd| { ]) .assert_success() .stdout_eq(str![[r#" + +╭-------------------------------+--------------------------------------------------------------------+------+--------+-------+--------------------------------------------------+--------------------------------------------------------------------+---------------------------------╮ | Name | Type | Slot | Offset | Bytes | Value | Hex Value | Contract | -|-------------------------------|--------------------------------------------------------------------|------|--------|-------|--------------------------------------------------|--------------------------------------------------------------------|---------------------------------| ++======================================================================================================================================================================================================================================================================================+ | _status | uint256 | 0 | 0 | 32 | 1 | 0x0000000000000000000000000000000000000000000000000000000000000001 | contracts/vault/Vault.sol:Vault | +|-------------------------------+--------------------------------------------------------------------+------+--------+-------+--------------------------------------------------+--------------------------------------------------------------------+---------------------------------| | _generalPoolsBalances | mapping(bytes32 => struct EnumerableMap.IERC20ToBytes32Map) | 1 | 0 | 32 | 0 | 0x0000000000000000000000000000000000000000000000000000000000000000 | contracts/vault/Vault.sol:Vault | +|-------------------------------+--------------------------------------------------------------------+------+--------+-------+--------------------------------------------------+--------------------------------------------------------------------+---------------------------------| | _nextNonce | mapping(address => uint256) | 2 | 0 | 32 | 0 | 0x0000000000000000000000000000000000000000000000000000000000000000 | contracts/vault/Vault.sol:Vault | +|-------------------------------+--------------------------------------------------------------------+------+--------+-------+--------------------------------------------------+--------------------------------------------------------------------+---------------------------------| | _paused | bool | 3 | 0 | 1 | 0 | 0x0000000000000000000000000000000000000000000000000000000000000000 | contracts/vault/Vault.sol:Vault | +|-------------------------------+--------------------------------------------------------------------+------+--------+-------+--------------------------------------------------+--------------------------------------------------------------------+---------------------------------| | _authorizer | contract IAuthorizer | 3 | 1 | 20 | 549683469959765988649777481110995959958745616871 | 0x0000000000000000000000006048a8c631fb7e77eca533cf9c29784e482391e7 | contracts/vault/Vault.sol:Vault | +|-------------------------------+--------------------------------------------------------------------+------+--------+-------+--------------------------------------------------+--------------------------------------------------------------------+---------------------------------| | _approvedRelayers | mapping(address => mapping(address => bool)) | 4 | 0 | 32 | 0 | 0x0000000000000000000000000000000000000000000000000000000000000000 | contracts/vault/Vault.sol:Vault | +|-------------------------------+--------------------------------------------------------------------+------+--------+-------+--------------------------------------------------+--------------------------------------------------------------------+---------------------------------| | _isPoolRegistered | mapping(bytes32 => bool) | 5 | 0 | 32 | 0 | 0x0000000000000000000000000000000000000000000000000000000000000000 | contracts/vault/Vault.sol:Vault | +|-------------------------------+--------------------------------------------------------------------+------+--------+-------+--------------------------------------------------+--------------------------------------------------------------------+---------------------------------| | _nextPoolNonce | uint256 | 6 | 0 | 32 | 1760 | 0x00000000000000000000000000000000000000000000000000000000000006e0 | contracts/vault/Vault.sol:Vault | +|-------------------------------+--------------------------------------------------------------------+------+--------+-------+--------------------------------------------------+--------------------------------------------------------------------+---------------------------------| | _minimalSwapInfoPoolsBalances | mapping(bytes32 => mapping(contract IERC20 => bytes32)) | 7 | 0 | 32 | 0 | 0x0000000000000000000000000000000000000000000000000000000000000000 | contracts/vault/Vault.sol:Vault | +|-------------------------------+--------------------------------------------------------------------+------+--------+-------+--------------------------------------------------+--------------------------------------------------------------------+---------------------------------| | _minimalSwapInfoPoolsTokens | mapping(bytes32 => struct EnumerableSet.AddressSet) | 8 | 0 | 32 | 0 | 0x0000000000000000000000000000000000000000000000000000000000000000 | contracts/vault/Vault.sol:Vault | +|-------------------------------+--------------------------------------------------------------------+------+--------+-------+--------------------------------------------------+--------------------------------------------------------------------+---------------------------------| | _twoTokenPoolTokens | mapping(bytes32 => struct TwoTokenPoolsBalance.TwoTokenPoolTokens) | 9 | 0 | 32 | 0 | 0x0000000000000000000000000000000000000000000000000000000000000000 | contracts/vault/Vault.sol:Vault | +|-------------------------------+--------------------------------------------------------------------+------+--------+-------+--------------------------------------------------+--------------------------------------------------------------------+---------------------------------| | _poolAssetManagers | mapping(bytes32 => mapping(contract IERC20 => address)) | 10 | 0 | 32 | 0 | 0x0000000000000000000000000000000000000000000000000000000000000000 | contracts/vault/Vault.sol:Vault | +|-------------------------------+--------------------------------------------------------------------+------+--------+-------+--------------------------------------------------+--------------------------------------------------------------------+---------------------------------| | _internalTokenBalance | mapping(address => mapping(contract IERC20 => uint256)) | 11 | 0 | 32 | 0 | 0x0000000000000000000000000000000000000000000000000000000000000000 | contracts/vault/Vault.sol:Vault | +╰-------------------------------+--------------------------------------------------------------------+------+--------+-------+--------------------------------------------------+--------------------------------------------------------------------+---------------------------------╯ + "#]]); }); @@ -1473,6 +1497,101 @@ casttest!(string_decode, |_prj, cmd| { "#]]); }); +// tests cast can decode event with provided signature +casttest!(event_decode_with_sig, |_prj, cmd| { + cmd.args(["decode-event", "--sig", "MyEvent(uint256,address)", "0x000000000000000000000000000000000000000000000000000000000000004e0000000000000000000000000000000000000000000000000000000000d0004f"]).assert_success().stdout_eq(str![[r#" +78 +0x0000000000000000000000000000000000D0004F + +"#]]); + + cmd.args(["--json"]).assert_success().stdout_eq(str![[r#" +[ + "78", + "0x0000000000000000000000000000000000D0004F" +] + +"#]]); +}); + +// tests cast can decode event with Openchain API +casttest!(event_decode_with_openchain, |prj, cmd| { + prj.clear_cache(); + cmd.args(["decode-event", "0xe27c4c1372396a3d15a9922f74f9dfc7c72b1ad6d63868470787249c356454c1000000000000000000000000000000000000000000000000000000000000004e00000000000000000000000000000000000000000000000000000dd00000004e"]).assert_success().stdout_eq(str![[r#" +BaseCurrencySet(address,uint256) +0x000000000000000000000000000000000000004e +15187004358734 [1.518e13] + +"#]]); +}); + +// tests cast can decode error with provided signature +casttest!(error_decode_with_sig, |_prj, cmd| { + cmd.args(["decode-error", "--sig", "AnotherValueTooHigh(uint256,address)", "0x7191bc6200000000000000000000000000000000000000000000000000000000000000650000000000000000000000000000000000000000000000000000000000D0004F"]).assert_success().stdout_eq(str![[r#" +101 +0x0000000000000000000000000000000000D0004F + +"#]]); + + cmd.args(["--json"]).assert_success().stdout_eq(str![[r#" +[ + "101", + "0x0000000000000000000000000000000000D0004F" +] + +"#]]); +}); + +// tests cast can decode error with Openchain API +casttest!(error_decode_with_openchain, |prj, cmd| { + prj.clear_cache(); + cmd.args(["decode-error", "0x7a0e198500000000000000000000000000000000000000000000000000000000000000650000000000000000000000000000000000000000000000000000000000000064"]).assert_success().stdout_eq(str![[r#" +ValueTooHigh(uint256,uint256) +101 +100 + +"#]]); +}); + +// tests cast can decode error and event when using local sig identifiers cache +forgetest!(error_event_decode_with_cache, |prj, cmd| { + prj.clear_cache(); + foundry_test_utils::util::initialize(prj.root()); + prj.add_source( + "LocalProjectContract", + r#" +contract ContractWithCustomError { + error AnotherValueTooHigh(uint256, address); + event MyUniqueEventWithinLocalProject(uint256 a, address b); +} + "#, + ) + .unwrap(); + // Store selectors in local cache. + cmd.forge_fuse().args(["selectors", "cache"]).assert_success(); + + // Assert cast can decode custom error with local cache. + cmd.cast_fuse() + .args(["decode-error", "0x7191bc6200000000000000000000000000000000000000000000000000000000000000650000000000000000000000000000000000000000000000000000000000D0004F"]) + .assert_success() + .stdout_eq(str![[r#" +AnotherValueTooHigh(uint256,address) +101 +0x0000000000000000000000000000000000D0004F + +"#]]); + // Assert cast can decode event with local cache. + cmd.cast_fuse() + .args(["decode-event", "0xbd3699995dcc867b64dbb607be2c33be38df9134bef1178df13bfb9446e73104000000000000000000000000000000000000000000000000000000000000004e00000000000000000000000000000000000000000000000000000dd00000004e"]) + .assert_success() + .stdout_eq(str![[r#" +MyUniqueEventWithinLocalProject(uint256,address) +78 +0x00000000000000000000000000000DD00000004e + +"#]]); +}); + casttest!(format_units, |_prj, cmd| { cmd.args(["format-units", "1000000", "6"]).assert_success().stdout_eq(str![[r#" 1 @@ -1597,3 +1716,283 @@ casttest!(fetch_artifact_from_etherscan, |_prj, cmd| { "#]]); }); + +// tests cast can decode traces when using project artifacts +forgetest_async!(decode_traces_with_project_artifacts, |prj, cmd| { + let (api, handle) = + anvil::spawn(NodeConfig::test().with_disable_default_create2_deployer(true)).await; + + foundry_test_utils::util::initialize(prj.root()); + prj.add_source( + "LocalProjectContract", + r#" +contract LocalProjectContract { + event LocalProjectContractCreated(address owner); + + constructor() { + emit LocalProjectContractCreated(msg.sender); + } +} + "#, + ) + .unwrap(); + prj.add_script( + "LocalProjectScript", + r#" +import "forge-std/Script.sol"; +import {LocalProjectContract} from "../src/LocalProjectContract.sol"; + +contract LocalProjectScript is Script { + function run() public { + vm.startBroadcast(); + new LocalProjectContract(); + vm.stopBroadcast(); + } +} + "#, + ) + .unwrap(); + + cmd.args([ + "script", + "--private-key", + "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80", + "--rpc-url", + &handle.http_endpoint(), + "--broadcast", + "LocalProjectScript", + ]); + + cmd.assert_success(); + + let tx_hash = api + .transaction_by_block_number_and_index(BlockNumberOrTag::Latest, Index::from(0)) + .await + .unwrap() + .unwrap() + .tx_hash(); + + // Assert cast with local artifacts from outside the project. + cmd.cast_fuse() + .args(["run", "--la", format!("{tx_hash}").as_str(), "--rpc-url", &handle.http_endpoint()]) + .assert_success() + .stdout_eq(str![[r#" +Executing previous transactions from the block. +Compiling project to generate artifacts +Nothing to compile + +"#]]); + + // Run cast from project dir. + cmd.cast_fuse().set_current_dir(prj.root()); + + // Assert cast without local artifacts cannot decode traces. + cmd.cast_fuse() + .args(["run", format!("{tx_hash}").as_str(), "--rpc-url", &handle.http_endpoint()]) + .assert_success() + .stdout_eq(str![[r#" +Executing previous transactions from the block. +Traces: + [13520] → new @0x5FbDB2315678afecb367f032d93F642f64180aa3 + ├─ emit topic 0: 0xa7263295d3a687d750d1fd377b5df47de69d7db8decc745aaa4bbee44dc1688d + │ data: 0x000000000000000000000000f39fd6e51aad88f6f4ce6ab8827279cfffb92266 + └─ ← [Return] 62 bytes of code + + +Transaction successfully executed. +[GAS] + +"#]]); + + // Assert cast with local artifacts can decode traces. + cmd.cast_fuse() + .args(["run", "--la", format!("{tx_hash}").as_str(), "--rpc-url", &handle.http_endpoint()]) + .assert_success() + .stdout_eq(str![[r#" +Executing previous transactions from the block. +Compiling project to generate artifacts +No files changed, compilation skipped +Traces: + [13520] → new LocalProjectContract@0x5FbDB2315678afecb367f032d93F642f64180aa3 + ├─ emit LocalProjectContractCreated(owner: 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266) + └─ ← [Return] 62 bytes of code + + +Transaction successfully executed. +[GAS] + +"#]]); +}); + +// tests cast can decode traces when running with verbosity level > 4 +forgetest_async!(show_state_changes_in_traces, |prj, cmd| { + let (api, handle) = anvil::spawn(NodeConfig::test()).await; + + foundry_test_utils::util::initialize(prj.root()); + // Deploy counter contract. + cmd.args([ + "script", + "--private-key", + "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80", + "--rpc-url", + &handle.http_endpoint(), + "--broadcast", + "CounterScript", + ]) + .assert_success(); + + // Send tx to change counter storage value. + cmd.cast_fuse() + .args([ + "send", + "0x5FbDB2315678afecb367f032d93F642f64180aa3", + "setNumber(uint256)", + "111", + "--private-key", + "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80", + "--rpc-url", + &handle.http_endpoint(), + ]) + .assert_success(); + + let tx_hash = api + .transaction_by_block_number_and_index(BlockNumberOrTag::Latest, Index::from(0)) + .await + .unwrap() + .unwrap() + .tx_hash(); + + // Assert cast with verbosity displays storage changes. + cmd.cast_fuse() + .args([ + "run", + format!("{tx_hash}").as_str(), + "-vvvvv", + "--rpc-url", + &handle.http_endpoint(), + ]) + .assert_success() + .stdout_eq(str![[r#" +Executing previous transactions from the block. +Traces: + [22287] 0x5FbDB2315678afecb367f032d93F642f64180aa3::setNumber(111) + ├─ storage changes: + │ @ 0: 0 → 111 + └─ ← [Stop] + + +Transaction successfully executed. +[GAS] + +"#]]); +}); + +// tests cast can decode external libraries traces with project cached selectors +forgetest_async!(decode_external_libraries_with_cached_selectors, |prj, cmd| { + let (api, handle) = anvil::spawn(NodeConfig::test()).await; + + foundry_test_utils::util::initialize(prj.root()); + prj.add_source( + "ExternalLib", + r#" +import "./CounterInExternalLib.sol"; +library ExternalLib { + function updateCounterInExternalLib(CounterInExternalLib.Info storage counterInfo, uint256 counter) public { + counterInfo.counter = counter + 1; + } +} + "#, + ) + .unwrap(); + prj.add_source( + "CounterInExternalLib", + r#" +import "./ExternalLib.sol"; +contract CounterInExternalLib { + struct Info { + uint256 counter; + } + Info info; + constructor() { + ExternalLib.updateCounterInExternalLib(info, 100); + } +} + "#, + ) + .unwrap(); + prj.add_script( + "CounterInExternalLibScript", + r#" +import "forge-std/Script.sol"; +import {CounterInExternalLib} from "../src/CounterInExternalLib.sol"; +contract CounterInExternalLibScript is Script { + function run() public { + vm.startBroadcast(); + new CounterInExternalLib(); + vm.stopBroadcast(); + } +} + "#, + ) + .unwrap(); + + cmd.args([ + "script", + "--private-key", + "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80", + "--rpc-url", + &handle.http_endpoint(), + "--broadcast", + "CounterInExternalLibScript", + ]) + .assert_success(); + + let tx_hash = api + .transaction_by_block_number_and_index(BlockNumberOrTag::Latest, Index::from(0)) + .await + .unwrap() + .unwrap() + .tx_hash(); + + // Cache project selectors. + cmd.forge_fuse().set_current_dir(prj.root()); + cmd.forge_fuse().args(["selectors", "cache"]).assert_success(); + + // Assert cast with local artifacts can decode external lib signature. + cmd.cast_fuse().set_current_dir(prj.root()); + cmd.cast_fuse() + .args(["run", format!("{tx_hash}").as_str(), "--rpc-url", &handle.http_endpoint()]) + .assert_success() + .stdout_eq(str![[r#" +... +Traces: + [37739] → new @0xe7f1725E7734CE288F8367e1Bb143E90bb3F0512 + ├─ [22411] 0xfAb06527117d29EA121998AC4fAB9Fc88bF5f979::updateCounterInExternalLib(0, 100) [delegatecall] + │ └─ ← [Stop] + └─ ← [Return] 62 bytes of code + + +Transaction successfully executed. +[GAS] + +"#]]); +}); + +// https://github.com/foundry-rs/foundry/issues/9476 +forgetest_async!(cast_call_custom_chain_id, |_prj, cmd| { + let chain_id = 55555u64; + let (_api, handle) = anvil::spawn(NodeConfig::test().with_chain_id(Some(chain_id))).await; + + let http_endpoint = handle.http_endpoint(); + + cmd.cast_fuse() + .args([ + "call", + "5FbDB2315678afecb367f032d93F642f64180aa3", + "--rpc-url", + &http_endpoint, + "--chain", + &chain_id.to_string(), + ]) + .assert_success(); +}); diff --git a/crates/cheatcodes/Cargo.toml b/crates/cheatcodes/Cargo.toml index e817e1598..9f1882346 100644 --- a/crates/cheatcodes/Cargo.toml +++ b/crates/cheatcodes/Cargo.toml @@ -51,6 +51,7 @@ alloy-signer-local = { workspace = true, features = [ ] } parking_lot.workspace = true alloy-consensus = { workspace = true, features = ["k256"] } +alloy-network.workspace = true alloy-rlp.workspace = true base64.workspace = true @@ -73,3 +74,4 @@ toml = { workspace = true, features = ["preserve_order"] } tracing.workspace = true walkdir.workspace = true proptest.workspace = true +serde.workspace = true diff --git a/crates/cheatcodes/assets/cheatcodes.json b/crates/cheatcodes/assets/cheatcodes.json index fb7a5d612..d2c9ea2ce 100644 --- a/crates/cheatcodes/assets/cheatcodes.json +++ b/crates/cheatcodes/assets/cheatcodes.json @@ -3111,7 +3111,7 @@ "func": { "id": "attachDelegation", "description": "Designate the next call as an EIP-7702 transaction", - "declaration": "function attachDelegation(SignedDelegation memory signedDelegation) external;", + "declaration": "function attachDelegation(SignedDelegation calldata signedDelegation) external;", "visibility": "external", "mutability": "", "signature": "attachDelegation((uint8,bytes32,bytes32,uint64,address))", @@ -4615,7 +4615,7 @@ "func": { "id": "eth_getLogs", "description": "Gets all the logs according to specified filter.", - "declaration": "function eth_getLogs(uint256 fromBlock, uint256 toBlock, address target, bytes32[] memory topics) external returns (EthGetLogs[] memory logs);", + "declaration": "function eth_getLogs(uint256 fromBlock, uint256 toBlock, address target, bytes32[] calldata topics) external returns (EthGetLogs[] memory logs);", "visibility": "external", "mutability": "", "signature": "eth_getLogs(uint256,uint256,address,bytes32[])", @@ -5051,6 +5051,46 @@ "status": "stable", "safety": "unsafe" }, + { + "func": { + "id": "expectRevert_10", + "description": "Expects a `count` number of reverts from the upcoming calls from the reverter address that match the revert data.", + "declaration": "function expectRevert(bytes4 revertData, address reverter, uint64 count) external;", + "visibility": "external", + "mutability": "", + "signature": "expectRevert(bytes4,address,uint64)", + "selector": "0xb0762d73", + "selectorBytes": [ + 176, + 118, + 45, + 115 + ] + }, + "group": "testing", + "status": "stable", + "safety": "unsafe" + }, + { + "func": { + "id": "expectRevert_11", + "description": "Expects a `count` number of reverts from the upcoming calls from the reverter address that exactly match the revert data.", + "declaration": "function expectRevert(bytes calldata revertData, address reverter, uint64 count) external;", + "visibility": "external", + "mutability": "", + "signature": "expectRevert(bytes,address,uint64)", + "selector": "0xd345fb1f", + "selectorBytes": [ + 211, + 69, + 251, + 31 + ] + }, + "group": "testing", + "status": "stable", + "safety": "unsafe" + }, { "func": { "id": "expectRevert_2", @@ -5131,6 +5171,86 @@ "status": "stable", "safety": "unsafe" }, + { + "func": { + "id": "expectRevert_6", + "description": "Expects a `count` number of reverts from the upcoming calls with any revert data or reverter.", + "declaration": "function expectRevert(uint64 count) external;", + "visibility": "external", + "mutability": "", + "signature": "expectRevert(uint64)", + "selector": "0x4ee38244", + "selectorBytes": [ + 78, + 227, + 130, + 68 + ] + }, + "group": "testing", + "status": "stable", + "safety": "unsafe" + }, + { + "func": { + "id": "expectRevert_7", + "description": "Expects a `count` number of reverts from the upcoming calls that match the revert data.", + "declaration": "function expectRevert(bytes4 revertData, uint64 count) external;", + "visibility": "external", + "mutability": "", + "signature": "expectRevert(bytes4,uint64)", + "selector": "0xe45ca72d", + "selectorBytes": [ + 228, + 92, + 167, + 45 + ] + }, + "group": "testing", + "status": "stable", + "safety": "unsafe" + }, + { + "func": { + "id": "expectRevert_8", + "description": "Expects a `count` number of reverts from the upcoming calls that exactly match the revert data.", + "declaration": "function expectRevert(bytes calldata revertData, uint64 count) external;", + "visibility": "external", + "mutability": "", + "signature": "expectRevert(bytes,uint64)", + "selector": "0x4994c273", + "selectorBytes": [ + 73, + 148, + 194, + 115 + ] + }, + "group": "testing", + "status": "stable", + "safety": "unsafe" + }, + { + "func": { + "id": "expectRevert_9", + "description": "Expects a `count` number of reverts from the upcoming calls from the reverter address.", + "declaration": "function expectRevert(address reverter, uint64 count) external;", + "visibility": "external", + "mutability": "", + "signature": "expectRevert(address,uint64)", + "selector": "0x1ff5f952", + "selectorBytes": [ + 31, + 245, + 249, + 82 + ] + }, + "group": "testing", + "status": "stable", + "safety": "unsafe" + }, { "func": { "id": "expectSafeMemory", @@ -5355,7 +5475,7 @@ "func": { "id": "getBroadcast", "description": "Returns the most recent broadcast for the given contract on `chainId` matching `txType`.\nFor example:\nThe most recent deployment can be fetched by passing `txType` as `CREATE` or `CREATE2`.\nThe most recent call can be fetched by passing `txType` as `CALL`.", - "declaration": "function getBroadcast(string memory contractName, uint64 chainId, BroadcastTxType txType) external view returns (BroadcastTxSummary memory);", + "declaration": "function getBroadcast(string calldata contractName, uint64 chainId, BroadcastTxType txType) external view returns (BroadcastTxSummary memory);", "visibility": "external", "mutability": "view", "signature": "getBroadcast(string,uint64,uint8)", @@ -5375,7 +5495,7 @@ "func": { "id": "getBroadcasts_0", "description": "Returns all broadcasts for the given contract on `chainId` with the specified `txType`.\nSorted such that the most recent broadcast is the first element, and the oldest is the last. i.e descending order of BroadcastTxSummary.blockNumber.", - "declaration": "function getBroadcasts(string memory contractName, uint64 chainId, BroadcastTxType txType) external view returns (BroadcastTxSummary[] memory);", + "declaration": "function getBroadcasts(string calldata contractName, uint64 chainId, BroadcastTxType txType) external view returns (BroadcastTxSummary[] memory);", "visibility": "external", "mutability": "view", "signature": "getBroadcasts(string,uint64,uint8)", @@ -5395,7 +5515,7 @@ "func": { "id": "getBroadcasts_1", "description": "Returns all broadcasts for the given contract on `chainId`.\nSorted such that the most recent broadcast is the first element, and the oldest is the last. i.e descending order of BroadcastTxSummary.blockNumber.", - "declaration": "function getBroadcasts(string memory contractName, uint64 chainId) external view returns (BroadcastTxSummary[] memory);", + "declaration": "function getBroadcasts(string calldata contractName, uint64 chainId) external view returns (BroadcastTxSummary[] memory);", "visibility": "external", "mutability": "view", "signature": "getBroadcasts(string,uint64)", @@ -5455,7 +5575,7 @@ "func": { "id": "getDeployment_0", "description": "Returns the most recent deployment for the current `chainId`.", - "declaration": "function getDeployment(string memory contractName) external view returns (address deployedAddress);", + "declaration": "function getDeployment(string calldata contractName) external view returns (address deployedAddress);", "visibility": "external", "mutability": "view", "signature": "getDeployment(string)", @@ -5475,7 +5595,7 @@ "func": { "id": "getDeployment_1", "description": "Returns the most recent deployment for the given contract on `chainId`", - "declaration": "function getDeployment(string memory contractName, uint64 chainId) external view returns (address deployedAddress);", + "declaration": "function getDeployment(string calldata contractName, uint64 chainId) external view returns (address deployedAddress);", "visibility": "external", "mutability": "view", "signature": "getDeployment(string,uint64)", @@ -5495,7 +5615,7 @@ "func": { "id": "getDeployments", "description": "Returns all deployments for the given contract on `chainId`\nSorted in descending order of deployment time i.e descending order of BroadcastTxSummary.blockNumber.\nThe most recent deployment is the first element, and the oldest is the last.", - "declaration": "function getDeployments(string memory contractName, uint64 chainId) external view returns (address[] memory deployedAddresses);", + "declaration": "function getDeployments(string calldata contractName, uint64 chainId) external view returns (address[] memory deployedAddresses);", "visibility": "external", "mutability": "view", "signature": "getDeployments(string,uint64)", @@ -5671,6 +5791,46 @@ "status": "stable", "safety": "safe" }, + { + "func": { + "id": "getStateDiff", + "description": "Returns state diffs from current `vm.startStateDiffRecording` session.", + "declaration": "function getStateDiff() external view returns (string memory diff);", + "visibility": "external", + "mutability": "view", + "signature": "getStateDiff()", + "selector": "0x80df01cc", + "selectorBytes": [ + 128, + 223, + 1, + 204 + ] + }, + "group": "evm", + "status": "stable", + "safety": "safe" + }, + { + "func": { + "id": "getStateDiffJson", + "description": "Returns state diffs from current `vm.startStateDiffRecording` session, in json format.", + "declaration": "function getStateDiffJson() external view returns (string memory diff);", + "visibility": "external", + "mutability": "view", + "signature": "getStateDiffJson()", + "selector": "0xf54fe009", + "selectorBytes": [ + 245, + 79, + 224, + 9 + ] + }, + "group": "evm", + "status": "stable", + "safety": "safe" + }, { "func": { "id": "getWallets", @@ -8621,7 +8781,7 @@ "func": { "id": "serializeJsonType_0", "description": "See `serializeJson`.", - "declaration": "function serializeJsonType(string calldata typeDescription, bytes memory value) external pure returns (string memory json);", + "declaration": "function serializeJsonType(string calldata typeDescription, bytes calldata value) external pure returns (string memory json);", "visibility": "external", "mutability": "pure", "signature": "serializeJsonType(string,bytes)", @@ -8641,7 +8801,7 @@ "func": { "id": "serializeJsonType_1", "description": "See `serializeJson`.", - "declaration": "function serializeJsonType(string calldata objectKey, string calldata valueKey, string calldata typeDescription, bytes memory value) external returns (string memory json);", + "declaration": "function serializeJsonType(string calldata objectKey, string calldata valueKey, string calldata typeDescription, bytes calldata value) external returns (string memory json);", "visibility": "external", "mutability": "", "signature": "serializeJsonType(string,string,string,bytes)", @@ -10340,4 +10500,4 @@ "safety": "safe" } ] -} \ No newline at end of file +} diff --git a/crates/cheatcodes/spec/src/vm.rs b/crates/cheatcodes/spec/src/vm.rs index cc3f91387..36455cf79 100644 --- a/crates/cheatcodes/spec/src/vm.rs +++ b/crates/cheatcodes/spec/src/vm.rs @@ -384,6 +384,14 @@ interface Vm { #[cheatcode(group = Evm, safety = Safe)] function stopAndReturnStateDiff() external returns (AccountAccess[] memory accountAccesses); + /// Returns state diffs from current `vm.startStateDiffRecording` session. + #[cheatcode(group = Evm, safety = Safe)] + function getStateDiff() external view returns (string memory diff); + + /// Returns state diffs from current `vm.startStateDiffRecording` session, in json format. + #[cheatcode(group = Evm, safety = Safe)] + function getStateDiffJson() external view returns (string memory diff); + // -------- Recording Map Writes -------- /// Starts recording all map SSTOREs for later retrieval. @@ -808,7 +816,7 @@ interface Vm { /// Gets all the logs according to specified filter. #[cheatcode(group = Evm, safety = Safe)] - function eth_getLogs(uint256 fromBlock, uint256 toBlock, address target, bytes32[] memory topics) + function eth_getLogs(uint256 fromBlock, uint256 toBlock, address target, bytes32[] calldata topics) external returns (EthGetLogs[] memory logs); @@ -1041,6 +1049,30 @@ interface Vm { #[cheatcode(group = Testing, safety = Unsafe)] function expectRevert(bytes calldata revertData, address reverter) external; + /// Expects a `count` number of reverts from the upcoming calls with any revert data or reverter. + #[cheatcode(group = Testing, safety = Unsafe)] + function expectRevert(uint64 count) external; + + /// Expects a `count` number of reverts from the upcoming calls that match the revert data. + #[cheatcode(group = Testing, safety = Unsafe)] + function expectRevert(bytes4 revertData, uint64 count) external; + + /// Expects a `count` number of reverts from the upcoming calls that exactly match the revert data. + #[cheatcode(group = Testing, safety = Unsafe)] + function expectRevert(bytes calldata revertData, uint64 count) external; + + /// Expects a `count` number of reverts from the upcoming calls from the reverter address. + #[cheatcode(group = Testing, safety = Unsafe)] + function expectRevert(address reverter, uint64 count) external; + + /// Expects a `count` number of reverts from the upcoming calls from the reverter address that match the revert data. + #[cheatcode(group = Testing, safety = Unsafe)] + function expectRevert(bytes4 revertData, address reverter, uint64 count) external; + + /// Expects a `count` number of reverts from the upcoming calls from the reverter address that exactly match the revert data. + #[cheatcode(group = Testing, safety = Unsafe)] + function expectRevert(bytes calldata revertData, address reverter, uint64 count) external; + /// Expects an error on next call that starts with the revert data. #[cheatcode(group = Testing, safety = Unsafe)] function expectPartialRevert(bytes4 revertData) external; @@ -1786,27 +1818,27 @@ interface Vm { /// /// The most recent call can be fetched by passing `txType` as `CALL`. #[cheatcode(group = Filesystem)] - function getBroadcast(string memory contractName, uint64 chainId, BroadcastTxType txType) external view returns (BroadcastTxSummary memory); + function getBroadcast(string calldata contractName, uint64 chainId, BroadcastTxType txType) external view returns (BroadcastTxSummary memory); /// Returns all broadcasts for the given contract on `chainId` with the specified `txType`. /// /// Sorted such that the most recent broadcast is the first element, and the oldest is the last. i.e descending order of BroadcastTxSummary.blockNumber. #[cheatcode(group = Filesystem)] - function getBroadcasts(string memory contractName, uint64 chainId, BroadcastTxType txType) external view returns (BroadcastTxSummary[] memory); + function getBroadcasts(string calldata contractName, uint64 chainId, BroadcastTxType txType) external view returns (BroadcastTxSummary[] memory); /// Returns all broadcasts for the given contract on `chainId`. /// /// Sorted such that the most recent broadcast is the first element, and the oldest is the last. i.e descending order of BroadcastTxSummary.blockNumber. #[cheatcode(group = Filesystem)] - function getBroadcasts(string memory contractName, uint64 chainId) external view returns (BroadcastTxSummary[] memory); + function getBroadcasts(string calldata contractName, uint64 chainId) external view returns (BroadcastTxSummary[] memory); /// Returns the most recent deployment for the current `chainId`. #[cheatcode(group = Filesystem)] - function getDeployment(string memory contractName) external view returns (address deployedAddress); + function getDeployment(string calldata contractName) external view returns (address deployedAddress); /// Returns the most recent deployment for the given contract on `chainId` #[cheatcode(group = Filesystem)] - function getDeployment(string memory contractName, uint64 chainId) external view returns (address deployedAddress); + function getDeployment(string calldata contractName, uint64 chainId) external view returns (address deployedAddress); /// Returns all deployments for the given contract on `chainId` /// @@ -1814,7 +1846,7 @@ interface Vm { /// /// The most recent deployment is the first element, and the oldest is the last. #[cheatcode(group = Filesystem)] - function getDeployments(string memory contractName, uint64 chainId) external view returns (address[] memory deployedAddresses); + function getDeployments(string calldata contractName, uint64 chainId) external view returns (address[] memory deployedAddresses); // -------- Foreign Function Interface -------- @@ -2062,7 +2094,7 @@ interface Vm { /// Designate the next call as an EIP-7702 transaction #[cheatcode(group = Scripting)] - function attachDelegation(SignedDelegation memory signedDelegation) external; + function attachDelegation(SignedDelegation calldata signedDelegation) external; /// Sign an EIP-7702 authorization and designate the next call as an EIP-7702 transaction #[cheatcode(group = Scripting)] @@ -2320,13 +2352,13 @@ interface Vm { returns (string memory json); /// See `serializeJson`. #[cheatcode(group = Json)] - function serializeJsonType(string calldata typeDescription, bytes memory value) + function serializeJsonType(string calldata typeDescription, bytes calldata value) external pure returns (string memory json); /// See `serializeJson`. #[cheatcode(group = Json)] - function serializeJsonType(string calldata objectKey, string calldata valueKey, string calldata typeDescription, bytes memory value) + function serializeJsonType(string calldata objectKey, string calldata valueKey, string calldata typeDescription, bytes calldata value) external returns (string memory json); diff --git a/crates/cheatcodes/src/config.rs b/crates/cheatcodes/src/config.rs index e1185014c..aeea88b98 100644 --- a/crates/cheatcodes/src/config.rs +++ b/crates/cheatcodes/src/config.rs @@ -80,9 +80,9 @@ impl CheatsConfig { use_zk: bool, zk_env: Option, ) -> Self { - let mut allowed_paths = vec![config.root.0.clone()]; - allowed_paths.extend(config.libs.clone()); - allowed_paths.extend(config.allow_paths.clone()); + let mut allowed_paths = vec![config.root.clone()]; + allowed_paths.extend(config.libs.iter().cloned()); + allowed_paths.extend(config.allow_paths.iter().cloned()); let rpc_endpoints = config.rpc_endpoints.clone().resolved(); trace!(?rpc_endpoints, "using resolved rpc endpoints"); @@ -100,8 +100,8 @@ impl CheatsConfig { rpc_endpoints, paths: config.project_paths(), fs_permissions: config.fs_permissions.clone().joined(config.root.as_ref()), - root: config.root.0.clone(), - broadcast: config.root.0.clone().join(&config.broadcast), + root: config.root.clone(), + broadcast: config.root.clone().join(&config.broadcast), allowed_paths, evm_opts, labels: config.labels.clone(), @@ -116,6 +116,17 @@ impl CheatsConfig { } } + /// Returns a new `CheatsConfig` configured with the given `Config` and `EvmOpts`. + pub fn clone_with(&self, config: &Config, evm_opts: EvmOpts) -> Self { + Self::new( + config, + evm_opts, + self.available_artifacts.clone(), + self.running_contract.clone(), + self.running_version.clone(), + ) + } + /// Attempts to canonicalize (see [std::fs::canonicalize]) the path. /// /// Canonicalization fails for non-existing paths, in which case we just normalize the path. @@ -257,7 +268,7 @@ mod tests { fn config(root: &str, fs_permissions: FsPermissions) -> CheatsConfig { CheatsConfig::new( - &Config { root: PathBuf::from(root).into(), fs_permissions, ..Default::default() }, + &Config { root: root.into(), fs_permissions, ..Default::default() }, Default::default(), None, None, diff --git a/crates/cheatcodes/src/error.rs b/crates/cheatcodes/src/error.rs index d459e9274..c2c220edf 100644 --- a/crates/cheatcodes/src/error.rs +++ b/crates/cheatcodes/src/error.rs @@ -206,7 +206,6 @@ impl Error { } impl Drop for Error { - #[inline] fn drop(&mut self) { if self.drop { drop(unsafe { Box::<[u8]>::from_raw(self.data.cast_mut()) }); @@ -224,21 +223,18 @@ impl From> for Error { } impl From for Error { - #[inline] fn from(value: String) -> Self { Self::new_string(value) } } impl From<&'static str> for Error { - #[inline] fn from(value: &'static str) -> Self { Self::new_str(value) } } impl From> for Error { - #[inline] fn from(value: Cow<'static, [u8]>) -> Self { match value { Cow::Borrowed(bytes) => Self::new_bytes(bytes), @@ -248,21 +244,18 @@ impl From> for Error { } impl From<&'static [u8]> for Error { - #[inline] fn from(value: &'static [u8]) -> Self { Self::new_bytes(value) } } impl From<&'static [u8; N]> for Error { - #[inline] fn from(value: &'static [u8; N]) -> Self { Self::new_bytes(value) } } impl From> for Error { - #[inline] fn from(value: Vec) -> Self { Self::new_vec(value) } @@ -279,7 +272,6 @@ impl From for Error { macro_rules! impl_from { ($($t:ty),* $(,)?) => {$( impl From<$t> for Error { - #[inline] fn from(value: $t) -> Self { Self::display(value) } @@ -309,20 +301,14 @@ impl_from!( ); impl> From> for Error { - #[inline] fn from(err: EVMError) -> Self { Self::display(BackendError::from(err)) } } impl From for Error { - #[inline] fn from(err: eyre::Report) -> Self { - let mut chained_cause = String::new(); - for cause in err.chain() { - chained_cause.push_str(format!(" {cause};").as_str()); - } - Self::display(chained_cause) + Self::from(foundry_common::errors::display_chain(&err)) } } diff --git a/crates/cheatcodes/src/evm.rs b/crates/cheatcodes/src/evm.rs index bda24a598..8005c98a4 100644 --- a/crates/cheatcodes/src/evm.rs +++ b/crates/cheatcodes/src/evm.rs @@ -19,9 +19,15 @@ use foundry_evm_core::{ use foundry_evm_traces::StackSnapshotType; use rand::Rng; use revm::primitives::{Account, Bytecode, SpecId, KECCAK_EMPTY}; -use std::{collections::BTreeMap, path::Path}; +use std::{ + collections::{btree_map::Entry, BTreeMap}, + fmt::Display, + path::Path, +}; + mod record_debug_step; use record_debug_step::{convert_call_trace_to_debug_step, flatten_call_trace}; +use serde::Serialize; mod fork; pub(crate) mod mapping; @@ -52,6 +58,70 @@ pub struct DealRecord { pub new_balance: U256, } +/// Storage slot diff info. +#[derive(Serialize, Default)] +#[serde(rename_all = "camelCase")] +struct SlotStateDiff { + /// Initial storage value. + previous_value: B256, + /// Current storage value. + new_value: B256, +} + +/// Balance diff info. +#[derive(Serialize, Default)] +#[serde(rename_all = "camelCase")] +struct BalanceDiff { + /// Initial storage value. + previous_value: U256, + /// Current storage value. + new_value: U256, +} + +/// Account state diff info. +#[derive(Serialize, Default)] +#[serde(rename_all = "camelCase")] +struct AccountStateDiffs { + /// Address label, if any set. + label: Option, + /// Account balance changes. + balance_diff: Option, + /// State changes, per slot. + state_diff: BTreeMap, +} + +impl Display for AccountStateDiffs { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> eyre::Result<(), std::fmt::Error> { + // Print changed account. + if let Some(label) = &self.label { + writeln!(f, "label: {label}")?; + } + // Print balance diff if changed. + if let Some(balance_diff) = &self.balance_diff { + if balance_diff.previous_value != balance_diff.new_value { + writeln!( + f, + "- balance diff: {} → {}", + balance_diff.previous_value, balance_diff.new_value + )?; + } + } + // Print state diff if any. + if !&self.state_diff.is_empty() { + writeln!(f, "- state diff:")?; + for (slot, slot_changes) in &self.state_diff { + writeln!( + f, + "@ {slot}: {} → {}", + slot_changes.previous_value, slot_changes.new_value + )?; + } + } + + Ok(()) + } +} + impl Cheatcode for addrCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { privateKey } = self; @@ -702,6 +772,25 @@ impl Cheatcode for stopAndReturnStateDiffCall { } } +impl Cheatcode for getStateDiffCall { + fn apply(&self, state: &mut Cheatcodes) -> Result { + let mut diffs = String::new(); + let state_diffs = get_recorded_state_diffs(state); + for (address, state_diffs) in state_diffs { + diffs.push_str(&format!("{address}\n")); + diffs.push_str(&format!("{state_diffs}\n")); + } + Ok(diffs.abi_encode()) + } +} + +impl Cheatcode for getStateDiffJsonCall { + fn apply(&self, state: &mut Cheatcodes) -> Result { + let state_diffs = get_recorded_state_diffs(state); + Ok(serde_json::to_string(&state_diffs)?.abi_encode()) + } +} + impl Cheatcode for broadcastRawTransactionCall { fn apply_full(&self, ccx: &mut CheatsCtxt, executor: &mut dyn CheatcodesExecutor) -> Result { let tx = TxEnvelope::decode(&mut self.data.as_ref()) @@ -787,11 +876,12 @@ impl Cheatcode for stopAndReturnDebugTraceRecordingCall { let debug_steps: Vec = steps.iter().map(|&step| convert_call_trace_to_debug_step(step)).collect(); - // Free up memory by clearing the steps if they are not recorded outside of cheatcode usage. if !record_info.original_tracer_config.record_steps { tracer.traces_mut().nodes_mut().iter_mut().for_each(|node| { node.trace.steps = Vec::new(); + node.logs = Vec::new(); + node.ordering = Vec::new(); }); } @@ -1064,3 +1154,56 @@ fn genesis_account(account: &Account) -> GenesisAccount { private_key: None, } } + +/// Helper function to returns state diffs recorded for each changed account. +fn get_recorded_state_diffs(state: &mut Cheatcodes) -> BTreeMap { + let mut state_diffs: BTreeMap = BTreeMap::default(); + if let Some(records) = &state.recorded_account_diffs_stack { + records + .iter() + .flatten() + .filter(|account_access| { + !account_access.storageAccesses.is_empty() || + account_access.oldBalance != account_access.newBalance + }) + .for_each(|account_access| { + let account_diff = + state_diffs.entry(account_access.account).or_insert(AccountStateDiffs { + label: state.labels.get(&account_access.account).cloned(), + ..Default::default() + }); + + // Record account balance diffs. + if account_access.oldBalance != account_access.newBalance { + // Update balance diff. Do not overwrite the initial balance if already set. + if let Some(diff) = &mut account_diff.balance_diff { + diff.new_value = account_access.newBalance; + } else { + account_diff.balance_diff = Some(BalanceDiff { + previous_value: account_access.oldBalance, + new_value: account_access.newBalance, + }); + } + } + + // Record account state diffs. + for storage_access in &account_access.storageAccesses { + if storage_access.isWrite && !storage_access.reverted { + // Update state diff. Do not overwrite the initial value if already set. + match account_diff.state_diff.entry(storage_access.slot) { + Entry::Vacant(slot_state_diff) => { + slot_state_diff.insert(SlotStateDiff { + previous_value: storage_access.previousValue, + new_value: storage_access.newValue, + }); + } + Entry::Occupied(mut slot_state_diff) => { + slot_state_diff.get_mut().new_value = storage_access.newValue; + } + } + } + } + }); + } + state_diffs +} diff --git a/crates/cheatcodes/src/evm/mock.rs b/crates/cheatcodes/src/evm/mock.rs index aed6b1a56..3e02f12c3 100644 --- a/crates/cheatcodes/src/evm/mock.rs +++ b/crates/cheatcodes/src/evm/mock.rs @@ -187,7 +187,7 @@ fn mock_calls( fn make_acc_non_empty(callee: &Address, ecx: InnerEcx) -> Result { let acc = ecx.load_account(*callee)?; - let empty_bytecode = acc.info.code.as_ref().map_or(true, Bytecode::is_empty); + let empty_bytecode = acc.info.code.as_ref().is_none_or(Bytecode::is_empty); if empty_bytecode { let code = Bytecode::new_raw(Bytes::from_static(&foundry_zksync_core::EMPTY_CODE)); ecx.journaled_state.set_code(*callee, code); diff --git a/crates/cheatcodes/src/fs.rs b/crates/cheatcodes/src/fs.rs index 6325b71be..436afa466 100644 --- a/crates/cheatcodes/src/fs.rs +++ b/crates/cheatcodes/src/fs.rs @@ -4,9 +4,9 @@ use super::string::parse; use crate::{Cheatcode, Cheatcodes, CheatcodesExecutor, CheatsCtxt, Result, Vm::*}; use alloy_dyn_abi::DynSolType; use alloy_json_abi::ContractObject; +use alloy_network::AnyTransactionReceipt; use alloy_primitives::{hex, map::Entry, Bytes, U256}; use alloy_provider::network::ReceiptResponse; -use alloy_rpc_types::AnyTransactionReceipt; use alloy_sol_types::SolValue; use dialoguer::{Input, Password}; use forge_script_sequence::{BroadcastReader, TransactionWithMetadata}; diff --git a/crates/cheatcodes/src/inspector.rs b/crates/cheatcodes/src/inspector.rs index daa73a39c..bc77d3282 100644 --- a/crates/cheatcodes/src/inspector.rs +++ b/crates/cheatcodes/src/inspector.rs @@ -567,7 +567,7 @@ pub struct Cheatcodes { /// Scripting based transactions pub broadcastable_transactions: BroadcastableTransactions, - /// Additional, user configurable context this Inspector has access to when inspecting a call + /// Additional, user configurable context this Inspector has access to when inspecting a call. pub config: Arc, /// Test-scoped context holding data that needs to be reset every test run @@ -760,7 +760,7 @@ impl Cheatcodes { /// Returns the configured wallets if available, else creates a new instance. pub fn wallets(&mut self) -> &Wallets { - self.wallets.get_or_insert(Wallets::new(MultiWallet::default(), None)) + self.wallets.get_or_insert_with(|| Wallets::new(MultiWallet::default(), None)) } /// Sets the unlocked wallets. @@ -1462,16 +1462,23 @@ where { if ecx.journaled_state.depth() <= expected_revert.depth && matches!(expected_revert.kind, ExpectedRevertKind::Default) { - let expected_revert = std::mem::take(&mut self.expected_revert).unwrap(); - return match expect::handle_expect_revert( + let mut expected_revert = std::mem::take(&mut self.expected_revert).unwrap(); + let handler_result = expect::handle_expect_revert( false, true, - &expected_revert, + &mut expected_revert, outcome.result.result, outcome.result.output.clone(), &self.config.available_artifacts, - ) { + ); + + return match handler_result { Ok((address, retdata)) => { + expected_revert.actual_count += 1; + if expected_revert.actual_count < expected_revert.count { + self.expected_revert = Some(expected_revert.clone()); + } + outcome.result.result = InstructionResult::Return; outcome.result.output = retdata; outcome.address = address; @@ -1660,12 +1667,11 @@ where { *calldata == call.input[..calldata.len()] && // The value matches, if provided expected - .value - .map_or(true, |value| Some(value) == call.transfer_value()) && + .value.is_none_or(|value| Some(value) == call.transfer_value()) && // The gas matches, if provided - expected.gas.map_or(true, |gas| gas == call.gas_limit) && + expected.gas.is_none_or(|gas| gas == call.gas_limit) && // The minimum gas matches, if provided - expected.min_gas.map_or(true, |min_gas| min_gas <= call.gas_limit) + expected.min_gas.is_none_or(|min_gas| min_gas <= call.gas_limit) { *actual_count += 1; } @@ -1683,7 +1689,7 @@ where { .iter_mut() .find(|(mock, _)| { call.input.get(..mock.calldata.len()) == Some(&mock.calldata[..]) && - mock.value.map_or(true, |value| Some(value) == call.transfer_value()) + mock.value.is_none_or(|value| Some(value) == call.transfer_value()) }) .map(|(_, v)| v), } { @@ -1714,6 +1720,7 @@ where { if prank.delegate_call { call.target_address = prank.new_caller; call.caller = prank.new_caller; + // NOTE(zk): ecx_inner vs upstream's ecx used here let acc = ecx_inner.journaled_state.account(prank.new_caller); call.value = CallValue::Apparent(acc.info.balance); if let Some(new_origin) = prank.new_origin { @@ -1722,6 +1729,7 @@ where { } } + // NOTE(zk): ecx_inner vs upstream's ecx used here if ecx_inner.journaled_state.depth() >= prank.depth && call.caller == prank.prank_caller { let mut prank_applied = false; @@ -2302,6 +2310,14 @@ impl Inspector<&mut dyn DatabaseExt> for Cheatcodes { expected_revert.reverted_by.is_none() { expected_revert.reverted_by = Some(call.target_address); + } else if outcome.result.is_revert() && + expected_revert.reverter.is_some() && + expected_revert.reverted_by.is_some() && + expected_revert.count > 1 + { + // If we're expecting more than one revert, we need to reset the reverted_by address + // to latest reverter. + expected_revert.reverted_by = Some(call.target_address); } if ecx.journaled_state.depth() <= expected_revert.depth { @@ -2315,15 +2331,20 @@ impl Inspector<&mut dyn DatabaseExt> for Cheatcodes { }; if needs_processing { - let expected_revert = std::mem::take(&mut self.expected_revert).unwrap(); - return match expect::handle_expect_revert( + // Only `remove` the expected revert from state if `expected_revert.count` == + // `expected_revert.actual_count` + let mut expected_revert = std::mem::take(&mut self.expected_revert).unwrap(); + + let handler_result = expect::handle_expect_revert( cheatcode_call, false, - &expected_revert, + &mut expected_revert, outcome.result.result, outcome.result.output.clone(), &self.config.available_artifacts, - ) { + ); + + return match handler_result { Err(error) => { trace!(expected=?expected_revert, ?error, status=?outcome.result.result, "Expected revert mismatch"); outcome.result.result = InstructionResult::Revert; @@ -2331,6 +2352,10 @@ impl Inspector<&mut dyn DatabaseExt> for Cheatcodes { outcome } Ok((_, retdata)) => { + expected_revert.actual_count += 1; + if expected_revert.actual_count < expected_revert.count { + self.expected_revert = Some(expected_revert.clone()); + } outcome.result.result = InstructionResult::Return; outcome.result.output = retdata; outcome @@ -2586,6 +2611,10 @@ impl InspectorExt for Cheatcodes { false } } + + fn create2_deployer(&self) -> Address { + self.config.evm_opts.create2_deployer + } } impl Cheatcodes { diff --git a/crates/cheatcodes/src/string.rs b/crates/cheatcodes/src/string.rs index a4c06eef6..080d9bc08 100644 --- a/crates/cheatcodes/src/string.rs +++ b/crates/cheatcodes/src/string.rs @@ -17,7 +17,7 @@ impl Cheatcode for toString_0Call { impl Cheatcode for toString_1Call { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { value } = self; - Ok(hex::encode_prefixed(value).abi_encode()) + Ok(value.to_string().abi_encode()) } } @@ -95,7 +95,6 @@ impl Cheatcode for parseBoolCall { } } -// toLowercase impl Cheatcode for toLowercaseCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { input } = self; @@ -103,7 +102,6 @@ impl Cheatcode for toLowercaseCall { } } -// toUppercase impl Cheatcode for toUppercaseCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { input } = self; @@ -111,7 +109,6 @@ impl Cheatcode for toUppercaseCall { } } -// trim impl Cheatcode for trimCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { input } = self; @@ -119,7 +116,6 @@ impl Cheatcode for trimCall { } } -// Replace impl Cheatcode for replaceCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { input, from, to } = self; @@ -127,7 +123,6 @@ impl Cheatcode for replaceCall { } } -// Split impl Cheatcode for splitCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { input, delimiter } = self; @@ -136,7 +131,6 @@ impl Cheatcode for splitCall { } } -// indexOf impl Cheatcode for indexOfCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { input, key } = self; @@ -144,7 +138,6 @@ impl Cheatcode for indexOfCall { } } -// contains impl Cheatcode for containsCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { subject, search } = self; @@ -202,7 +195,7 @@ fn parse_value_fallback(s: &str, ty: &DynSolType) -> Option { - if !s.starts_with("0x") && s.chars().all(|c| c.is_ascii_hexdigit()) { + if !s.starts_with("0x") && hex::check_raw(s) { return Some(Err("missing hex prefix (\"0x\") for hex string")); } } diff --git a/crates/cheatcodes/src/test/expect.rs b/crates/cheatcodes/src/test/expect.rs index 94afe820d..617948a90 100644 --- a/crates/cheatcodes/src/test/expect.rs +++ b/crates/cheatcodes/src/test/expect.rs @@ -49,6 +49,10 @@ pub struct ExpectedRevert { pub reverter: Option
, /// Actual reverter of the call. pub reverted_by: Option
, + /// Number of times this revert is expected. + pub count: u64, + /// Actual number of times this revert has been seen. + pub actual_count: u64, } #[derive(Clone, Debug)] @@ -257,7 +261,7 @@ impl Cheatcode for expectEmitAnonymous_3Call { impl Cheatcode for expectRevert_0Call { fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result { let Self {} = self; - expect_revert(ccx.state, None, ccx.ecx.journaled_state.depth(), false, false, None) + expect_revert(ccx.state, None, ccx.ecx.journaled_state.depth(), false, false, None, 1) } } @@ -271,6 +275,7 @@ impl Cheatcode for expectRevert_1Call { false, false, None, + 1, ) } } @@ -285,6 +290,7 @@ impl Cheatcode for expectRevert_2Call { false, false, None, + 1, ) } } @@ -299,6 +305,7 @@ impl Cheatcode for expectRevert_3Call { false, false, Some(*reverter), + 1, ) } } @@ -313,6 +320,7 @@ impl Cheatcode for expectRevert_4Call { false, false, Some(*reverter), + 1, ) } } @@ -327,6 +335,89 @@ impl Cheatcode for expectRevert_5Call { false, false, Some(*reverter), + 1, + ) + } +} + +impl Cheatcode for expectRevert_6Call { + fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result { + let Self { count } = self; + expect_revert(ccx.state, None, ccx.ecx.journaled_state.depth(), false, false, None, *count) + } +} + +impl Cheatcode for expectRevert_7Call { + fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result { + let Self { revertData, count } = self; + expect_revert( + ccx.state, + Some(revertData.as_ref()), + ccx.ecx.journaled_state.depth(), + false, + false, + None, + *count, + ) + } +} + +impl Cheatcode for expectRevert_8Call { + fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result { + let Self { revertData, count } = self; + expect_revert( + ccx.state, + Some(revertData), + ccx.ecx.journaled_state.depth(), + false, + false, + None, + *count, + ) + } +} + +impl Cheatcode for expectRevert_9Call { + fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result { + let Self { reverter, count } = self; + expect_revert( + ccx.state, + None, + ccx.ecx.journaled_state.depth(), + false, + false, + Some(*reverter), + *count, + ) + } +} + +impl Cheatcode for expectRevert_10Call { + fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result { + let Self { revertData, reverter, count } = self; + expect_revert( + ccx.state, + Some(revertData.as_ref()), + ccx.ecx.journaled_state.depth(), + false, + false, + Some(*reverter), + *count, + ) + } +} + +impl Cheatcode for expectRevert_11Call { + fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result { + let Self { revertData, reverter, count } = self; + expect_revert( + ccx.state, + Some(revertData), + ccx.ecx.journaled_state.depth(), + false, + false, + Some(*reverter), + *count, ) } } @@ -341,6 +432,7 @@ impl Cheatcode for expectPartialRevert_0Call { false, true, None, + 1, ) } } @@ -355,13 +447,14 @@ impl Cheatcode for expectPartialRevert_1Call { false, true, Some(*reverter), + 1, ) } } impl Cheatcode for _expectCheatcodeRevert_0Call { fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result { - expect_revert(ccx.state, None, ccx.ecx.journaled_state.depth(), true, false, None) + expect_revert(ccx.state, None, ccx.ecx.journaled_state.depth(), true, false, None, 1) } } @@ -375,6 +468,7 @@ impl Cheatcode for _expectCheatcodeRevert_1Call { true, false, None, + 1, ) } } @@ -389,6 +483,7 @@ impl Cheatcode for _expectCheatcodeRevert_2Call { true, false, None, + 1, ) } } @@ -624,6 +719,7 @@ fn expect_revert( cheatcode: bool, partial_match: bool, reverter: Option
, + count: u64, ) -> Result { ensure!( state.expected_revert.is_none(), @@ -640,6 +736,8 @@ fn expect_revert( partial_match, reverter, reverted_by: None, + count, + actual_count: 0, }); Ok(Default::default()) } @@ -647,7 +745,7 @@ fn expect_revert( pub(crate) fn handle_expect_revert( is_cheatcode: bool, is_create: bool, - expected_revert: &ExpectedRevert, + expected_revert: &mut ExpectedRevert, status: InstructionResult, retdata: Bytes, known_contracts: &Option, @@ -660,72 +758,117 @@ pub(crate) fn handle_expect_revert( } }; - ensure!(!matches!(status, return_ok!()), "next call did not revert as expected"); - - // If expected reverter address is set then check it matches the actual reverter. - if let (Some(expected_reverter), Some(actual_reverter)) = - (expected_revert.reverter, expected_revert.reverted_by) - { - if expected_reverter != actual_reverter { - return Err(fmt_err!( - "Reverter != expected reverter: {} != {}", - actual_reverter, - expected_reverter - )); + let stringify = |data: &[u8]| { + if let Ok(s) = String::abi_decode(data, true) { + return s; } - } - - let expected_reason = expected_revert.reason.as_deref(); - // If None, accept any revert. - let Some(expected_reason) = expected_reason else { - return Ok(success_return()); + if data.is_ascii() { + return std::str::from_utf8(data).unwrap().to_owned(); + } + hex::encode_prefixed(data) }; - if !expected_reason.is_empty() && retdata.is_empty() { - bail!("call reverted as expected, but without data"); - } - - let mut actual_revert: Vec = retdata.into(); + if expected_revert.count == 0 { + if expected_revert.reverter.is_none() && expected_revert.reason.is_none() { + ensure!( + matches!(status, return_ok!()), + "call reverted when it was expected not to revert" + ); + return Ok(success_return()); + } - // Compare only the first 4 bytes if partial match. - if expected_revert.partial_match && actual_revert.get(..4) == expected_reason.get(..4) { - return Ok(success_return()) - } + // Flags to track if the reason and reverter match. + let mut reason_match = expected_revert.reason.as_ref().map(|_| false); + let mut reverter_match = expected_revert.reverter.as_ref().map(|_| false); - // Try decoding as known errors. - if matches!( - actual_revert.get(..4).map(|s| s.try_into().unwrap()), - Some(Vm::CheatcodeError::SELECTOR | alloy_sol_types::Revert::SELECTOR) - ) { - if let Ok(decoded) = Vec::::abi_decode(&actual_revert[4..], false) { - actual_revert = decoded; + // Reverter check + if let (Some(expected_reverter), Some(actual_reverter)) = + (expected_revert.reverter, expected_revert.reverted_by) + { + if expected_reverter == actual_reverter { + reverter_match = Some(true); + } } - } - if actual_revert == expected_reason || - (is_cheatcode && memchr::memmem::find(&actual_revert, expected_reason).is_some()) - { - Ok(success_return()) + // Reason check + let expected_reason = expected_revert.reason.as_deref(); + if let Some(expected_reason) = expected_reason { + let mut actual_revert: Vec = retdata.into(); + actual_revert = decode_revert(actual_revert); + + if actual_revert == expected_reason { + reason_match = Some(true); + } + }; + + match (reason_match, reverter_match) { + (Some(true), Some(true)) => Err(fmt_err!( + "expected 0 reverts with reason: {}, from address: {}, but got one", + &stringify(expected_reason.unwrap_or_default()), + expected_revert.reverter.unwrap() + )), + (Some(true), None) => Err(fmt_err!( + "expected 0 reverts with reason: {}, but got one", + &stringify(expected_reason.unwrap_or_default()) + )), + (None, Some(true)) => Err(fmt_err!( + "expected 0 reverts from address: {}, but got one", + expected_revert.reverter.unwrap() + )), + _ => Ok(success_return()), + } } else { - let (actual, expected) = if let Some(contracts) = known_contracts { - let decoder = RevertDecoder::new().with_abis(contracts.iter().map(|(_, c)| &c.abi)); - ( - &decoder.decode(actual_revert.as_slice(), Some(status)), - &decoder.decode(expected_reason, Some(status)), - ) + ensure!(!matches!(status, return_ok!()), "next call did not revert as expected"); + + // If expected reverter address is set then check it matches the actual reverter. + if let (Some(expected_reverter), Some(actual_reverter)) = + (expected_revert.reverter, expected_revert.reverted_by) + { + if expected_reverter != actual_reverter { + return Err(fmt_err!( + "Reverter != expected reverter: {} != {}", + actual_reverter, + expected_reverter + )); + } + } + + let expected_reason = expected_revert.reason.as_deref(); + // If None, accept any revert. + let Some(expected_reason) = expected_reason else { + return Ok(success_return()); + }; + + if !expected_reason.is_empty() && retdata.is_empty() { + bail!("call reverted as expected, but without data"); + } + + let mut actual_revert: Vec = retdata.into(); + + // Compare only the first 4 bytes if partial match. + if expected_revert.partial_match && actual_revert.get(..4) == expected_reason.get(..4) { + return Ok(success_return()) + } + + // Try decoding as known errors. + actual_revert = decode_revert(actual_revert); + + if actual_revert == expected_reason || + (is_cheatcode && memchr::memmem::find(&actual_revert, expected_reason).is_some()) + { + Ok(success_return()) } else { - let stringify = |data: &[u8]| { - if let Ok(s) = String::abi_decode(data, true) { - return s; - } - if data.is_ascii() { - return std::str::from_utf8(data).unwrap().to_owned(); - } - hex::encode_prefixed(data) + let (actual, expected) = if let Some(contracts) = known_contracts { + let decoder = RevertDecoder::new().with_abis(contracts.iter().map(|(_, c)| &c.abi)); + ( + &decoder.decode(actual_revert.as_slice(), Some(status)), + &decoder.decode(expected_reason, Some(status)), + ) + } else { + (&stringify(&actual_revert), &stringify(expected_reason)) }; - (&stringify(&actual_revert), &stringify(expected_reason)) - }; - Err(fmt_err!("Error != expected error: {} != {}", actual, expected,)) + Err(fmt_err!("Error != expected error: {} != {}", actual, expected,)) + } } } @@ -736,3 +879,15 @@ fn expect_safe_memory(state: &mut Cheatcodes, start: u64, end: u64, depth: u64) offsets.push(start..end); Ok(Default::default()) } + +fn decode_revert(revert: Vec) -> Vec { + if matches!( + revert.get(..4).map(|s| s.try_into().unwrap()), + Some(Vm::CheatcodeError::SELECTOR | alloy_sol_types::Revert::SELECTOR) + ) { + if let Ok(decoded) = Vec::::abi_decode(&revert[4..], false) { + return decoded; + } + } + revert +} diff --git a/crates/chisel/Cargo.toml b/crates/chisel/Cargo.toml index 65b3c9748..167329d45 100644 --- a/crates/chisel/Cargo.toml +++ b/crates/chisel/Cargo.toml @@ -53,11 +53,12 @@ eyre.workspace = true regex.workspace = true reqwest.workspace = true revm.workspace = true -rustyline = "14" +rustyline = "15" semver.workspace = true serde_json.workspace = true serde.workspace = true solang-parser.workspace = true +solar-parse.workspace = true strum = { workspace = true, features = ["derive"] } time = { version = "0.3", features = ["formatting"] } tokio = { workspace = true, features = ["full"] } diff --git a/crates/chisel/bin/main.rs b/crates/chisel/bin/main.rs index acc2c6f17..ca3fc1ff5 100644 --- a/crates/chisel/bin/main.rs +++ b/crates/chisel/bin/main.rs @@ -35,7 +35,7 @@ extern crate foundry_common; static ALLOC: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc; // Loads project's figment and merges the build cli arguments into it -foundry_config::merge_impl_figment_convert!(Chisel, opts, evm_opts); +foundry_config::merge_impl_figment_convert!(Chisel, opts, evm_args); const VERSION_MESSAGE: &str = concat!( env!("CARGO_PKG_VERSION"), @@ -76,7 +76,7 @@ pub struct Chisel { pub opts: CoreBuildArgs, #[command(flatten)] - pub evm_opts: EvmArgs, + pub evm_args: EvmArgs, } /// Chisel binary subcommands diff --git a/crates/chisel/src/dispatcher.rs b/crates/chisel/src/dispatcher.rs index d69de3bf5..2a6a2fc3f 100644 --- a/crates/chisel/src/dispatcher.rs +++ b/crates/chisel/src/dispatcher.rs @@ -38,14 +38,16 @@ use strum::IntoEnumIterator; use tracing::debug; use yansi::Paint; -/// Prompt arrow character -pub static PROMPT_ARROW: char = '➜'; -static DEFAULT_PROMPT: &str = "➜ "; +/// Prompt arrow character. +pub const PROMPT_ARROW: char = '➜'; +/// Prompt arrow string. +pub const PROMPT_ARROW_STR: &str = "➜"; +const DEFAULT_PROMPT: &str = "➜ "; /// Command leader character -pub static COMMAND_LEADER: char = '!'; +pub const COMMAND_LEADER: char = '!'; /// Chisel character -pub static CHISEL_CHAR: &str = "⚒️"; +pub const CHISEL_CHAR: &str = "⚒️"; /// Matches Solidity comments static COMMENT_RE: LazyLock = @@ -320,7 +322,7 @@ impl ChiselDispatcher { }, ChiselCommand::Source => match self.format_source() { Ok(formatted_source) => DispatchResult::CommandSuccess(Some( - SolidityHelper::highlight(&formatted_source).into_owned(), + SolidityHelper::new().highlight(&formatted_source).into_owned(), )), Err(_) => { DispatchResult::CommandFailed(String::from("Failed to format session source")) diff --git a/crates/chisel/src/executor.rs b/crates/chisel/src/executor.rs index 87d69b9ba..d128dac4f 100644 --- a/crates/chisel/src/executor.rs +++ b/crates/chisel/src/executor.rs @@ -7,7 +7,7 @@ use crate::prelude::{ }; use alloy_dyn_abi::{DynSolType, DynSolValue}; use alloy_json_abi::EventParam; -use alloy_primitives::{hex, Address, U256}; +use alloy_primitives::{hex, Address, B256, U256}; use core::fmt::Debug; use eyre::{Result, WrapErr}; use foundry_compilers::Artifact; @@ -344,7 +344,7 @@ impl SessionSource { ) }) .gas_limit(self.config.evm_opts.gas_limit()) - .spec(self.config.foundry_config.evm_spec_id()) + .spec_id(self.config.foundry_config.evm_spec_id()) .legacy_assertions(self.config.foundry_config.legacy_assertions) .build(env, backend); @@ -382,7 +382,7 @@ fn format_token(token: DynSolValue) -> String { .collect::() ) .cyan(), - format!("{i:#x}").cyan(), + hex::encode_prefixed(B256::from(i)).cyan(), i.cyan() ) } @@ -400,7 +400,7 @@ fn format_token(token: DynSolValue) -> String { .collect::() ) .cyan(), - format!("{i:#x}").cyan(), + hex::encode_prefixed(B256::from(i)).cyan(), i.cyan() ) } @@ -507,7 +507,7 @@ fn format_event_definition(event_definition: &pt::EventDefinition) -> Result Self { + Self::new() + } } impl SolidityHelper { /// Create a new SolidityHelper. pub fn new() -> Self { - Self::default() + Self { + errored: false, + do_paint: yansi::is_enabled(), + sess: Session::builder().with_silent_emitter(None).build(), + globals: SessionGlobals::new(), + } + } + + /// Returns whether the helper is in an errored state. + pub fn errored(&self) -> bool { + self.errored } /// Set the errored field. @@ -55,54 +68,9 @@ impl SolidityHelper { self } - /// Get styles for a solidity source string - pub fn get_styles(input: &str) -> Vec { - let mut comments = Vec::with_capacity(DEFAULT_COMMENTS); - let mut errors = Vec::with_capacity(5); - let mut out = Lexer::new(input, 0, &mut comments, &mut errors) - .map(|(start, token, end)| (start, token.style(), end)) - .collect::>(); - - // highlight comments too - let comments_iter = comments.into_iter().map(|comment| { - let loc = match comment { - pt::Comment::Line(loc, _) | - pt::Comment::Block(loc, _) | - pt::Comment::DocLine(loc, _) | - pt::Comment::DocBlock(loc, _) => loc, - }; - (loc.start(), Style::new().dim(), loc.end()) - }); - out.extend(comments_iter); - - out - } - - /// Get contiguous styles for a solidity source string - pub fn get_contiguous_styles(input: &str) -> Vec { - let mut styles = Self::get_styles(input); - styles.sort_unstable_by_key(|(start, _, _)| *start); - - let len = input.len(); - // len / 4 is just a random average of whitespaces in the input - let mut out = Vec::with_capacity(styles.len() + len / 4 + 1); - let mut index = 0; - for (start, style, end) in styles { - if index < start { - out.push((index, Style::default(), start)); - } - out.push((start, style, end)); - index = end; - } - if index < len { - out.push((index, Style::default(), len)); - } - out - } - - /// Highlights a solidity source string - pub fn highlight(input: &str) -> Cow<'_, str> { - if !yansi::is_enabled() { + /// Highlights a Solidity source string. + pub fn highlight<'a>(&self, input: &'a str) -> Cow<'a, str> { + if !self.do_paint() { return Cow::Borrowed(input) } @@ -133,52 +101,53 @@ impl SolidityHelper { Cow::Owned(out) } else { - let styles = Self::get_contiguous_styles(input); - let len = styles.len(); - if len == 0 { - Cow::Borrowed(input) - } else { - let mut out = String::with_capacity(input.len() + MAX_ANSI_LEN * len); - for (start, style, end) in styles { - Self::paint_unchecked(&input[start..end], style, &mut out); + let mut out = String::with_capacity(input.len() * 2); + self.with_contiguous_styles(input, |style, range| { + Self::paint_unchecked(&input[range], style, &mut out); + }); + Cow::Owned(out) + } + } + + /// Returns a list of styles and the ranges they should be applied to. + /// + /// Covers the entire source string, including any whitespace. + fn with_contiguous_styles(&self, input: &str, mut f: impl FnMut(Style, Range)) { + self.enter(|sess| { + let len = input.len(); + let mut index = 0; + for token in Lexer::new(sess, input) { + let range = token.span.lo().to_usize()..token.span.hi().to_usize(); + let style = token_style(&token); + if index < range.start { + f(Style::default(), index..range.start); } - Cow::Owned(out) + index = range.end; + f(style, range); } - } + if index < len { + f(Style::default(), index..len); + } + }); } /// Validate that a source snippet is closed (i.e., all braces and parenthesis are matched). - fn validate_closed(input: &str) -> ValidationResult { - let mut bracket_depth = 0usize; - let mut paren_depth = 0usize; - let mut brace_depth = 0usize; - let mut comments = Vec::with_capacity(DEFAULT_COMMENTS); - // returns on any encountered error, so allocate for just one - let mut errors = Vec::with_capacity(1); - for (_, token, _) in Lexer::new(input, 0, &mut comments, &mut errors) { - match token { - Token::OpenBracket => { - bracket_depth += 1; - } - Token::OpenCurlyBrace => { - brace_depth += 1; + fn validate_closed(&self, input: &str) -> ValidationResult { + let mut depth = [0usize; 3]; + self.enter(|sess| { + for token in Lexer::new(sess, input) { + match token.kind { + TokenKind::OpenDelim(delim) => { + depth[delim as usize] += 1; + } + TokenKind::CloseDelim(delim) => { + depth[delim as usize] = depth[delim as usize].saturating_sub(1); + } + _ => {} } - Token::OpenParenthesis => { - paren_depth += 1; - } - Token::CloseBracket => { - bracket_depth = bracket_depth.saturating_sub(1); - } - Token::CloseCurlyBrace => { - brace_depth = brace_depth.saturating_sub(1); - } - Token::CloseParenthesis => { - paren_depth = paren_depth.saturating_sub(1); - } - _ => {} } - } - if (bracket_depth | brace_depth | paren_depth) == 0 { + }); + if depth == [0; 3] { ValidationResult::Valid(None) } else { ValidationResult::Incomplete @@ -186,8 +155,7 @@ impl SolidityHelper { } /// Formats `input` with `style` into `out`, without checking `style.wrapping` or - /// `yansi::is_enabled` - #[inline] + /// `self.do_paint`. fn paint_unchecked(string: &str, style: Style, out: &mut String) { if style == Style::default() { out.push_str(string); @@ -198,20 +166,29 @@ impl SolidityHelper { } } - #[inline] fn paint_unchecked_owned(string: &str, style: Style) -> String { let mut out = String::with_capacity(MAX_ANSI_LEN + string.len()); Self::paint_unchecked(string, style, &mut out); out } + + /// Returns whether to color the output. + fn do_paint(&self) -> bool { + self.do_paint + } + + /// Enters the session. + fn enter(&self, f: impl FnOnce(&Session)) { + self.globals.set(|| self.sess.enter(|| f(&self.sess))); + } } impl Highlighter for SolidityHelper { fn highlight<'l>(&self, line: &'l str, _pos: usize) -> Cow<'l, str> { - Self::highlight(line) + self.highlight(line) } - fn highlight_char(&self, line: &str, pos: usize, _forced: bool) -> bool { + fn highlight_char(&self, line: &str, pos: usize, _kind: CmdKind) -> bool { pos == line.len() } @@ -220,7 +197,7 @@ impl Highlighter for SolidityHelper { prompt: &'p str, _default: bool, ) -> Cow<'b, str> { - if !yansi::is_enabled() { + if !self.do_paint() { return Cow::Borrowed(prompt) } @@ -241,14 +218,7 @@ impl Highlighter for SolidityHelper { if let Some(i) = out.find(PROMPT_ARROW) { let style = if self.errored { Color::Red.foreground() } else { Color::Green.foreground() }; - - let mut arrow = String::with_capacity(MAX_ANSI_LEN + 4); - - let _ = style.fmt_prefix(&mut arrow); - arrow.push(PROMPT_ARROW); - let _ = style.fmt_suffix(&mut arrow); - - out.replace_range(i..=i + 2, &arrow); + out.replace_range(i..=i + 2, &Self::paint_unchecked_owned(PROMPT_ARROW_STR, style)); } Cow::Owned(out) @@ -257,7 +227,7 @@ impl Highlighter for SolidityHelper { impl Validator for SolidityHelper { fn validate(&self, ctx: &mut ValidationContext<'_>) -> rustyline::Result { - Ok(Self::validate_closed(ctx.input())) + Ok(self.validate_closed(ctx.input())) } } @@ -271,44 +241,32 @@ impl Hinter for SolidityHelper { impl Helper for SolidityHelper {} -/// Trait that assigns a color to a Token kind -pub trait TokenStyle { - /// Returns the style with which the token should be decorated with. - fn style(&self) -> Style; -} +#[allow(non_upper_case_globals)] +#[deny(unreachable_patterns)] +fn token_style(token: &Token) -> Style { + use solar_parse::{ + interface::kw::*, + token::{TokenKind::*, TokenLitKind::*}, + }; -/// [TokenStyle] implementation for [Token] -impl TokenStyle for Token<'_> { - fn style(&self) -> Style { - use Token::*; - match self { - StringLiteral(_, _) => Color::Green.foreground(), - - AddressLiteral(_) | - HexLiteral(_) | - Number(_, _) | - RationalNumber(_, _, _) | - HexNumber(_) | - True | - False => Color::Yellow.foreground(), + match token.kind { + Literal(Str | HexStr | UnicodeStr, _) => Color::Green.foreground(), + Literal(..) => Color::Yellow.foreground(), + Ident( Memory | Storage | Calldata | Public | Private | Internal | External | Constant | Pure | View | Payable | Anonymous | Indexed | Abstract | Virtual | Override | - Modifier | Immutable | Unchecked => Color::Cyan.foreground(), + Modifier | Immutable | Unchecked, + ) => Color::Cyan.foreground(), - Contract | Library | Interface | Function | Pragma | Import | Struct | Event | - Enum | Type | Constructor | As | Is | Using | New | Delete | Do | Continue | - Break | Throw | Emit | Return | Returns | Revert | For | While | If | Else | Try | - Catch | Assembly | Let | Leave | Switch | Case | Default | YulArrow | Arrow => { - Color::Magenta.foreground() - } + Ident(s) if s.is_elementary_type() => Color::Blue.foreground(), + Ident(Mapping) => Color::Blue.foreground(), - Uint(_) | Int(_) | Bytes(_) | Byte | DynamicBytes | Bool | Address | String | - Mapping => Color::Blue.foreground(), + Ident(s) if s.is_used_keyword() || s.is_yul_keyword() => Color::Magenta.foreground(), + Arrow | FatArrow => Color::Magenta.foreground(), - Identifier(_) => Style::default(), + Comment(..) => Color::Primary.dim(), - _ => Style::default(), - } + _ => Color::Primary.foreground(), } } diff --git a/crates/cli/src/opts/build/core.rs b/crates/cli/src/opts/build/core.rs index 55e4bb2f3..884fe8598 100644 --- a/crates/cli/src/opts/build/core.rs +++ b/crates/cli/src/opts/build/core.rs @@ -16,8 +16,7 @@ use foundry_config::{ Figment, Metadata, Profile, Provider, }, filter::SkipBuildFilter, - providers::remappings::Remappings, - Config, + Config, Remappings, }; use serde::Serialize; use std::path::PathBuf; @@ -211,7 +210,7 @@ impl<'a> From<&'a CoreBuildArgs> for Config { // if `--config-path` is set we need to adjust the config's root path to the actual root // path for the project, otherwise it will the parent dir of the `--config-path` if args.project_paths.config_path.is_some() { - config.root = args.project_paths.project_root().into(); + config.root = args.project_paths.project_root(); } config } diff --git a/crates/cli/src/opts/ethereum.rs b/crates/cli/src/opts/ethereum.rs index b858d998f..8d2601be1 100644 --- a/crates/cli/src/opts/ethereum.rs +++ b/crates/cli/src/opts/ethereum.rs @@ -1,4 +1,5 @@ use crate::opts::ChainValueParser; +use alloy_chains::ChainKind; use clap::Parser; use eyre::Result; use foundry_config::{ @@ -48,6 +49,10 @@ pub struct RpcOpts { /// Default value: 45 #[arg(long, env = "ETH_RPC_TIMEOUT")] pub rpc_timeout: Option, + + /// Specify custom headers for RPC requests. + #[arg(long, alias = "headers", env = "ETH_RPC_HEADERS", value_delimiter(','))] + pub rpc_headers: Option>, } impl_figment_convert_cast!(RpcOpts); @@ -95,6 +100,9 @@ impl RpcOpts { if let Some(rpc_timeout) = self.rpc_timeout { dict.insert("eth_rpc_timeout".into(), rpc_timeout.into()); } + if let Some(headers) = &self.rpc_headers { + dict.insert("eth_rpc_headers".into(), headers.clone().into()); + } dict } } @@ -147,7 +155,11 @@ impl EtherscanOpts { dict.insert("etherscan_api_key".into(), key.into()); } if let Some(chain) = self.chain { - dict.insert("chain_id".into(), chain.to_string().into()); + if let ChainKind::Id(id) = chain.kind() { + dict.insert("chain_id".into(), (*id).into()); + } else { + dict.insert("chain_id".into(), chain.to_string().into()); + } } dict } diff --git a/crates/cli/src/opts/global.rs b/crates/cli/src/opts/global.rs index ad715f241..74ed15a65 100644 --- a/crates/cli/src/opts/global.rs +++ b/crates/cli/src/opts/global.rs @@ -15,7 +15,7 @@ pub struct GlobalOpts { /// - 2 (-vv): Print logs for all tests. /// - 3 (-vvv): Print execution traces for failing tests. /// - 4 (-vvvv): Print execution traces for all tests, and setup traces for failing tests. - /// - 5 (-vvvvv): Print execution and setup traces for all tests. + /// - 5 (-vvvvv): Print execution and setup traces for all tests, including storage changes. #[arg(help_heading = "Display options", global = true, short, long, verbatim_doc_comment, conflicts_with = "quiet", action = ArgAction::Count)] verbosity: Verbosity, @@ -50,11 +50,6 @@ impl GlobalOpts { Ok(()) } - /// Initialize the global thread pool. - pub fn force_init_thread_pool(&self) -> eyre::Result<()> { - init_thread_pool(self.threads.unwrap_or(0)) - } - /// Create a new shell instance. pub fn shell(&self) -> Shell { let mode = match self.quiet { @@ -69,6 +64,11 @@ impl GlobalOpts { Shell::new_with(format, mode, color, self.verbosity) } + + /// Initialize the global thread pool. + pub fn force_init_thread_pool(&self) -> eyre::Result<()> { + init_thread_pool(self.threads.unwrap_or(0)) + } } /// Initialize the global thread pool. diff --git a/crates/cli/src/utils/cmd.rs b/crates/cli/src/utils/cmd.rs index c2d1ed41e..07c42ad45 100644 --- a/crates/cli/src/utils/cmd.rs +++ b/crates/cli/src/utils/cmd.rs @@ -1,7 +1,7 @@ use alloy_json_abi::JsonAbi; use alloy_primitives::Address; use eyre::{Result, WrapErr}; -use foundry_common::{fs, TestFunctionExt}; +use foundry_common::{compile::ProjectCompiler, fs, shell, ContractsByArtifact, TestFunctionExt}; use foundry_compilers::{ artifacts::{CompactBytecode, Settings}, cache::{CacheEntry, CompilerCache}, @@ -15,11 +15,10 @@ use foundry_evm::{ executors::{DeployResult, EvmError, RawCallResult}, opts::EvmOpts, traces::{ - debug::DebugTraceIdentifier, + debug::{ContractSources, DebugTraceIdentifier}, decode_trace_arena, - identifier::{EtherscanIdentifier, SignaturesIdentifier}, - render_trace_arena_with_bytecodes, CallTraceDecoder, CallTraceDecoderBuilder, TraceKind, - Traces, + identifier::{CachedSignatures, SignaturesIdentifier, TraceIdentifiers}, + render_trace_arena_inner, CallTraceDecoder, CallTraceDecoderBuilder, TraceKind, Traces, }, }; use std::{ @@ -198,27 +197,24 @@ pub fn init_progress(len: u64, label: &str) -> indicatif::ProgressBar { /// True if the network calculates gas costs differently. pub fn has_different_gas_calc(chain_id: u64) -> bool { if let Some(chain) = Chain::from(chain_id).named() { - return matches!( - chain, - NamedChain::Acala | - NamedChain::AcalaMandalaTestnet | - NamedChain::AcalaTestnet | - NamedChain::Arbitrum | - NamedChain::ArbitrumGoerli | - NamedChain::ArbitrumSepolia | - NamedChain::ArbitrumTestnet | - NamedChain::Etherlink | - NamedChain::EtherlinkTestnet | - NamedChain::Karura | - NamedChain::KaruraTestnet | - NamedChain::Mantle | - NamedChain::MantleSepolia | - NamedChain::MantleTestnet | - NamedChain::Moonbase | - NamedChain::Moonbeam | - NamedChain::MoonbeamDev | - NamedChain::Moonriver - ); + return chain.is_arbitrum() || + matches!( + chain, + NamedChain::Acala | + NamedChain::AcalaMandalaTestnet | + NamedChain::AcalaTestnet | + NamedChain::Etherlink | + NamedChain::EtherlinkTestnet | + NamedChain::Karura | + NamedChain::KaruraTestnet | + NamedChain::Mantle | + NamedChain::MantleSepolia | + NamedChain::MantleTestnet | + NamedChain::Moonbase | + NamedChain::Moonbeam | + NamedChain::MoonbeamDev | + NamedChain::Moonriver + ); } false } @@ -226,15 +222,14 @@ pub fn has_different_gas_calc(chain_id: u64) -> bool { /// True if it supports broadcasting in batches. pub fn has_batch_support(chain_id: u64) -> bool { if let Some(chain) = Chain::from(chain_id).named() { - return !matches!( + if matches!( chain, - NamedChain::Arbitrum | - NamedChain::ArbitrumTestnet | - NamedChain::ArbitrumGoerli | - NamedChain::ArbitrumSepolia | NamedChain::ZkSync | NamedChain::ZkSyncTestnet - ); + ) { + return false + }; + return !chain.is_arbitrum(); } true } @@ -423,10 +418,25 @@ pub async fn handle_traces( config: &Config, chain: Option, labels: Vec, + with_local_artifacts: bool, debug: bool, decode_internal: bool, - verbose: bool, ) -> Result<()> { + let (known_contracts, mut sources) = if with_local_artifacts { + let _ = sh_println!("Compiling project to generate artifacts"); + let project = config.project()?; + let compiler = ProjectCompiler::new(); + let output = compiler.compile(&project)?; + ( + Some(ContractsByArtifact::new( + output.artifact_ids().map(|(id, artifact)| (id, artifact.clone().into())), + )), + ContractSources::from_project_output(&output, project.root(), None)?, + ) + } else { + (None, ContractSources::default()) + }; + let labels = labels.iter().filter_map(|label_str| { let mut iter = label_str.split(':'); @@ -438,45 +448,44 @@ pub async fn handle_traces( None }); let config_labels = config.labels.clone().into_iter(); - let mut decoder = CallTraceDecoderBuilder::new() + + let mut builder = CallTraceDecoderBuilder::new() .with_labels(labels.chain(config_labels)) .with_signature_identifier(SignaturesIdentifier::new( Config::foundry_cache_dir(), config.offline, - )?) - .build(); + )?); + let mut identifier = TraceIdentifiers::new().with_etherscan(config, chain)?; + if let Some(contracts) = &known_contracts { + builder = builder.with_known_contracts(contracts); + identifier = identifier.with_local(contracts); + } - let mut etherscan_identifier = EtherscanIdentifier::new(config, chain)?; - if let Some(etherscan_identifier) = &mut etherscan_identifier { - for (_, trace) in result.traces.as_deref_mut().unwrap_or_default() { - decoder.identify(trace, etherscan_identifier); - } + let mut decoder = builder.build(); + + for (_, trace) in result.traces.as_deref_mut().unwrap_or_default() { + decoder.identify(trace, &mut identifier); } - if decode_internal { - let sources = if let Some(etherscan_identifier) = ðerscan_identifier { - etherscan_identifier.get_compiled_contracts().await? - } else { - Default::default() - }; + if decode_internal || debug { + if let Some(ref etherscan_identifier) = identifier.etherscan { + sources.merge(etherscan_identifier.get_compiled_contracts().await?); + } + + if debug { + let mut debugger = Debugger::builder() + .traces(result.traces.expect("missing traces")) + .decoder(&decoder) + .sources(sources) + .build(); + debugger.try_run_tui()?; + return Ok(()) + } + decoder.debug_identifier = Some(DebugTraceIdentifier::new(sources)); } - if debug { - let sources = if let Some(etherscan_identifier) = etherscan_identifier { - etherscan_identifier.get_compiled_contracts().await? - } else { - Default::default() - }; - let mut debugger = Debugger::builder() - .traces(result.traces.expect("missing traces")) - .decoder(&decoder) - .sources(sources) - .build(); - debugger.try_run_tui()?; - } else { - print_traces(&mut result, &decoder, verbose).await?; - } + print_traces(&mut result, &decoder, shell::verbosity() > 0, shell::verbosity() > 4).await?; Ok(()) } @@ -485,22 +494,65 @@ pub async fn print_traces( result: &mut TraceResult, decoder: &CallTraceDecoder, verbose: bool, + state_changes: bool, ) -> Result<()> { let traces = result.traces.as_mut().expect("No traces found"); - sh_println!("Traces:")?; + if !shell::is_json() { + sh_println!("Traces:")?; + } + for (_, arena) in traces { decode_trace_arena(arena, decoder).await?; - sh_println!("{}", render_trace_arena_with_bytecodes(arena, verbose))?; + sh_println!("{}", render_trace_arena_inner(arena, verbose, state_changes))?; } - sh_println!()?; + if shell::is_json() { + return Ok(()); + } + + sh_println!()?; if result.success { sh_println!("{}", "Transaction successfully executed.".green())?; } else { sh_err!("Transaction failed.")?; } - sh_println!("Gas used: {}", result.gas_used)?; + + Ok(()) +} + +/// Traverse the artifacts in the project to generate local signatures and merge them into the cache +/// file. +pub fn cache_local_signatures(output: &ProjectCompileOutput, cache_path: PathBuf) -> Result<()> { + let path = cache_path.join("signatures"); + let mut cached_signatures = CachedSignatures::load(cache_path); + output.artifacts().for_each(|(_, artifact)| { + if let Some(abi) = &artifact.abi { + for func in abi.functions() { + cached_signatures.functions.insert(func.selector().to_string(), func.signature()); + } + for event in abi.events() { + cached_signatures + .events + .insert(event.selector().to_string(), event.full_signature()); + } + for error in abi.errors() { + cached_signatures.errors.insert(error.selector().to_string(), error.signature()); + } + // External libraries doesn't have functions included in abi, but `methodIdentifiers`. + if let Some(method_identifiers) = &artifact.method_identifiers { + method_identifiers.iter().for_each(|(signature, selector)| { + cached_signatures + .functions + .entry(format!("0x{selector}")) + .or_insert(signature.to_string()); + }); + } + } + }); + + fs::write_json_file(&path, &cached_signatures)?; +>>>>>>> 59f354c179f4e7f6d7292acb3d068815c79286d1 Ok(()) } diff --git a/crates/cli/src/utils/mod.rs b/crates/cli/src/utils/mod.rs index a66923de9..555a88ae8 100644 --- a/crates/cli/src/utils/mod.rs +++ b/crates/cli/src/utils/mod.rs @@ -117,6 +117,10 @@ pub fn get_provider_builder(config: &Config) -> Result { builder = builder.timeout(Duration::from_secs(rpc_timeout)); } + if let Some(rpc_headers) = config.eth_rpc_headers.clone() { + builder = builder.headers(rpc_headers); + } + Ok(builder) } @@ -290,7 +294,7 @@ impl<'a> Git<'a> { #[inline] pub fn from_config(config: &'a Config) -> Self { - Self::new(config.root.0.as_path()) + Self::new(config.root.as_path()) } pub fn root_of(relative_to: &Path) -> Result { @@ -447,8 +451,8 @@ impl<'a> Git<'a> { self.cmd().args(["status", "--porcelain"]).exec().map(|out| out.stdout.is_empty()) } - pub fn has_branch(self, branch: impl AsRef) -> Result { - self.cmd() + pub fn has_branch(self, branch: impl AsRef, at: &Path) -> Result { + self.cmd_at(at) .args(["branch", "--list", "--no-color"]) .arg(branch) .get_stdout_lossy() @@ -570,6 +574,12 @@ ignore them in the `.gitignore` file, or run this command again with the `--no-c cmd } + pub fn cmd_at(self, path: &Path) -> Command { + let mut cmd = Self::cmd_no_root(); + cmd.current_dir(path); + cmd + } + pub fn cmd_no_root() -> Command { let mut cmd = Command::new("git"); cmd.stdout(Stdio::piped()).stderr(Stdio::piped()); diff --git a/crates/common/Cargo.toml b/crates/common/Cargo.toml index 12b952fda..9652d9c03 100644 --- a/crates/common/Cargo.toml +++ b/crates/common/Cargo.toml @@ -44,6 +44,8 @@ alloy-transport-ipc.workspace = true alloy-transport-ws.workspace = true alloy-transport.workspace = true alloy-consensus = { workspace = true, features = ["k256"] } +alloy-network.workspace = true + alloy-zksync.workspace = true tower.workspace = true diff --git a/crates/common/fmt/src/eof.rs b/crates/common/fmt/src/eof.rs index 639e175b4..1ae9de70b 100644 --- a/crates/common/fmt/src/eof.rs +++ b/crates/common/fmt/src/eof.rs @@ -1,4 +1,4 @@ -use comfy_table::{ContentArrangement, Table}; +use comfy_table::{modifiers::UTF8_ROUND_CORNERS, ContentArrangement, Table}; use revm_primitives::{ eof::{EofBody, EofHeader}, Eof, @@ -24,6 +24,7 @@ pub fn pretty_eof(eof: &Eof) -> Result { let mut result = String::new(); let mut table = Table::new(); + table.apply_modifier(UTF8_ROUND_CORNERS); table.add_row(vec!["type_size", &types_size.to_string()]); table.add_row(vec!["num_code_sections", &code_sizes.len().to_string()]); if !code_sizes.is_empty() { @@ -39,6 +40,7 @@ pub fn pretty_eof(eof: &Eof) -> Result { if !code_section.is_empty() { let mut table = Table::new(); + table.apply_modifier(UTF8_ROUND_CORNERS); table.set_content_arrangement(ContentArrangement::Dynamic); table.set_header(vec!["", "Inputs", "Outputs", "Max stack height", "Code"]); for (idx, (code, type_section)) in code_section.iter().zip(types_section).enumerate() { @@ -56,6 +58,7 @@ pub fn pretty_eof(eof: &Eof) -> Result { if !container_section.is_empty() { let mut table = Table::new(); + table.apply_modifier(UTF8_ROUND_CORNERS); table.set_content_arrangement(ContentArrangement::Dynamic); for (idx, container) in container_section.iter().enumerate() { table.add_row(vec![&idx.to_string(), &container.to_string()]); @@ -66,6 +69,7 @@ pub fn pretty_eof(eof: &Eof) -> Result { if !data_section.is_empty() { let mut table = Table::new(); + table.apply_modifier(UTF8_ROUND_CORNERS); table.set_content_arrangement(ContentArrangement::Dynamic); table.add_row(vec![&data_section.to_string()]); write!(result, "\n\nData section:\n{table}")?; diff --git a/crates/common/fmt/src/ui.rs b/crates/common/fmt/src/ui.rs index a82d2cdc3..9962a8583 100644 --- a/crates/common/fmt/src/ui.rs +++ b/crates/common/fmt/src/ui.rs @@ -1,14 +1,15 @@ //! Helper trait and functions to format Ethereum types. use alloy_consensus::{ - AnyReceiptEnvelope, Eip658Value, Receipt, ReceiptWithBloom, Transaction as TxTrait, TxEnvelope, - TxType, + Eip658Value, Receipt, ReceiptWithBloom, Transaction as TxTrait, TxEnvelope, TxType, Typed2718, +}; +use alloy_network::{ + AnyHeader, AnyReceiptEnvelope, AnyRpcBlock, AnyTransactionReceipt, AnyTxEnvelope, + ReceiptResponse, }; -use alloy_network::{AnyHeader, AnyRpcBlock, AnyTxEnvelope, ReceiptResponse}; use alloy_primitives::{hex, Address, Bloom, Bytes, FixedBytes, Uint, I256, U256, U64, U8}; use alloy_rpc_types::{ - AccessListItem, AnyTransactionReceipt, Block, BlockTransactions, Header, Log, Transaction, - TransactionReceipt, + AccessListItem, Block, BlockTransactions, Header, Log, Transaction, TransactionReceipt, }; use alloy_serde::{OtherFields, WithOtherFields}; use serde::Deserialize; @@ -900,6 +901,7 @@ fn pretty_block_basics(block: &Block>) excess_blob_gas, parent_beacon_block_root, requests_hash, + target_blobs_per_block, }, }, uncles: _, @@ -931,7 +933,8 @@ withdrawalsRoot {} totalDifficulty {} blobGasUsed {} excessBlobGas {} -requestsHash {}", +requestsHash {} +targetBlobsPerBlock {}", base_fee_per_gas.pretty(), difficulty.pretty(), extra_data.pretty(), @@ -959,6 +962,7 @@ requestsHash {}", blob_gas_used.pretty(), excess_blob_gas.pretty(), requests_hash.pretty(), + target_blobs_per_block.pretty(), ) } diff --git a/crates/common/src/abi.rs b/crates/common/src/abi.rs index de9b36219..fa9f24171 100644 --- a/crates/common/src/abi.rs +++ b/crates/common/src/abi.rs @@ -1,7 +1,7 @@ //! ABI related helper functions. use alloy_dyn_abi::{DynSolType, DynSolValue, FunctionExt, JsonAbiExt}; -use alloy_json_abi::{Event, Function, Param}; +use alloy_json_abi::{Error, Event, Function, Param}; use alloy_primitives::{hex, Address, LogData}; use eyre::{Context, ContextCompat, Result}; use foundry_block_explorers::{contract::ContractMetadata, errors::EtherscanError, Client}; @@ -85,6 +85,11 @@ pub fn get_event(sig: &str) -> Result { Event::parse(sig).wrap_err("could not parse event signature") } +/// Given an error signature string, it tries to parse it as a `Error` +pub fn get_error(sig: &str) -> Result { + Error::parse(sig).wrap_err("could not parse event signature") +} + /// Given an event without indexed parameters and a rawlog, it tries to return the event with the /// proper indexed parameters. Otherwise, it returns the original event. pub fn get_indexed_event(mut event: Event, raw_log: &LogData) -> Event { diff --git a/crates/common/src/compile.rs b/crates/common/src/compile.rs index 81e45f67f..dfd0234c0 100644 --- a/crates/common/src/compile.rs +++ b/crates/common/src/compile.rs @@ -6,11 +6,11 @@ use crate::{ term::SpinnerReporter, TestFunctionExt, }; -use comfy_table::{presets::ASCII_MARKDOWN, Attribute, Cell, CellAlignment, Color, Table}; +use comfy_table::{modifiers::UTF8_ROUND_CORNERS, Cell, Color, Table}; use eyre::Result; use foundry_block_explorers::contract::Metadata; use foundry_compilers::{ - artifacts::{remappings::Remapping, BytecodeObject, Source}, + artifacts::{remappings::Remapping, BytecodeObject, Contract, Source}, compilers::{ solc::{Solc, SolcCompiler}, Compiler, @@ -146,7 +146,10 @@ impl ProjectCompiler { } /// Compiles the project. - pub fn compile(mut self, project: &Project) -> Result> { + pub fn compile>( + mut self, + project: &Project, + ) -> Result> { // TODO: Avoid process::exit if !project.paths.has_input_files() && self.files.is_empty() { sh_println!("Nothing to compile")?; @@ -180,7 +183,10 @@ impl ProjectCompiler { /// ProjectCompiler::new().compile_with(|| Ok(prj.compile()?)).unwrap(); /// ``` #[instrument(target = "forge::compile", skip_all)] - fn compile_with(self, f: F) -> Result> + fn compile_with, F>( + self, + f: F, + ) -> Result> where F: FnOnce() -> Result>, { @@ -219,7 +225,10 @@ impl ProjectCompiler { } /// If configured, this will print sizes or names - fn handle_output(&self, output: &ProjectCompileOutput) { + fn handle_output>( + &self, + output: &ProjectCompileOutput, + ) { let print_names = self.print_names.unwrap_or(false); let print_sizes = self.print_sizes.unwrap_or(false); @@ -597,9 +606,8 @@ impl SizeReport { impl Display for SizeReport { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { match self.report_kind { - ReportKind::Markdown => { - let table = self.format_table_output(); - writeln!(f, "{table}")?; + ReportKind::Text => { + writeln!(f, "\n{}", self.format_table_output())?; } ReportKind::JSON => { writeln!(f, "{}", self.format_json_output())?; @@ -634,13 +642,14 @@ impl SizeReport { fn format_table_output(&self) -> Table { let mut table = Table::new(); - table.load_preset(ASCII_MARKDOWN); - table.set_header([ - Cell::new("Contract").add_attribute(Attribute::Bold).fg(Color::Blue), - Cell::new("Runtime Size (B)").add_attribute(Attribute::Bold).fg(Color::Blue), - Cell::new("Initcode Size (B)").add_attribute(Attribute::Bold).fg(Color::Blue), - Cell::new("Runtime Margin (B)").add_attribute(Attribute::Bold).fg(Color::Blue), - Cell::new("Initcode Margin (B)").add_attribute(Attribute::Bold).fg(Color::Blue), + table.apply_modifier(UTF8_ROUND_CORNERS); + + table.set_header(vec![ + Cell::new("Contract"), + Cell::new("Runtime Size (B)"), + Cell::new("Initcode Size (B)"), + Cell::new("Runtime Margin (B)"), + Cell::new("Initcode Margin (B)"), ]); // Filters out dev contracts (Test or Script) @@ -690,19 +699,11 @@ impl SizeReport { let locale = &Locale::en; table.add_row([ - Cell::new(name).fg(Color::Blue), - Cell::new(contract.runtime_size.to_formatted_string(locale)) - .set_alignment(CellAlignment::Right) - .fg(runtime_color), - Cell::new(contract.init_size.to_formatted_string(locale)) - .set_alignment(CellAlignment::Right) - .fg(init_color), - Cell::new(runtime_margin.to_formatted_string(locale)) - .set_alignment(CellAlignment::Right) - .fg(runtime_color), - Cell::new(init_margin.to_formatted_string(locale)) - .set_alignment(CellAlignment::Right) - .fg(init_color), + Cell::new(name), + Cell::new(contract.runtime_size.to_formatted_string(locale)).fg(runtime_color), + Cell::new(contract.init_size.to_formatted_string(locale)).fg(init_color), + Cell::new(runtime_margin.to_formatted_string(locale)).fg(runtime_color), + Cell::new(init_margin.to_formatted_string(locale)).fg(init_color), ]); } @@ -753,7 +754,7 @@ pub struct ContractInfo { /// If `verify` and it's a standalone script, throw error. Only allowed for projects. /// /// **Note:** this expects the `target_path` to be absolute -pub fn compile_target( +pub fn compile_target>( target_path: &Path, project: &Project, quiet: bool, diff --git a/crates/common/src/constants.rs b/crates/common/src/constants.rs index 0ba0514c2..4ff3eb8d7 100644 --- a/crates/common/src/constants.rs +++ b/crates/common/src/constants.rs @@ -40,6 +40,9 @@ pub const OPTIMISM_SYSTEM_ADDRESS: Address = address!("deaddeaddeaddeaddeaddeadd /// Transaction identifier of System transaction types pub const SYSTEM_TRANSACTION_TYPE: u8 = 126; +/// Default user agent set as the header for requests that don't specify one. +pub const DEFAULT_USER_AGENT: &str = concat!("foundry/", env!("CARGO_PKG_VERSION")); + /// Returns whether the sender is a known L2 system sender that is the first tx in every block. /// /// Transactions from these senders usually don't have a any fee information. diff --git a/crates/common/src/errors/mod.rs b/crates/common/src/errors/mod.rs index cfd9a307e..c8b2c6bcc 100644 --- a/crates/common/src/errors/mod.rs +++ b/crates/common/src/errors/mod.rs @@ -5,3 +5,38 @@ pub use fs::FsPathError; mod artifacts; pub use artifacts::*; + +/// Displays a chain of errors in a single line. +pub fn display_chain(error: &eyre::Report) -> String { + let mut causes = all_sources(error); + // Deduplicate the common pattern `msg1: msg2; msg2` -> `msg1: msg2`. + causes.dedup_by(|b, a| a.contains(b.as_str())); + causes.join("; ") +} + +fn all_sources(err: &eyre::Report) -> Vec { + err.chain().map(|cause| cause.to_string().trim().to_string()).collect() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn dedups_contained() { + #[derive(thiserror::Error, Debug)] + #[error("my error: {0}")] + struct A(#[from] B); + + #[derive(thiserror::Error, Debug)] + #[error("{0}")] + struct B(String); + + let ee = eyre::Report::from(A(B("hello".into()))); + assert_eq!(ee.chain().count(), 2, "{ee:?}"); + let full = all_sources(&ee).join("; "); + assert_eq!(full, "my error: hello; hello"); + let chained = display_chain(&ee); + assert_eq!(chained, "my error: hello"); + } +} diff --git a/crates/common/src/evm.rs b/crates/common/src/evm.rs index 3eca0800e..dac5e6d9a 100644 --- a/crates/common/src/evm.rs +++ b/crates/common/src/evm.rs @@ -103,6 +103,11 @@ pub struct EvmArgs { #[serde(skip)] pub always_use_create_2_factory: bool, + /// The CREATE2 deployer address to use, this will override the one in the config. + #[arg(long, value_name = "ADDRESS")] + #[serde(skip_serializing_if = "Option::is_none")] + pub create2_deployer: Option
, + /// Sets the number of assumed available compute units per second for this provider /// /// default value: 330 @@ -135,10 +140,10 @@ pub struct EvmArgs { #[serde(skip)] pub isolate: bool, - /// Whether to enable Alphanet features. - #[arg(long, alias = "odyssey")] + /// Whether to enable Odyssey features. + #[arg(long, alias = "alphanet")] #[serde(skip)] - pub alphanet: bool, + pub odyssey: bool, } // Make this set of options a `figment::Provider` so that it can be merged into the `Config` @@ -165,8 +170,8 @@ impl Provider for EvmArgs { dict.insert("isolate".to_string(), self.isolate.into()); } - if self.alphanet { - dict.insert("alphanet".to_string(), self.alphanet.into()); + if self.odyssey { + dict.insert("odyssey".to_string(), self.odyssey.into()); } if self.always_use_create_2_factory { @@ -196,11 +201,6 @@ impl Provider for EvmArgs { #[derive(Clone, Debug, Default, Serialize, Parser)] #[command(next_help_heading = "Executor environment config")] pub struct EnvArgs { - /// The block gas limit. - #[arg(long, value_name = "GAS_LIMIT")] - #[serde(skip_serializing_if = "Option::is_none")] - pub gas_limit: Option, - /// EIP-170: Contract code size limit in bytes. Useful to increase this because of tests. By /// default, it is 0x6000 (~25kb). #[arg(long, value_name = "CODE_SIZE")] @@ -253,7 +253,7 @@ pub struct EnvArgs { pub block_prevrandao: Option, /// The block gas limit. - #[arg(long, value_name = "GAS_LIMIT")] + #[arg(long, visible_alias = "gas-limit", value_name = "GAS_LIMIT")] #[serde(skip_serializing_if = "Option::is_none")] pub block_gas_limit: Option, diff --git a/crates/common/src/provider/runtime_transport.rs b/crates/common/src/provider/runtime_transport.rs index a95969be5..563cec313 100644 --- a/crates/common/src/provider/runtime_transport.rs +++ b/crates/common/src/provider/runtime_transport.rs @@ -1,7 +1,7 @@ //! Runtime transport that connects on first request, which can take either of an HTTP, //! WebSocket, or IPC transport and supports retries based on CUPS logic. -use crate::REQUEST_TIMEOUT; +use crate::{DEFAULT_USER_AGENT, REQUEST_TIMEOUT}; use alloy_json_rpc::{RequestPacket, ResponsePacket}; use alloy_pubsub::{PubSubConnect, PubSubFrontend}; use alloy_rpc_types::engine::{Claims, JwtSecret}; @@ -176,6 +176,14 @@ impl RuntimeTransport { ); } + if !headers.iter().any(|(k, _v)| k.as_str().starts_with("User-Agent:")) { + headers.insert( + reqwest::header::USER_AGENT, + HeaderValue::from_str(DEFAULT_USER_AGENT) + .expect("User-Agent should be valid string"), + ); + } + client_builder = client_builder.default_headers(headers); let client = diff --git a/crates/common/src/reports.rs b/crates/common/src/reports.rs index adbdc11bf..0fdf4502e 100644 --- a/crates/common/src/reports.rs +++ b/crates/common/src/reports.rs @@ -5,7 +5,7 @@ use crate::shell; #[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] pub enum ReportKind { #[default] - Markdown, + Text, JSON, } @@ -14,6 +14,6 @@ pub fn report_kind() -> ReportKind { if shell::is_json() { ReportKind::JSON } else { - ReportKind::Markdown + ReportKind::Text } } diff --git a/crates/common/src/retry.rs b/crates/common/src/retry.rs index 59ba2055f..b79e095ce 100644 --- a/crates/common/src/retry.rs +++ b/crates/common/src/retry.rs @@ -16,23 +16,28 @@ pub enum RetryError { #[derive(Clone, Debug)] pub struct Retry { retries: u32, - delay: Option, + delay: Duration, } impl Retry { /// Creates a new `Retry` instance. - pub fn new(retries: u32, delay: Option) -> Self { + pub fn new(retries: u32, delay: Duration) -> Self { Self { retries, delay } } + /// Creates a new `Retry` instance with no delay between retries. + pub fn new_no_delay(retries: u32) -> Self { + Self::new(retries, Duration::ZERO) + } + /// Runs the given closure in a loop, retrying if it fails up to the specified number of times. pub fn run Result, T>(mut self, mut callback: F) -> Result { loop { match callback() { Err(e) if self.retries > 0 => { self.handle_err(e); - if let Some(delay) = self.delay { - std::thread::sleep(delay); + if !self.delay.is_zero() { + std::thread::sleep(self.delay); } } res => return res, @@ -51,8 +56,8 @@ impl Retry { match callback().await { Err(e) if self.retries > 0 => { self.handle_err(e); - if let Some(delay) = self.delay { - tokio::time::sleep(delay).await; + if !self.delay.is_zero() { + tokio::time::sleep(self.delay).await; } } res => return res, @@ -71,8 +76,8 @@ impl Retry { match callback().await { Err(RetryError::Retry(e)) if self.retries > 0 => { self.handle_err(e); - if let Some(delay) = self.delay { - tokio::time::sleep(delay).await; + if !self.delay.is_zero() { + tokio::time::sleep(self.delay).await; } } Err(RetryError::Retry(e) | RetryError::Break(e)) => return Err(e), @@ -82,7 +87,17 @@ impl Retry { } fn handle_err(&mut self, err: Error) { + debug_assert!(self.retries > 0); self.retries -= 1; - let _ = sh_warn!("{} ({} tries remaining)", err.root_cause(), self.retries); + let _ = sh_warn!( + "{msg}{delay} ({retries} tries remaining)", + msg = crate::errors::display_chain(&err), + delay = if self.delay.is_zero() { + String::new() + } else { + format!("; waiting {} seconds before trying again", self.delay.as_secs()) + }, + retries = self.retries, + ); } } diff --git a/crates/common/src/selectors.rs b/crates/common/src/selectors.rs index cd4e2ffd0..cb59e1f32 100644 --- a/crates/common/src/selectors.rs +++ b/crates/common/src/selectors.rs @@ -140,7 +140,7 @@ impl OpenChainClient { .ok_or_else(|| eyre::eyre!("No signature found")) } - /// Decodes the given function or event selectors using OpenChain + /// Decodes the given function, error or event selectors using OpenChain. pub async fn decode_selectors( &self, selector_type: SelectorType, @@ -164,8 +164,8 @@ impl OpenChainClient { self.ensure_not_spurious()?; let expected_len = match selector_type { - SelectorType::Function => 10, // 0x + hex(4bytes) - SelectorType::Event => 66, // 0x + hex(32bytes) + SelectorType::Function | SelectorType::Error => 10, // 0x + hex(4bytes) + SelectorType::Event => 66, // 0x + hex(32bytes) }; if let Some(s) = selectors.iter().find(|s| s.len() != expected_len) { eyre::bail!( @@ -193,7 +193,7 @@ impl OpenChainClient { let url = format!( "{SELECTOR_LOOKUP_URL}?{ltype}={selectors_str}", ltype = match selector_type { - SelectorType::Function => "function", + SelectorType::Function | SelectorType::Error => "function", SelectorType::Event => "event", }, selectors_str = selectors.join(",") @@ -212,7 +212,7 @@ impl OpenChainClient { } let decoded = match selector_type { - SelectorType::Function => api_response.result.function, + SelectorType::Function | SelectorType::Error => api_response.result.function, SelectorType::Event => api_response.result.event, }; @@ -391,6 +391,8 @@ pub enum SelectorType { Function, /// An event selector. Event, + /// An custom error selector. + Error, } /// Decodes the given function or event selector using OpenChain. diff --git a/crates/common/src/transactions.rs b/crates/common/src/transactions.rs index b319da0d8..b725fc068 100644 --- a/crates/common/src/transactions.rs +++ b/crates/common/src/transactions.rs @@ -2,12 +2,13 @@ use alloy_consensus::{Transaction, TxEnvelope}; use alloy_eips::eip7702::SignedAuthorization; +use alloy_network::AnyTransactionReceipt; use alloy_primitives::{Address, TxKind, U256}; use alloy_provider::{ network::{AnyNetwork, ReceiptResponse, TransactionBuilder}, Provider, }; -use alloy_rpc_types::{AnyTransactionReceipt, BlockId, TransactionRequest}; +use alloy_rpc_types::{BlockId, TransactionRequest}; use alloy_serde::WithOtherFields; use alloy_transport::Transport; use eyre::Result; @@ -179,6 +180,10 @@ impl TransactionMaybeSigned { Ok(Self::Signed { tx, from }) } + pub fn is_unsigned(&self) -> bool { + matches!(self, Self::Unsigned(_)) + } + pub fn as_unsigned_mut(&mut self) -> Option<&mut WithOtherFields> { match self { Self::Unsigned(tx) => Some(tx), diff --git a/crates/config/Cargo.toml b/crates/config/Cargo.toml index 951531762..0abe4d2b2 100644 --- a/crates/config/Cargo.toml +++ b/crates/config/Cargo.toml @@ -27,11 +27,12 @@ dirs-next = "2" dunce.workspace = true eyre.workspace = true figment = { workspace = true, features = ["toml", "env"] } -globset = "0.4" glob = "0.3" +globset = "0.4" Inflector = "0.11" -number_prefix = "0.4" +itertools.workspace = true mesc.workspace = true +number_prefix = "0.4" regex.workspace = true reqwest.workspace = true semver = { workspace = true, features = ["serde"] } diff --git a/crates/config/src/fuzz.rs b/crates/config/src/fuzz.rs index 94410c21e..1bbffe531 100644 --- a/crates/config/src/fuzz.rs +++ b/crates/config/src/fuzz.rs @@ -1,9 +1,5 @@ //! Configuration for fuzz testing. -use crate::inline::{ - parse_config_bool, parse_config_u32, InlineConfigParser, InlineConfigParserError, - INLINE_CONFIG_FUZZ_KEY, -}; use alloy_primitives::U256; use serde::{Deserialize, Serialize}; use std::path::PathBuf; @@ -34,6 +30,8 @@ pub struct FuzzConfig { pub no_zksync_reserved_addresses: bool, /// show `console.log` in fuzz test, defaults to `false` pub show_logs: bool, + /// Optional timeout (in seconds) for each property test + pub timeout: Option, } impl Default for FuzzConfig { @@ -48,6 +46,7 @@ impl Default for FuzzConfig { failure_persist_file: None, no_zksync_reserved_addresses: false, show_logs: false, + timeout: None, } } } @@ -56,54 +55,14 @@ impl FuzzConfig { /// Creates fuzz configuration to write failures in `{PROJECT_ROOT}/cache/fuzz` dir. pub fn new(cache_dir: PathBuf) -> Self { Self { - runs: 256, - max_test_rejects: 65536, - seed: None, - dictionary: FuzzDictionaryConfig::default(), - gas_report_samples: 256, failure_persist_dir: Some(cache_dir), failure_persist_file: Some("failures".to_string()), no_zksync_reserved_addresses: false, - show_logs: false, + ..Default::default() } } } -impl InlineConfigParser for FuzzConfig { - fn config_key() -> String { - INLINE_CONFIG_FUZZ_KEY.into() - } - - fn try_merge(&self, configs: &[String]) -> Result, InlineConfigParserError> { - let overrides: Vec<(String, String)> = Self::get_config_overrides(configs); - - if overrides.is_empty() { - return Ok(None) - } - - let mut conf_clone = self.clone(); - - for pair in overrides { - let key = pair.0; - let value = pair.1; - match key.as_str() { - "runs" => conf_clone.runs = parse_config_u32(key, value)?, - "max-test-rejects" => conf_clone.max_test_rejects = parse_config_u32(key, value)?, - "dictionary-weight" => { - conf_clone.dictionary.dictionary_weight = parse_config_u32(key, value)? - } - "failure-persist-file" => conf_clone.failure_persist_file = Some(value), - "no-zksync-reserved-addresses" => { - conf_clone.no_zksync_reserved_addresses = parse_config_bool(key, value)? - } - "show-logs" => conf_clone.show_logs = parse_config_bool(key, value)?, - _ => Err(InlineConfigParserError::InvalidConfigProperty(key))?, - } - } - Ok(Some(conf_clone)) - } -} - /// Contains for fuzz testing #[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] pub struct FuzzDictionaryConfig { @@ -139,68 +98,3 @@ impl Default for FuzzDictionaryConfig { } } } - -#[cfg(test)] -mod tests { - use crate::{inline::InlineConfigParser, FuzzConfig}; - - #[test] - fn unrecognized_property() { - let configs = &["forge-config: default.fuzz.unknownprop = 200".to_string()]; - let base_config = FuzzConfig::default(); - if let Err(e) = base_config.try_merge(configs) { - assert_eq!(e.to_string(), "'unknownprop' is an invalid config property"); - } else { - unreachable!() - } - } - - #[test] - fn successful_merge() { - let configs = &[ - "forge-config: default.fuzz.runs = 42424242".to_string(), - "forge-config: default.fuzz.dictionary-weight = 42".to_string(), - "forge-config: default.fuzz.failure-persist-file = fuzz-failure".to_string(), - ]; - let base_config = FuzzConfig::default(); - let merged: FuzzConfig = base_config.try_merge(configs).expect("No errors").unwrap(); - assert_eq!(merged.runs, 42424242); - assert_eq!(merged.dictionary.dictionary_weight, 42); - assert_eq!(merged.failure_persist_file, Some("fuzz-failure".to_string())); - } - - #[test] - fn merge_is_none() { - let empty_config = &[]; - let base_config = FuzzConfig::default(); - let merged = base_config.try_merge(empty_config).expect("No errors"); - assert!(merged.is_none()); - } - - #[test] - fn merge_is_none_unrelated_property() { - let unrelated_configs = &["forge-config: default.invariant.runs = 2".to_string()]; - let base_config = FuzzConfig::default(); - let merged = base_config.try_merge(unrelated_configs).expect("No errors"); - assert!(merged.is_none()); - } - - #[test] - fn override_detection() { - let configs = &[ - "forge-config: default.fuzz.runs = 42424242".to_string(), - "forge-config: ci.fuzz.runs = 666666".to_string(), - "forge-config: default.invariant.runs = 2".to_string(), - "forge-config: default.fuzz.dictionary-weight = 42".to_string(), - ]; - let variables = FuzzConfig::get_config_overrides(configs); - assert_eq!( - variables, - vec![ - ("runs".into(), "42424242".into()), - ("runs".into(), "666666".into()), - ("dictionary-weight".into(), "42".into()) - ] - ); - } -} diff --git a/crates/config/src/inline/conf_parser.rs b/crates/config/src/inline/conf_parser.rs deleted file mode 100644 index e69449654..000000000 --- a/crates/config/src/inline/conf_parser.rs +++ /dev/null @@ -1,169 +0,0 @@ -use super::{remove_whitespaces, InlineConfigParserError}; -use crate::{inline::INLINE_CONFIG_PREFIX, InlineConfigError, NatSpec}; -use regex::Regex; - -/// This trait is intended to parse configurations from -/// structured text. Foundry users can annotate Solidity test functions, -/// providing special configs just for the execution of a specific test. -/// -/// An example: -/// -/// ```solidity -/// contract MyTest is Test { -/// /// forge-config: default.fuzz.runs = 100 -/// /// forge-config: ci.fuzz.runs = 500 -/// function test_SimpleFuzzTest(uint256 x) public {...} -/// -/// /// forge-config: default.fuzz.runs = 500 -/// /// forge-config: ci.fuzz.runs = 10000 -/// function test_ImportantFuzzTest(uint256 x) public {...} -/// } -/// ``` -pub trait InlineConfigParser -where - Self: Clone + Default + Sized + 'static, -{ - /// Returns a config key that is common to all valid configuration lines - /// for the current impl. This helps to extract correct values out of a text. - /// - /// An example key would be `fuzz` of `invariant`. - fn config_key() -> String; - - /// Tries to override `self` properties with values specified in the `configs` parameter. - /// - /// Returns - /// - `Some(Self)` in case some configurations are merged into self. - /// - `None` in case there are no configurations that can be applied to self. - /// - `Err(InlineConfigParserError)` in case of wrong configuration. - fn try_merge(&self, configs: &[String]) -> Result, InlineConfigParserError>; - - /// Validates and merges the natspec configs for current profile into the current config. - fn merge(&self, natspec: &NatSpec) -> Result, InlineConfigError> { - let config_key = Self::config_key(); - - let configs = natspec - .current_profile_configs() - .filter(|l| l.contains(&config_key)) - .collect::>(); - - self.try_merge(&configs).map_err(|e| { - let line = natspec.debug_context(); - InlineConfigError { line, source: e } - }) - } - - /// Given a list of config lines, returns all available pairs (key, value) matching the current - /// config key. - /// - /// # Examples - /// - /// ```ignore - /// assert_eq!( - /// get_config_overrides(&[ - /// "forge-config: default.invariant.runs = 500", - /// "forge-config: default.invariant.depth = 500", - /// "forge-config: ci.invariant.depth = 500", - /// "forge-config: ci.fuzz.runs = 10", - /// ]), - /// [("runs", "500"), ("depth", "500"), ("depth", "500")] - /// ); - /// ``` - fn get_config_overrides(config_lines: &[String]) -> Vec<(String, String)> { - let mut result: Vec<(String, String)> = vec![]; - let config_key = Self::config_key(); - let profile = ".*"; - let prefix = format!("^{INLINE_CONFIG_PREFIX}:{profile}{config_key}\\."); - let re = Regex::new(&prefix).unwrap(); - - config_lines - .iter() - .map(|l| remove_whitespaces(l)) - .filter(|l| re.is_match(l)) - .map(|l| re.replace(&l, "").to_string()) - .for_each(|line| { - let key_value = line.split('=').collect::>(); // i.e. "['runs', '500']" - if let Some(key) = key_value.first() { - if let Some(value) = key_value.last() { - result.push((key.to_string(), value.to_string())); - } - } - }); - - result - } -} - -/// Checks if all configuration lines specified in `natspec` use a valid profile. -/// -/// i.e. Given available profiles -/// ```rust -/// let _profiles = vec!["ci", "default"]; -/// ``` -/// A configuration like `forge-config: ciii.invariant.depth = 1` would result -/// in an error. -pub fn validate_profiles(natspec: &NatSpec, profiles: &[String]) -> Result<(), InlineConfigError> { - for config in natspec.config_lines() { - if !profiles.iter().any(|p| config.starts_with(&format!("{INLINE_CONFIG_PREFIX}:{p}."))) { - let err_line: String = natspec.debug_context(); - let profiles = format!("{profiles:?}"); - Err(InlineConfigError { - source: InlineConfigParserError::InvalidProfile(config, profiles), - line: err_line, - })? - } - } - Ok(()) -} - -/// Tries to parse a `u32` from `value`. The `key` argument is used to give details -/// in the case of an error. -pub fn parse_config_u32(key: String, value: String) -> Result { - value.parse().map_err(|_| InlineConfigParserError::ParseInt(key, value)) -} - -/// Tries to parse a `bool` from `value`. The `key` argument is used to give details -/// in the case of an error. -pub fn parse_config_bool(key: String, value: String) -> Result { - value.parse().map_err(|_| InlineConfigParserError::ParseBool(key, value)) -} - -#[cfg(test)] -mod tests { - use crate::{inline::conf_parser::validate_profiles, NatSpec}; - - #[test] - fn can_reject_invalid_profiles() { - let profiles = ["ci".to_string(), "default".to_string()]; - let natspec = NatSpec { - contract: Default::default(), - function: Default::default(), - line: Default::default(), - docs: r" - forge-config: ciii.invariant.depth = 1 - forge-config: default.invariant.depth = 1 - " - .into(), - }; - - let result = validate_profiles(&natspec, &profiles); - assert!(result.is_err()); - } - - #[test] - fn can_accept_valid_profiles() { - let profiles = ["ci".to_string(), "default".to_string()]; - let natspec = NatSpec { - contract: Default::default(), - function: Default::default(), - line: Default::default(), - docs: r" - forge-config: ci.invariant.depth = 1 - forge-config: default.invariant.depth = 1 - " - .into(), - }; - - let result = validate_profiles(&natspec, &profiles); - assert!(result.is_ok()); - } -} diff --git a/crates/config/src/inline/error.rs b/crates/config/src/inline/error.rs deleted file mode 100644 index ddcb6a61b..000000000 --- a/crates/config/src/inline/error.rs +++ /dev/null @@ -1,44 +0,0 @@ -/// Errors returned by the [`InlineConfigParser`](crate::InlineConfigParser) trait. -#[derive(Clone, Debug, PartialEq, Eq, thiserror::Error)] -pub enum InlineConfigParserError { - /// An invalid configuration property has been provided. - /// The property cannot be mapped to the configuration object - #[error("'{0}' is an invalid config property")] - InvalidConfigProperty(String), - /// An invalid profile has been provided - #[error("'{0}' specifies an invalid profile. Available profiles are: {1}")] - InvalidProfile(String, String), - /// An error occurred while trying to parse an integer configuration value - #[error("Invalid config value for key '{0}'. Unable to parse '{1}' into an integer value")] - ParseInt(String, String), - /// An error occurred while trying to parse a boolean configuration value - #[error("Invalid config value for key '{0}'. Unable to parse '{1}' into a boolean value")] - ParseBool(String, String), -} - -/// Wrapper error struct that catches config parsing errors, enriching them with context information -/// reporting the misconfigured line. -#[derive(Debug, thiserror::Error)] -#[error("Inline config error detected at {line}")] -pub struct InlineConfigError { - /// Specifies the misconfigured line. This is something of the form - /// `dir/TestContract.t.sol:FuzzContract:10:12:111` - pub line: String, - /// The inner error - pub source: InlineConfigParserError, -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn can_format_inline_config_errors() { - let source = InlineConfigParserError::ParseBool("key".into(), "invalid-bool-value".into()); - let line = "dir/TestContract.t.sol:FuzzContract".to_string(); - let error = InlineConfigError { line: line.clone(), source }; - - let expected = format!("Inline config error detected at {line}"); - assert_eq!(error.to_string(), expected); - } -} diff --git a/crates/config/src/inline/mod.rs b/crates/config/src/inline/mod.rs index 8b5616a21..fa67b2426 100644 --- a/crates/config/src/inline/mod.rs +++ b/crates/config/src/inline/mod.rs @@ -1,61 +1,186 @@ use crate::Config; use alloy_primitives::map::HashMap; -use std::sync::LazyLock; -mod conf_parser; -pub use conf_parser::*; - -mod error; -pub use error::*; +use figment::{ + value::{Dict, Map, Value}, + Figment, Profile, Provider, +}; +use foundry_compilers::ProjectCompileOutput; +use itertools::Itertools; mod natspec; pub use natspec::*; -pub const INLINE_CONFIG_FUZZ_KEY: &str = "fuzz"; -pub const INLINE_CONFIG_INVARIANT_KEY: &str = "invariant"; -const INLINE_CONFIG_PREFIX: &str = "forge-config"; +const INLINE_CONFIG_PREFIX: &str = "forge-config:"; + +type DataMap = Map; + +/// Errors returned when parsing inline config. +#[derive(Clone, Debug, PartialEq, Eq, thiserror::Error)] +pub enum InlineConfigErrorKind { + /// Failed to parse inline config as TOML. + #[error(transparent)] + Parse(#[from] toml::de::Error), + /// An invalid profile has been provided. + #[error("invalid profile `{0}`; valid profiles: {1}")] + InvalidProfile(String, String), +} -static INLINE_CONFIG_PREFIX_SELECTED_PROFILE: LazyLock = LazyLock::new(|| { - let selected_profile = Config::selected_profile().to_string(); - format!("{INLINE_CONFIG_PREFIX}:{selected_profile}.") -}); +/// Wrapper error struct that catches config parsing errors, enriching them with context information +/// reporting the misconfigured line. +#[derive(Debug, thiserror::Error)] +#[error("Inline config error at {location}: {kind}")] +pub struct InlineConfigError { + /// The span of the error in the format: + /// `dir/TestContract.t.sol:FuzzContract:10:12:111` + pub location: String, + /// The inner error + pub kind: InlineConfigErrorKind, +} /// Represents per-test configurations, declared inline /// as structured comments in Solidity test files. This allows /// to create configs directly bound to a solidity test. #[derive(Clone, Debug, Default)] -pub struct InlineConfig { - /// Contract-level configurations, used for functions that do not have a specific - /// configuration. - contract_level: HashMap, - /// Maps a (test-contract, test-function) pair - /// to a specific configuration provided by the user. - fn_level: HashMap<(String, String), T>, +pub struct InlineConfig { + /// Contract-level configuration. + contract_level: HashMap, + /// Function-level configuration. + fn_level: HashMap<(String, String), DataMap>, } -impl InlineConfig { - /// Returns an inline configuration, if any, for a test function. - /// Configuration is identified by the pair "contract", "function". - pub fn get(&self, contract_id: &str, fn_name: &str) -> Option<&T> { - let key = (contract_id.to_string(), fn_name.to_string()); - self.fn_level.get(&key).or_else(|| self.contract_level.get(contract_id)) +impl InlineConfig { + /// Creates a new, empty [`InlineConfig`]. + pub fn new() -> Self { + Self::default() + } + + /// Tries to create a new instance by detecting inline configurations from the project compile + /// output. + pub fn new_parsed(output: &ProjectCompileOutput, config: &Config) -> eyre::Result { + let natspecs: Vec = NatSpec::parse(output, &config.root); + let profiles = &config.profiles; + let mut inline = Self::new(); + for natspec in &natspecs { + inline.insert(natspec)?; + // Validate after parsing as TOML. + natspec.validate_profiles(profiles)?; + } + Ok(inline) + } + + /// Inserts a new [`NatSpec`] into the [`InlineConfig`]. + pub fn insert(&mut self, natspec: &NatSpec) -> Result<(), InlineConfigError> { + let map = if let Some(function) = &natspec.function { + self.fn_level.entry((natspec.contract.clone(), function.clone())).or_default() + } else { + self.contract_level.entry(natspec.contract.clone()).or_default() + }; + let joined = natspec + .config_values() + .map(|s| { + // Replace `-` with `_` for backwards compatibility with the old parser. + if let Some(idx) = s.find('=') { + s[..idx].replace('-', "_") + &s[idx..] + } else { + s.to_string() + } + }) + .format("\n") + .to_string(); + let data = toml::from_str::(&joined).map_err(|e| InlineConfigError { + location: natspec.location_string(), + kind: InlineConfigErrorKind::Parse(e), + })?; + extend_data_map(map, &data); + Ok(()) + } + + /// Returns a [`figment::Provider`] for this [`InlineConfig`] at the given contract and function + /// level. + pub fn provide<'a>(&'a self, contract: &'a str, function: &'a str) -> InlineConfigProvider<'a> { + InlineConfigProvider { inline: self, contract, function } + } + + /// Merges the inline configuration at the given contract and function level with the provided + /// base configuration. + pub fn merge(&self, contract: &str, function: &str, base: &Config) -> Figment { + Figment::from(base).merge(self.provide(contract, function)) + } + + /// Returns `true` if a configuration is present at the given contract level. + pub fn contains_contract(&self, contract: &str) -> bool { + self.get_contract(contract).is_some_and(|map| !map.is_empty()) } - pub fn insert_contract(&mut self, contract_id: impl Into, config: T) { - self.contract_level.insert(contract_id.into(), config); + /// Returns `true` if a configuration is present at the function level. + /// + /// Does not include contract-level configurations. + pub fn contains_function(&self, contract: &str, function: &str) -> bool { + self.get_function(contract, function).is_some_and(|map| !map.is_empty()) } - /// Inserts an inline configuration, for a test function. - /// Configuration is identified by the pair "contract", "function". - pub fn insert_fn(&mut self, contract_id: C, fn_name: F, config: T) - where - C: Into, - F: Into, - { - let key = (contract_id.into(), fn_name.into()); - self.fn_level.insert(key, config); + fn get_contract(&self, contract: &str) -> Option<&DataMap> { + self.contract_level.get(contract) + } + + fn get_function(&self, contract: &str, function: &str) -> Option<&DataMap> { + let key = (contract.to_string(), function.to_string()); + self.fn_level.get(&key) + } +} + +/// [`figment::Provider`] for [`InlineConfig`] at a given contract and function level. +/// +/// Created by [`InlineConfig::provide`]. +#[derive(Clone, Debug)] +pub struct InlineConfigProvider<'a> { + inline: &'a InlineConfig, + contract: &'a str, + function: &'a str, +} + +impl Provider for InlineConfigProvider<'_> { + fn metadata(&self) -> figment::Metadata { + figment::Metadata::named("inline config") + } + + fn data(&self) -> figment::Result { + let mut map = DataMap::new(); + if let Some(new) = self.inline.get_contract(self.contract) { + extend_data_map(&mut map, new); + } + if let Some(new) = self.inline.get_function(self.contract, self.function) { + extend_data_map(&mut map, new); + } + Ok(map) } } -pub(crate) fn remove_whitespaces(s: &str) -> String { - s.chars().filter(|c| !c.is_whitespace()).collect() +fn extend_data_map(map: &mut DataMap, new: &DataMap) { + for (profile, data) in new { + extend_dict(map.entry(profile.clone()).or_default(), data); + } +} + +fn extend_dict(dict: &mut Dict, new: &Dict) { + for (k, v) in new { + match dict.entry(k.clone()) { + std::collections::btree_map::Entry::Vacant(entry) => { + entry.insert(v.clone()); + } + std::collections::btree_map::Entry::Occupied(entry) => { + extend_value(entry.into_mut(), v); + } + } + } +} + +fn extend_value(value: &mut Value, new: &Value) { + match (value, new) { + (Value::Dict(tag, dict), Value::Dict(new_tag, new_dict)) => { + *tag = *new_tag; + extend_dict(dict, new_dict); + } + (value, new) => *value = new.clone(), + } } diff --git a/crates/config/src/inline/natspec.rs b/crates/config/src/inline/natspec.rs index 6dd6b696c..5774d9e19 100644 --- a/crates/config/src/inline/natspec.rs +++ b/crates/config/src/inline/natspec.rs @@ -1,8 +1,10 @@ -use super::{remove_whitespaces, INLINE_CONFIG_PREFIX, INLINE_CONFIG_PREFIX_SELECTED_PROFILE}; +use super::{InlineConfigError, InlineConfigErrorKind, INLINE_CONFIG_PREFIX}; +use figment::Profile; use foundry_compilers::{ artifacts::{ast::NodeType, Node}, ProjectCompileOutput, }; +use itertools::Itertools; use serde_json::Value; use solang_parser::{helpers::CodeLocation, pt}; use std::{collections::BTreeMap, path::Path}; @@ -10,15 +12,13 @@ use std::{collections::BTreeMap, path::Path}; /// Convenient struct to hold in-line per-test configurations #[derive(Clone, Debug, PartialEq, Eq)] pub struct NatSpec { - /// The parent contract of the natspec + /// The parent contract of the natspec. pub contract: String, - /// The function annotated with the natspec. None if the natspec is contract-level + /// The function annotated with the natspec. None if the natspec is contract-level. pub function: Option, - /// The line the natspec appears, in the form - /// `row:col:length` i.e. `10:21:122` + /// The line the natspec appears, in the form `row:col:length`, i.e. `10:21:122`. pub line: String, - /// The actual natspec comment, without slashes or block - /// punctuation + /// The actual natspec comment, without slashes or block punctuation. pub docs: String, } @@ -56,29 +56,52 @@ impl NatSpec { natspecs } - /// Returns a string describing the natspec - /// context, for debugging purposes 🐞 - /// i.e. `test/Counter.t.sol:CounterTest:testFuzz_SetNumber` - pub fn debug_context(&self) -> String { - format!("{}:{}", self.contract, self.function.as_deref().unwrap_or_default()) + /// Checks if all configuration lines use a valid profile. + /// + /// i.e. Given available profiles + /// ```rust + /// let _profiles = vec!["ci", "default"]; + /// ``` + /// A configuration like `forge-config: ciii.invariant.depth = 1` would result + /// in an error. + pub fn validate_profiles(&self, profiles: &[Profile]) -> eyre::Result<()> { + for config in self.config_values() { + if !profiles.iter().any(|p| { + config + .strip_prefix(p.as_str().as_str()) + .is_some_and(|rest| rest.trim_start().starts_with('.')) + }) { + Err(InlineConfigError { + location: self.location_string(), + kind: InlineConfigErrorKind::InvalidProfile( + config.to_string(), + profiles.iter().format(", ").to_string(), + ), + })? + } + } + Ok(()) } - /// Returns a list of configuration lines that match the current profile - pub fn current_profile_configs(&self) -> impl Iterator + '_ { - self.config_lines_with_prefix(INLINE_CONFIG_PREFIX_SELECTED_PROFILE.as_str()) + /// Returns the path of the contract. + pub fn path(&self) -> &str { + match self.contract.split_once(':') { + Some((path, _)) => path, + None => self.contract.as_str(), + } } - /// Returns a list of configuration lines that match a specific string prefix - pub fn config_lines_with_prefix<'a>( - &'a self, - prefix: &'a str, - ) -> impl Iterator + 'a { - self.config_lines().filter(move |l| l.starts_with(prefix)) + /// Returns the location of the natspec as a string. + pub fn location_string(&self) -> String { + format!("{}:{}", self.path(), self.line) } - /// Returns a list of all the configuration lines available in the natspec - pub fn config_lines(&self) -> impl Iterator + '_ { - self.docs.lines().filter(|line| line.contains(INLINE_CONFIG_PREFIX)).map(remove_whitespaces) + /// Returns a list of all the configuration values available in the natspec. + pub fn config_values(&self) -> impl Iterator { + self.docs.lines().filter_map(|line| { + line.find(INLINE_CONFIG_PREFIX) + .map(|idx| line[idx + INLINE_CONFIG_PREFIX.len()..].trim()) + }) } } @@ -258,6 +281,42 @@ mod tests { use super::*; use serde_json::json; + #[test] + fn can_reject_invalid_profiles() { + let profiles = ["ci".into(), "default".into()]; + let natspec = NatSpec { + contract: Default::default(), + function: Default::default(), + line: Default::default(), + docs: r" + forge-config: ciii.invariant.depth = 1 + forge-config: default.invariant.depth = 1 + " + .into(), + }; + + let result = natspec.validate_profiles(&profiles); + assert!(result.is_err()); + } + + #[test] + fn can_accept_valid_profiles() { + let profiles = ["ci".into(), "default".into()]; + let natspec = NatSpec { + contract: Default::default(), + function: Default::default(), + line: Default::default(), + docs: r" + forge-config: ci.invariant.depth = 1 + forge-config: default.invariant.depth = 1 + " + .into(), + }; + + let result = natspec.validate_profiles(&profiles); + assert!(result.is_ok()); + } + #[test] fn parse_solang() { let src = " @@ -355,42 +414,13 @@ contract FuzzInlineConf is DSTest { #[test] fn config_lines() { let natspec = natspec(); - let config_lines = natspec.config_lines(); - assert_eq!( - config_lines.collect::>(), - vec![ - "forge-config:default.fuzz.runs=600".to_string(), - "forge-config:ci.fuzz.runs=500".to_string(), - "forge-config:default.invariant.runs=1".to_string() - ] - ) - } - - #[test] - fn current_profile_configs() { - let natspec = natspec(); - let config_lines = natspec.current_profile_configs(); - - assert_eq!( - config_lines.collect::>(), - vec![ - "forge-config:default.fuzz.runs=600".to_string(), - "forge-config:default.invariant.runs=1".to_string() - ] - ); - } - - #[test] - fn config_lines_with_prefix() { - use super::INLINE_CONFIG_PREFIX; - let natspec = natspec(); - let prefix = format!("{INLINE_CONFIG_PREFIX}:default"); - let config_lines = natspec.config_lines_with_prefix(&prefix); + let config_lines = natspec.config_values(); assert_eq!( config_lines.collect::>(), - vec![ - "forge-config:default.fuzz.runs=600".to_string(), - "forge-config:default.invariant.runs=1".to_string() + [ + "default.fuzz.runs = 600".to_string(), + "ci.fuzz.runs = 500".to_string(), + "default.invariant.runs = 1".to_string() ] ) } diff --git a/crates/config/src/invariant.rs b/crates/config/src/invariant.rs index a53a38a7d..5a6c02db9 100644 --- a/crates/config/src/invariant.rs +++ b/crates/config/src/invariant.rs @@ -1,12 +1,6 @@ //! Configuration for invariant testing -use crate::{ - fuzz::FuzzDictionaryConfig, - inline::{ - parse_config_bool, parse_config_u32, InlineConfigParser, InlineConfigParserError, - INLINE_CONFIG_INVARIANT_KEY, - }, -}; +use crate::fuzz::FuzzDictionaryConfig; use serde::{Deserialize, Serialize}; use std::path::PathBuf; @@ -36,6 +30,8 @@ pub struct InvariantConfig { pub failure_persist_dir: Option, /// Whether to collect and display fuzzed selectors metrics. pub show_metrics: bool, + /// Optional timeout (in seconds) for each invariant test. + pub timeout: Option, /// When enabled, filters all addresses below 2^16, as they are reserved in zkSync. pub no_zksync_reserved_addresses: bool, } @@ -53,6 +49,7 @@ impl Default for InvariantConfig { gas_report_samples: 256, failure_persist_dir: None, show_metrics: false, + timeout: None, no_zksync_reserved_addresses: false, } } @@ -72,6 +69,7 @@ impl InvariantConfig { gas_report_samples: 256, failure_persist_dir: Some(cache_dir), show_metrics: false, + timeout: None, no_zksync_reserved_addresses: false, } } @@ -84,91 +82,3 @@ impl InvariantConfig { .join(contract_name.split(':').last().unwrap()) } } - -impl InlineConfigParser for InvariantConfig { - fn config_key() -> String { - INLINE_CONFIG_INVARIANT_KEY.into() - } - - fn try_merge(&self, configs: &[String]) -> Result, InlineConfigParserError> { - let overrides: Vec<(String, String)> = Self::get_config_overrides(configs); - - if overrides.is_empty() { - return Ok(None) - } - - let mut conf_clone = self.clone(); - - for pair in overrides { - let key = pair.0; - let value = pair.1; - match key.as_str() { - "runs" => conf_clone.runs = parse_config_u32(key, value)?, - "depth" => conf_clone.depth = parse_config_u32(key, value)?, - "fail-on-revert" => conf_clone.fail_on_revert = parse_config_bool(key, value)?, - "call-override" => conf_clone.call_override = parse_config_bool(key, value)?, - "failure-persist-dir" => { - conf_clone.failure_persist_dir = Some(PathBuf::from(value)) - } - "shrink-run-limit" => conf_clone.shrink_run_limit = parse_config_u32(key, value)?, - "show-metrics" => conf_clone.show_metrics = parse_config_bool(key, value)?, - "no-zksync-reserved-addresses" => { - conf_clone.no_zksync_reserved_addresses = parse_config_bool(key, value)? - } - _ => Err(InlineConfigParserError::InvalidConfigProperty(key.to_string()))?, - } - } - Ok(Some(conf_clone)) - } -} - -#[cfg(test)] -mod tests { - use crate::{inline::InlineConfigParser, InvariantConfig}; - - #[test] - fn unrecognized_property() { - let configs = &["forge-config: default.invariant.unknownprop = 200".to_string()]; - let base_config = InvariantConfig::default(); - if let Err(e) = base_config.try_merge(configs) { - assert_eq!(e.to_string(), "'unknownprop' is an invalid config property"); - } else { - unreachable!() - } - } - - #[test] - fn successful_merge() { - let configs = &["forge-config: default.invariant.runs = 42424242".to_string()]; - let base_config = InvariantConfig::default(); - let merged: InvariantConfig = base_config.try_merge(configs).expect("No errors").unwrap(); - assert_eq!(merged.runs, 42424242); - } - - #[test] - fn merge_is_none() { - let empty_config = &[]; - let base_config = InvariantConfig::default(); - let merged = base_config.try_merge(empty_config).expect("No errors"); - assert!(merged.is_none()); - } - - #[test] - fn can_merge_unrelated_properties_into_config() { - let unrelated_configs = &["forge-config: default.fuzz.runs = 2".to_string()]; - let base_config = InvariantConfig::default(); - let merged = base_config.try_merge(unrelated_configs).expect("No errors"); - assert!(merged.is_none()); - } - - #[test] - fn override_detection() { - let configs = &[ - "forge-config: default.fuzz.runs = 42424242".to_string(), - "forge-config: ci.fuzz.runs = 666666".to_string(), - "forge-config: default.invariant.runs = 2".to_string(), - ]; - let variables = InvariantConfig::get_config_overrides(configs); - assert_eq!(variables, vec![("runs".into(), "2".into())]); - } -} diff --git a/crates/config/src/lib.rs b/crates/config/src/lib.rs index e4c3b3b45..0981b058b 100644 --- a/crates/config/src/lib.rs +++ b/crates/config/src/lib.rs @@ -16,6 +16,7 @@ use figment::{ value::{Dict, Map, Value}, Error, Figment, Metadata, Profile, Provider, }; +use filter::GlobMatcher; use foundry_compilers::{ artifacts::{ output_selection::{ContractOutputSelection, OutputSelection}, @@ -37,7 +38,6 @@ use foundry_compilers::{ ArtifactOutput, ConfigurableArtifacts, Graph, Project, ProjectPathsConfig, RestrictionsWithVersion, VyperLanguage, }; -use inflector::Inflector; use regex::Regex; use revm_primitives::{map::AddressHashMap, FixedBytes, SpecId}; use semver::Version; @@ -98,7 +98,8 @@ pub use alloy_chains::{Chain, NamedChain}; pub use figment; pub mod providers; -use providers::{remappings::RemappingsProvider, FallbackProfileProvider, WarningsProvider}; +pub use providers::Remappings; +use providers::*; mod fuzz; pub use fuzz::{FuzzConfig, FuzzDictionaryConfig}; @@ -107,7 +108,7 @@ mod invariant; pub use invariant::InvariantConfig; mod inline; -pub use inline::{validate_profiles, InlineConfig, InlineConfigError, InlineConfigParser, NatSpec}; +pub use inline::{InlineConfig, InlineConfigError, NatSpec}; pub mod soldeer; use soldeer::{SoldeerConfig, SoldeerDependencyConfig}; @@ -164,6 +165,19 @@ pub struct Config { /// set to the extracting Figment's selected `Profile`. #[serde(skip)] pub profile: Profile, + /// The list of all profiles defined in the config. + /// + /// See `profile`. + #[serde(skip)] + pub profiles: Vec, + + /// The root path where the config detection started from, [`Config::with_root`]. + // We're skipping serialization here, so it won't be included in the [`Config::to_string()`] + // representation, but will be deserialized from the `Figment` so that forge commands can + // override it. + #[serde(default = "root_default", skip_serializing)] + pub root: PathBuf, + /// path of the source contracts dir, like `src` or `contracts` pub src: PathBuf, /// path of the test dir @@ -193,8 +207,7 @@ pub struct Config { /// additional solc include paths for `--include-path` pub include_paths: Vec, /// glob patterns to skip - #[serde(with = "from_vec_glob")] - pub skip: Vec, + pub skip: Vec, /// whether to force a `project.clean()` pub force: bool, /// evm version to use @@ -250,6 +263,15 @@ pub struct Config { pub eth_rpc_jwt: Option, /// Timeout that should be used for any rpc calls pub eth_rpc_timeout: Option, + /// Headers that should be used for any rpc calls + /// + /// # Example + /// + /// rpc_headers = ["x-custom-header:value", "x-another-header:another-value"] + /// + /// You can also the ETH_RPC_HEADERS env variable like so: + /// `ETH_RPC_HEADERS="x-custom-header:value x-another-header:another-value"` + pub eth_rpc_headers: Option>, /// etherscan API key, or alias for an `EtherscanConfig` in `etherscan` table pub etherscan_api_key: Option, /// Multiple etherscan api configs and their aliases @@ -442,6 +464,9 @@ pub struct Config { /// CREATE2 salt to use for the library deployment in scripts. pub create2_library_salt: B256, + /// The CREATE2 deployer address to use. + pub create2_deployer: Address, + /// Configuration for Vyper compiler pub vyper: VyperConfig, @@ -451,13 +476,6 @@ pub struct Config { /// Soldeer custom configs pub soldeer: Option, - /// The root path where the config detection started from, [`Config::with_root`]. - // We're skipping serialization here, so it won't be included in the [`Config::to_string()`] - // representation, but will be deserialized from the `Figment` so that forge commands can - // override it. - #[serde(default, skip_serializing)] - pub root: RootPath, - /// Whether failed assertions should revert. /// /// Note that this only applies to native (cheatcode) assertions, invoked on Vm contract. @@ -474,8 +492,9 @@ pub struct Config { #[serde(default, skip_serializing_if = "Option::is_none")] pub eof_version: Option, - /// Whether to enable Alphanet features. - pub alphanet: bool, + /// Whether to enable Odyssey features. + #[serde(alias = "alphanet")] + pub odyssey: bool, /// Timeout for transactions in seconds. pub transaction_timeout: u64, @@ -483,7 +502,7 @@ pub struct Config { /// Use EOF-enabled solc for compilation. pub eof: bool, - /// Warnings gathered when loading the Config. See [`WarningsProvider`] for more information + /// Warnings gathered when loading the Config. See [`WarningsProvider`] for more information. #[serde(rename = "__warnings", default, skip_serializing)] pub warnings: Vec, @@ -511,7 +530,7 @@ pub struct Config { pub zksync: ZkSyncConfig, } -/// Mapping of fallback standalone sections. See [`FallbackProfileProvider`] +/// Mapping of fallback standalone sections. See [`FallbackProfileProvider`]. pub const STANDALONE_FALLBACK_SECTIONS: &[(&str, &str)] = &[("invariant", "fuzz")]; /// Deprecated keys and their replacements. @@ -521,7 +540,7 @@ pub const DEPRECATIONS: &[(&str, &str)] = &[("cancun", "evm_version = Cancun")]; impl Config { /// The default profile: "default" - pub const DEFAULT_PROFILE: Profile = Profile::const_new("default"); + pub const DEFAULT_PROFILE: Profile = Profile::Default; /// The hardhat profile: "hardhat" pub const HARDHAT_PROFILE: Profile = Profile::const_new("hardhat"); @@ -558,6 +577,10 @@ impl Config { /// Default salt for create2 library deployments pub const DEFAULT_CREATE2_LIBRARY_SALT: FixedBytes<32> = FixedBytes::<32>::ZERO; + /// Default create2 deployer + pub const DEFAULT_CREATE2_DEPLOYER: Address = + address!("4e59b44847b379578588920ca78fbf26c0b4956c"); + /// Docker image with eof-enabled solc binary pub const EOF_SOLC_IMAGE: &'static str = "ghcr.io/paradigmxyz/forge-eof@sha256:46f868ce5264e1190881a3a335d41d7f42d6f26ed20b0c823609c715e38d603f"; @@ -574,7 +597,7 @@ impl Config { /// See [`figment`](Self::figment) for more details. #[track_caller] pub fn load_with_providers(providers: FigmentProviders) -> Self { - Self::default().to_figment(providers).extract().unwrap() + Self::from_provider(Self::default().to_figment(providers)) } /// Returns the current `Config` @@ -625,19 +648,47 @@ impl Config { /// let config = Config::try_from(figment); /// ``` pub fn try_from(provider: T) -> Result { - let figment = Figment::from(provider); + Self::try_from_figment(Figment::from(provider)) + } + + fn try_from_figment(figment: Figment) -> Result { let mut config = figment.extract::().map_err(ExtractConfigError::new)?; config.profile = figment.profile().clone(); + + // The `"profile"` profile contains all the profiles as keys. + let mut add_profile = |profile: &Profile| { + if !config.profiles.contains(profile) { + config.profiles.push(profile.clone()); + } + }; + let figment = figment.select(Self::PROFILE_SECTION); + if let Ok(data) = figment.data() { + if let Some(profiles) = data.get(&Profile::new(Self::PROFILE_SECTION)) { + for profile in profiles.keys() { + add_profile(&Profile::new(profile)); + } + } + } + add_profile(&Self::DEFAULT_PROFILE); + add_profile(&config.profile); + Ok(config) } /// Returns the populated [Figment] using the requested [FigmentProviders] preset. /// - /// This will merge various providers, such as env,toml,remappings into the figment. - pub fn to_figment(self, providers: FigmentProviders) -> Figment { - let mut c = self; + /// This will merge various providers, such as env,toml,remappings into the figment if + /// requested. + pub fn to_figment(&self, providers: FigmentProviders) -> Figment { + // Note that `Figment::from` here is a method on `Figment` rather than the `From` impl below + + if providers.is_none() { + return Figment::from(self); + } + + let root = self.root.as_path(); let profile = Self::selected_profile(); - let mut figment = Figment::default().merge(DappHardhatDirProvider(&c.root.0)); + let mut figment = Figment::default().merge(DappHardhatDirProvider(root)); // merge global foundry.toml file if let Some(global_toml) = Self::foundry_dir_toml().filter(|p| p.exists()) { @@ -650,7 +701,7 @@ impl Config { // merge local foundry.toml file figment = Self::merge_toml_provider( figment, - TomlFileProvider::new(Some("FOUNDRY_CONFIG"), c.root.0.join(Self::FILE_NAME)).cached(), + TomlFileProvider::new(Some("FOUNDRY_CONFIG"), root.join(Self::FILE_NAME)).cached(), profile.clone(), ); @@ -697,17 +748,17 @@ impl Config { lib_paths: figment .extract_inner::>("libs") .map(Cow::Owned) - .unwrap_or_else(|_| Cow::Borrowed(&c.libs)), - root: &c.root.0, + .unwrap_or_else(|_| Cow::Borrowed(&self.libs)), + root, remappings: figment.extract_inner::>("remappings"), }; figment = figment.merge(remappings); } // normalize defaults - figment = c.normalize_defaults(figment); + figment = self.normalize_defaults(figment); - Figment::from(c).merge(figment).select(profile) + Figment::from(self).merge(figment).select(profile) } /// The config supports relative paths and tracks the root path separately see @@ -716,7 +767,7 @@ impl Config { /// This joins all relative paths with the current root and attempts to make them canonic #[must_use] pub fn canonic(self) -> Self { - let root = self.root.0.clone(); + let root = self.root.clone(); self.canonic_at(root) } @@ -909,6 +960,9 @@ impl Config { ) -> Result>, SolcError> { let mut map = BTreeMap::new(); + if self.compilation_restrictions.is_empty() { + return Ok(BTreeMap::new()); + } let graph = Graph::::resolve(paths)?; let (sources, _) = graph.into_sources(); @@ -959,7 +1013,7 @@ impl Config { .set_no_artifacts(no_artifacts); if !self.skip.is_empty() { - let filter = SkipBuildFilters::new(self.skip.clone(), self.root.0.clone()); + let filter = SkipBuildFilters::new(self.skip.clone(), self.root.clone()); builder = builder.sparse_output(filter); } @@ -973,7 +1027,7 @@ impl Config { } /// Cleans the project. - pub fn cleanup( + pub fn cleanup>( &self, project: &Project, ) -> Result<(), SolcError> { @@ -1013,7 +1067,7 @@ impl Config { fn ensure_solc(&self) -> Result, SolcError> { if self.eof { let (tx, rx) = mpsc::channel(); - let root = self.root.0.clone(); + let root = self.root.clone(); std::thread::spawn(move || { tx.send( Solc::new_with_args( @@ -1045,7 +1099,7 @@ impl Config { rx.recv().expect("sender dropped") } Err(RecvTimeoutError::Disconnected) => panic!("sender dropped"), - } + }; } if let Some(ref solc) = self.solc { let solc = match solc { @@ -1080,7 +1134,7 @@ impl Config { /// Returns the [SpecId] derived from the configured [EvmVersion] #[inline] pub fn evm_spec_id(&self) -> SpecId { - evm_spec_id(&self.evm_version, self.alphanet) + evm_spec_id(self.evm_version, self.odyssey) } /// Returns whether the compiler version should be auto-detected @@ -1123,7 +1177,7 @@ impl Config { .artifacts(&self.out) .libs(self.libs.iter()) .remappings(self.get_all_remappings()) - .allowed_path(&self.root.0) + .allowed_path(&self.root) .allowed_paths(&self.libs) .allowed_paths(&self.allow_paths) .include_paths(&self.include_paths); @@ -1132,7 +1186,7 @@ impl Config { builder = builder.build_infos(build_info_path); } - builder.build_with_root(&self.root.0) + builder.build_with_root(&self.root) } /// Returns configuration for a compiler to use when setting up a [Project]. @@ -1246,11 +1300,11 @@ impl Config { ) -> Option, UnresolvedEnvVarError>> { let mut endpoints = self.rpc_endpoints.clone().resolved(); if let Some(endpoint) = endpoints.remove(maybe_alias) { - return Some(endpoint.map(Cow::Owned)) + return Some(endpoint.map(Cow::Owned)); } if let Ok(Some(endpoint)) = mesc::get_endpoint_by_query(maybe_alias, Some("foundry")) { - return Some(Ok(Cow::Owned(endpoint.url))) + return Some(Ok(Cow::Owned(endpoint.url))); } None @@ -1384,7 +1438,7 @@ impl Config { /// Returns the remapping for the project's _test_ directory, but only if it exists pub fn get_test_dir_remapping(&self) -> Option { - if self.root.0.join(&self.test).exists() { + if self.root.join(&self.test).exists() { get_dir_remapping(&self.test) } else { None @@ -1393,7 +1447,7 @@ impl Config { /// Returns the remapping for the project's _script_ directory, but only if it exists pub fn get_script_dir_remapping(&self) -> Option { - if self.root.0.join(&self.script).exists() { + if self.root.join(&self.script).exists() { get_dir_remapping(&self.script) } else { None @@ -1571,7 +1625,7 @@ impl Config { let paths = ProjectPathsConfig::builder().build_with_root::<()>(root); let artifacts: PathBuf = paths.artifacts.file_name().unwrap().into(); Self { - root: paths.root.into(), + root: paths.root, src: paths.sources.file_name().unwrap().into(), out: artifacts.clone(), libs: paths.libraries.into_iter().map(|lib| lib.file_name().unwrap().into()).collect(), @@ -1663,7 +1717,7 @@ impl Config { pub fn update_libs(&self) -> eyre::Result<()> { self.update(|doc| { let profile = self.profile.as_str().as_str(); - let root = &self.root.0; + let root = &self.root; let libs: toml_edit::Value = self .libs .iter() @@ -1720,7 +1774,7 @@ impl Config { /// Returns the path to the `foundry.toml` of this `Config`. pub fn get_config_path(&self) -> PathBuf { - self.root.0.join(Self::FILE_NAME) + self.root.join(Self::FILE_NAME) } /// Sets the non-inlinable libraries inside a `foundry.toml` file but only if it exists the @@ -1744,6 +1798,19 @@ impl Config { /// /// If the `FOUNDRY_PROFILE` env variable is not set, this returns the `DEFAULT_PROFILE`. pub fn selected_profile() -> Profile { + // Can't cache in tests because the env var can change. + #[cfg(test)] + { + Self::force_selected_profile() + } + #[cfg(not(test))] + { + static CACHE: std::sync::OnceLock = std::sync::OnceLock::new(); + CACHE.get_or_init(Self::force_selected_profile).clone() + } + } + + fn force_selected_profile() -> Profile { Profile::from_env_or("FOUNDRY_PROFILE", Self::DEFAULT_PROFILE) } @@ -2009,7 +2076,7 @@ impl Config { let provider = toml_provider.strict_select(profiles); // apply any key fixes - let provider = BackwardsCompatTomlProvider(ForcedSnakeCaseData(provider)); + let provider = &BackwardsCompatTomlProvider(ForcedSnakeCaseData(provider)); // merge the default profile as a base if profile != Self::DEFAULT_PROFILE { @@ -2039,19 +2106,20 @@ impl Config { /// This normalizes the default `evm_version` if a `solc` was provided in the config. /// /// See also - fn normalize_defaults(&mut self, figment: Figment) -> Figment { + fn normalize_defaults(&self, mut figment: Figment) -> Figment { + // TODO: add a warning if evm_version is provided but incompatible + if figment.contains("evm_version") { + return figment; + } + + // Normalize `evm_version` based on the provided solc version. if let Ok(solc) = figment.extract_inner::("solc") { - // check if evm_version is set - // TODO: add a warning if evm_version is provided but incompatible - if figment.find_value("evm_version").is_err() { - if let Some(version) = solc - .try_version() - .ok() - .and_then(|version| self.evm_version.normalize_version_solc(&version)) - { - // normalize evm_version based on the provided solc version - self.evm_version = version; - } + if let Some(version) = solc + .try_version() + .ok() + .and_then(|version| self.evm_version.normalize_version_solc(&version)) + { + figment = figment.merge(("evm_version", version)); } } @@ -2061,36 +2129,53 @@ impl Config { impl From for Figment { fn from(c: Config) -> Self { + (&c).into() + } +} +impl From<&Config> for Figment { + fn from(c: &Config) -> Self { c.to_figment(FigmentProviders::All) } } -/// Determines what providers should be used when loading the [Figment] for a [Config] +/// Determines what providers should be used when loading the [`Figment`] for a [`Config`]. #[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] pub enum FigmentProviders { - /// Include all providers + /// Include all providers. #[default] All, - /// Only include necessary providers that are useful for cast commands + /// Only include necessary providers that are useful for cast commands. /// - /// This will exclude more expensive providers such as remappings + /// This will exclude more expensive providers such as remappings. Cast, - /// Only include necessary providers that are useful for anvil + /// Only include necessary providers that are useful for anvil. /// - /// This will exclude more expensive providers such as remappings + /// This will exclude more expensive providers such as remappings. Anvil, + /// Don't include any providers. + None, } impl FigmentProviders { - /// Returns true if all providers should be included + /// Returns true if all providers should be included. pub const fn is_all(&self) -> bool { matches!(self, Self::All) } - /// Returns true if this is the cast preset + /// Returns true if this is the cast preset. pub const fn is_cast(&self) -> bool { matches!(self, Self::Cast) } + + /// Returns true if this is the anvil preset. + pub const fn is_anvil(&self) -> bool { + matches!(self, Self::Anvil) + } + + /// Returns true if no providers should be included. + pub const fn is_none(&self) -> bool { + matches!(self, Self::None) + } } /// Wrapper type for `regex::Regex` that implements `PartialEq` @@ -2153,53 +2238,6 @@ pub(crate) mod from_opt_glob { } } -/// Ser/de `globset::Glob` explicitly to handle `Option` properly -pub(crate) mod from_vec_glob { - use serde::{Deserialize, Deserializer, Serialize, Serializer}; - - pub fn serialize(value: &[globset::Glob], serializer: S) -> Result - where - S: Serializer, - { - let value = value.iter().map(|g| g.glob()).collect::>(); - value.serialize(serializer) - } - - pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> - where - D: Deserializer<'de>, - { - let s: Vec = Vec::deserialize(deserializer)?; - s.into_iter() - .map(|s| globset::Glob::new(&s)) - .collect::, _>>() - .map_err(serde::de::Error::custom) - } -} - -/// A helper wrapper around the root path used during Config detection -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] -#[serde(transparent)] -pub struct RootPath(pub PathBuf); - -impl Default for RootPath { - fn default() -> Self { - ".".into() - } -} - -impl> From

for RootPath { - fn from(p: P) -> Self { - Self(p.into()) - } -} - -impl AsRef for RootPath { - fn as_ref(&self) -> &Path { - &self.0 - } -} - /// Parses a config profile /// /// All `Profile` date is ignored by serde, however the `Config::to_string_pretty` includes it and @@ -2248,12 +2286,10 @@ impl Default for Config { fn default() -> Self { Self { profile: Self::DEFAULT_PROFILE, + profiles: vec![Self::DEFAULT_PROFILE], fs_permissions: FsPermissions::new([PathPermission::read("out")]), - #[cfg(not(feature = "isolate-by-default"))] - isolate: false, - #[cfg(feature = "isolate-by-default")] - isolate: true, - root: Default::default(), + isolate: cfg!(feature = "isolate-by-default"), + root: root_default(), src: "src".into(), test: "test".into(), script: "script".into(), @@ -2317,6 +2353,7 @@ impl Default for Config { eth_rpc_url: None, eth_rpc_jwt: None, eth_rpc_timeout: None, + eth_rpc_headers: None, etherscan_api_key: None, verbosity: 0, remappings: vec![], @@ -2350,6 +2387,7 @@ impl Default for Config { labels: Default::default(), unchecked_cheatcode_artifacts: false, create2_library_salt: Self::DEFAULT_CREATE2_LIBRARY_SALT, + create2_deployer: Self::DEFAULT_CREATE2_DEPLOYER, skip: vec![], dependencies: Default::default(), soldeer: Default::default(), @@ -2358,7 +2396,7 @@ impl Default for Config { warnings: vec![], extra_args: vec![], eof_version: None, - alphanet: false, + odyssey: false, transaction_timeout: 120, additional_compiler_profiles: Default::default(), compilation_restrictions: Default::default(), @@ -2369,11 +2407,12 @@ impl Default for Config { } } -/// Wrapper for the config's `gas_limit` value necessary because toml-rs can't handle larger number because integers are stored signed: +/// Wrapper for the config's `gas_limit` value necessary because toml-rs can't handle larger number +/// because integers are stored signed: /// /// Due to this limitation this type will be serialized/deserialized as String if it's larger than /// `i64` -#[derive(Clone, Copy, Debug, PartialEq, Eq, Deserialize)] +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, Deserialize)] pub struct GasLimit(#[serde(deserialize_with = "crate::deserialize_u64_or_max")] pub u64); impl From for GasLimit { @@ -2438,518 +2477,6 @@ impl> From for SolcReq { } } -/// A convenience provider to retrieve a toml file. -/// This will return an error if the env var is set but the file does not exist -struct TomlFileProvider { - pub env_var: Option<&'static str>, - pub default: PathBuf, - pub cache: Option, Error>>, -} - -impl TomlFileProvider { - fn new(env_var: Option<&'static str>, default: impl Into) -> Self { - Self { env_var, default: default.into(), cache: None } - } - - fn env_val(&self) -> Option { - self.env_var.and_then(Env::var) - } - - fn file(&self) -> PathBuf { - self.env_val().map(PathBuf::from).unwrap_or_else(|| self.default.clone()) - } - - fn is_missing(&self) -> bool { - if let Some(file) = self.env_val() { - let path = Path::new(&file); - if !path.exists() { - return true; - } - } - false - } - - pub fn cached(mut self) -> Self { - self.cache = Some(self.read()); - self - } - - fn read(&self) -> Result, Error> { - use serde::de::Error as _; - if let Some(file) = self.env_val() { - let path = Path::new(&file); - if !path.exists() { - return Err(Error::custom(format!( - "Config file `{}` set in env var `{}` does not exist", - file, - self.env_var.unwrap() - ))); - } - Toml::file(file) - } else { - Toml::file(&self.default) - } - .nested() - .data() - } -} - -impl Provider for TomlFileProvider { - fn metadata(&self) -> Metadata { - if self.is_missing() { - Metadata::named("TOML file provider") - } else { - Toml::file(self.file()).nested().metadata() - } - } - - fn data(&self) -> Result, Error> { - if let Some(cache) = self.cache.as_ref() { - cache.clone() - } else { - self.read() - } - } -} - -/// A Provider that ensures all keys are snake case if they're not standalone sections, See -/// `Config::STANDALONE_SECTIONS` -struct ForcedSnakeCaseData

(P); - -impl Provider for ForcedSnakeCaseData

{ - fn metadata(&self) -> Metadata { - self.0.metadata() - } - - fn data(&self) -> Result, Error> { - let mut map = Map::new(); - for (profile, dict) in self.0.data()? { - if Config::STANDALONE_SECTIONS.contains(&profile.as_ref()) { - // don't force snake case for keys in standalone sections - map.insert(profile, dict); - continue; - } - map.insert(profile, dict.into_iter().map(|(k, v)| (k.to_snake_case(), v)).collect()); - } - Ok(map) - } -} - -/// A Provider that handles breaking changes in toml files -struct BackwardsCompatTomlProvider

(P); - -impl Provider for BackwardsCompatTomlProvider

{ - fn metadata(&self) -> Metadata { - self.0.metadata() - } - - fn data(&self) -> Result, Error> { - let mut map = Map::new(); - let solc_env = std::env::var("FOUNDRY_SOLC_VERSION") - .or_else(|_| std::env::var("DAPP_SOLC_VERSION")) - .map(Value::from) - .ok(); - for (profile, mut dict) in self.0.data()? { - if let Some(v) = solc_env.clone() { - // ENV var takes precedence over config file - dict.insert("solc".to_string(), v); - } else if let Some(v) = dict.remove("solc_version") { - // only insert older variant if not already included - if !dict.contains_key("solc") { - dict.insert("solc".to_string(), v); - } - } - - if let Some(v) = dict.remove("odyssey") { - dict.insert("alphanet".to_string(), v); - } - map.insert(profile, dict); - } - Ok(map) - } -} - -/// A provider that sets the `src` and `output` path depending on their existence. -struct DappHardhatDirProvider<'a>(&'a Path); - -impl Provider for DappHardhatDirProvider<'_> { - fn metadata(&self) -> Metadata { - Metadata::named("Dapp Hardhat dir compat") - } - - fn data(&self) -> Result, Error> { - let mut dict = Dict::new(); - dict.insert( - "src".to_string(), - ProjectPathsConfig::find_source_dir(self.0) - .file_name() - .unwrap() - .to_string_lossy() - .to_string() - .into(), - ); - dict.insert( - "out".to_string(), - ProjectPathsConfig::find_artifacts_dir(self.0) - .file_name() - .unwrap() - .to_string_lossy() - .to_string() - .into(), - ); - - // detect libs folders: - // if `lib` _and_ `node_modules` exists: include both - // if only `node_modules` exists: include `node_modules` - // include `lib` otherwise - let mut libs = vec![]; - let node_modules = self.0.join("node_modules"); - let lib = self.0.join("lib"); - if node_modules.exists() { - if lib.exists() { - libs.push(lib.file_name().unwrap().to_string_lossy().to_string()); - } - libs.push(node_modules.file_name().unwrap().to_string_lossy().to_string()); - } else { - libs.push(lib.file_name().unwrap().to_string_lossy().to_string()); - } - - dict.insert("libs".to_string(), libs.into()); - - Ok(Map::from([(Config::selected_profile(), dict)])) - } -} - -/// A provider that checks for DAPP_ env vars that are named differently than FOUNDRY_ -struct DappEnvCompatProvider; - -impl Provider for DappEnvCompatProvider { - fn metadata(&self) -> Metadata { - Metadata::named("Dapp env compat") - } - - fn data(&self) -> Result, Error> { - use serde::de::Error as _; - use std::env; - - let mut dict = Dict::new(); - if let Ok(val) = env::var("DAPP_TEST_NUMBER") { - dict.insert( - "block_number".to_string(), - val.parse::().map_err(figment::Error::custom)?.into(), - ); - } - if let Ok(val) = env::var("DAPP_TEST_ADDRESS") { - dict.insert("sender".to_string(), val.into()); - } - if let Ok(val) = env::var("DAPP_FORK_BLOCK") { - dict.insert( - "fork_block_number".to_string(), - val.parse::().map_err(figment::Error::custom)?.into(), - ); - } else if let Ok(val) = env::var("DAPP_TEST_NUMBER") { - dict.insert( - "fork_block_number".to_string(), - val.parse::().map_err(figment::Error::custom)?.into(), - ); - } - if let Ok(val) = env::var("DAPP_TEST_TIMESTAMP") { - dict.insert( - "block_timestamp".to_string(), - val.parse::().map_err(figment::Error::custom)?.into(), - ); - } - if let Ok(val) = env::var("DAPP_BUILD_OPTIMIZE_RUNS") { - dict.insert( - "optimizer_runs".to_string(), - val.parse::().map_err(figment::Error::custom)?.into(), - ); - } - if let Ok(val) = env::var("DAPP_BUILD_OPTIMIZE") { - // Activate Solidity optimizer (0 or 1) - let val = val.parse::().map_err(figment::Error::custom)?; - if val > 1 { - return Err( - format!("Invalid $DAPP_BUILD_OPTIMIZE value `{val}`, expected 0 or 1").into() - ); - } - dict.insert("optimizer".to_string(), (val == 1).into()); - } - - // libraries in env vars either as `[..]` or single string separated by comma - if let Ok(val) = env::var("DAPP_LIBRARIES").or_else(|_| env::var("FOUNDRY_LIBRARIES")) { - dict.insert("libraries".to_string(), utils::to_array_value(&val)?); - } - - let mut fuzz_dict = Dict::new(); - if let Ok(val) = env::var("DAPP_TEST_FUZZ_RUNS") { - fuzz_dict.insert( - "runs".to_string(), - val.parse::().map_err(figment::Error::custom)?.into(), - ); - } - dict.insert("fuzz".to_string(), fuzz_dict.into()); - - let mut invariant_dict = Dict::new(); - if let Ok(val) = env::var("DAPP_TEST_DEPTH") { - invariant_dict.insert( - "depth".to_string(), - val.parse::().map_err(figment::Error::custom)?.into(), - ); - } - dict.insert("invariant".to_string(), invariant_dict.into()); - - Ok(Map::from([(Config::selected_profile(), dict)])) - } -} - -/// Renames a profile from `from` to `to`. -/// -/// For example given: -/// -/// ```toml -/// [from] -/// key = "value" -/// ``` -/// -/// RenameProfileProvider will output -/// -/// ```toml -/// [to] -/// key = "value" -/// ``` -struct RenameProfileProvider

{ - provider: P, - from: Profile, - to: Profile, -} - -impl

RenameProfileProvider

{ - pub fn new(provider: P, from: impl Into, to: impl Into) -> Self { - Self { provider, from: from.into(), to: to.into() } - } -} - -impl Provider for RenameProfileProvider

{ - fn metadata(&self) -> Metadata { - self.provider.metadata() - } - fn data(&self) -> Result, Error> { - let mut data = self.provider.data()?; - if let Some(data) = data.remove(&self.from) { - return Ok(Map::from([(self.to.clone(), data)])); - } - Ok(Default::default()) - } - fn profile(&self) -> Option { - Some(self.to.clone()) - } -} - -/// Unwraps a profile reducing the key depth -/// -/// For example given: -/// -/// ```toml -/// [wrapping_key.profile] -/// key = "value" -/// ``` -/// -/// UnwrapProfileProvider will output: -/// -/// ```toml -/// [profile] -/// key = "value" -/// ``` -struct UnwrapProfileProvider

{ - provider: P, - wrapping_key: Profile, - profile: Profile, -} - -impl

UnwrapProfileProvider

{ - pub fn new(provider: P, wrapping_key: impl Into, profile: impl Into) -> Self { - Self { provider, wrapping_key: wrapping_key.into(), profile: profile.into() } - } -} - -impl Provider for UnwrapProfileProvider

{ - fn metadata(&self) -> Metadata { - self.provider.metadata() - } - fn data(&self) -> Result, Error> { - self.provider.data().and_then(|mut data| { - if let Some(profiles) = data.remove(&self.wrapping_key) { - for (profile_str, profile_val) in profiles { - let profile = Profile::new(&profile_str); - if profile != self.profile { - continue; - } - match profile_val { - Value::Dict(_, dict) => return Ok(profile.collect(dict)), - bad_val => { - let mut err = Error::from(figment::error::Kind::InvalidType( - bad_val.to_actual(), - "dict".into(), - )); - err.metadata = Some(self.provider.metadata()); - err.profile = Some(self.profile.clone()); - return Err(err); - } - } - } - } - Ok(Default::default()) - }) - } - fn profile(&self) -> Option { - Some(self.profile.clone()) - } -} - -/// Wraps a profile in another profile -/// -/// For example given: -/// -/// ```toml -/// [profile] -/// key = "value" -/// ``` -/// -/// WrapProfileProvider will output: -/// -/// ```toml -/// [wrapping_key.profile] -/// key = "value" -/// ``` -struct WrapProfileProvider

{ - provider: P, - wrapping_key: Profile, - profile: Profile, -} - -impl

WrapProfileProvider

{ - pub fn new(provider: P, wrapping_key: impl Into, profile: impl Into) -> Self { - Self { provider, wrapping_key: wrapping_key.into(), profile: profile.into() } - } -} - -impl Provider for WrapProfileProvider

{ - fn metadata(&self) -> Metadata { - self.provider.metadata() - } - fn data(&self) -> Result, Error> { - if let Some(inner) = self.provider.data()?.remove(&self.profile) { - let value = Value::from(inner); - let dict = [(self.profile.to_string().to_snake_case(), value)].into_iter().collect(); - Ok(self.wrapping_key.collect(dict)) - } else { - Ok(Default::default()) - } - } - fn profile(&self) -> Option { - Some(self.profile.clone()) - } -} - -/// Extracts the profile from the `profile` key and using the original key as backup, merging -/// values where necessary -/// -/// For example given: -/// -/// ```toml -/// [profile.cool] -/// key = "value" -/// -/// [cool] -/// key2 = "value2" -/// ``` -/// -/// OptionalStrictProfileProvider will output: -/// -/// ```toml -/// [cool] -/// key = "value" -/// key2 = "value2" -/// ``` -/// -/// And emit a deprecation warning -struct OptionalStrictProfileProvider

{ - provider: P, - profiles: Vec, -} - -impl

OptionalStrictProfileProvider

{ - pub const PROFILE_PROFILE: Profile = Profile::const_new("profile"); - - pub fn new(provider: P, profiles: impl IntoIterator>) -> Self { - Self { provider, profiles: profiles.into_iter().map(|profile| profile.into()).collect() } - } -} - -impl Provider for OptionalStrictProfileProvider

{ - fn metadata(&self) -> Metadata { - self.provider.metadata() - } - fn data(&self) -> Result, Error> { - let mut figment = Figment::from(&self.provider); - for profile in &self.profiles { - figment = figment.merge(UnwrapProfileProvider::new( - &self.provider, - Self::PROFILE_PROFILE, - profile.clone(), - )); - } - figment.data().map_err(|err| { - // figment does tag metadata and tries to map metadata to an error, since we use a new - // figment in this provider this new figment does not know about the metadata of the - // provider and can't map the metadata to the error. Therefore we return the root error - // if this error originated in the provider's data. - if let Err(root_err) = self.provider.data() { - return root_err; - } - err - }) - } - fn profile(&self) -> Option { - self.profiles.last().cloned() - } -} - -trait ProviderExt: Provider { - fn rename( - &self, - from: impl Into, - to: impl Into, - ) -> RenameProfileProvider<&Self> { - RenameProfileProvider::new(self, from, to) - } - - fn wrap( - &self, - wrapping_key: impl Into, - profile: impl Into, - ) -> WrapProfileProvider<&Self> { - WrapProfileProvider::new(self, wrapping_key, profile) - } - - fn strict_select( - &self, - profiles: impl IntoIterator>, - ) -> OptionalStrictProfileProvider<&Self> { - OptionalStrictProfileProvider::new(self, profiles) - } - - fn fallback( - &self, - profile: impl Into, - fallback: impl Into, - ) -> FallbackProfileProvider<&Self> { - FallbackProfileProvider::new(self, profile, fallback) - } -} -impl ProviderExt for P {} - /// A subset of the foundry `Config` /// used to initialize a `foundry.toml` file /// @@ -3020,6 +2547,10 @@ fn canonic(path: impl Into) -> PathBuf { foundry_compilers::utils::canonicalize(&path).unwrap_or(path) } +fn root_default() -> PathBuf { + ".".into() +} + #[cfg(test)] mod tests { use super::*; @@ -3099,9 +2630,39 @@ mod tests { #[test] fn test_figment_is_default() { figment::Jail::expect_with(|_| { - let mut default: Config = Config::figment().extract().unwrap(); - default.profile = Config::default().profile; - assert_eq!(default, Config::default()); + let mut default: Config = Config::figment().extract()?; + let default2 = Config::default(); + default.profile = default2.profile.clone(); + default.profiles = default2.profiles.clone(); + assert_eq!(default, default2); + Ok(()) + }); + } + + #[test] + fn figment_profiles() { + figment::Jail::expect_with(|jail| { + jail.create_file( + "foundry.toml", + r" + [foo.baz] + libs = ['node_modules', 'lib'] + + [profile.default] + libs = ['node_modules', 'lib'] + + [profile.ci] + libs = ['node_modules', 'lib'] + + [profile.local] + libs = ['node_modules', 'lib'] + ", + )?; + + let config = crate::Config::load(); + let expected: &[figment::Profile] = &["ci".into(), "default".into(), "local".into()]; + assert_eq!(config.profiles, expected); + Ok(()) }); } @@ -3210,7 +2771,6 @@ mod tests { jail.set_env("FOUNDRY_PROFILE", "custom"); let config = Config::load(); - assert_eq!(config.src, PathBuf::from("customsrc")); assert_eq!(config.test, PathBuf::from("defaulttest")); assert_eq!(config.libs, vec![PathBuf::from("lib"), PathBuf::from("node_modules")]); diff --git a/crates/config/src/providers/ext.rs b/crates/config/src/providers/ext.rs new file mode 100644 index 000000000..58f418469 --- /dev/null +++ b/crates/config/src/providers/ext.rs @@ -0,0 +1,562 @@ +use crate::{utils, Config}; +use figment::{ + providers::{Env, Format, Toml}, + value::{Dict, Map, Value}, + Error, Figment, Metadata, Profile, Provider, +}; +use foundry_compilers::ProjectPathsConfig; +use inflector::Inflector; +use std::path::{Path, PathBuf}; + +pub(crate) trait ProviderExt: Provider + Sized { + fn rename( + self, + from: impl Into, + to: impl Into, + ) -> RenameProfileProvider { + RenameProfileProvider::new(self, from, to) + } + + fn wrap( + self, + wrapping_key: impl Into, + profile: impl Into, + ) -> WrapProfileProvider { + WrapProfileProvider::new(self, wrapping_key, profile) + } + + fn strict_select( + self, + profiles: impl IntoIterator>, + ) -> OptionalStrictProfileProvider { + OptionalStrictProfileProvider::new(self, profiles) + } + + fn fallback( + self, + profile: impl Into, + fallback: impl Into, + ) -> FallbackProfileProvider { + FallbackProfileProvider::new(self, profile, fallback) + } +} + +impl ProviderExt for P {} + +/// A convenience provider to retrieve a toml file. +/// This will return an error if the env var is set but the file does not exist +pub(crate) struct TomlFileProvider { + pub env_var: Option<&'static str>, + pub default: PathBuf, + pub cache: Option, Error>>, +} + +impl TomlFileProvider { + pub(crate) fn new(env_var: Option<&'static str>, default: impl Into) -> Self { + Self { env_var, default: default.into(), cache: None } + } + + fn env_val(&self) -> Option { + self.env_var.and_then(Env::var) + } + + fn file(&self) -> PathBuf { + self.env_val().map(PathBuf::from).unwrap_or_else(|| self.default.clone()) + } + + fn is_missing(&self) -> bool { + if let Some(file) = self.env_val() { + let path = Path::new(&file); + if !path.exists() { + return true; + } + } + false + } + + pub(crate) fn cached(mut self) -> Self { + self.cache = Some(self.read()); + self + } + + fn read(&self) -> Result, Error> { + use serde::de::Error as _; + if let Some(file) = self.env_val() { + let path = Path::new(&file); + if !path.exists() { + return Err(Error::custom(format!( + "Config file `{}` set in env var `{}` does not exist", + file, + self.env_var.unwrap() + ))); + } + Toml::file(file) + } else { + Toml::file(&self.default) + } + .nested() + .data() + } +} + +impl Provider for TomlFileProvider { + fn metadata(&self) -> Metadata { + if self.is_missing() { + Metadata::named("TOML file provider") + } else { + Toml::file(self.file()).nested().metadata() + } + } + + fn data(&self) -> Result, Error> { + if let Some(cache) = self.cache.as_ref() { + cache.clone() + } else { + self.read() + } + } +} + +/// A Provider that ensures all keys are snake case if they're not standalone sections, See +/// `Config::STANDALONE_SECTIONS` +pub(crate) struct ForcedSnakeCaseData

(pub(crate) P); + +impl Provider for ForcedSnakeCaseData

{ + fn metadata(&self) -> Metadata { + self.0.metadata() + } + + fn data(&self) -> Result, Error> { + let mut map = Map::new(); + for (profile, dict) in self.0.data()? { + if Config::STANDALONE_SECTIONS.contains(&profile.as_ref()) { + // don't force snake case for keys in standalone sections + map.insert(profile, dict); + continue; + } + map.insert(profile, dict.into_iter().map(|(k, v)| (k.to_snake_case(), v)).collect()); + } + Ok(map) + } +} + +/// A Provider that handles breaking changes in toml files +pub(crate) struct BackwardsCompatTomlProvider

(pub(crate) P); + +impl Provider for BackwardsCompatTomlProvider

{ + fn metadata(&self) -> Metadata { + self.0.metadata() + } + + fn data(&self) -> Result, Error> { + let mut map = Map::new(); + let solc_env = std::env::var("FOUNDRY_SOLC_VERSION") + .or_else(|_| std::env::var("DAPP_SOLC_VERSION")) + .map(Value::from) + .ok(); + for (profile, mut dict) in self.0.data()? { + if let Some(v) = solc_env.clone() { + // ENV var takes precedence over config file + dict.insert("solc".to_string(), v); + } else if let Some(v) = dict.remove("solc_version") { + // only insert older variant if not already included + if !dict.contains_key("solc") { + dict.insert("solc".to_string(), v); + } + } + + if let Some(v) = dict.remove("odyssey") { + dict.insert("odyssey".to_string(), v); + } + map.insert(profile, dict); + } + Ok(map) + } +} + +/// A provider that sets the `src` and `output` path depending on their existence. +pub(crate) struct DappHardhatDirProvider<'a>(pub(crate) &'a Path); + +impl Provider for DappHardhatDirProvider<'_> { + fn metadata(&self) -> Metadata { + Metadata::named("Dapp Hardhat dir compat") + } + + fn data(&self) -> Result, Error> { + let mut dict = Dict::new(); + dict.insert( + "src".to_string(), + ProjectPathsConfig::find_source_dir(self.0) + .file_name() + .unwrap() + .to_string_lossy() + .to_string() + .into(), + ); + dict.insert( + "out".to_string(), + ProjectPathsConfig::find_artifacts_dir(self.0) + .file_name() + .unwrap() + .to_string_lossy() + .to_string() + .into(), + ); + + // detect libs folders: + // if `lib` _and_ `node_modules` exists: include both + // if only `node_modules` exists: include `node_modules` + // include `lib` otherwise + let mut libs = vec![]; + let node_modules = self.0.join("node_modules"); + let lib = self.0.join("lib"); + if node_modules.exists() { + if lib.exists() { + libs.push(lib.file_name().unwrap().to_string_lossy().to_string()); + } + libs.push(node_modules.file_name().unwrap().to_string_lossy().to_string()); + } else { + libs.push(lib.file_name().unwrap().to_string_lossy().to_string()); + } + + dict.insert("libs".to_string(), libs.into()); + + Ok(Map::from([(Config::selected_profile(), dict)])) + } +} + +/// A provider that checks for DAPP_ env vars that are named differently than FOUNDRY_ +pub(crate) struct DappEnvCompatProvider; + +impl Provider for DappEnvCompatProvider { + fn metadata(&self) -> Metadata { + Metadata::named("Dapp env compat") + } + + fn data(&self) -> Result, Error> { + use serde::de::Error as _; + use std::env; + + let mut dict = Dict::new(); + if let Ok(val) = env::var("DAPP_TEST_NUMBER") { + dict.insert( + "block_number".to_string(), + val.parse::().map_err(figment::Error::custom)?.into(), + ); + } + if let Ok(val) = env::var("DAPP_TEST_ADDRESS") { + dict.insert("sender".to_string(), val.into()); + } + if let Ok(val) = env::var("DAPP_FORK_BLOCK") { + dict.insert( + "fork_block_number".to_string(), + val.parse::().map_err(figment::Error::custom)?.into(), + ); + } else if let Ok(val) = env::var("DAPP_TEST_NUMBER") { + dict.insert( + "fork_block_number".to_string(), + val.parse::().map_err(figment::Error::custom)?.into(), + ); + } + if let Ok(val) = env::var("DAPP_TEST_TIMESTAMP") { + dict.insert( + "block_timestamp".to_string(), + val.parse::().map_err(figment::Error::custom)?.into(), + ); + } + if let Ok(val) = env::var("DAPP_BUILD_OPTIMIZE_RUNS") { + dict.insert( + "optimizer_runs".to_string(), + val.parse::().map_err(figment::Error::custom)?.into(), + ); + } + if let Ok(val) = env::var("DAPP_BUILD_OPTIMIZE") { + // Activate Solidity optimizer (0 or 1) + let val = val.parse::().map_err(figment::Error::custom)?; + if val > 1 { + return Err( + format!("Invalid $DAPP_BUILD_OPTIMIZE value `{val}`, expected 0 or 1").into() + ); + } + dict.insert("optimizer".to_string(), (val == 1).into()); + } + + // libraries in env vars either as `[..]` or single string separated by comma + if let Ok(val) = env::var("DAPP_LIBRARIES").or_else(|_| env::var("FOUNDRY_LIBRARIES")) { + dict.insert("libraries".to_string(), utils::to_array_value(&val)?); + } + + let mut fuzz_dict = Dict::new(); + if let Ok(val) = env::var("DAPP_TEST_FUZZ_RUNS") { + fuzz_dict.insert( + "runs".to_string(), + val.parse::().map_err(figment::Error::custom)?.into(), + ); + } + dict.insert("fuzz".to_string(), fuzz_dict.into()); + + let mut invariant_dict = Dict::new(); + if let Ok(val) = env::var("DAPP_TEST_DEPTH") { + invariant_dict.insert( + "depth".to_string(), + val.parse::().map_err(figment::Error::custom)?.into(), + ); + } + dict.insert("invariant".to_string(), invariant_dict.into()); + + Ok(Map::from([(Config::selected_profile(), dict)])) + } +} + +/// Renames a profile from `from` to `to`. +/// +/// For example given: +/// +/// ```toml +/// [from] +/// key = "value" +/// ``` +/// +/// RenameProfileProvider will output +/// +/// ```toml +/// [to] +/// key = "value" +/// ``` +pub(crate) struct RenameProfileProvider

{ + provider: P, + from: Profile, + to: Profile, +} + +impl

RenameProfileProvider

{ + pub(crate) fn new(provider: P, from: impl Into, to: impl Into) -> Self { + Self { provider, from: from.into(), to: to.into() } + } +} + +impl Provider for RenameProfileProvider

{ + fn metadata(&self) -> Metadata { + self.provider.metadata() + } + fn data(&self) -> Result, Error> { + let mut data = self.provider.data()?; + if let Some(data) = data.remove(&self.from) { + return Ok(Map::from([(self.to.clone(), data)])); + } + Ok(Default::default()) + } + fn profile(&self) -> Option { + Some(self.to.clone()) + } +} + +/// Unwraps a profile reducing the key depth +/// +/// For example given: +/// +/// ```toml +/// [wrapping_key.profile] +/// key = "value" +/// ``` +/// +/// UnwrapProfileProvider will output: +/// +/// ```toml +/// [profile] +/// key = "value" +/// ``` +struct UnwrapProfileProvider

{ + provider: P, + wrapping_key: Profile, + profile: Profile, +} + +impl

UnwrapProfileProvider

{ + pub fn new(provider: P, wrapping_key: impl Into, profile: impl Into) -> Self { + Self { provider, wrapping_key: wrapping_key.into(), profile: profile.into() } + } +} + +impl Provider for UnwrapProfileProvider

{ + fn metadata(&self) -> Metadata { + self.provider.metadata() + } + fn data(&self) -> Result, Error> { + self.provider.data().and_then(|mut data| { + if let Some(profiles) = data.remove(&self.wrapping_key) { + for (profile_str, profile_val) in profiles { + let profile = Profile::new(&profile_str); + if profile != self.profile { + continue; + } + match profile_val { + Value::Dict(_, dict) => return Ok(profile.collect(dict)), + bad_val => { + let mut err = Error::from(figment::error::Kind::InvalidType( + bad_val.to_actual(), + "dict".into(), + )); + err.metadata = Some(self.provider.metadata()); + err.profile = Some(self.profile.clone()); + return Err(err); + } + } + } + } + Ok(Default::default()) + }) + } + fn profile(&self) -> Option { + Some(self.profile.clone()) + } +} + +/// Wraps a profile in another profile +/// +/// For example given: +/// +/// ```toml +/// [profile] +/// key = "value" +/// ``` +/// +/// WrapProfileProvider will output: +/// +/// ```toml +/// [wrapping_key.profile] +/// key = "value" +/// ``` +pub(crate) struct WrapProfileProvider

{ + provider: P, + wrapping_key: Profile, + profile: Profile, +} + +impl

WrapProfileProvider

{ + pub fn new(provider: P, wrapping_key: impl Into, profile: impl Into) -> Self { + Self { provider, wrapping_key: wrapping_key.into(), profile: profile.into() } + } +} + +impl Provider for WrapProfileProvider

{ + fn metadata(&self) -> Metadata { + self.provider.metadata() + } + fn data(&self) -> Result, Error> { + if let Some(inner) = self.provider.data()?.remove(&self.profile) { + let value = Value::from(inner); + let dict = [(self.profile.to_string().to_snake_case(), value)].into_iter().collect(); + Ok(self.wrapping_key.collect(dict)) + } else { + Ok(Default::default()) + } + } + fn profile(&self) -> Option { + Some(self.profile.clone()) + } +} + +/// Extracts the profile from the `profile` key and using the original key as backup, merging +/// values where necessary +/// +/// For example given: +/// +/// ```toml +/// [profile.cool] +/// key = "value" +/// +/// [cool] +/// key2 = "value2" +/// ``` +/// +/// OptionalStrictProfileProvider will output: +/// +/// ```toml +/// [cool] +/// key = "value" +/// key2 = "value2" +/// ``` +/// +/// And emit a deprecation warning +pub(crate) struct OptionalStrictProfileProvider

{ + provider: P, + profiles: Vec, +} + +impl

OptionalStrictProfileProvider

{ + pub const PROFILE_PROFILE: Profile = Profile::const_new("profile"); + + pub fn new(provider: P, profiles: impl IntoIterator>) -> Self { + Self { provider, profiles: profiles.into_iter().map(|profile| profile.into()).collect() } + } +} + +impl Provider for OptionalStrictProfileProvider

{ + fn metadata(&self) -> Metadata { + self.provider.metadata() + } + fn data(&self) -> Result, Error> { + let mut figment = Figment::from(&self.provider); + for profile in &self.profiles { + figment = figment.merge(UnwrapProfileProvider::new( + &self.provider, + Self::PROFILE_PROFILE, + profile.clone(), + )); + } + figment.data().map_err(|err| { + // figment does tag metadata and tries to map metadata to an error, since we use a new + // figment in this provider this new figment does not know about the metadata of the + // provider and can't map the metadata to the error. Therefore we return the root error + // if this error originated in the provider's data. + if let Err(root_err) = self.provider.data() { + return root_err; + } + err + }) + } + fn profile(&self) -> Option { + self.profiles.last().cloned() + } +} + +/// Extracts the profile from the `profile` key and sets unset values according to the fallback +/// provider +pub struct FallbackProfileProvider

{ + provider: P, + profile: Profile, + fallback: Profile, +} + +impl

FallbackProfileProvider

{ + /// Creates a new fallback profile provider. + pub fn new(provider: P, profile: impl Into, fallback: impl Into) -> Self { + Self { provider, profile: profile.into(), fallback: fallback.into() } + } +} + +impl Provider for FallbackProfileProvider

{ + fn metadata(&self) -> Metadata { + self.provider.metadata() + } + + fn data(&self) -> Result, Error> { + let data = self.provider.data()?; + if let Some(fallback) = data.get(&self.fallback) { + let mut inner = data.get(&self.profile).cloned().unwrap_or_default(); + for (k, v) in fallback.iter() { + if !inner.contains_key(k) { + inner.insert(k.to_owned(), v.clone()); + } + } + Ok(self.profile.collect(inner)) + } else { + Ok(data) + } + } + + fn profile(&self) -> Option { + Some(self.profile.clone()) + } +} diff --git a/crates/config/src/providers/mod.rs b/crates/config/src/providers/mod.rs index 1f9f5c88e..9fec7d290 100644 --- a/crates/config/src/providers/mod.rs +++ b/crates/config/src/providers/mod.rs @@ -1,157 +1,10 @@ //! Config providers. -use crate::{Config, Warning, DEPRECATIONS}; -use figment::{ - value::{Dict, Map, Value}, - Error, Figment, Metadata, Profile, Provider, -}; -use std::collections::BTreeMap; +mod ext; +pub use ext::*; -/// Remappings provider -pub mod remappings; +mod remappings; +pub use remappings::*; -/// Generate warnings for unknown sections and deprecated keys -pub struct WarningsProvider

{ - provider: P, - profile: Profile, - old_warnings: Result, Error>, -} - -impl

WarningsProvider

{ - const WARNINGS_KEY: &'static str = "__warnings"; - - /// Creates a new warnings provider. - pub fn new( - provider: P, - profile: impl Into, - old_warnings: Result, Error>, - ) -> Self { - Self { provider, profile: profile.into(), old_warnings } - } - - /// Creates a new figment warnings provider. - pub fn for_figment(provider: P, figment: &Figment) -> Self { - let old_warnings = { - let warnings_res = figment.extract_inner(Self::WARNINGS_KEY); - if warnings_res.as_ref().err().map(|err| err.missing()).unwrap_or(false) { - Ok(vec![]) - } else { - warnings_res - } - }; - Self::new(provider, figment.profile().clone(), old_warnings) - } -} - -impl WarningsProvider

{ - /// Collects all warnings. - pub fn collect_warnings(&self) -> Result, Error> { - let data = self.provider.data().unwrap_or_default(); - - let mut out = self.old_warnings.clone()?; - - // Add warning for unknown sections. - out.extend( - data.keys() - .filter(|k| { - **k != Config::PROFILE_SECTION && - !Config::STANDALONE_SECTIONS.iter().any(|s| s == k) - }) - .map(|unknown_section| { - let source = self.provider.metadata().source.map(|s| s.to_string()); - Warning::UnknownSection { unknown_section: unknown_section.clone(), source } - }), - ); - - // Add warning for deprecated keys. - let deprecated_key_warning = |key| { - DEPRECATIONS.iter().find_map(|(deprecated_key, new_value)| { - if key == *deprecated_key { - Some(Warning::DeprecatedKey { - old: deprecated_key.to_string(), - new: new_value.to_string(), - }) - } else { - None - } - }) - }; - let profiles = data - .iter() - .filter(|(profile, _)| **profile == Config::PROFILE_SECTION) - .map(|(_, dict)| dict); - out.extend(profiles.clone().flat_map(BTreeMap::keys).filter_map(deprecated_key_warning)); - out.extend( - profiles - .filter_map(|dict| dict.get(self.profile.as_str().as_str())) - .filter_map(Value::as_dict) - .flat_map(BTreeMap::keys) - .filter_map(deprecated_key_warning), - ); - - Ok(out) - } -} - -impl Provider for WarningsProvider

{ - fn metadata(&self) -> Metadata { - if let Some(source) = self.provider.metadata().source { - Metadata::from("Warnings", source) - } else { - Metadata::named("Warnings") - } - } - - fn data(&self) -> Result, Error> { - Ok(Map::from([( - self.profile.clone(), - Dict::from([( - Self::WARNINGS_KEY.to_string(), - Value::serialize(self.collect_warnings()?)?, - )]), - )])) - } - - fn profile(&self) -> Option { - Some(self.profile.clone()) - } -} - -/// Extracts the profile from the `profile` key and sets unset values according to the fallback -/// provider -pub struct FallbackProfileProvider

{ - provider: P, - profile: Profile, - fallback: Profile, -} - -impl

FallbackProfileProvider

{ - /// Creates a new fallback profile provider. - pub fn new(provider: P, profile: impl Into, fallback: impl Into) -> Self { - Self { provider, profile: profile.into(), fallback: fallback.into() } - } -} - -impl Provider for FallbackProfileProvider

{ - fn metadata(&self) -> Metadata { - self.provider.metadata() - } - - fn data(&self) -> Result, Error> { - if let Some(fallback) = self.provider.data()?.get(&self.fallback) { - let mut inner = self.provider.data()?.remove(&self.profile).unwrap_or_default(); - for (k, v) in fallback.iter() { - if !inner.contains_key(k) { - inner.insert(k.to_owned(), v.clone()); - } - } - Ok(self.profile.collect(inner)) - } else { - self.provider.data() - } - } - - fn profile(&self) -> Option { - Some(self.profile.clone()) - } -} +mod warnings; +pub use warnings::*; diff --git a/crates/config/src/providers/remappings.rs b/crates/config/src/providers/remappings.rs index bb4c969e6..d96c15038 100644 --- a/crates/config/src/providers/remappings.rs +++ b/crates/config/src/providers/remappings.rs @@ -39,7 +39,12 @@ impl Remappings { pub fn with_figment(mut self, figment: &Figment) -> Self { let mut add_project_remapping = |path: &str| { if let Ok(path) = figment.find_value(path) { - if let Some(remapping) = path.into_string().and_then(get_dir_remapping) { + if let Some(path) = path.into_string() { + let remapping = Remapping { + context: None, + name: format!("{path}/"), + path: format!("{path}/"), + }; self.project_paths.push(remapping); } } @@ -114,7 +119,7 @@ pub struct RemappingsProvider<'a> { pub lib_paths: Cow<'a, Vec>, /// the root path used to turn an absolute `Remapping`, as we're getting it from /// `Remapping::find_many` into a relative one. - pub root: &'a PathBuf, + pub root: &'a Path, /// This contains either: /// - previously set remappings /// - a `MissingField` error, which means previous provider didn't set the "remappings" field diff --git a/crates/config/src/providers/warnings.rs b/crates/config/src/providers/warnings.rs new file mode 100644 index 000000000..944225be1 --- /dev/null +++ b/crates/config/src/providers/warnings.rs @@ -0,0 +1,109 @@ +use crate::{Config, Warning, DEPRECATIONS}; +use figment::{ + value::{Dict, Map, Value}, + Error, Figment, Metadata, Profile, Provider, +}; +use std::collections::BTreeMap; + +/// Generate warnings for unknown sections and deprecated keys +pub struct WarningsProvider

{ + provider: P, + profile: Profile, + old_warnings: Result, Error>, +} + +impl WarningsProvider

{ + const WARNINGS_KEY: &'static str = "__warnings"; + + /// Creates a new warnings provider. + pub fn new( + provider: P, + profile: impl Into, + old_warnings: Result, Error>, + ) -> Self { + Self { provider, profile: profile.into(), old_warnings } + } + + /// Creates a new figment warnings provider. + pub fn for_figment(provider: P, figment: &Figment) -> Self { + let old_warnings = { + let warnings_res = figment.extract_inner(Self::WARNINGS_KEY); + if warnings_res.as_ref().err().map(|err| err.missing()).unwrap_or(false) { + Ok(vec![]) + } else { + warnings_res + } + }; + Self::new(provider, figment.profile().clone(), old_warnings) + } + + /// Collects all warnings. + pub fn collect_warnings(&self) -> Result, Error> { + let data = self.provider.data().unwrap_or_default(); + + let mut out = self.old_warnings.clone()?; + + // Add warning for unknown sections. + out.extend( + data.keys() + .filter(|k| { + **k != Config::PROFILE_SECTION && + !Config::STANDALONE_SECTIONS.iter().any(|s| s == k) + }) + .map(|unknown_section| { + let source = self.provider.metadata().source.map(|s| s.to_string()); + Warning::UnknownSection { unknown_section: unknown_section.clone(), source } + }), + ); + + // Add warning for deprecated keys. + let deprecated_key_warning = |key| { + DEPRECATIONS.iter().find_map(|(deprecated_key, new_value)| { + if key == *deprecated_key { + Some(Warning::DeprecatedKey { + old: deprecated_key.to_string(), + new: new_value.to_string(), + }) + } else { + None + } + }) + }; + let profiles = data + .iter() + .filter(|(profile, _)| **profile == Config::PROFILE_SECTION) + .map(|(_, dict)| dict); + out.extend(profiles.clone().flat_map(BTreeMap::keys).filter_map(deprecated_key_warning)); + out.extend( + profiles + .filter_map(|dict| dict.get(self.profile.as_str().as_str())) + .filter_map(Value::as_dict) + .flat_map(BTreeMap::keys) + .filter_map(deprecated_key_warning), + ); + + Ok(out) + } +} + +impl Provider for WarningsProvider

{ + fn metadata(&self) -> Metadata { + if let Some(source) = self.provider.metadata().source { + Metadata::from("Warnings", source) + } else { + Metadata::named("Warnings") + } + } + + fn data(&self) -> Result, Error> { + let warnings = self.collect_warnings()?; + Ok(Map::from([( + self.profile.clone(), + Dict::from([(Self::WARNINGS_KEY.to_string(), Value::serialize(warnings)?)]), + )])) + } + + fn profile(&self) -> Option { + Some(self.profile.clone()) + } +} diff --git a/crates/config/src/utils.rs b/crates/config/src/utils.rs index 2117834f4..43b9b7468 100644 --- a/crates/config/src/utils.rs +++ b/crates/config/src/utils.rs @@ -14,7 +14,6 @@ use std::{ path::{Path, PathBuf}, str::FromStr, }; -use toml_edit::{DocumentMut, Item}; /// Loads the config for the current project workspace pub fn load_config() -> Config { @@ -186,45 +185,6 @@ pub(crate) fn get_dir_remapping(dir: impl AsRef) -> Option { } } -/// Returns all available `profile` keys in a given `.toml` file -/// -/// i.e. The toml below would return would return `["default", "ci", "local"]` -/// ```toml -/// [profile.default] -/// ... -/// [profile.ci] -/// ... -/// [profile.local] -/// ``` -pub fn get_available_profiles(toml_path: impl AsRef) -> eyre::Result> { - let mut result = vec![Config::DEFAULT_PROFILE.to_string()]; - - if !toml_path.as_ref().exists() { - return Ok(result) - } - - let doc = read_toml(toml_path)?; - - if let Some(Item::Table(profiles)) = doc.as_table().get(Config::PROFILE_SECTION) { - for (profile, _) in profiles { - let p = profile.to_string(); - if !result.contains(&p) { - result.push(p); - } - } - } - - Ok(result) -} - -/// Returns a [`toml_edit::Document`] loaded from the provided `path`. -/// Can raise an error in case of I/O or parsing errors. -fn read_toml(path: impl AsRef) -> eyre::Result { - let path = path.as_ref().to_owned(); - let doc: DocumentMut = std::fs::read_to_string(path)?.parse()?; - Ok(doc) -} - /// Deserialize stringified percent. The value must be between 0 and 100 inclusive. pub(crate) fn deserialize_stringified_percent<'de, D>(deserializer: D) -> Result where @@ -299,8 +259,8 @@ impl FromStr for Numeric { /// Returns the [SpecId] derived from [EvmVersion] #[inline] -pub fn evm_spec_id(evm_version: &EvmVersion, alphanet: bool) -> SpecId { - if alphanet { +pub fn evm_spec_id(evm_version: EvmVersion, odyssey: bool) -> SpecId { + if odyssey { return SpecId::OSAKA; } match evm_version { @@ -319,41 +279,3 @@ pub fn evm_spec_id(evm_version: &EvmVersion, alphanet: bool) -> SpecId { EvmVersion::Prague => SpecId::OSAKA, // Osaka enables EOF } } - -#[cfg(test)] -mod tests { - use crate::get_available_profiles; - use std::path::Path; - - #[test] - fn get_profiles_from_toml() { - figment::Jail::expect_with(|jail| { - jail.create_file( - "foundry.toml", - r" - [foo.baz] - libs = ['node_modules', 'lib'] - - [profile.default] - libs = ['node_modules', 'lib'] - - [profile.ci] - libs = ['node_modules', 'lib'] - - [profile.local] - libs = ['node_modules', 'lib'] - ", - )?; - - let path = Path::new("./foundry.toml"); - let profiles = get_available_profiles(path).unwrap(); - - assert_eq!( - profiles, - vec!["default".to_string(), "ci".to_string(), "local".to_string()] - ); - - Ok(()) - }); - } -} diff --git a/crates/debugger/Cargo.toml b/crates/debugger/Cargo.toml index 4fb417db5..4cf86e20a 100644 --- a/crates/debugger/Cargo.toml +++ b/crates/debugger/Cargo.toml @@ -23,7 +23,7 @@ alloy-primitives.workspace = true crossterm = "0.28" eyre.workspace = true -ratatui = { version = "0.28", default-features = false, features = [ +ratatui = { version = "0.29", default-features = false, features = [ "crossterm", ] } revm.workspace = true diff --git a/crates/evm/core/src/backend/cow.rs b/crates/evm/core/src/backend/cow.rs index 4b7798ec7..f7876569d 100644 --- a/crates/evm/core/src/backend/cow.rs +++ b/crates/evm/core/src/backend/cow.rs @@ -100,7 +100,7 @@ impl<'a> CowBackend<'a> { self.spec_id = env.handler_cfg.spec_id; let mut evm = crate::utils::new_evm_with_inspector(self, env.clone(), inspector); - let res = evm.transact().wrap_err("backend: failed while inspecting")?; + let res = evm.transact().wrap_err("EVM error")?; env.env = evm.context.evm.inner.env; diff --git a/crates/evm/core/src/backend/mod.rs b/crates/evm/core/src/backend/mod.rs index 2edd0ec86..66c555f9a 100644 --- a/crates/evm/core/src/backend/mod.rs +++ b/crates/evm/core/src/backend/mod.rs @@ -10,7 +10,7 @@ use crate::{ use alloy_consensus::Transaction as TransactionTrait; use alloy_genesis::GenesisAccount; use alloy_network::{AnyRpcBlock, AnyTxEnvelope, TransactionResponse}; -use alloy_primitives::{keccak256, map::HashMap, uint, Address, B256, U256}; +use alloy_primitives::{keccak256, map::HashMap, uint, Address, TxKind, B256, U256}; use alloy_rpc_types::{BlockNumberOrTag, Transaction, TransactionRequest}; use eyre::Context; use foundry_common::{is_known_system_sender, SYSTEM_TRANSACTION_TYPE}; @@ -829,7 +829,7 @@ impl Backend { self.initialize(env); let mut evm = crate::utils::new_evm_with_inspector(self, env.clone(), inspector); - let res = evm.transact().wrap_err("backend: failed while inspecting")?; + let res = evm.transact().wrap_err("EVM error")?; env.env = evm.context.evm.inner.env; @@ -2218,12 +2218,6 @@ fn commit_transaction( persistent_accounts: &HashSet

, inspector: &mut dyn InspectorExt, ) -> eyre::Result<()> { - // TODO: Remove after https://github.com/foundry-rs/foundry/pull/9131 - // if the tx has the blob_versioned_hashes field, we assume it's a Cancun block - if tx.blob_versioned_hashes().is_some() { - env.handler_cfg.spec_id = SpecId::CANCUN; - } - configure_tx_env(&mut env.env, tx); let now = Instant::now(); diff --git a/crates/evm/core/src/constants.rs b/crates/evm/core/src/constants.rs index daff4bf0a..c4555ae86 100644 --- a/crates/evm/core/src/constants.rs +++ b/crates/evm/core/src/constants.rs @@ -32,6 +32,9 @@ pub const MAGIC_ASSUME: &[u8] = b"FOUNDRY::ASSUME"; /// Magic return value returned by the `skip` cheatcode. Optionally appended with a reason. pub const MAGIC_SKIP: &[u8] = b"FOUNDRY::SKIP"; +/// Test timeout return value. +pub const TEST_TIMEOUT: &str = "FOUNDRY::TEST_TIMEOUT"; + /// The address that deploys the default CREATE2 deployer contract. pub const DEFAULT_CREATE2_DEPLOYER_DEPLOYER: Address = address!("3fAB184622Dc19b6109349B94811493BF2a45362"); @@ -42,6 +45,11 @@ pub const DEFAULT_CREATE2_DEPLOYER: Address = address!("4e59b44847b379578588920c pub const DEFAULT_CREATE2_DEPLOYER_CODE: &[u8] = &hex!("604580600e600039806000f350fe7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe03601600081602082378035828234f58015156039578182fd5b8082525050506014600cf3"); /// The runtime code of the default CREATE2 deployer. pub const DEFAULT_CREATE2_DEPLOYER_RUNTIME_CODE: &[u8] = &hex!("7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe03601600081602082378035828234f58015156039578182fd5b8082525050506014600cf3"); +/// The hash of the default CREATE2 deployer code. +/// +/// This is calculated as `keccak256([`DEFAULT_CREATE2_DEPLOYER_RUNTIME_CODE`])`. +pub const DEFAULT_CREATE2_DEPLOYER_CODEHASH: B256 = + b256!("2fa86add0aed31f33a762c9d88e807c475bd51d0f52bd0955754b2608f7e4989"); #[cfg(test)] mod tests { diff --git a/crates/evm/core/src/ic.rs b/crates/evm/core/src/ic.rs index 2711f8933..fcabf2a18 100644 --- a/crates/evm/core/src/ic.rs +++ b/crates/evm/core/src/ic.rs @@ -1,4 +1,5 @@ use alloy_primitives::map::HashMap; +use eyre::Result; use revm::interpreter::{ opcode::{PUSH0, PUSH1, PUSH32}, OpCode, @@ -100,7 +101,7 @@ pub struct Instruction<'a> { } /// Decodes raw opcode bytes into [`Instruction`]s. -pub fn decode_instructions(code: &[u8]) -> Vec> { +pub fn decode_instructions(code: &[u8]) -> Result>> { let mut pc = 0; let mut steps = Vec::new(); @@ -108,10 +109,14 @@ pub fn decode_instructions(code: &[u8]) -> Vec> { let op = OpCode::new(code[pc]); let immediate_size = op.map(|op| immediate_size(op, &code[pc + 1..])).unwrap_or(0) as usize; + if pc + 1 + immediate_size > code.len() { + eyre::bail!("incomplete sequence of bytecode"); + } + steps.push(Instruction { op, pc, immediate: &code[pc + 1..pc + 1 + immediate_size] }); pc += 1 + immediate_size; } - steps + Ok(steps) } diff --git a/crates/evm/core/src/lib.rs b/crates/evm/core/src/lib.rs index 34d43b152..a602ab97a 100644 --- a/crates/evm/core/src/lib.rs +++ b/crates/evm/core/src/lib.rs @@ -5,6 +5,8 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies))] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] +use crate::constants::DEFAULT_CREATE2_DEPLOYER; +use alloy_primitives::Address; use auto_impl::auto_impl; use backend::DatabaseExt; use foundry_zksync_core::Call; @@ -51,11 +53,16 @@ pub trait InspectorExt: for<'a> Inspector<&'a mut dyn DatabaseExt> { /// Simulates `console.log` invocation. fn console_log(&mut self, _input: String) {} - /// Returns `true` if the current network is Alphanet. - fn is_alphanet(&self) -> bool { + /// Returns `true` if the current network is Odyssey. + fn is_odyssey(&self) -> bool { false } + /// Returns the CREATE2 deployer address. + fn create2_deployer(&self) -> Address { + DEFAULT_CREATE2_DEPLOYER + } + /// Appends provided zksync traces. fn trace_zksync( &mut self, diff --git a/crates/evm/core/src/opts.rs b/crates/evm/core/src/opts.rs index 9849fd1ce..ad4304f6a 100644 --- a/crates/evm/core/src/opts.rs +++ b/crates/evm/core/src/opts.rs @@ -1,15 +1,16 @@ use super::fork::environment; -use crate::fork::CreateFork; +use crate::{constants::DEFAULT_CREATE2_DEPLOYER, fork::CreateFork}; use alloy_primitives::{Address, B256, U256}; use alloy_provider::{network::AnyRpcBlock, Provider}; use eyre::WrapErr; use foundry_common::{provider::ProviderBuilder, ALCHEMY_FREE_TIER_CUPS}; -use foundry_config::{Chain, Config}; +use foundry_config::{Chain, Config, GasLimit}; use revm::primitives::{BlockEnv, CfgEnv, TxEnv}; use serde::{Deserialize, Deserializer, Serialize}; +use std::fmt::Write; use url::Url; -#[derive(Clone, Debug, Default, Serialize, Deserialize)] +#[derive(Clone, Debug, Serialize, Deserialize)] pub struct EvmOpts { /// The EVM environment configuration. #[serde(flatten)] @@ -64,8 +65,36 @@ pub struct EvmOpts { /// Whether to disable block gas limit checks. pub disable_block_gas_limit: bool, - /// whether to enable Alphanet features. - pub alphanet: bool, + /// whether to enable Odyssey features. + pub odyssey: bool, + + /// The CREATE2 deployer's address. + pub create2_deployer: Address, +} + +impl Default for EvmOpts { + fn default() -> Self { + Self { + env: Env::default(), + fork_url: None, + fork_block_number: None, + fork_retries: None, + fork_retry_backoff: None, + compute_units_per_second: None, + no_rpc_rate_limit: false, + no_storage_caching: false, + initial_balance: U256::default(), + sender: Address::default(), + ffi: false, + always_use_create_2_factory: false, + verbosity: 0, + memory_limit: 0, + isolate: false, + disable_block_gas_limit: false, + odyssey: false, + create2_deployer: DEFAULT_CREATE2_DEPLOYER, + } + } } impl EvmOpts { @@ -85,9 +114,8 @@ impl EvmOpts { /// And the block that was used to configure the environment. pub async fn fork_evm_env( &self, - fork_url: impl AsRef, + fork_url: &str, ) -> eyre::Result<(revm::primitives::Env, AnyRpcBlock)> { - let fork_url = fork_url.as_ref(); let provider = ProviderBuilder::new(fork_url) .compute_units_per_second(self.get_compute_units_per_second()) .build()?; @@ -102,13 +130,13 @@ impl EvmOpts { ) .await .wrap_err_with(|| { - let mut err_msg = "Could not instantiate forked environment".to_string(); + let mut msg = "Could not instantiate forked environment".to_string(); if let Ok(url) = Url::parse(fork_url) { if let Some(provider) = url.host() { - err_msg.push_str(&format!(" with provider {provider}")); + write!(msg, " with provider {provider}").unwrap(); } } - err_msg + msg }) } @@ -166,7 +194,7 @@ impl EvmOpts { /// Returns the gas limit to use pub fn gas_limit(&self) -> u64 { - self.env.block_gas_limit.unwrap_or(self.env.gas_limit) + self.env.block_gas_limit.unwrap_or(self.env.gas_limit).0 } /// Returns the configured chain id, which will be @@ -225,8 +253,7 @@ impl EvmOpts { #[derive(Clone, Debug, Default, Serialize, Deserialize)] pub struct Env { /// The block gas limit. - #[serde(deserialize_with = "string_or_number")] - pub gas_limit: u64, + pub gas_limit: GasLimit, /// The `CHAINID` opcode value. pub chain_id: Option, @@ -260,47 +287,10 @@ pub struct Env { pub block_prevrandao: B256, /// the block.gaslimit value during EVM execution - #[serde( - default, - skip_serializing_if = "Option::is_none", - deserialize_with = "string_or_number_opt" - )] - pub block_gas_limit: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub block_gas_limit: Option, /// EIP-170: Contract code size limit in bytes. Useful to increase this because of tests. #[serde(default, skip_serializing_if = "Option::is_none")] pub code_size_limit: Option, } - -#[derive(Deserialize)] -#[serde(untagged)] -enum Gas { - Number(u64), - Text(String), -} - -fn string_or_number<'de, D>(deserializer: D) -> Result -where - D: Deserializer<'de>, -{ - use serde::de::Error; - match Gas::deserialize(deserializer)? { - Gas::Number(num) => Ok(num), - Gas::Text(s) => s.parse().map_err(D::Error::custom), - } -} - -fn string_or_number_opt<'de, D>(deserializer: D) -> Result, D::Error> -where - D: Deserializer<'de>, -{ - use serde::de::Error; - - match Option::::deserialize(deserializer)? { - Some(gas) => match gas { - Gas::Number(num) => Ok(Some(num)), - Gas::Text(s) => s.parse().map(Some).map_err(D::Error::custom), - }, - _ => Ok(None), - } -} diff --git a/crates/evm/core/src/precompiles.rs b/crates/evm/core/src/precompiles.rs index 2544258d3..ceaf6d004 100644 --- a/crates/evm/core/src/precompiles.rs +++ b/crates/evm/core/src/precompiles.rs @@ -46,13 +46,13 @@ pub const PRECOMPILES: &[Address] = &[ EC_PAIRING, BLAKE_2F, POINT_EVALUATION, - ALPHANET_P256_ADDRESS, + ODYSSEY_P256_ADDRESS, ]; -/// [EIP-7212](https://eips.ethereum.org/EIPS/eip-7212) secp256r1 precompile address on Alphanet. +/// [EIP-7212](https://eips.ethereum.org/EIPS/eip-7212) secp256r1 precompile address on Odyssey. /// -/// -pub const ALPHANET_P256_ADDRESS: Address = address!("0000000000000000000000000000000000000014"); +/// +pub const ODYSSEY_P256_ADDRESS: Address = address!("0000000000000000000000000000000000000014"); /// Wrapper around revm P256 precompile, matching EIP-7212 spec. /// @@ -69,5 +69,5 @@ pub fn p256_verify(input: &Bytes, gas_limit: u64) -> PrecompileResult { } /// [EIP-7212](https://eips.ethereum.org/EIPS/eip-7212#specification) secp256r1 precompile. -pub const ALPHANET_P256: PrecompileWithAddress = - PrecompileWithAddress(ALPHANET_P256_ADDRESS, Precompile::Standard(p256_verify)); +pub const ODYSSEY_P256: PrecompileWithAddress = + PrecompileWithAddress(ODYSSEY_P256_ADDRESS, Precompile::Standard(p256_verify)); diff --git a/crates/evm/core/src/utils.rs b/crates/evm/core/src/utils.rs index 67f364252..889702d11 100644 --- a/crates/evm/core/src/utils.rs +++ b/crates/evm/core/src/utils.rs @@ -1,12 +1,12 @@ pub use crate::ic::*; use crate::{ - backend::DatabaseExt, constants::DEFAULT_CREATE2_DEPLOYER, precompiles::ALPHANET_P256, + backend::DatabaseExt, constants::DEFAULT_CREATE2_DEPLOYER_CODEHASH, precompiles::ODYSSEY_P256, InspectorExt, }; use alloy_consensus::BlockHeader; use alloy_json_abi::{Function, JsonAbi}; use alloy_network::AnyTxEnvelope; -use alloy_primitives::{Address, Selector, TxKind, U256}; +use alloy_primitives::{Address, Selector, TxKind, B256, U256}; use alloy_provider::{network::BlockResponse, Network}; use alloy_rpc_types::{Transaction, TransactionRequest}; use foundry_config::NamedChain; @@ -35,11 +35,12 @@ pub fn apply_chain_and_block_specific_env_changes( env: &mut revm::primitives::Env, block: &N::BlockResponse, ) { + use NamedChain::*; if let Ok(chain) = NamedChain::try_from(env.cfg.chain_id) { let block_number = block.header().number(); match chain { - NamedChain::Mainnet => { + Mainnet => { // after merge difficulty is supplanted with prevrandao EIP-4399 if block_number >= 15_537_351u64 { env.block.difficulty = env.block.prevrandao.unwrap_or_default().into(); @@ -47,10 +48,13 @@ pub fn apply_chain_and_block_specific_env_changes( return; } - NamedChain::Arbitrum | - NamedChain::ArbitrumGoerli | - NamedChain::ArbitrumNova | - NamedChain::ArbitrumTestnet => { + Moonbeam | Moonbase | Moonriver | MoonbeamDev => { + if env.block.prevrandao.is_none() { + // + env.block.prevrandao = Some(B256::random()); + } + } + c if c.is_arbitrum() => { // on arbitrum `block.number` is the L1 block which is included in the // `l1BlockNumber` field if let Some(l1_block_number) = block @@ -150,12 +154,16 @@ pub fn gas_used(spec: SpecId, spent: u64, refunded: u64) -> u64 { spent - (refunded).min(spent / refund_quotient) } -fn get_create2_factory_call_inputs(salt: U256, inputs: CreateInputs) -> CallInputs { +fn get_create2_factory_call_inputs( + salt: U256, + inputs: CreateInputs, + deployer: Address, +) -> CallInputs { let calldata = [&salt.to_be_bytes::<32>()[..], &inputs.init_code[..]].concat(); CallInputs { caller: inputs.caller, - bytecode_address: DEFAULT_CREATE2_DEPLOYER, - target_address: DEFAULT_CREATE2_DEPLOYER, + bytecode_address: deployer, + target_address: deployer, scheme: CallScheme::Call, value: CallValue::Transfer(inputs.value), input: calldata.into(), @@ -166,7 +174,7 @@ fn get_create2_factory_call_inputs(salt: U256, inputs: CreateInputs) -> CallInpu } } -/// Used for routing certain CREATE2 invocations through [DEFAULT_CREATE2_DEPLOYER]. +/// Used for routing certain CREATE2 invocations through CREATE2_DEPLOYER. /// /// Overrides create hook with CALL frame if [InspectorExt::should_use_create2_factory] returns /// true. Keeps track of overridden frames and handles outcome in the overridden insert_call_outcome @@ -191,8 +199,10 @@ pub fn create2_handler_register( let gas_limit = inputs.gas_limit; + // Get CREATE2 deployer. + let create2_deployer = ctx.external.create2_deployer(); // Generate call inputs for CREATE2 factory. - let mut call_inputs = get_create2_factory_call_inputs(salt, *inputs); + let mut call_inputs = get_create2_factory_call_inputs(salt, *inputs, create2_deployer); // Call inspector to change input or return outcome. let outcome = ctx.external.call(&mut ctx.evm, &mut call_inputs); @@ -203,19 +213,26 @@ pub fn create2_handler_register( .push((ctx.evm.journaled_state.depth(), call_inputs.clone())); // Sanity check that CREATE2 deployer exists. - // We check which deployer we are using to separate the logic for zkSync and original - // foundry. - let mut code_hash = ctx.evm.load_account(DEFAULT_CREATE2_DEPLOYER)?.info.code_hash; - + let code_hash = ctx.evm.load_account(create2_deployer)?.info.code_hash; + // NOTE(zk): We check which deployer we are using to separate the logic for zkSync + // and original foundry. if call_inputs.target_address == DEFAULT_CREATE2_DEPLOYER_ZKSYNC { code_hash = ctx.evm.load_account(call_inputs.target_address)?.info.code_hash; }; - if code_hash == KECCAK_EMPTY { return Ok(FrameOrResult::Result(FrameResult::Call(CallOutcome { result: InterpreterResult { result: InstructionResult::Revert, - output: "missing CREATE2 deployer".into(), + output: format!("missing CREATE2 deployer: {create2_deployer}").into(), + gas: Gas::new(gas_limit), + }, + memory_offset: 0..0, + }))) + } else if code_hash != DEFAULT_CREATE2_DEPLOYER_CODEHASH { + return Ok(FrameOrResult::Result(FrameResult::Call(CallOutcome { + result: InterpreterResult { + result: InstructionResult::Revert, + output: "invalid CREATE2 deployer bytecode".into(), gas: Gas::new(gas_limit), }, memory_offset: 0..0, @@ -280,13 +297,13 @@ pub fn create2_handler_register( }); } -/// Adds Alphanet P256 precompile to the list of loaded precompiles. -pub fn alphanet_handler_register(handler: &mut EvmHandler<'_, EXT, DB>) { +/// Adds Odyssey P256 precompile to the list of loaded precompiles. +pub fn odyssey_handler_register(handler: &mut EvmHandler<'_, EXT, DB>) { let prev = handler.pre_execution.load_precompiles.clone(); handler.pre_execution.load_precompiles = Arc::new(move || { let mut loaded_precompiles = prev(); - loaded_precompiles.extend([ALPHANET_P256]); + loaded_precompiles.extend([ODYSSEY_P256]); loaded_precompiles }); @@ -315,8 +332,8 @@ pub fn new_evm_with_inspector<'evm, 'i, 'db, I: InspectorExt + ?Sized>( let mut handler = revm::Handler::new(handler_cfg); handler.append_handler_register_plain(revm::inspector_handle_register); - if inspector.is_alphanet() { - handler.append_handler_register_plain(alphanet_handler_register); + if inspector.is_odyssey() { + handler.append_handler_register_plain(odyssey_handler_register); } handler.append_handler_register_plain(create2_handler_register); @@ -333,8 +350,8 @@ pub fn new_evm_with_existing_context<'a>( let mut handler = revm::Handler::new(handler_cfg); handler.append_handler_register_plain(revm::inspector_handle_register); - if inspector.is_alphanet() { - handler.append_handler_register_plain(alphanet_handler_register); + if inspector.is_odyssey() { + handler.append_handler_register_plain(odyssey_handler_register); } handler.append_handler_register_plain(create2_handler_register); diff --git a/crates/evm/coverage/src/analysis.rs b/crates/evm/coverage/src/analysis.rs index c18ba823b..f8cc746c5 100644 --- a/crates/evm/coverage/src/analysis.rs +++ b/crates/evm/coverage/src/analysis.rs @@ -1,7 +1,10 @@ use super::{CoverageItem, CoverageItemKind, SourceLocation}; use alloy_primitives::map::HashMap; use foundry_common::TestFunctionExt; -use foundry_compilers::artifacts::ast::{self, Ast, Node, NodeType}; +use foundry_compilers::artifacts::{ + ast::{self, Ast, Node, NodeType}, + Source, +}; use rayon::prelude::*; use std::sync::Arc; @@ -19,7 +22,7 @@ pub struct ContractVisitor<'a> { /// The current branch ID branch_id: usize, /// Stores the last line we put in the items collection to ensure we don't push duplicate lines - last_line: usize, + last_line: u32, /// Coverage items pub items: Vec, @@ -47,23 +50,25 @@ impl<'a> ContractVisitor<'a> { } fn visit_function_definition(&mut self, node: &Node) -> eyre::Result<()> { + let Some(body) = &node.body else { return Ok(()) }; + let name: String = node.attribute("name").ok_or_else(|| eyre::eyre!("Function has no name"))?; - let kind: String = node.attribute("kind").ok_or_else(|| eyre::eyre!("Function has no kind"))?; - match &node.body { - Some(body) => { - // Do not add coverage item for constructors without statements. - if kind == "constructor" && !has_statements(body) { - return Ok(()) - } - self.push_item_kind(CoverageItemKind::Function { name }, &node.src); - self.visit_block(body) - } - _ => Ok(()), + // TODO: We currently can only detect empty bodies in normal functions, not any of the other + // kinds: https://github.com/foundry-rs/foundry/issues/9458 + if kind != "function" && !has_statements(body) { + return Ok(()); } + + // `fallback`, `receive`, and `constructor` functions have an empty `name`. + // Use the `kind` itself as the name. + let name = if name.is_empty() { kind } else { name }; + + self.push_item_kind(CoverageItemKind::Function { name }, &node.src); + self.visit_block(body) } fn visit_modifier_or_yul_fn_definition(&mut self, node: &Node) -> eyre::Result<()> { @@ -367,8 +372,9 @@ impl<'a> ContractVisitor<'a> { let expr: Option = node.attribute("expression"); if let Some(NodeType::Identifier) = expr.as_ref().map(|expr| &expr.node_type) { // Might be a require call, add branch coverage. + // Asserts should not be considered branches: . let name: Option = expr.and_then(|expr| expr.attribute("name")); - if let Some("require" | "assert") = name.as_deref() { + if let Some("require") = name.as_deref() { let branch_id = self.branch_id; self.branch_id += 1; self.push_item_kind( @@ -454,30 +460,34 @@ impl<'a> ContractVisitor<'a> { /// collection (plus additional coverage line if item is a statement). fn push_item_kind(&mut self, kind: CoverageItemKind, src: &ast::LowFidelitySourceLocation) { let item = CoverageItem { kind, loc: self.source_location_for(src), hits: 0 }; - // Push a line item if we haven't already - if matches!(item.kind, CoverageItemKind::Statement | CoverageItemKind::Branch { .. }) && - self.last_line < item.loc.line - { + + // Push a line item if we haven't already. + debug_assert!(!matches!(item.kind, CoverageItemKind::Line)); + if self.last_line < item.loc.lines.start { self.items.push(CoverageItem { kind: CoverageItemKind::Line, loc: item.loc.clone(), hits: 0, }); - self.last_line = item.loc.line; + self.last_line = item.loc.lines.start; } self.items.push(item); } fn source_location_for(&self, loc: &ast::LowFidelitySourceLocation) -> SourceLocation { - let loc_start = - self.source.char_indices().map(|(i, _)| i).nth(loc.start).unwrap_or_default(); + let bytes_start = loc.start as u32; + let bytes_end = (loc.start + loc.length.unwrap_or(0)) as u32; + let bytes = bytes_start..bytes_end; + + let start_line = self.source[..bytes.start as usize].lines().count() as u32; + let n_lines = self.source[bytes.start as usize..bytes.end as usize].lines().count() as u32; + let lines = start_line..start_line + n_lines; SourceLocation { source_id: self.source_id, contract_name: self.contract_name.clone(), - start: loc.start as u32, - length: loc.length.map(|x| x as u32), - line: self.source[..loc_start].lines().count(), + bytes, + lines, } } } @@ -494,10 +504,7 @@ fn has_statements(node: &Node) -> bool { NodeType::TryStatement | NodeType::VariableDeclarationStatement | NodeType::WhileStatement => true, - _ => { - let statements: Vec = node.attribute("statements").unwrap_or_default(); - !statements.is_empty() - } + _ => node.attribute::>("statements").is_some_and(|s| !s.is_empty()), } } @@ -556,7 +563,7 @@ impl<'a> SourceAnalyzer<'a> { .attribute("name") .ok_or_else(|| eyre::eyre!("Contract has no name"))?; - let mut visitor = ContractVisitor::new(source_id, source, &name); + let mut visitor = ContractVisitor::new(source_id, &source.content, &name); visitor.visit_contract(node)?; let mut items = visitor.items; @@ -590,7 +597,7 @@ pub struct SourceFiles<'a> { #[derive(Debug)] pub struct SourceFile<'a> { /// The source code. - pub source: String, + pub source: Source, /// The AST of the source code. pub ast: &'a Ast, } diff --git a/crates/evm/coverage/src/anchors.rs b/crates/evm/coverage/src/anchors.rs index 6643524d6..ee723d95c 100644 --- a/crates/evm/coverage/src/anchors.rs +++ b/crates/evm/coverage/src/anchors.rs @@ -177,14 +177,14 @@ fn is_in_source_range(element: &SourceElement, location: &SourceLocation) -> boo } // Needed because some source ranges in the source map mark the entire contract... - let is_within_start = element.offset() >= location.start; + let is_within_start = element.offset() >= location.bytes.start; if !is_within_start { return false; } - let start_of_ranges = location.start.max(element.offset()); - let end_of_ranges = (location.start + location.length.unwrap_or_default()) - .min(element.offset() + element.length()); + let start_of_ranges = location.bytes.start.max(element.offset()); + let end_of_ranges = + (location.bytes.start + location.len()).min(element.offset() + element.length()); let within_ranges = start_of_ranges <= end_of_ranges; if !within_ranges { return false; diff --git a/crates/evm/coverage/src/inspector.rs b/crates/evm/coverage/src/inspector.rs index 73d2ff148..bc3a40e56 100644 --- a/crates/evm/coverage/src/inspector.rs +++ b/crates/evm/coverage/src/inspector.rs @@ -1,36 +1,102 @@ use crate::{HitMap, HitMaps}; use alloy_primitives::B256; use revm::{interpreter::Interpreter, Database, EvmContext, Inspector}; +use std::ptr::NonNull; -#[derive(Clone, Debug, Default)] +/// Inspector implementation for collecting coverage information. +#[derive(Clone, Debug)] pub struct CoverageCollector { - /// Maps that track instruction hit data. - pub maps: HitMaps, + // NOTE: `current_map` is always a valid reference into `maps`. + // It is accessed only through `get_or_insert_map` which guarantees that it's valid. + // Both of these fields are unsafe to access directly outside of `*insert_map`. + current_map: NonNull, + current_hash: B256, + + maps: HitMaps, +} + +// SAFETY: See comments on `current_map`. +unsafe impl Send for CoverageCollector {} +unsafe impl Sync for CoverageCollector {} + +impl Default for CoverageCollector { + fn default() -> Self { + Self { + current_map: NonNull::dangling(), + current_hash: B256::ZERO, + maps: Default::default(), + } + } } impl Inspector for CoverageCollector { + fn initialize_interp(&mut self, interpreter: &mut Interpreter, _context: &mut EvmContext) { + get_or_insert_contract_hash(interpreter); + self.insert_map(interpreter); + } + #[inline] - fn initialize_interp(&mut self, interp: &mut Interpreter, _context: &mut EvmContext) { + fn step(&mut self, interpreter: &mut Interpreter, _context: &mut EvmContext) { + let map = self.get_or_insert_map(interpreter); + map.hit(interpreter.program_counter()); + } +} + +impl CoverageCollector { + /// Finish collecting coverage information and return the [`HitMaps`]. + pub fn finish(self) -> HitMaps { self.maps - .entry(get_contract_hash(interp)) - .or_insert_with(|| HitMap::new(interp.contract.bytecode.original_bytes())); } + /// Gets the hit map for the current contract, or inserts a new one if it doesn't exist. + /// + /// The map is stored in `current_map` and returned as a mutable reference. + /// See comments on `current_map` for more details. #[inline] - fn step(&mut self, interp: &mut Interpreter, _context: &mut EvmContext) { - self.maps - .entry(get_contract_hash(interp)) - .and_modify(|map| map.hit(interp.program_counter())); + fn get_or_insert_map(&mut self, interpreter: &mut Interpreter) -> &mut HitMap { + let hash = get_or_insert_contract_hash(interpreter); + if self.current_hash != *hash { + self.insert_map(interpreter); + } + // SAFETY: See comments on `current_map`. + unsafe { self.current_map.as_mut() } + } + + #[cold] + #[inline(never)] + fn insert_map(&mut self, interpreter: &Interpreter) { + let Some(hash) = interpreter.contract.hash else { eof_panic() }; + self.current_hash = hash; + // Converts the mutable reference to a `NonNull` pointer. + self.current_map = self + .maps + .entry(hash) + .or_insert_with(|| HitMap::new(interpreter.contract.bytecode.original_bytes())) + .into(); } } /// Helper function for extracting contract hash used to record coverage hit map. -/// If contract hash available in interpreter contract is zero (contract not yet created but going -/// to be created in current tx) then it hash is calculated from contract bytecode. -fn get_contract_hash(interp: &mut Interpreter) -> B256 { - let mut hash = interp.contract.hash.expect("Contract hash is None"); - if hash == B256::ZERO { - hash = interp.contract.bytecode.hash_slow(); +/// +/// If the contract hash is zero (contract not yet created but it's going to be created in current +/// tx) then the hash is calculated from the bytecode. +#[inline] +fn get_or_insert_contract_hash(interpreter: &mut Interpreter) -> &B256 { + let Some(hash) = interpreter.contract.hash.as_mut() else { eof_panic() }; + if hash.is_zero() { + set_contract_hash(hash, &interpreter.contract.bytecode); } hash } + +#[cold] +#[inline(never)] +fn set_contract_hash(hash: &mut B256, bytecode: &revm::primitives::Bytecode) { + *hash = bytecode.hash_slow(); +} + +#[cold] +#[inline(never)] +fn eof_panic() -> ! { + panic!("coverage does not support EOF"); +} diff --git a/crates/evm/coverage/src/lib.rs b/crates/evm/coverage/src/lib.rs index ad4ab53e3..c55fec62f 100644 --- a/crates/evm/coverage/src/lib.rs +++ b/crates/evm/coverage/src/lib.rs @@ -11,14 +11,18 @@ extern crate foundry_common; #[macro_use] extern crate tracing; -use alloy_primitives::{map::HashMap, Bytes, B256}; -use eyre::{Context, Result}; +use alloy_primitives::{ + map::{B256HashMap, HashMap}, + Bytes, +}; +use eyre::Result; use foundry_compilers::artifacts::sourcemap::SourceMap; use semver::Version; use std::{ collections::BTreeMap, fmt::Display, - ops::{AddAssign, Deref, DerefMut}, + num::NonZeroU32, + ops::{Deref, DerefMut, Range}, path::{Path, PathBuf}, sync::Arc, }; @@ -82,40 +86,29 @@ impl CoverageReport { self.anchors.extend(anchors); } - /// Get coverage summaries by source file path. - pub fn summary_by_file(&self) -> impl Iterator { - let mut summaries = BTreeMap::new(); - - for (version, items) in self.items.iter() { - for item in items { - let Some(path) = - self.source_paths.get(&(version.clone(), item.loc.source_id)).cloned() - else { - continue; - }; - *summaries.entry(path).or_default() += item; - } - } - - summaries.into_iter() + /// Returns an iterator over coverage summaries by source file path. + pub fn summary_by_file(&self) -> impl Iterator { + self.by_file(|summary: &mut CoverageSummary, item| summary.add_item(item)) } - /// Get coverage items by source file path. - pub fn items_by_source(&self) -> impl Iterator)> { - let mut items_by_source: BTreeMap<_, Vec<_>> = BTreeMap::new(); + /// Returns an iterator over coverage items by source file path. + pub fn items_by_file(&self) -> impl Iterator)> { + self.by_file(|list: &mut Vec<_>, item| list.push(item)) + } - for (version, items) in self.items.iter() { + fn by_file<'a, T: Default>( + &'a self, + mut f: impl FnMut(&mut T, &'a CoverageItem), + ) -> impl Iterator { + let mut by_file: BTreeMap<&Path, T> = BTreeMap::new(); + for (version, items) in &self.items { for item in items { - let Some(path) = - self.source_paths.get(&(version.clone(), item.loc.source_id)).cloned() - else { - continue; - }; - items_by_source.entry(path).or_default().push(item.clone()); + let key = (version.clone(), item.loc.source_id); + let Some(path) = self.source_paths.get(&key) else { continue }; + f(by_file.entry(path).or_default(), item); } } - - items_by_source.into_iter() + by_file.into_iter() } /// Processes data from a [`HitMap`] and sets hit counts for coverage items in this coverage @@ -130,22 +123,21 @@ impl CoverageReport { is_deployed_code: bool, ) -> Result<()> { // Add bytecode level hits - let e = self - .bytecode_hits + self.bytecode_hits .entry(contract_id.clone()) - .or_insert_with(|| HitMap::new(hit_map.bytecode.clone())); - e.merge(hit_map).wrap_err_with(|| format!("{contract_id:?}"))?; + .and_modify(|m| m.merge(hit_map)) + .or_insert_with(|| hit_map.clone()); // Add source level hits if let Some(anchors) = self.anchors.get(contract_id) { let anchors = if is_deployed_code { &anchors.1 } else { &anchors.0 }; for anchor in anchors { - if let Some(&hits) = hit_map.hits.get(&anchor.instruction) { + if let Some(hits) = hit_map.get(anchor.instruction) { self.items .get_mut(&contract_id.version) .and_then(|items| items.get_mut(anchor.item_id)) .expect("Anchor refers to non-existent coverage item") - .hits += hits; + .hits += hits.get(); } } } @@ -171,9 +163,10 @@ impl CoverageReport { /// A collection of [`HitMap`]s. #[derive(Clone, Debug, Default)] -pub struct HitMaps(pub HashMap); +pub struct HitMaps(pub B256HashMap); impl HitMaps { + /// Merges two `Option`. pub fn merge_opt(a: &mut Option, b: Option) { match (a, b) { (_, None) => {} @@ -182,17 +175,15 @@ impl HitMaps { } } + /// Merges two `HitMaps`. pub fn merge(&mut self, other: Self) { - for (code_hash, hit_map) in other.0 { - if let Some(HitMap { hits: extra_hits, .. }) = self.insert(code_hash, hit_map) { - for (pc, hits) in extra_hits { - self.entry(code_hash) - .and_modify(|map| *map.hits.entry(pc).or_default() += hits); - } - } + self.reserve(other.len()); + for (code_hash, other) in other.0 { + self.entry(code_hash).and_modify(|e| e.merge(&other)).or_insert(other); } } + /// Merges two `HitMaps`. pub fn merged(mut self, other: Self) -> Self { self.merge(other); self @@ -200,7 +191,7 @@ impl HitMaps { } impl Deref for HitMaps { - type Target = HashMap; + type Target = B256HashMap; fn deref(&self) -> &Self::Target { &self.0 @@ -218,40 +209,70 @@ impl DerefMut for HitMaps { /// Contains low-level data about hit counters for the instructions in the bytecode of a contract. #[derive(Clone, Debug)] pub struct HitMap { - pub bytecode: Bytes, - pub hits: BTreeMap, + bytecode: Bytes, + hits: HashMap, } impl HitMap { + /// Create a new hitmap with the given bytecode. + #[inline] pub fn new(bytecode: Bytes) -> Self { - Self { bytecode, hits: BTreeMap::new() } + Self { bytecode, hits: HashMap::with_capacity_and_hasher(1024, Default::default()) } } - /// Increase the hit counter for the given program counter. + /// Returns the bytecode. + #[inline] + pub fn bytecode(&self) -> &Bytes { + &self.bytecode + } + + /// Returns the number of hits for the given program counter. + #[inline] + pub fn get(&self, pc: usize) -> Option { + NonZeroU32::new(self.hits.get(&Self::cvt_pc(pc)).copied().unwrap_or(0)) + } + + /// Increase the hit counter by 1 for the given program counter. + #[inline] pub fn hit(&mut self, pc: usize) { - *self.hits.entry(pc).or_default() += 1; + self.hits(pc, 1) + } + + /// Increase the hit counter by `hits` for the given program counter. + #[inline] + pub fn hits(&mut self, pc: usize, hits: u32) { + *self.hits.entry(Self::cvt_pc(pc)).or_default() += hits; } /// Merge another hitmap into this, assuming the bytecode is consistent - pub fn merge(&mut self, other: &Self) -> Result<(), eyre::Report> { - for (pc, hits) in &other.hits { - *self.hits.entry(*pc).or_default() += hits; + pub fn merge(&mut self, other: &Self) { + self.hits.reserve(other.len()); + for (pc, hits) in other.iter() { + self.hits(pc, hits); } - Ok(()) } - pub fn consistent_bytecode(&self, hm1: &Self, hm2: &Self) -> bool { - // Consider the bytecodes consistent if they are the same out as far as the - // recorded hits - let len1 = hm1.hits.last_key_value(); - let len2 = hm2.hits.last_key_value(); - if let (Some(len1), Some(len2)) = (len1, len2) { - let len = std::cmp::max(len1.0, len2.0); - let ok = hm1.bytecode.0[..*len] == hm2.bytecode.0[..*len]; - let _ = sh_println!("consistent_bytecode: {}, {}, {}, {}", ok, len1.0, len2.0, len); - return ok; - } - true + /// Returns an iterator over all the program counters and their hit counts. + #[inline] + pub fn iter(&self) -> impl Iterator + '_ { + self.hits.iter().map(|(&pc, &hits)| (pc as usize, hits)) + } + + /// Returns the number of program counters hit in the hitmap. + #[inline] + pub fn len(&self) -> usize { + self.hits.len() + } + + /// Returns `true` if the hitmap is empty. + #[inline] + pub fn is_empty(&self) -> bool { + self.hits.is_empty() + } + + #[inline] + fn cvt_pc(pc: usize) -> u32 { + pc.try_into().expect("4GiB bytecode") } } @@ -322,7 +343,7 @@ pub struct CoverageItem { /// The location of the item in the source code. pub loc: SourceLocation, /// The number of times this item was hit. - pub hits: u64, + pub hits: u32, } impl Display for CoverageItem { @@ -345,30 +366,34 @@ impl Display for CoverageItem { } } +/// A source location. #[derive(Clone, Debug)] pub struct SourceLocation { /// The source ID. pub source_id: usize, /// The contract this source range is in. pub contract_name: Arc, - /// Start byte in the source code. - pub start: u32, - /// Number of bytes in the source code. - pub length: Option, - /// The line in the source code. - pub line: usize, + /// Byte range. + pub bytes: Range, + /// Line range. Indices are 1-based. + pub lines: Range, } impl Display for SourceLocation { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!( - f, - "source ID {}, line {}, chars {}-{}", - self.source_id, - self.line, - self.start, - self.length.map_or(self.start, |length| self.start + length) - ) + write!(f, "source ID {}, lines {:?}, bytes {:?}", self.source_id, self.lines, self.bytes) + } +} + +impl SourceLocation { + /// Returns the length of the byte range. + pub fn len(&self) -> u32 { + self.bytes.len() as u32 + } + + /// Returns true if the byte range is empty. + pub fn is_empty(&self) -> bool { + self.len() == 0 } } @@ -393,21 +418,43 @@ pub struct CoverageSummary { pub function_hits: usize, } -impl AddAssign<&Self> for CoverageSummary { - fn add_assign(&mut self, other: &Self) { - self.line_count += other.line_count; - self.line_hits += other.line_hits; - self.statement_count += other.statement_count; - self.statement_hits += other.statement_hits; - self.branch_count += other.branch_count; - self.branch_hits += other.branch_hits; - self.function_count += other.function_count; - self.function_hits += other.function_hits; +impl CoverageSummary { + /// Creates a new, empty coverage summary. + pub fn new() -> Self { + Self::default() + } + + /// Creates a coverage summary from a collection of coverage items. + pub fn from_items<'a>(items: impl IntoIterator) -> Self { + let mut summary = Self::default(); + summary.add_items(items); + summary } -} -impl AddAssign<&CoverageItem> for CoverageSummary { - fn add_assign(&mut self, item: &CoverageItem) { + /// Adds another coverage summary to this one. + pub fn merge(&mut self, other: &Self) { + let Self { + line_count, + line_hits, + statement_count, + statement_hits, + branch_count, + branch_hits, + function_count, + function_hits, + } = self; + *line_count += other.line_count; + *line_hits += other.line_hits; + *statement_count += other.statement_count; + *statement_hits += other.statement_hits; + *branch_count += other.branch_count; + *branch_hits += other.branch_hits; + *function_count += other.function_count; + *function_hits += other.function_hits; + } + + /// Adds a coverage item to this summary. + pub fn add_item(&mut self, item: &CoverageItem) { match item.kind { CoverageItemKind::Line => { self.line_count += 1; @@ -435,4 +482,11 @@ impl AddAssign<&CoverageItem> for CoverageSummary { } } } + + /// Adds multiple coverage items to this summary. + pub fn add_items<'a>(&mut self, items: impl IntoIterator) { + for item in items { + self.add_item(item); + } + } } diff --git a/crates/evm/evm/src/executors/builder.rs b/crates/evm/evm/src/executors/builder.rs index c7ad6c180..2f90f5e65 100644 --- a/crates/evm/evm/src/executors/builder.rs +++ b/crates/evm/evm/src/executors/builder.rs @@ -59,7 +59,7 @@ impl ExecutorBuilder { /// Sets the EVM spec to use. #[inline] - pub fn spec(mut self, spec: SpecId) -> Self { + pub fn spec_id(mut self, spec: SpecId) -> Self { self.spec_id = spec; self } diff --git a/crates/evm/evm/src/executors/fuzz/mod.rs b/crates/evm/evm/src/executors/fuzz/mod.rs index 8a479c019..c7c095619 100644 --- a/crates/evm/evm/src/executors/fuzz/mod.rs +++ b/crates/evm/evm/src/executors/fuzz/mod.rs @@ -1,4 +1,4 @@ -use crate::executors::{Executor, RawCallResult}; +use crate::executors::{Executor, FuzzTestTimer, RawCallResult}; use alloy_dyn_abi::JsonAbiExt; use alloy_json_abi::Function; use alloy_primitives::{map::HashMap, Address, Bytes, Log, U256}; @@ -6,7 +6,7 @@ use eyre::Result; use foundry_common::evm::Breakpoints; use foundry_config::FuzzConfig; use foundry_evm_core::{ - constants::MAGIC_ASSUME, + constants::{MAGIC_ASSUME, TEST_TIMEOUT}, decode::{RevertDecoder, SkipReason}, }; use foundry_evm_coverage::HitMaps; @@ -77,10 +77,12 @@ impl FuzzedExecutor { /// test case. /// /// Returns a list of all the consumed gas and calldata of every fuzz case + #[allow(clippy::too_many_arguments)] pub fn fuzz( &self, func: &Function, fuzz_fixtures: &FuzzFixtures, + deployed_libs: &[Address], address: Address, should_fail: bool, rd: &RevertDecoder, @@ -88,7 +90,7 @@ impl FuzzedExecutor { ) -> FuzzTestResult { // Stores the fuzz test execution data. let execution_data = RefCell::new(FuzzTestData::default()); - let state = self.build_fuzz_state(); + let state = self.build_fuzz_state(deployed_libs); let no_zksync_reserved_addresses = state.dictionary_read().no_zksync_reserved_addresses(); let dictionary_weight = self.config.dictionary.dictionary_weight.min(100); let strategy = proptest::prop_oneof![ @@ -99,7 +101,15 @@ impl FuzzedExecutor { let max_traces_to_collect = std::cmp::max(1, self.config.gas_report_samples) as usize; let show_logs = self.config.show_logs; + // Start timer for this fuzz test. + let timer = FuzzTestTimer::new(self.config.timeout); + let run_result = self.runner.clone().run(&strategy, |calldata| { + // Check if the timeout has been reached. + if timer.is_timed_out() { + return Err(TestCaseError::fail(TEST_TIMEOUT)); + } + let fuzz_res = self.single_fuzz(address, should_fail, calldata)?; // If running with progress then increment current run. @@ -194,17 +204,21 @@ impl FuzzedExecutor { } Err(TestError::Fail(reason, _)) => { let reason = reason.to_string(); - result.reason = (!reason.is_empty()).then_some(reason); - - let args = if let Some(data) = calldata.get(4..) { - func.abi_decode_input(data, false).unwrap_or_default() + if reason == TEST_TIMEOUT { + // If the reason is a timeout, we consider the fuzz test successful. + result.success = true; } else { - vec![] - }; + result.reason = (!reason.is_empty()).then_some(reason); + let args = if let Some(data) = calldata.get(4..) { + func.abi_decode_input(data, false).unwrap_or_default() + } else { + vec![] + }; - result.counterexample = Some(CounterExample::Single( - BaseCounterExample::from_fuzz_call(calldata, args, call.traces), - )); + result.counterexample = Some(CounterExample::Single( + BaseCounterExample::from_fuzz_call(calldata, args, call.traces), + )); + } } } @@ -263,17 +277,19 @@ impl FuzzedExecutor { } /// Stores fuzz state for use with [fuzz_calldata_from_state] - pub fn build_fuzz_state(&self) -> EvmFuzzState { + pub fn build_fuzz_state(&self, deployed_libs: &[Address]) -> EvmFuzzState { if let Some(fork_db) = self.executor.backend.active_fork_db() { EvmFuzzState::new( fork_db, self.config.dictionary, + deployed_libs, self.config.no_zksync_reserved_addresses, ) } else { EvmFuzzState::new( self.executor.backend.mem_db(), self.config.dictionary, + deployed_libs, self.config.no_zksync_reserved_addresses, ) } diff --git a/crates/evm/evm/src/executors/invariant/mod.rs b/crates/evm/evm/src/executors/invariant/mod.rs index 0fcb9dac8..ef702eb22 100644 --- a/crates/evm/evm/src/executors/invariant/mod.rs +++ b/crates/evm/evm/src/executors/invariant/mod.rs @@ -8,8 +8,10 @@ use eyre::{eyre, ContextCompat, Result}; use foundry_common::contracts::{ContractsByAddress, ContractsByArtifact}; use foundry_config::InvariantConfig; use foundry_evm_core::{ - abi::HARDHAT_CONSOLE_ADDRESS, - constants::{CALLER, CHEATCODE_ADDRESS, DEFAULT_CREATE2_DEPLOYER, MAGIC_ASSUME}, + constants::{ + CALLER, CHEATCODE_ADDRESS, DEFAULT_CREATE2_DEPLOYER, HARDHAT_CONSOLE_ADDRESS, MAGIC_ASSUME, + TEST_TIMEOUT, + }, precompiles::PRECOMPILES, }; use foundry_evm_fuzz::{ @@ -48,7 +50,7 @@ pub use result::InvariantFuzzTestResult; use serde::{Deserialize, Serialize}; mod shrink; -use crate::executors::EvmError; +use crate::executors::{EvmError, FuzzTestTimer}; pub use shrink::check_sequence; sol! { @@ -321,6 +323,7 @@ impl<'a> InvariantExecutor<'a> { &mut self, invariant_contract: InvariantContract<'_>, fuzz_fixtures: &FuzzFixtures, + deployed_libs: &[Address], progress: Option<&ProgressBar>, ) -> Result { // Throw an error to abort test run if the invariant function accepts input params @@ -329,7 +332,10 @@ impl<'a> InvariantExecutor<'a> { } let (invariant_test, invariant_strategy) = - self.prepare_test(&invariant_contract, fuzz_fixtures)?; + self.prepare_test(&invariant_contract, fuzz_fixtures, deployed_libs)?; + + // Start timer for this invariant test. + let timer = FuzzTestTimer::new(self.config.timeout); let _ = self.runner.run(&invariant_strategy, |first_input| { // Create current invariant run data. @@ -346,6 +352,15 @@ impl<'a> InvariantExecutor<'a> { } while current_run.depth < self.config.depth { + // Check if the timeout has been reached. + if timer.is_timed_out() { + // Since we never record a revert here the test is still considered + // successful even though it timed out. We *want* + // this behavior for now, so that's ok, but + // future developers should be aware of this. + return Err(TestCaseError::fail(TEST_TIMEOUT)); + } + let tx = current_run.inputs.last().ok_or_else(|| { TestCaseError::fail("No input generated to call fuzzed target.") })?; @@ -492,6 +507,7 @@ impl<'a> InvariantExecutor<'a> { &mut self, invariant_contract: &InvariantContract<'_>, fuzz_fixtures: &FuzzFixtures, + deployed_libs: &[Address], ) -> Result<(InvariantTest, impl Strategy)> { // Finds out the chosen deployed contracts and/or senders. self.select_contract_artifacts(invariant_contract.address)?; @@ -502,6 +518,7 @@ impl<'a> InvariantExecutor<'a> { let fuzz_state = EvmFuzzState::new( self.executor.backend().mem_db(), self.config.dictionary, + deployed_libs, self.config.no_zksync_reserved_addresses, ); diff --git a/crates/evm/evm/src/executors/mod.rs b/crates/evm/evm/src/executors/mod.rs index f483076ef..84d57e685 100644 --- a/crates/evm/evm/src/executors/mod.rs +++ b/crates/evm/evm/src/executors/mod.rs @@ -24,6 +24,7 @@ use foundry_evm_core::{ }, decode::{RevertDecoder, SkipReason}, utils::StateChangeset, + InspectorExt, }; use foundry_evm_coverage::HitMaps; use foundry_evm_traces::{SparsedTraceArena, TraceMode}; @@ -37,7 +38,10 @@ use revm::{ }, Database, }; -use std::borrow::Cow; +use std::{ + borrow::Cow, + time::{Duration, Instant}, +}; mod builder; pub use builder::ExecutorBuilder; @@ -84,9 +88,7 @@ pub struct Executor { pub env: EnvWithHandlerCfg, /// The Revm inspector stack. pub inspector: InspectorStack, - /// The gas limit for calls and deployments. This is different from the gas limit imposed by - /// the passed in environment, as those limits are used by the EVM for certain opcodes like - /// `gaslimit`. + /// The gas limit for calls and deployments. gas_limit: u64, /// Whether `failed()` should be called on the test contract to determine if the test failed. legacy_assertions: bool, @@ -182,6 +184,36 @@ impl Executor { self.env.spec_id() } + /// Sets the EVM spec ID. + pub fn set_spec_id(&mut self, spec_id: SpecId) { + self.env.handler_cfg.spec_id = spec_id; + } + + /// Returns the gas limit for calls and deployments. + /// + /// This is different from the gas limit imposed by the passed in environment, as those limits + /// are used by the EVM for certain opcodes like `gaslimit`. + pub fn gas_limit(&self) -> u64 { + self.gas_limit + } + + /// Sets the gas limit for calls and deployments. + pub fn set_gas_limit(&mut self, gas_limit: u64) { + self.gas_limit = gas_limit; + } + + /// Returns whether `failed()` should be called on the test contract to determine if the test + /// failed. + pub fn legacy_assertions(&self) -> bool { + self.legacy_assertions + } + + /// Sets whether `failed()` should be called on the test contract to determine if the test + /// failed. + pub fn set_legacy_assertions(&mut self, legacy_assertions: bool) { + self.legacy_assertions = legacy_assertions; + } + /// Creates the default CREATE2 Contract Deployer for local tests and scripts. pub fn deploy_create2_deployer(&mut self) -> eyre::Result<()> { trace!("deploying local create2 deployer"); @@ -191,7 +223,7 @@ impl Executor { .ok_or_else(|| BackendError::MissingAccount(DEFAULT_CREATE2_DEPLOYER))?; // If the deployer is not currently deployed, deploy the default one. - if create2_deployer_account.code.map_or(true, |code| code.is_empty()) { + if create2_deployer_account.code.is_none_or(|code| code.is_empty()) { let creator = DEFAULT_CREATE2_DEPLOYER_DEPLOYER; // Probably 0, but just in case. @@ -266,9 +298,8 @@ impl Executor { } #[inline] - pub fn set_gas_limit(&mut self, gas_limit: u64) -> &mut Self { - self.gas_limit = gas_limit; - self + pub fn create2_deployer(&self) -> Address { + self.inspector().create2_deployer() } /// Deploys a contract and commits the new state to the underlying database. @@ -752,8 +783,12 @@ pub enum EvmError { #[error("{_0}")] Skip(SkipReason), /// Any other error. - #[error(transparent)] - Eyre(eyre::Error), + #[error("{}", foundry_common::errors::display_chain(.0))] + Eyre( + #[from] + #[source] + eyre::Report, + ), } impl From for EvmError { @@ -768,16 +803,6 @@ impl From for EvmError { } } -impl From for EvmError { - fn from(err: eyre::Report) -> Self { - let mut chained_cause = String::new(); - for cause in err.chain() { - chained_cause.push_str(format!("{cause}; ").as_str()); - } - Self::Eyre(eyre::format_err!("{chained_cause}")) - } -} - /// The result of a deployment. #[derive(Debug)] pub struct DeployResult { @@ -1023,3 +1048,20 @@ fn convert_executed_result( chisel_state, }) } + +/// Timer for a fuzz test. +pub struct FuzzTestTimer { + /// Inner fuzz test timer - (test start time, test duration). + inner: Option<(Instant, Duration)>, +} + +impl FuzzTestTimer { + pub fn new(timeout: Option) -> Self { + Self { inner: timeout.map(|timeout| (Instant::now(), Duration::from_secs(timeout.into()))) } + } + + /// Whether the current fuzz test timed out and should be stopped. + pub fn is_timed_out(&self) -> bool { + self.inner.is_some_and(|(start, duration)| start.elapsed() > duration) + } +} diff --git a/crates/evm/evm/src/executors/trace.rs b/crates/evm/evm/src/executors/trace.rs index 69c68442b..b55517a67 100644 --- a/crates/evm/evm/src/executors/trace.rs +++ b/crates/evm/evm/src/executors/trace.rs @@ -1,8 +1,9 @@ use crate::executors::{Executor, ExecutorBuilder}; +use alloy_primitives::Address; use foundry_compilers::artifacts::EvmVersion; use foundry_config::{utils::evm_spec_id, Chain, Config}; use foundry_evm_core::{backend::Backend, fork::CreateFork, opts::EvmOpts}; -use foundry_evm_traces::{InternalTraceMode, TraceMode}; +use foundry_evm_traces::TraceMode; use revm::primitives::{Env, SpecId}; use std::ops::{Deref, DerefMut}; @@ -16,23 +17,19 @@ impl TracingExecutor { env: revm::primitives::Env, fork: Option, version: Option, - debug: bool, - decode_internal: bool, - alphanet: bool, + trace_mode: TraceMode, + odyssey: bool, + create2_deployer: Address, ) -> Self { let db = Backend::spawn(fork); - let trace_mode = - TraceMode::Call.with_debug(debug).with_decode_internal(if decode_internal { - InternalTraceMode::Full - } else { - InternalTraceMode::None - }); Self { // configures a bare version of the evm executor: no cheatcode inspector is enabled, // tracing will be enabled only for the targeted transaction executor: ExecutorBuilder::new() - .inspectors(|stack| stack.trace_mode(trace_mode).alphanet(alphanet)) - .spec(evm_spec_id(&version.unwrap_or_default(), alphanet)) + .inspectors(|stack| { + stack.trace_mode(trace_mode).odyssey(odyssey).create2_deployer(create2_deployer) + }) + .spec_id(evm_spec_id(version.unwrap_or_default(), odyssey)) .build(env, db), } } @@ -54,7 +51,7 @@ impl TracingExecutor { let fork = evm_opts.get_fork(config, env.clone()); - Ok((env, fork, evm_opts.get_remote_chain_id().await, evm_opts.alphanet)) + Ok((env, fork, evm_opts.get_remote_chain_id().await, evm_opts.odyssey)) } } diff --git a/crates/evm/evm/src/inspectors/stack.rs b/crates/evm/evm/src/inspectors/stack.rs index f312d123a..0db809f5e 100644 --- a/crates/evm/evm/src/inspectors/stack.rs +++ b/crates/evm/evm/src/inspectors/stack.rs @@ -55,10 +55,12 @@ pub struct InspectorStackBuilder { /// In isolation mode all top-level calls are executed as a separate transaction in a separate /// EVM context, enabling more precise gas accounting and transaction state changes. pub enable_isolation: bool, - /// Whether to enable Alphanet features. - pub alphanet: bool, + /// Whether to enable Odyssey features. + pub odyssey: bool, /// The wallets to set in the cheatcodes context. pub wallets: Option, + /// The CREATE2 deployer address. + pub create2_deployer: Address, } impl InspectorStackBuilder { @@ -148,11 +150,17 @@ impl InspectorStackBuilder { self } - /// Set whether to enable Alphanet features. + /// Set whether to enable Odyssey features. /// For description of call isolation, see [`InspectorStack::enable_isolation`]. #[inline] - pub fn alphanet(mut self, yes: bool) -> Self { - self.alphanet = yes; + pub fn odyssey(mut self, yes: bool) -> Self { + self.odyssey = yes; + self + } + + #[inline] + pub fn create2_deployer(mut self, create2_deployer: Address) -> Self { + self.create2_deployer = create2_deployer; self } @@ -169,8 +177,9 @@ impl InspectorStackBuilder { print, chisel_state, enable_isolation, - alphanet, + odyssey, wallets, + create2_deployer, } = self; let mut stack = InspectorStack::new(); @@ -196,7 +205,8 @@ impl InspectorStackBuilder { stack.tracing(trace_mode); stack.enable_isolation(enable_isolation); - stack.alphanet(alphanet); + stack.odyssey(odyssey); + stack.set_create2_deployer(create2_deployer); // environment, must come after all of the inspectors if let Some(block) = block { @@ -281,7 +291,8 @@ pub struct InspectorStackInner { pub printer: Option, pub tracer: Option, pub enable_isolation: bool, - pub alphanet: bool, + pub odyssey: bool, + pub create2_deployer: Address, /// Flag marking if we are in the inner EVM context. pub in_inner_context: bool, @@ -394,8 +405,14 @@ impl InspectorStack { /// Set whether to enable call isolation. #[inline] - pub fn alphanet(&mut self, yes: bool) { - self.alphanet = yes; + pub fn odyssey(&mut self, yes: bool) { + self.odyssey = yes; + } + + /// Set the CREATE2 deployer address. + #[inline] + pub fn set_create2_deployer(&mut self, deployer: Address) { + self.create2_deployer = deployer; } /// Set whether to enable the log collector. @@ -453,7 +470,7 @@ impl InspectorStack { .map(|cheatcodes| cheatcodes.labels.clone()) .unwrap_or_default(), traces, - coverage: coverage.map(|coverage| coverage.maps), + coverage: coverage.map(|coverage| coverage.finish()), cheatcodes, chisel_state: chisel_state.and_then(|state| state.state), } @@ -1030,6 +1047,18 @@ impl InspectorExt for InspectorStackRefMut<'_> { )); } + fn is_alphanet(&self) -> bool { + self.inner.alphanet + } + + fn is_odyssey(&self) -> bool { + self.inner.odyssey + } + + fn create2_deployer(&self) -> Address { + self.inner.create2_deployer + } + fn trace_zksync(&mut self, ecx: &mut EvmContext<&mut dyn DatabaseExt>, call_traces: Vec) { call_inspectors!([&mut self.tracer], |inspector| InspectorExt::trace_zksync( inspector, @@ -1037,10 +1066,6 @@ impl InspectorExt for InspectorStackRefMut<'_> { call_traces )); } - - fn is_alphanet(&self) -> bool { - self.inner.alphanet - } } impl Inspector<&mut dyn DatabaseExt> for InspectorStack { @@ -1140,8 +1165,12 @@ impl InspectorExt for InspectorStack { self.as_mut().should_use_create2_factory(ecx, inputs) } - fn is_alphanet(&self) -> bool { - self.alphanet + fn is_odyssey(&self) -> bool { + self.odyssey + } + + fn create2_deployer(&self) -> Address { + self.create2_deployer } } diff --git a/crates/evm/fuzz/src/strategies/param.rs b/crates/evm/fuzz/src/strategies/param.rs index 6efd4b48e..7b8ee0e9b 100644 --- a/crates/evm/fuzz/src/strategies/param.rs +++ b/crates/evm/fuzz/src/strategies/param.rs @@ -149,16 +149,27 @@ pub fn fuzz_param_from_state( // Convert the value based on the parameter type match *param { - DynSolType::Address => value() - .prop_map(move |value| { - let addr = Address::from_word(value); - if no_zksync_reserved_addresses { - DynSolValue::Address(foundry_zksync_core::to_safe_address(addr)) - } else { - DynSolValue::Address(addr) - } - }) - .boxed(), + DynSolType::Address => { + let deployed_libs = state.deployed_libs.clone(); + value() + .prop_filter_map("filter address fuzzed from state", move |value| { + let fuzzed_addr = Address::from_word(value); + // Do not use addresses of deployed libraries as fuzz input. + // See . + if !deployed_libs.contains(&fuzzed_addr) { + if no_zksync_reserved_addresses { + Some(DynSolValue::Address(foundry_zksync_core::to_safe_address( + fuzzed_addr, + ))) + } else { + Some(DynSolValue::Address(fuzzed_addr)) + } + } else { + None + } + }) + .boxed() + } DynSolType::Function => value() .prop_map(move |value| { DynSolValue::Function(alloy_primitives::Function::from_word(value)) @@ -244,7 +255,7 @@ mod tests { let f = "testArray(uint64[2] calldata values)"; let func = get_func(f).unwrap(); let db = CacheDB::new(EmptyDB::default()); - let state = EvmFuzzState::new(&db, FuzzDictionaryConfig::default(), false); + let state = EvmFuzzState::new(&db, FuzzDictionaryConfig::default(), &[], false); let strategy = proptest::prop_oneof![ 60 => fuzz_calldata(func.clone(), &FuzzFixtures::default(), false), 40 => fuzz_calldata_from_state(func, &state), diff --git a/crates/evm/fuzz/src/strategies/state.rs b/crates/evm/fuzz/src/strategies/state.rs index 03ca2559b..e85328bee 100644 --- a/crates/evm/fuzz/src/strategies/state.rs +++ b/crates/evm/fuzz/src/strategies/state.rs @@ -27,12 +27,15 @@ const PUSH_BYTE_ANALYSIS_LIMIT: usize = 24 * 1024; #[derive(Clone, Debug)] pub struct EvmFuzzState { inner: Arc>, + /// Addresses of external libraries deployed in test setup, excluded from fuzz test inputs. + pub deployed_libs: Vec
, } impl EvmFuzzState { pub fn new( db: &CacheDB, config: FuzzDictionaryConfig, + deployed_libs: &[Address], no_zksync_reserved_addresses: bool, ) -> Self { // Sort accounts to ensure deterministic dictionary generation from the same setUp state. @@ -42,7 +45,7 @@ impl EvmFuzzState { // Create fuzz dictionary and insert values from db state. let mut dictionary = FuzzDictionary::new(config, no_zksync_reserved_addresses); dictionary.insert_db_values(accs); - Self { inner: Arc::new(RwLock::new(dictionary)) } + Self { inner: Arc::new(RwLock::new(dictionary)), deployed_libs: deployed_libs.to_vec() } } pub fn collect_values(&self, values: impl IntoIterator) { diff --git a/crates/evm/traces/Cargo.toml b/crates/evm/traces/Cargo.toml index 90eede863..a10e9230d 100644 --- a/crates/evm/traces/Cargo.toml +++ b/crates/evm/traces/Cargo.toml @@ -38,11 +38,12 @@ eyre.workspace = true futures.workspace = true itertools.workspace = true serde.workspace = true +serde_json.workspace = true tokio = { workspace = true, features = ["time", "macros"] } tracing.workspace = true tempfile.workspace = true rayon.workspace = true -solang-parser.workspace = true +solar-parse.workspace = true revm.workspace = true [dev-dependencies] diff --git a/crates/evm/traces/src/debug/sources.rs b/crates/evm/traces/src/debug/sources.rs index 40e540a97..b2e37e32d 100644 --- a/crates/evm/traces/src/debug/sources.rs +++ b/crates/evm/traces/src/debug/sources.rs @@ -3,7 +3,7 @@ use foundry_common::compact_to_contract; use foundry_compilers::{ artifacts::{ sourcemap::{SourceElement, SourceMap}, - Bytecode, ContractBytecodeSome, Libraries, Source, + Bytecode, Contract, ContractBytecodeSome, Libraries, Source, }, multi::MultiCompilerLanguage, Artifact, Compiler, ProjectCompileOutput, @@ -11,9 +11,13 @@ use foundry_compilers::{ use foundry_evm_core::utils::PcIcMap; use foundry_linking::Linker; use rayon::prelude::*; -use solang_parser::pt::SourceUnitPart; +use solar_parse::{ + interface::{Pos, Session}, + Parser, +}; use std::{ collections::{BTreeMap, HashMap}, + ops::Range, path::{Path, PathBuf}, sync::Arc, }; @@ -25,7 +29,7 @@ pub struct SourceData { pub path: PathBuf, /// Maps contract name to (start, end) of the contract definition in the source code. /// This is useful for determining which contract contains given function definition. - contract_definitions: Vec<(String, usize, usize)>, + contract_definitions: Vec<(String, Range)>, } impl SourceData { @@ -35,26 +39,26 @@ impl SourceData { match language { MultiCompilerLanguage::Vyper(_) => { // Vyper contracts have the same name as the file name. - if let Some(name) = path.file_name().map(|s| s.to_string_lossy().to_string()) { - contract_definitions.push((name, 0, source.len())); + if let Some(name) = path.file_stem().map(|s| s.to_string_lossy().to_string()) { + contract_definitions.push((name, 0..source.len())); } } MultiCompilerLanguage::Solc(_) => { - if let Ok((parsed, _)) = solang_parser::parse(&source, 0) { - for item in parsed.0 { - let SourceUnitPart::ContractDefinition(contract) = item else { - continue; - }; - let Some(name) = contract.name else { - continue; - }; - contract_definitions.push(( - name.name, - name.loc.start(), - contract.loc.end(), - )); + let sess = Session::builder().with_silent_emitter(None).build(); + let _ = sess.enter(|| -> solar_parse::interface::Result<()> { + let arena = solar_parse::ast::Arena::new(); + let filename = path.clone().into(); + let mut parser = + Parser::from_source_code(&sess, &arena, filename, source.to_string())?; + let ast = parser.parse_file().map_err(|e| e.emit())?; + for item in ast.items { + if let solar_parse::ast::ItemKind::Contract(contract) = &item.kind { + let range = item.span.lo().to_usize()..item.span.hi().to_usize(); + contract_definitions.push((contract.name.to_string(), range)); + } } - } + Ok(()) + }); } } @@ -65,8 +69,8 @@ impl SourceData { pub fn find_contract_name(&self, start: usize, end: usize) -> Option<&str> { self.contract_definitions .iter() - .find(|(_, s, e)| start >= *s && end <= *e) - .map(|(name, _, _)| name.as_str()) + .find(|(_, r)| start >= r.start && end <= r.end) + .map(|(name, _)| name.as_str()) } } @@ -84,7 +88,7 @@ impl ArtifactData { fn new(bytecode: ContractBytecodeSome, build_id: String, file_id: u32) -> Result { let parse = |b: &Bytecode, name: &str| { // Only parse source map if it's not empty. - let source_map = if b.source_map.as_ref().map_or(true, |s| s.is_empty()) { + let source_map = if b.source_map.as_ref().is_none_or(|s| s.is_empty()) { Ok(None) } else { b.source_map().transpose().wrap_err_with(|| { @@ -133,7 +137,7 @@ impl ContractSources { Ok(sources) } - pub fn insert( + pub fn insert>( &mut self, output: &ProjectCompileOutput, root: &Path, @@ -182,26 +186,22 @@ impl ContractSources { let mut files: BTreeMap> = BTreeMap::new(); for (build_id, build) in output.builds() { for (source_id, path) in &build.source_id_to_path { - let source_data = if let Some(source_data) = files.get(path) { - source_data.clone() - } else { - let source = Source::read(path).wrap_err_with(|| { - format!("failed to read artifact source file for `{}`", path.display()) - })?; - - let stripped = path.strip_prefix(root).unwrap_or(path).to_path_buf(); - - let source_data = Arc::new(SourceData::new( - source.content.clone(), - build.language.into(), - stripped, - )); - - files.insert(path.clone(), source_data.clone()); - - source_data + let source_data = match files.entry(path.clone()) { + std::collections::btree_map::Entry::Vacant(entry) => { + let source = Source::read(path).wrap_err_with(|| { + format!("failed to read artifact source file for `{}`", path.display()) + })?; + let stripped = path.strip_prefix(root).unwrap_or(path).to_path_buf(); + let source_data = Arc::new(SourceData::new( + source.content.clone(), + build.language.into(), + stripped, + )); + entry.insert(source_data.clone()); + source_data + } + std::collections::btree_map::Entry::Occupied(entry) => entry.get().clone(), }; - self.sources_by_id .entry(build_id.clone()) .or_default() @@ -212,6 +212,14 @@ impl ContractSources { Ok(()) } + /// Merges given contract sources. + pub fn merge(&mut self, sources: Self) { + self.sources_by_id.extend(sources.sources_by_id); + for (name, artifacts) in sources.artifacts_by_name { + self.artifacts_by_name.entry(name).or_default().extend(artifacts); + } + } + /// Returns all sources for a contract by name. pub fn get_sources( &self, diff --git a/crates/evm/traces/src/decoder/mod.rs b/crates/evm/traces/src/decoder/mod.rs index 0fb7ca7f5..bef41c7eb 100644 --- a/crates/evm/traces/src/decoder/mod.rs +++ b/crates/evm/traces/src/decoder/mod.rs @@ -702,10 +702,13 @@ fn reconstruct_params(event: &Event, decoded: &DecodedEvent) -> Vec let mut unindexed = 0; let mut inputs = vec![]; for input in event.inputs.iter() { - if input.indexed { + // Prevent panic of event `Transfer(from, to)` decoded with a signature + // `Transfer(address indexed from, address indexed to, uint256 indexed tokenId)` by making + // sure the event inputs is not higher than decoded indexed / un-indexed values. + if input.indexed && indexed < decoded.indexed.len() { inputs.push(decoded.indexed[indexed].clone()); indexed += 1; - } else { + } else if unindexed < decoded.body.len() { inputs.push(decoded.body[unindexed].clone()); unindexed += 1; } diff --git a/crates/evm/traces/src/identifier/mod.rs b/crates/evm/traces/src/identifier/mod.rs index 008e5f841..51f949832 100644 --- a/crates/evm/traces/src/identifier/mod.rs +++ b/crates/evm/traces/src/identifier/mod.rs @@ -12,7 +12,7 @@ mod etherscan; pub use etherscan::EtherscanIdentifier; mod signatures; -pub use signatures::{SignaturesIdentifier, SingleSignaturesIdentifier}; +pub use signatures::{CachedSignatures, SignaturesIdentifier, SingleSignaturesIdentifier}; /// An address identity pub struct AddressIdentity<'a> { diff --git a/crates/evm/traces/src/identifier/signatures.rs b/crates/evm/traces/src/identifier/signatures.rs index 1e3924aa3..801f9da37 100644 --- a/crates/evm/traces/src/identifier/signatures.rs +++ b/crates/evm/traces/src/identifier/signatures.rs @@ -1,7 +1,7 @@ -use alloy_json_abi::{Event, Function}; +use alloy_json_abi::{Error, Event, Function}; use alloy_primitives::{hex, map::HashSet}; use foundry_common::{ - abi::{get_event, get_func}, + abi::{get_error, get_event, get_func}, fs, selectors::{OpenChainClient, SelectorType}, }; @@ -12,16 +12,35 @@ use tokio::sync::RwLock; pub type SingleSignaturesIdentifier = Arc>; #[derive(Debug, Default, Serialize, Deserialize)] -struct CachedSignatures { - events: BTreeMap, - functions: BTreeMap, +pub struct CachedSignatures { + pub errors: BTreeMap, + pub events: BTreeMap, + pub functions: BTreeMap, } +impl CachedSignatures { + #[instrument(target = "evm::traces")] + pub fn load(cache_path: PathBuf) -> Self { + let path = cache_path.join("signatures"); + if path.is_file() { + fs::read_json_file(&path) + .map_err( + |err| warn!(target: "evm::traces", ?path, ?err, "failed to read cache file"), + ) + .unwrap_or_default() + } else { + if let Err(err) = std::fs::create_dir_all(cache_path) { + warn!(target: "evm::traces", "could not create signatures cache dir: {:?}", err); + } + Self::default() + } + } +} /// An identifier that tries to identify functions and events using signatures found at /// `https://openchain.xyz` or a local cache. #[derive(Debug)] pub struct SignaturesIdentifier { - /// Cached selectors for functions and events. + /// Cached selectors for functions, events and custom errors. cached: CachedSignatures, /// Location where to save `CachedSignatures`. cached_path: Option, @@ -42,16 +61,7 @@ impl SignaturesIdentifier { let identifier = if let Some(cache_path) = cache_path { let path = cache_path.join("signatures"); trace!(target: "evm::traces", ?path, "reading signature cache"); - let cached = if path.is_file() { - fs::read_json_file(&path) - .map_err(|err| warn!(target: "evm::traces", ?path, ?err, "failed to read cache file")) - .unwrap_or_default() - } else { - if let Err(err) = std::fs::create_dir_all(cache_path) { - warn!(target: "evm::traces", "could not create signatures cache dir: {:?}", err); - } - CachedSignatures::default() - }; + let cached = CachedSignatures::load(cache_path); Self { cached, cached_path: Some(path), unavailable: HashSet::default(), client } } else { Self { @@ -92,6 +102,7 @@ impl SignaturesIdentifier { let cache = match selector_type { SelectorType::Function => &mut self.cached.functions, SelectorType::Event => &mut self.cached.events, + SelectorType::Error => &mut self.cached.errors, }; let hex_identifiers: Vec = @@ -148,6 +159,19 @@ impl SignaturesIdentifier { pub async fn identify_event(&mut self, identifier: &[u8]) -> Option { self.identify_events(&[identifier]).await.pop().unwrap() } + + /// Identifies `Error`s from its cache or `https://api.openchain.xyz`. + pub async fn identify_errors( + &mut self, + identifiers: impl IntoIterator>, + ) -> Vec> { + self.identify(SelectorType::Error, identifiers, get_error).await + } + + /// Identifies `Error` from its cache or `https://api.openchain.xyz`. + pub async fn identify_error(&mut self, identifier: &[u8]) -> Option { + self.identify_errors(&[identifier]).await.pop().unwrap() + } } impl Drop for SignaturesIdentifier { diff --git a/crates/evm/traces/src/lib.rs b/crates/evm/traces/src/lib.rs index 18136c481..f88efbb1b 100644 --- a/crates/evm/traces/src/lib.rs +++ b/crates/evm/traces/src/lib.rs @@ -11,7 +11,10 @@ extern crate foundry_common; #[macro_use] extern crate tracing; -use foundry_common::contracts::{ContractsByAddress, ContractsByArtifact}; +use foundry_common::{ + contracts::{ContractsByAddress, ContractsByArtifact}, + shell, +}; use revm::interpreter::OpCode; use revm_inspectors::tracing::{ types::{DecodedTraceStep, TraceMemberOrder}, @@ -183,15 +186,23 @@ pub async fn decode_trace_arena( /// Render a collection of call traces to a string. pub fn render_trace_arena(arena: &SparsedTraceArena) -> String { - render_trace_arena_with_bytecodes(arena, false) + render_trace_arena_inner(arena, false, false) } -/// Render a collection of call traces to a string optionally including contract creation bytecodes. -pub fn render_trace_arena_with_bytecodes( +/// Render a collection of call traces to a string optionally including contract creation bytecodes +/// and in JSON format. +pub fn render_trace_arena_inner( arena: &SparsedTraceArena, with_bytecodes: bool, + with_storage_changes: bool, ) -> String { - let mut w = TraceWriter::new(Vec::::new()).write_bytecodes(with_bytecodes); + if shell::is_json() { + return serde_json::to_string(&arena.resolve_arena()).expect("Failed to write traces"); + } + + let mut w = TraceWriter::new(Vec::::new()) + .write_bytecodes(with_bytecodes) + .with_storage_changes(with_storage_changes); w.write_arena(&arena.resolve_arena()).expect("Failed to write traces"); String::from_utf8(w.into_writer()).expect("trace writer wrote invalid UTF-8") } @@ -289,6 +300,8 @@ pub enum TraceMode { /// /// Used by debugger. Debug, + /// Debug trace with storage changes. + RecordStateDiff, } impl TraceMode { @@ -308,6 +321,10 @@ impl TraceMode { matches!(self, Self::Jump) } + pub const fn record_state_diff(self) -> bool { + matches!(self, Self::RecordStateDiff) + } + pub const fn is_debug(self) -> bool { matches!(self, Self::Debug) } @@ -324,8 +341,16 @@ impl TraceMode { std::cmp::max(self, mode.into()) } - pub fn with_verbosity(self, verbosiy: u8) -> Self { - if verbosiy >= 3 { + pub fn with_state_changes(self, yes: bool) -> Self { + if yes { + std::cmp::max(self, Self::RecordStateDiff) + } else { + self + } + } + + pub fn with_verbosity(self, verbosity: u8) -> Self { + if verbosity >= 3 { std::cmp::max(self, Self::Call) } else { self @@ -345,7 +370,7 @@ impl TraceMode { StackSnapshotType::None }, record_logs: true, - record_state_diff: false, + record_state_diff: self.record_state_diff(), record_returndata_snapshots: self.is_debug(), record_opcodes_filter: (self.is_jump() || self.is_jump_simple()) .then(|| OpcodeFilter::new().enabled(OpCode::JUMP).enabled(OpCode::JUMPDEST)), diff --git a/crates/fmt/Cargo.toml b/crates/fmt/Cargo.toml index 0bc3e06a6..bc1f44fdc 100644 --- a/crates/fmt/Cargo.toml +++ b/crates/fmt/Cargo.toml @@ -17,7 +17,7 @@ foundry-config.workspace = true alloy-primitives.workspace = true -ariadne = "0.4" +ariadne = "0.5" itertools.workspace = true solang-parser.workspace = true thiserror.workspace = true diff --git a/crates/fmt/src/comments.rs b/crates/fmt/src/comments.rs index e3fb79043..eafdb9989 100644 --- a/crates/fmt/src/comments.rs +++ b/crates/fmt/src/comments.rs @@ -88,7 +88,7 @@ impl CommentWithMetadata { return Self::new( comment, CommentPosition::Prefix, - last_line.map_or(true, str::is_empty), + last_line.is_none_or(str::is_empty), indent_len, ) } diff --git a/crates/fmt/src/helpers.rs b/crates/fmt/src/helpers.rs index 7f05a9c09..1d036ba6b 100644 --- a/crates/fmt/src/helpers.rs +++ b/crates/fmt/src/helpers.rs @@ -97,20 +97,20 @@ pub fn format_diagnostics_report( path.map(|p| p.file_name().unwrap().to_string_lossy().to_string()).unwrap_or_default(); let mut s = Vec::new(); for diag in diagnostics { - let (start, end) = (diag.loc.start(), diag.loc.end()); - let mut report = Report::build(ReportKind::Error, &filename, start) + let span = (filename.as_str(), diag.loc.start()..diag.loc.end()); + let mut report = Report::build(ReportKind::Error, span.clone()) .with_message(format!("{:?}", diag.ty)) .with_label( - Label::new((&filename, start..end)) + Label::new(span) .with_color(Color::Red) - .with_message(format!("{}", diag.message.as_str().fg(Color::Red))), + .with_message(diag.message.as_str().fg(Color::Red)), ); for note in &diag.notes { report = report.with_note(¬e.message); } - report.finish().write((&filename, Source::from(content)), &mut s).unwrap(); + report.finish().write((filename.as_str(), Source::from(content)), &mut s).unwrap(); } String::from_utf8(s).unwrap() } diff --git a/crates/forge/Cargo.toml b/crates/forge/Cargo.toml index 03a0e44a4..4eaa0eee8 100644 --- a/crates/forge/Cargo.toml +++ b/crates/forge/Cargo.toml @@ -85,7 +85,7 @@ dialoguer = { version = "0.11", default-features = false } dunce.workspace = true futures.workspace = true indicatif = "0.17" -inferno = { version = "0.11", default-features = false } +inferno = { version = "0.12", default-features = false } itertools.workspace = true parking_lot.workspace = true regex = { workspace = true, default-features = false } @@ -94,14 +94,16 @@ semver.workspace = true serde_json.workspace = true similar = { version = "2", features = ["inline"] } solang-parser.workspace = true +solar-ast.workspace = true +solar-parse.workspace = true strum = { workspace = true, features = ["derive"] } thiserror.workspace = true tokio = { workspace = true, features = ["time"] } toml = { workspace = true, features = ["preserve_order"] } toml_edit = "0.22" -watchexec = "4.1" -watchexec-events = "3.0" -watchexec-signals = "3.0" +watchexec = "5.0" +watchexec-events = "4.0" +watchexec-signals = "4.0" clearscreen = "3.0" evm-disassembler.workspace = true rustls = { version = "0.23", features = ["ring"] } diff --git a/crates/forge/bin/cmd/bind_json.rs b/crates/forge/bin/cmd/bind_json.rs index de7a5a7a7..d8a361134 100644 --- a/crates/forge/bin/cmd/bind_json.rs +++ b/crates/forge/bin/cmd/bind_json.rs @@ -64,7 +64,7 @@ impl BindJsonArgs { let config = self.try_load_config_emit_warnings()?; let project = config.create_project(false, true)?; - let target_path = config.root.0.join(self.out.as_ref().unwrap_or(&config.bind_json.out)); + let target_path = config.root.join(self.out.as_ref().unwrap_or(&config.bind_json.out)); let sources = project.paths.read_input_files()?; let graph = Graph::::resolve_sources(&project.paths, sources)?; diff --git a/crates/forge/bin/cmd/build.rs b/crates/forge/bin/cmd/build.rs index 0821cb1c7..045f04aac 100644 --- a/crates/forge/bin/cmd/build.rs +++ b/crates/forge/bin/cmd/build.rs @@ -180,7 +180,7 @@ impl BuildArgs { // directories as well as the `foundry.toml` configuration file. self.watch.watchexec_config(|| { let config = Config::from(self); - let foundry_toml: PathBuf = config.root.0.join(Config::FILE_NAME); + let foundry_toml: PathBuf = config.root.join(Config::FILE_NAME); [config.src, config.test, config.script, foundry_toml] }) } diff --git a/crates/forge/bin/cmd/clone.rs b/crates/forge/bin/cmd/clone.rs index a77615cf2..07fc47290 100644 --- a/crates/forge/bin/cmd/clone.rs +++ b/crates/forge/bin/cmd/clone.rs @@ -266,7 +266,7 @@ impl CloneArgs { let remappings_txt_content = config.remappings.iter().map(|r| r.to_string()).collect::>().join("\n"); if fs::write(&remappings_txt, remappings_txt_content).is_err() { - return false + return false; } let profile = config.profile.as_str().as_str(); @@ -613,7 +613,7 @@ impl EtherscanClient for Client { mod tests { use super::*; use alloy_primitives::hex; - use foundry_compilers::Artifact; + use foundry_compilers::CompilerContract; use foundry_test_utils::rpc::next_mainnet_etherscan_api_key; use std::collections::BTreeMap; @@ -632,7 +632,7 @@ mod tests { contracts.iter().for_each(|(name, contract)| { if name == contract_name { let compiled_creation_code = - contract.get_bytecode_object().expect("creation code not found"); + contract.bin_ref().expect("creation code not found"); assert!( hex::encode(compiled_creation_code.as_ref()) .starts_with(stripped_creation_code), diff --git a/crates/forge/bin/cmd/config.rs b/crates/forge/bin/cmd/config.rs index 36f4d1731..0aa1fdb63 100644 --- a/crates/forge/bin/cmd/config.rs +++ b/crates/forge/bin/cmd/config.rs @@ -5,7 +5,7 @@ use foundry_cli::utils::LoadConfig; use foundry_common::{evm::EvmArgs, shell}; use foundry_config::fix::fix_tomls; -foundry_config::impl_figment_convert!(ConfigArgs, opts, evm_opts); +foundry_config::impl_figment_convert!(ConfigArgs, opts, evm_args); /// CLI arguments for `forge config`. #[derive(Clone, Debug, Parser)] @@ -23,7 +23,7 @@ pub struct ConfigArgs { opts: BuildArgs, #[command(flatten)] - evm_opts: EvmArgs, + evm_args: EvmArgs, } impl ConfigArgs { diff --git a/crates/forge/bin/cmd/coverage.rs b/crates/forge/bin/cmd/coverage.rs index c4b6451dc..b892d0668 100644 --- a/crates/forge/bin/cmd/coverage.rs +++ b/crates/forge/bin/cmd/coverage.rs @@ -6,24 +6,28 @@ use forge::{ coverage::{ analysis::{SourceAnalysis, SourceAnalyzer, SourceFile, SourceFiles}, anchors::find_anchors, - BytecodeReporter, ContractId, CoverageReport, CoverageReporter, DebugReporter, ItemAnchor, - LcovReporter, SummaryReporter, + BytecodeReporter, ContractId, CoverageReport, CoverageReporter, CoverageSummaryReporter, + DebugReporter, ItemAnchor, LcovReporter, }, opts::EvmOpts, utils::IcPcMap, - MultiContractRunnerBuilder, TestOptions, + MultiContractRunnerBuilder, }; use foundry_cli::utils::{LoadConfig, STATIC_FUZZ_SEED}; use foundry_common::{compile::ProjectCompiler, fs}; use foundry_compilers::{ - artifacts::{sourcemap::SourceMap, CompactBytecode, CompactDeployedBytecode, SolcLanguage}, + artifacts::{ + sourcemap::SourceMap, CompactBytecode, CompactDeployedBytecode, SolcLanguage, Source, + }, + compilers::multi::MultiCompiler, Artifact, ArtifactId, Project, ProjectCompileOutput, }; use foundry_config::{Config, SolcReq}; use foundry_zksync_compiler::DualCompiledContracts; use rayon::prelude::*; -use semver::Version; +use semver::{Version, VersionReq}; use std::{ + io, path::{Path, PathBuf}, sync::Arc, }; @@ -40,6 +44,17 @@ pub struct CoverageArgs { #[arg(long, value_enum, default_value = "summary")] report: Vec, + /// The version of the LCOV "tracefile" format to use. + /// + /// Format: `MAJOR[.MINOR]`. + /// + /// Main differences: + /// - `1.x`: The original v1 format. + /// - `2.0`: Adds support for "line end" numbers for functions. + /// - `2.2`: Changes the format of functions. + #[arg(long, default_value = "1", value_parser = parse_lcov_version)] + lcov_version: Version, + /// Enable viaIR with minimum optimization /// /// This can fix most of the "stack too deep" errors while resulting a @@ -154,7 +169,7 @@ impl CoverageArgs { let source = SourceFile { ast, - source: fs::read_to_string(&file) + source: Source::read(&file) .wrap_err("Could not read source code for analysis")?, }; versioned_sources @@ -168,7 +183,7 @@ impl CoverageArgs { // Get source maps and bytecodes let artifacts: Vec = output .artifact_ids() - .par_bridge() + .par_bridge() // This parses source maps, so we want to run it in parallel. .filter_map(|(id, artifact)| { let source_id = report.get_source_id(id.version.clone(), id.source.clone())?; ArtifactData::new(&id, source_id, artifact) @@ -231,13 +246,8 @@ impl CoverageArgs { .evm_spec(config.evm_spec_id()) .sender(evm_opts.sender) .with_fork(evm_opts.get_fork(&config, env.clone())) - .with_test_options(TestOptions { - fuzz: config.fuzz.clone(), - invariant: config.invariant.clone(), - ..Default::default() - }) .set_coverage(true) - .build(&root, output.clone(), None, env, evm_opts, DualCompiledContracts::default())?; + .build::(&root, output, env, evm_opts)?; let known_contracts = runner.known_contracts.clone(); @@ -253,10 +263,10 @@ impl CoverageArgs { for result in suite.test_results.values() { let Some(hit_maps) = result.coverage.as_ref() else { continue }; for map in hit_maps.0.values() { - if let Some((id, _)) = known_contracts.find_by_deployed_code(&map.bytecode) { + if let Some((id, _)) = known_contracts.find_by_deployed_code(map.bytecode()) { hits.push((id, map, true)); } else if let Some((id, _)) = - known_contracts.find_by_creation_code(&map.bytecode) + known_contracts.find_by_creation_code(map.bytecode()) { hits.push((id, map, false)); } @@ -285,7 +295,7 @@ impl CoverageArgs { let file_pattern = filter.args().coverage_pattern_inverse.as_ref(); let file_root = &filter.paths().root; report.filter_out_ignored_sources(|path: &Path| { - file_pattern.map_or(true, |re| { + file_pattern.is_none_or(|re| { !re.is_match(&path.strip_prefix(file_root).unwrap_or(path).to_string_lossy()) }) }); @@ -293,21 +303,17 @@ impl CoverageArgs { // Output final report for report_kind in self.report { match report_kind { - CoverageReportKind::Summary => SummaryReporter::default().report(&report), + CoverageReportKind::Summary => CoverageSummaryReporter::default().report(&report), CoverageReportKind::Lcov => { - if let Some(report_file) = self.report_file { - return LcovReporter::new(&mut fs::create_file(root.join(report_file))?) - .report(&report) - } else { - return LcovReporter::new(&mut fs::create_file(root.join("lcov.info"))?) - .report(&report) - } + let path = + root.join(self.report_file.as_deref().unwrap_or("lcov.info".as_ref())); + let mut file = io::BufWriter::new(fs::create_file(path)?); + LcovReporter::new(&mut file, self.lcov_version.clone()).report(&report) } CoverageReportKind::Bytecode => { let destdir = root.join("bytecode-coverage"); fs::create_dir_all(&destdir)?; - BytecodeReporter::new(root.clone(), destdir).report(&report)?; - Ok(()) + BytecodeReporter::new(root.clone(), destdir).report(&report) } CoverageReportKind::Debug => DebugReporter.report(&report), }?; @@ -316,9 +322,10 @@ impl CoverageArgs { } } -// TODO: HTML -#[derive(Clone, Debug, ValueEnum)] +/// Coverage reports to generate. +#[derive(Clone, Debug, Default, ValueEnum)] pub enum CoverageReportKind { + #[default] Summary, Lcov, Debug, @@ -410,3 +417,31 @@ impl BytecodeData { ) } } + +fn parse_lcov_version(s: &str) -> Result { + let vr = VersionReq::parse(&format!("={s}")).map_err(|e| e.to_string())?; + let [c] = &vr.comparators[..] else { + return Err("invalid version".to_string()); + }; + if c.op != semver::Op::Exact { + return Err("invalid version".to_string()); + } + if !c.pre.is_empty() { + return Err("pre-releases are not supported".to_string()); + } + Ok(Version::new(c.major, c.minor.unwrap_or(0), c.patch.unwrap_or(0))) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn lcov_version() { + assert_eq!(parse_lcov_version("0").unwrap(), Version::new(0, 0, 0)); + assert_eq!(parse_lcov_version("1").unwrap(), Version::new(1, 0, 0)); + assert_eq!(parse_lcov_version("1.0").unwrap(), Version::new(1, 0, 0)); + assert_eq!(parse_lcov_version("1.1").unwrap(), Version::new(1, 1, 0)); + assert_eq!(parse_lcov_version("1.11").unwrap(), Version::new(1, 11, 0)); + } +} diff --git a/crates/forge/bin/cmd/create.rs b/crates/forge/bin/cmd/create.rs index 15ebbb87a..dc4ba7c60 100644 --- a/crates/forge/bin/cmd/create.rs +++ b/crates/forge/bin/cmd/create.rs @@ -1,10 +1,11 @@ +use crate::cmd::install; use alloy_chains::Chain; use alloy_dyn_abi::{DynSolValue, JsonAbiExt, Specifier}; use alloy_json_abi::{Constructor, JsonAbi}; -use alloy_network::{AnyNetwork, EthereumWallet, Network, ReceiptResponse, TransactionBuilder}; +use alloy_network::{AnyNetwork, AnyTransactionReceipt, EthereumWallet, Network, ReceiptResponse, TransactionBuilder}; use alloy_primitives::{hex, Address, Bytes}; use alloy_provider::{PendingTransactionError, Provider, ProviderBuilder}; -use alloy_rpc_types::{AnyTransactionReceipt, TransactionRequest}; +use alloy_rpc_types::TransactionRequest; use alloy_serde::WithOtherFields; use alloy_signer::Signer; use alloy_transport::{Transport, TransportError}; @@ -73,6 +74,10 @@ pub struct CreateArgs { )] constructor_args_path: Option, + /// Broadcast the transaction. + #[arg(long)] + pub broadcast: bool, + /// Verify contract after creation. #[arg(long)] verify: bool, @@ -120,7 +125,14 @@ pub struct ZkSyncData { impl CreateArgs { /// Executes the command to create a contract pub async fn run(mut self) -> Result<()> { - let config = self.try_load_config_emit_warnings()?; + let mut config = self.try_load_config_emit_warnings()?; + + // Install missing dependencies. + if install::install_missing_dependencies(&mut config) && config.auto_detect_remappings { + // need to re-configure here to also catch additional remappings + config = self.load_config(); + } + // Find Project & Compile let project = config.project()?; @@ -340,6 +352,10 @@ impl CreateArgs { } else { provider.get_chain_id().await? }; + + // Whether to broadcast the transaction or not + let dry_run = !self.broadcast; + if self.unlocked { // Deploy with unlocked account let sender = self.eth.wallet.from.expect("required"); @@ -352,6 +368,7 @@ impl CreateArgs { sender, config.transaction_timeout, id, + dry_run, ) .await } else { @@ -370,6 +387,7 @@ impl CreateArgs { deployer, config.transaction_timeout, id, + dry_run, ) .await } @@ -450,6 +468,7 @@ impl CreateArgs { deployer_address: Address, timeout: u64, id: ArtifactId, + dry_run: bool, ) -> Result<()> { let bin = bin.into_bytes().unwrap_or_else(|| { panic!("no bytecode found in bin object for {}", self.contract.name) @@ -529,6 +548,31 @@ impl CreateArgs { self.verify_preflight_check(constructor_args.clone(), chain, &id).await?; } + if dry_run { + if !shell::is_json() { + sh_warn!("Dry run enabled, not broadcasting transaction\n")?; + + sh_println!("Contract: {}", self.contract.name)?; + sh_println!( + "Transaction: {}", + serde_json::to_string_pretty(&deployer.tx.clone())? + )?; + sh_println!("ABI: {}\n", serde_json::to_string_pretty(&abi)?)?; + + sh_warn!("To broadcast this transaction, add --broadcast to the previous command. See forge create --help for more.")?; + } else { + let output = json!({ + "contract": self.contract.name, + "transaction": &deployer.tx, + "abi":&abi + }); + sh_println!("{}", serde_json::to_string_pretty(&output)?)?; + } + + return Ok(()); +>>>>>>> 59f354c179f4e7f6d7292acb3d068815c79286d1 + } + // Deploy the actual contract let (deployed_contract, receipt) = deployer.send_with_receipt().await?; @@ -539,7 +583,7 @@ impl CreateArgs { "deployedTo": address.to_string(), "transactionHash": receipt.transaction_hash }); - sh_println!("{output}")?; + sh_println!("{}", serde_json::to_string_pretty(&output)?)?; } else { sh_println!("Deployer: {deployer_address}")?; sh_println!("Deployed to: {address}")?; @@ -714,6 +758,11 @@ impl CreateArgs { None }; let verify = VerifyArgs { +<<<<<<< HEAD +======= +>>>>>>> 59f354c179f4e7f6d7292acb3d068815c79286d1 +======= +>>>>>>> main address, contract: Some(self.contract), compiler_version: None, @@ -734,7 +783,12 @@ impl CreateArgs { evm_version: self.opts.compiler.evm_version, show_standard_json_input: self.show_standard_json_input, guess_constructor_args: false, +<<<<<<< HEAD + // compilation_profile: Some(id.profile.to_string()), + compilation_profile: None, // TODO(zk): provide comp profile +======= compilation_profile: None, //TODO(zk): provide comp profile +>>>>>>> main zksync: self.opts.compiler.zk.enabled(), }; sh_println!("Waiting for {} to detect contract deployment...", verify.verifier.verifier)?; diff --git a/crates/forge/bin/cmd/debug.rs b/crates/forge/bin/cmd/debug.rs index 421478bd5..5ccfc13d5 100644 --- a/crates/forge/bin/cmd/debug.rs +++ b/crates/forge/bin/cmd/debug.rs @@ -6,7 +6,7 @@ use foundry_common::evm::EvmArgs; use std::path::PathBuf; // Loads project's figment and merges the build cli arguments into it -foundry_config::impl_figment_convert!(DebugArgs, opts, evm_opts); +foundry_config::impl_figment_convert!(DebugArgs, opts, evm_args); /// CLI arguments for `forge debug`. #[derive(Clone, Debug, Parser)] @@ -46,7 +46,7 @@ pub struct DebugArgs { pub opts: CoreBuildArgs, #[command(flatten)] - pub evm_opts: EvmArgs, + pub evm_args: EvmArgs, } impl DebugArgs { @@ -58,7 +58,7 @@ impl DebugArgs { sig: self.sig, gas_estimate_multiplier: 130, opts: self.opts, - evm_opts: self.evm_opts, + evm_args: self.evm_args, debug: true, dump: self.dump, retry: RETRY_VERIFY_ON_CREATE, diff --git a/crates/forge/bin/cmd/doc/mod.rs b/crates/forge/bin/cmd/doc/mod.rs index ad61facf5..2fa996a04 100644 --- a/crates/forge/bin/cmd/doc/mod.rs +++ b/crates/forge/bin/cmd/doc/mod.rs @@ -68,7 +68,7 @@ pub struct DocArgs { impl DocArgs { pub async fn run(self) -> Result<()> { let config = self.config()?; - let root = &config.root.0; + let root = &config.root; let project = config.project()?; let compiler = ProjectCompiler::new().quiet(true); let _output = compiler.compile(&project)?; diff --git a/crates/forge/bin/cmd/fmt.rs b/crates/forge/bin/cmd/fmt.rs index 49548e1b6..137e139e6 100644 --- a/crates/forge/bin/cmd/fmt.rs +++ b/crates/forge/bin/cmd/fmt.rs @@ -48,7 +48,7 @@ impl FmtArgs { let config = self.try_load_config_emit_warnings()?; // Expand ignore globs and canonicalize from the get go - let ignored = expand_globs(&config.root.0, config.fmt.ignore.iter())? + let ignored = expand_globs(&config.root, config.fmt.ignore.iter())? .iter() .flat_map(foundry_common::fs::canonicalize_path) .collect::>(); @@ -96,9 +96,7 @@ impl FmtArgs { let format = |source: String, path: Option<&Path>| -> Result<_> { let name = match path { - Some(path) => { - path.strip_prefix(&config.root.0).unwrap_or(path).display().to_string() - } + Some(path) => path.strip_prefix(&config.root).unwrap_or(path).display().to_string(), None => "stdin".to_string(), }; diff --git a/crates/forge/bin/cmd/geiger.rs b/crates/forge/bin/cmd/geiger.rs new file mode 100644 index 000000000..6d4c735a9 --- /dev/null +++ b/crates/forge/bin/cmd/geiger.rs @@ -0,0 +1,159 @@ +use clap::{Parser, ValueHint}; +use eyre::{Result, WrapErr}; +use foundry_cli::utils::LoadConfig; +use foundry_compilers::{resolver::parse::SolData, Graph}; +use foundry_config::{impl_figment_convert_basic, Config}; +use itertools::Itertools; +use solar_ast::visit::Visit; +use solar_parse::{ast, interface::Session}; +use std::path::{Path, PathBuf}; + +/// CLI arguments for `forge geiger`. +#[derive(Clone, Debug, Parser)] +pub struct GeigerArgs { + /// Paths to files or directories to detect. + #[arg( + conflicts_with = "root", + value_hint = ValueHint::FilePath, + value_name = "PATH", + num_args(1..), + )] + paths: Vec, + + /// The project's root path. + /// + /// By default root of the Git repository, if in one, + /// or the current working directory. + #[arg(long, value_hint = ValueHint::DirPath, value_name = "PATH")] + root: Option, + + /// Globs to ignore. + #[arg( + long, + value_hint = ValueHint::FilePath, + value_name = "PATH", + num_args(1..), + )] + ignore: Vec, + + #[arg(long, hide = true)] + check: bool, + #[arg(long, hide = true)] + full: bool, +} + +impl_figment_convert_basic!(GeigerArgs); + +impl GeigerArgs { + pub fn sources(&self, config: &Config) -> Result> { + let cwd = std::env::current_dir()?; + + let mut sources: Vec = { + if self.paths.is_empty() { + let paths = config.project_paths(); + Graph::::resolve(&paths)? + .files() + .keys() + .filter(|f| !paths.has_library_ancestor(f)) + .cloned() + .collect() + } else { + self.paths + .iter() + .flat_map(|path| foundry_common::fs::files_with_ext(path, "sol")) + .unique() + .collect() + } + }; + + sources.retain_mut(|path| { + let abs_path = if path.is_absolute() { path.clone() } else { cwd.join(&path) }; + *path = abs_path.strip_prefix(&cwd).unwrap_or(&abs_path).to_path_buf(); + !self.ignore.iter().any(|ignore| { + if ignore.is_absolute() { + abs_path.starts_with(ignore) + } else { + abs_path.starts_with(cwd.join(ignore)) + } + }) + }); + + Ok(sources) + } + + pub fn run(self) -> Result { + if self.check { + sh_warn!("`--check` is deprecated as it's now the default behavior\n")?; + } + if self.full { + sh_warn!("`--full` is deprecated as reports are not generated anymore\n")?; + } + + let config = self.try_load_config_emit_warnings()?; + let sources = self.sources(&config).wrap_err("Failed to resolve files")?; + + if config.ffi { + sh_warn!("FFI enabled\n")?; + } + + let mut sess = Session::builder().with_stderr_emitter().build(); + sess.dcx = sess.dcx.set_flags(|flags| flags.track_diagnostics = false); + let unsafe_cheatcodes = &[ + "ffi".to_string(), + "readFile".to_string(), + "readLine".to_string(), + "writeFile".to_string(), + "writeLine".to_string(), + "removeFile".to_string(), + "closeFile".to_string(), + "setEnv".to_string(), + "deriveKey".to_string(), + ]; + Ok(sess + .enter(|| sources.iter().map(|file| lint_file(&sess, unsafe_cheatcodes, file)).sum())) + } +} + +fn lint_file(sess: &Session, unsafe_cheatcodes: &[String], path: &Path) -> usize { + try_lint_file(sess, unsafe_cheatcodes, path).unwrap_or(0) +} + +fn try_lint_file( + sess: &Session, + unsafe_cheatcodes: &[String], + path: &Path, +) -> solar_parse::interface::Result { + let arena = solar_parse::ast::Arena::new(); + let mut parser = solar_parse::Parser::from_file(sess, &arena, path)?; + let ast = parser.parse_file().map_err(|e| e.emit())?; + let mut visitor = Visitor::new(sess, unsafe_cheatcodes); + visitor.visit_source_unit(&ast); + Ok(visitor.count) +} + +struct Visitor<'a> { + sess: &'a Session, + count: usize, + unsafe_cheatcodes: &'a [String], +} + +impl<'a> Visitor<'a> { + fn new(sess: &'a Session, unsafe_cheatcodes: &'a [String]) -> Self { + Self { sess, count: 0, unsafe_cheatcodes } + } +} + +impl<'ast> Visit<'ast> for Visitor<'_> { + fn visit_expr(&mut self, expr: &'ast ast::Expr<'ast>) { + if let ast::ExprKind::Call(lhs, _args) = &expr.kind { + if let ast::ExprKind::Member(_lhs, member) = &lhs.kind { + if self.unsafe_cheatcodes.iter().any(|c| c.as_str() == member.as_str()) { + let msg = format!("usage of unsafe cheatcode `vm.{member}`"); + self.sess.dcx.err(msg).span(member.span).emit(); + self.count += 1; + } + } + } + self.walk_expr(expr); + } +} diff --git a/crates/forge/bin/cmd/geiger/error.rs b/crates/forge/bin/cmd/geiger/error.rs deleted file mode 100644 index 010fb237c..000000000 --- a/crates/forge/bin/cmd/geiger/error.rs +++ /dev/null @@ -1,11 +0,0 @@ -use forge_fmt::FormatterError; -use foundry_common::errors::FsPathError; - -/// Possible errors when scanning a solidity file -#[derive(Debug, thiserror::Error)] -pub enum ScanFileError { - #[error(transparent)] - Io(#[from] FsPathError), - #[error(transparent)] - ParseSol(#[from] FormatterError), -} diff --git a/crates/forge/bin/cmd/geiger/find.rs b/crates/forge/bin/cmd/geiger/find.rs deleted file mode 100644 index e3cd65413..000000000 --- a/crates/forge/bin/cmd/geiger/find.rs +++ /dev/null @@ -1,165 +0,0 @@ -use super::{error::ScanFileError, visitor::CheatcodeVisitor}; -use eyre::Result; -use forge_fmt::{offset_to_line_column, parse2, FormatterError, Visitable}; -use foundry_common::fs; -use solang_parser::pt::Loc; -use std::{ - fmt, - path::{Path, PathBuf}, -}; -use yansi::Paint; - -/// Scan a single file for `unsafe` cheatcode usage. -pub fn find_cheatcodes_in_file(path: &Path) -> Result { - let contents = fs::read_to_string(path)?; - let cheatcodes = find_cheatcodes_in_string(&contents, Some(path))?; - Ok(SolFileMetrics { contents, cheatcodes, file: path.to_path_buf() }) -} - -/// Scan a string for unsafe cheatcodes. -pub fn find_cheatcodes_in_string( - src: &str, - path: Option<&Path>, -) -> Result { - let mut parsed = parse2(src, path)?; - let mut visitor = CheatcodeVisitor::default(); - parsed.pt.visit(&mut visitor).unwrap(); - Ok(visitor.cheatcodes) -} - -/// Scan result for a single Solidity file. -#[derive(Clone, Debug, Default, PartialEq, Eq)] -pub struct SolFileMetrics { - /// The Solidity file - pub file: PathBuf, - - /// The file's contents. - pub contents: String, - - /// The unsafe cheatcodes found. - pub cheatcodes: UnsafeCheatcodes, -} - -/// Formats the metrics for a single file using [`fmt::Display`]. -#[derive(Clone, Copy, Debug, PartialEq, Eq)] -pub struct SolFileMetricsPrinter<'a, 'b> { - pub metrics: &'a SolFileMetrics, - pub root: &'b Path, -} - -impl fmt::Display for SolFileMetricsPrinter<'_, '_> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let SolFileMetricsPrinter { metrics, root } = *self; - - let file = metrics.file.strip_prefix(root).unwrap_or(&metrics.file); - - macro_rules! print_unsafe_fn { - ($($name:literal => $field:ident),*) => {$( - let $field = &metrics.cheatcodes.$field[..]; - if !$field.is_empty() { - writeln!(f, " {} {}", metrics.cheatcodes.$field.len().red(), $name.red())?; - - for &loc in $field { - let content = &metrics.contents[loc.range()]; - let (line, col) = offset_to_line_column(&metrics.contents, loc.start()); - let pos = format!(" --> {}:{}:{}", file.display(), line, col); - writeln!(f,"{}", pos.red())?; - for line in content.lines() { - writeln!(f, " {}", line.red())?; - } - } - } - )*}; - } - - if !metrics.cheatcodes.is_empty() { - writeln!(f, "{} {}", metrics.cheatcodes.len().red(), file.display().red())?; - print_unsafe_fn!( - "ffi" => ffi, - "readFile" => read_file, - "readLine" => read_line, - "writeFile" => write_file, - "writeLine" => write_line, - "removeFile" => remove_file, - "closeFile" => close_file, - "setEnv" => set_env, - "deriveKey" => derive_key - ); - } else { - writeln!(f, "0 {}", file.display())? - } - - Ok(()) - } -} - -/// Unsafe usage metrics collection. -#[derive(Clone, Debug, Default, PartialEq, Eq)] -pub struct UnsafeCheatcodes { - pub ffi: Vec, - pub read_file: Vec, - pub read_line: Vec, - pub write_file: Vec, - pub write_line: Vec, - pub remove_file: Vec, - pub close_file: Vec, - pub set_env: Vec, - pub derive_key: Vec, -} - -impl UnsafeCheatcodes { - /// Whether there are any unsafe calls. - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - - /// The total number of unsafe calls. - pub fn len(&self) -> usize { - self.ffi.len() + - self.read_file.len() + - self.read_line.len() + - self.write_file.len() + - self.write_line.len() + - self.close_file.len() + - self.set_env.len() + - self.derive_key.len() + - self.remove_file.len() - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn can_find_calls() { - let s = r" - contract A is Test { - function do_ffi() public { - string[] memory inputs = new string[](1); - vm.ffi(inputs); - } - } - "; - - let count = find_cheatcodes_in_string(s, None).unwrap(); - assert_eq!(count.ffi.len(), 1); - assert!(!count.is_empty()); - } - - #[test] - fn can_find_call_in_assignment() { - let s = r" - contract A is Test { - function do_ffi() public { - string[] memory inputs = new string[](1); - bytes stuff = vm.ffi(inputs); - } - } - "; - - let count = find_cheatcodes_in_string(s, None).unwrap(); - assert_eq!(count.ffi.len(), 1); - assert!(!count.is_empty()); - } -} diff --git a/crates/forge/bin/cmd/geiger/mod.rs b/crates/forge/bin/cmd/geiger/mod.rs deleted file mode 100644 index 4167b7882..000000000 --- a/crates/forge/bin/cmd/geiger/mod.rs +++ /dev/null @@ -1,122 +0,0 @@ -use clap::{Parser, ValueHint}; -use eyre::{Result, WrapErr}; -use foundry_cli::utils::LoadConfig; -use foundry_compilers::{resolver::parse::SolData, Graph}; -use foundry_config::{impl_figment_convert_basic, Config}; -use itertools::Itertools; -use rayon::prelude::*; -use std::path::PathBuf; - -mod error; - -mod find; -use find::{find_cheatcodes_in_file, SolFileMetricsPrinter}; - -mod visitor; - -/// CLI arguments for `forge geiger`. -#[derive(Clone, Debug, Parser)] -pub struct GeigerArgs { - /// Paths to files or directories to detect. - #[arg( - conflicts_with = "root", - value_hint = ValueHint::FilePath, - value_name = "PATH", - num_args(1..), - )] - paths: Vec, - - /// The project's root path. - /// - /// By default root of the Git repository, if in one, - /// or the current working directory. - #[arg(long, value_hint = ValueHint::DirPath, value_name = "PATH")] - root: Option, - - /// Run in "check" mode. - /// - /// The exit code of the program will be the number of unsafe cheatcodes found. - #[arg(long)] - pub check: bool, - - /// Globs to ignore. - #[arg( - long, - value_hint = ValueHint::FilePath, - value_name = "PATH", - num_args(1..), - )] - ignore: Vec, - - /// Print a report of all files, even if no unsafe functions are found. - #[arg(long)] - full: bool, -} - -impl_figment_convert_basic!(GeigerArgs); - -impl GeigerArgs { - pub fn sources(&self, config: &Config) -> Result> { - let cwd = std::env::current_dir()?; - - let mut sources: Vec = { - if self.paths.is_empty() { - Graph::::resolve(&config.project_paths())? - .files() - .keys() - .cloned() - .collect() - } else { - self.paths - .iter() - .flat_map(|path| foundry_common::fs::files_with_ext(path, "sol")) - .unique() - .collect() - } - }; - - sources.retain(|path| { - let abs_path = if path.is_absolute() { path.clone() } else { cwd.join(path) }; - !self.ignore.iter().any(|ignore| { - if ignore.is_absolute() { - abs_path.starts_with(ignore) - } else { - abs_path.starts_with(cwd.join(ignore)) - } - }) - }); - - Ok(sources) - } - - pub fn run(self) -> Result { - let config = self.try_load_config_emit_warnings()?; - let sources = self.sources(&config).wrap_err("Failed to resolve files")?; - - if config.ffi { - sh_warn!("FFI enabled\n")?; - } - - let root = config.root.0; - - let sum = sources - .par_iter() - .map(|file| match find_cheatcodes_in_file(file) { - Ok(metrics) => { - let len = metrics.cheatcodes.len(); - let printer = SolFileMetricsPrinter { metrics: &metrics, root: &root }; - if self.full || len == 0 { - let _ = sh_eprint!("{printer}"); - } - len - } - Err(err) => { - let _ = sh_err!("{err}"); - 0 - } - }) - .sum(); - - Ok(sum) - } -} diff --git a/crates/forge/bin/cmd/geiger/visitor.rs b/crates/forge/bin/cmd/geiger/visitor.rs deleted file mode 100644 index 703130890..000000000 --- a/crates/forge/bin/cmd/geiger/visitor.rs +++ /dev/null @@ -1,333 +0,0 @@ -use super::find::UnsafeCheatcodes; -use eyre::Result; -use forge_fmt::{Visitable, Visitor}; -use solang_parser::pt::{ - ContractDefinition, Expression, FunctionDefinition, IdentifierPath, Loc, Parameter, SourceUnit, - Statement, TypeDefinition, VariableDeclaration, VariableDefinition, -}; -use std::convert::Infallible; - -/// a [`forge_fmt::Visitor` that scans for invocations of cheatcodes -#[derive(Default)] -pub struct CheatcodeVisitor { - pub cheatcodes: UnsafeCheatcodes, -} - -impl Visitor for CheatcodeVisitor { - type Error = Infallible; - - fn visit_source_unit(&mut self, source_unit: &mut SourceUnit) -> Result<(), Self::Error> { - source_unit.0.visit(self) - } - - fn visit_contract(&mut self, contract: &mut ContractDefinition) -> Result<(), Self::Error> { - contract.base.visit(self)?; - contract.parts.visit(self) - } - - fn visit_block( - &mut self, - _loc: Loc, - _unchecked: bool, - statements: &mut Vec, - ) -> Result<(), Self::Error> { - statements.visit(self) - } - - fn visit_expr(&mut self, _loc: Loc, expr: &mut Expression) -> Result<(), Self::Error> { - match expr { - Expression::PostIncrement(_, expr) => { - expr.visit(self)?; - } - Expression::PostDecrement(_, expr) => { - expr.visit(self)?; - } - Expression::New(_, expr) => { - expr.visit(self)?; - } - Expression::ArraySubscript(_, expr1, expr2) => { - expr1.visit(self)?; - expr2.visit(self)?; - } - Expression::ArraySlice(_, expr1, expr2, expr3) => { - expr1.visit(self)?; - expr2.visit(self)?; - expr3.visit(self)?; - } - Expression::Parenthesis(_, expr) => { - expr.visit(self)?; - } - Expression::MemberAccess(_, expr, _) => { - expr.visit(self)?; - } - Expression::FunctionCall(loc, lhs, rhs) => { - // all cheatcodes are accessd via .cheatcode - if let Expression::MemberAccess(_, expr, identifier) = &**lhs { - if let Expression::Variable(_) = &**expr { - match identifier.name.as_str() { - "ffi" => self.cheatcodes.ffi.push(*loc), - "readFile" => self.cheatcodes.read_file.push(*loc), - "writeFile" => self.cheatcodes.write_file.push(*loc), - "readLine" => self.cheatcodes.read_line.push(*loc), - "writeLine" => self.cheatcodes.write_line.push(*loc), - "closeFile" => self.cheatcodes.close_file.push(*loc), - "removeFile" => self.cheatcodes.remove_file.push(*loc), - "setEnv" => self.cheatcodes.set_env.push(*loc), - "deriveKey" => self.cheatcodes.derive_key.push(*loc), - _ => {} - } - } - } - rhs.visit(self)?; - } - Expression::FunctionCallBlock(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::NamedFunctionCall(_, lhs, rhs) => { - lhs.visit(self)?; - for arg in rhs.iter_mut() { - arg.expr.visit(self)?; - } - } - Expression::Not(_, expr) => { - expr.visit(self)?; - } - Expression::BitwiseNot(_, expr) => { - expr.visit(self)?; - } - Expression::Delete(_, expr) => { - expr.visit(self)?; - } - Expression::PreIncrement(_, expr) => { - expr.visit(self)?; - } - Expression::PreDecrement(_, expr) => { - expr.visit(self)?; - } - Expression::UnaryPlus(_, expr) => { - expr.visit(self)?; - } - Expression::Negate(_, expr) => { - expr.visit(self)?; - } - Expression::Power(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::Multiply(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::Divide(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::Modulo(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::Add(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::Subtract(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::ShiftLeft(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::ShiftRight(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::BitwiseAnd(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::BitwiseXor(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::BitwiseOr(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::Less(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::More(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::LessEqual(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::MoreEqual(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::Equal(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::NotEqual(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::And(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::Or(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::ConditionalOperator(_, llhs, lhs, rhs) => { - llhs.visit(self)?; - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::Assign(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::AssignOr(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::AssignAnd(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::AssignXor(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::AssignShiftLeft(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::AssignShiftRight(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::AssignAdd(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::AssignSubtract(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::AssignMultiply(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::AssignDivide(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::AssignModulo(_, lhs, rhs) => { - lhs.visit(self)?; - rhs.visit(self)?; - } - Expression::List(_, param) => { - for (_, param) in param.iter_mut() { - param.visit(self)?; - } - } - _ => {} - } - - Ok(()) - } - - fn visit_emit(&mut self, _: Loc, expr: &mut Expression) -> Result<(), Self::Error> { - expr.visit(self) - } - - fn visit_var_definition(&mut self, var: &mut VariableDefinition) -> Result<(), Self::Error> { - var.ty.visit(self)?; - var.initializer.visit(self) - } - - fn visit_var_definition_stmt( - &mut self, - _: Loc, - declaration: &mut VariableDeclaration, - expr: &mut Option, - ) -> Result<(), Self::Error> { - declaration.visit(self)?; - expr.visit(self) - } - - fn visit_var_declaration(&mut self, var: &mut VariableDeclaration) -> Result<(), Self::Error> { - var.ty.visit(self) - } - - fn visit_revert( - &mut self, - _: Loc, - _error: &mut Option, - args: &mut Vec, - ) -> Result<(), Self::Error> { - args.visit(self) - } - - fn visit_if( - &mut self, - _loc: Loc, - cond: &mut Expression, - if_branch: &mut Box, - else_branch: &mut Option>, - _is_frst_stmt: bool, - ) -> Result<(), Self::Error> { - cond.visit(self)?; - if_branch.visit(self)?; - else_branch.visit(self) - } - - fn visit_while( - &mut self, - _loc: Loc, - cond: &mut Expression, - body: &mut Statement, - ) -> Result<(), Self::Error> { - cond.visit(self)?; - body.visit(self) - } - - fn visit_for( - &mut self, - _loc: Loc, - init: &mut Option>, - cond: &mut Option>, - update: &mut Option>, - body: &mut Option>, - ) -> Result<(), Self::Error> { - init.visit(self)?; - cond.visit(self)?; - update.visit(self)?; - body.visit(self) - } - - fn visit_function(&mut self, func: &mut FunctionDefinition) -> Result<(), Self::Error> { - if let Some(ref mut body) = func.body { - body.visit(self)?; - } - Ok(()) - } - - fn visit_parameter(&mut self, parameter: &mut Parameter) -> Result<(), Self::Error> { - parameter.ty.visit(self) - } - - fn visit_type_definition(&mut self, def: &mut TypeDefinition) -> Result<(), Self::Error> { - def.ty.visit(self) - } -} diff --git a/crates/forge/bin/cmd/inspect.rs b/crates/forge/bin/cmd/inspect.rs index 5c7c224f7..426a8b36e 100644 --- a/crates/forge/bin/cmd/inspect.rs +++ b/crates/forge/bin/cmd/inspect.rs @@ -1,10 +1,10 @@ use alloy_primitives::{hex, keccak256, Address}; use clap::Parser; -use comfy_table::{presets::ASCII_MARKDOWN, Table}; +use comfy_table::{modifiers::UTF8_ROUND_CORNERS, Cell, Table}; use eyre::{Context, Result}; use forge::revm::primitives::Eof; use foundry_cli::opts::{CompilerArgs, CoreBuildArgs}; -use foundry_common::{compile::ProjectCompiler, fmt::pretty_eof}; +use foundry_common::{compile::ProjectCompiler, fmt::pretty_eof, shell}; use foundry_compilers::{ artifacts::{ output_selection::{ @@ -111,7 +111,7 @@ impl InspectArgs { print_json(&artifact.gas_estimates)?; } ContractArtifactField::StorageLayout => { - print_storage_layout(artifact.storage_layout.as_ref(), pretty)?; + print_storage_layout(artifact.storage_layout.as_ref())?; } ContractArtifactField::DevDoc => { print_json(&artifact.devdoc)?; @@ -176,18 +176,26 @@ impl InspectArgs { } } -pub fn print_storage_layout(storage_layout: Option<&StorageLayout>, pretty: bool) -> Result<()> { +pub fn print_storage_layout(storage_layout: Option<&StorageLayout>) -> Result<()> { let Some(storage_layout) = storage_layout else { eyre::bail!("Could not get storage layout"); }; - if !pretty { + if shell::is_json() { return print_json(&storage_layout) } let mut table = Table::new(); - table.load_preset(ASCII_MARKDOWN); - table.set_header(["Name", "Type", "Slot", "Offset", "Bytes", "Contract"]); + table.apply_modifier(UTF8_ROUND_CORNERS); + + table.set_header(vec![ + Cell::new("Name"), + Cell::new("Type"), + Cell::new("Slot"), + Cell::new("Offset"), + Cell::new("Bytes"), + Cell::new("Contract"), + ]); for slot in &storage_layout.storage { let storage_type = storage_layout.types.get(&slot.storage_type); @@ -201,7 +209,7 @@ pub fn print_storage_layout(storage_layout: Option<&StorageLayout>, pretty: bool ]); } - sh_println!("{table}")?; + sh_println!("\n{table}\n")?; Ok(()) } diff --git a/crates/forge/bin/cmd/install.rs b/crates/forge/bin/cmd/install.rs index 60fc7f299..de9178a70 100644 --- a/crates/forge/bin/cmd/install.rs +++ b/crates/forge/bin/cmd/install.rs @@ -168,7 +168,7 @@ impl DependencyInstallOpts { // Pin branch to submodule if branch is used if let Some(branch) = &installed_tag { // First, check if this tag has a branch - if git.has_branch(branch)? { + if git.has_branch(branch, &path)? { // always work with relative paths when directly modifying submodules git.cmd() .args(["submodule", "set-branch", "-b", branch]) diff --git a/crates/forge/bin/cmd/mod.rs b/crates/forge/bin/cmd/mod.rs index f2de1d632..427b25fb0 100644 --- a/crates/forge/bin/cmd/mod.rs +++ b/crates/forge/bin/cmd/mod.rs @@ -24,7 +24,7 @@ //! #[derive(Clone, Debug, Parser)] //! pub struct MyArgs { //! #[command(flatten)] -//! evm_opts: EvmArgs, +//! evm_args: EvmArgs, //! #[command(flatten)] //! opts: BuildArgs, //! } diff --git a/crates/forge/bin/cmd/selectors.rs b/crates/forge/bin/cmd/selectors.rs index ddd6a7968..31992983d 100644 --- a/crates/forge/bin/cmd/selectors.rs +++ b/crates/forge/bin/cmd/selectors.rs @@ -1,16 +1,17 @@ use alloy_primitives::hex; use clap::Parser; -use comfy_table::Table; +use comfy_table::{modifiers::UTF8_ROUND_CORNERS, Table}; use eyre::Result; use foundry_cli::{ opts::{CompilerArgs, CoreBuildArgs, ProjectPathsArgs}, - utils::FoundryPathExt, + utils::{cache_local_signatures, FoundryPathExt}, }; use foundry_common::{ compile::{compile_target, ProjectCompiler}, selectors::{import_selectors, SelectorImportData}, }; use foundry_compilers::{artifacts::output_selection::ContractOutputSelection, info::ContractInfo}; +use foundry_config::Config; use std::fs::canonicalize; /// CLI arguments for `forge selectors`. @@ -67,11 +68,34 @@ pub enum SelectorsSubcommands { #[command(flatten)] project_paths: ProjectPathsArgs, }, + + /// Cache project selectors (enables trace with local contracts functions and events). + #[command(visible_alias = "c")] + Cache { + #[command(flatten)] + project_paths: ProjectPathsArgs, + }, } impl SelectorsSubcommands { pub async fn run(self) -> Result<()> { match self { + Self::Cache { project_paths } => { + sh_println!("Caching selectors for contracts in the project...")?; + let build_args = CoreBuildArgs { + project_paths, + compiler: CompilerArgs { + extra_output: vec![ContractOutputSelection::Abi], + ..Default::default() + }, + ..Default::default() + }; + + // compile the project to get the artifacts/abis + let project = build_args.project()?; + let outcome = ProjectCompiler::new().quiet(true).compile(&project)?; + cache_local_signatures(&outcome, Config::foundry_cache_dir().unwrap())? + } Self::Upload { contract, all, project_paths } => { let build_args = CoreBuildArgs { project_paths: project_paths.clone(), @@ -174,6 +198,7 @@ impl SelectorsSubcommands { sh_println!("No colliding method selectors between the two contracts.")?; } else { let mut table = Table::new(); + table.apply_modifier(UTF8_ROUND_CORNERS); table.set_header([ String::from("Selector"), first_contract.name, @@ -183,7 +208,7 @@ impl SelectorsSubcommands { table.add_row([method.0, method.1, method.2]); } sh_println!("{} collisions found:", colliding_methods.len())?; - sh_println!("{table}")?; + sh_println!("\n{table}\n")?; } } Self::List { contract, project_paths } => { @@ -243,6 +268,7 @@ impl SelectorsSubcommands { sh_println!("{contract}")?; let mut table = Table::new(); + table.apply_modifier(UTF8_ROUND_CORNERS); table.set_header(["Type", "Signature", "Selector"]); @@ -264,7 +290,7 @@ impl SelectorsSubcommands { table.add_row(["Error", &sig, &hex::encode_prefixed(selector)]); } - sh_println!("{table}")?; + sh_println!("\n{table}\n")?; if artifacts.peek().is_some() { sh_println!()? @@ -296,6 +322,7 @@ impl SelectorsSubcommands { .collect::>(); let mut table = Table::new(); + table.apply_modifier(UTF8_ROUND_CORNERS); table.set_header(["Type", "Signature", "Selector", "Contract"]); @@ -341,7 +368,7 @@ impl SelectorsSubcommands { if table.row_count() > 0 { sh_println!("\nFound {} instance(s)...", table.row_count())?; - sh_println!("{table}")?; + sh_println!("\n{table}\n")?; } else { return Err(eyre::eyre!("\nSelector not found in the project.")); } diff --git a/crates/forge/bin/cmd/test/mod.rs b/crates/forge/bin/cmd/test/mod.rs index d1d510bc4..acb144755 100644 --- a/crates/forge/bin/cmd/test/mod.rs +++ b/crates/forge/bin/cmd/test/mod.rs @@ -12,9 +12,9 @@ use forge::{ debug::{ContractSources, DebugTraceIdentifier}, decode_trace_arena, folded_stack_trace, identifier::SignaturesIdentifier, - render_trace_arena, CallTraceDecoderBuilder, InternalTraceMode, TraceKind, + CallTraceDecoderBuilder, InternalTraceMode, TraceKind, }, - MultiContractRunner, MultiContractRunnerBuilder, TestFilter, TestOptions, TestOptionsBuilder, + MultiContractRunner, MultiContractRunnerBuilder, TestFilter, }; use foundry_cli::{ opts::{CoreBuildArgs, GlobalOpts}, @@ -23,7 +23,10 @@ use foundry_cli::{ use foundry_common::{compile::ProjectCompiler, evm::EvmArgs, fs, shell, TestFunctionExt}; use foundry_compilers::{ artifacts::output_selection::OutputSelection, - compilers::{multi::MultiCompilerLanguage, Language}, + compilers::{ + multi::{MultiCompiler, MultiCompilerLanguage}, + Language, + }, utils::source_files_iter, ProjectCompileOutput, }; @@ -34,7 +37,7 @@ use foundry_config::{ Metadata, Profile, Provider, }, filter::GlobMatcher, - get_available_profiles, Config, + Config, }; use foundry_debugger::Debugger; use foundry_evm::traces::identifier::TraceIdentifiers; @@ -51,16 +54,13 @@ use yansi::Paint; mod filter; mod summary; - -use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite}; -use summary::TestSummaryReporter; - -use crate::cmd::test::summary::print_invariant_metrics; pub use filter::FilterArgs; -use forge::result::TestKind; +use forge::{result::TestKind, traces::render_trace_arena_inner}; +use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite}; +use summary::{print_invariant_metrics, TestSummaryReport}; // Loads project's figment and merges the build cli arguments into it -foundry_config::merge_impl_figment_convert!(TestArgs, opts, evm_opts); +foundry_config::merge_impl_figment_convert!(TestArgs, opts, evm_args); /// CLI arguments for `forge test`. #[derive(Clone, Debug, Parser)] @@ -80,7 +80,7 @@ pub struct TestArgs { /// /// If the matching test is a fuzz test, then it will open the debugger on the first failure /// case. If the fuzz test does not fail, it will open the debugger on the last fuzz case. - #[arg(long, value_name = "DEPRECATED_TEST_FUNCTION_REGEX")] + #[arg(long, conflicts_with_all = ["flamegraph", "flamechart", "decode_internal", "rerun"], value_name = "DEPRECATED_TEST_FUNCTION_REGEX")] debug: Option>, /// Generate a flamegraph for a single test. Implies `--decode-internal`. @@ -124,7 +124,7 @@ pub struct TestArgs { allow_failure: bool, /// Output test results as JUnit XML report. - #[arg(long, conflicts_with_all = ["quiet", "json", "gas_report"], help_heading = "Display options")] + #[arg(long, conflicts_with_all = ["quiet", "json", "gas_report", "summary", "list", "show_progress"], help_heading = "Display options")] pub junit: bool, /// Stop running tests after the first failure. @@ -136,7 +136,7 @@ pub struct TestArgs { etherscan_api_key: Option, /// List tests instead of running them. - #[arg(long, short, help_heading = "Display options")] + #[arg(long, short, conflicts_with_all = ["show_progress", "decode_internal", "summary"], help_heading = "Display options")] list: bool, /// Set seed used to generate randomness during your fuzz runs. @@ -146,12 +146,16 @@ pub struct TestArgs { #[arg(long, env = "FOUNDRY_FUZZ_RUNS", value_name = "RUNS")] pub fuzz_runs: Option, + /// Timeout for each fuzz run in seconds. + #[arg(long, env = "FOUNDRY_FUZZ_TIMEOUT", value_name = "TIMEOUT")] + pub fuzz_timeout: Option, + /// File to rerun fuzz failures from. #[arg(long)] pub fuzz_input_file: Option, /// Show test execution progress. - #[arg(long)] + #[arg(long, conflicts_with_all = ["quiet", "json"], help_heading = "Display options")] pub show_progress: bool, #[command(flatten)] @@ -163,7 +167,7 @@ pub struct TestArgs { pub rerun: bool, #[command(flatten)] - evm_opts: EvmArgs, + evm_args: EvmArgs, #[command(flatten)] opts: CoreBuildArgs, @@ -281,16 +285,15 @@ impl TestArgs { config.invariant.gas_report_samples = 0; } - // Set up the project. - let mut project = config.project()?; - // Install missing dependencies. if install::install_missing_dependencies(&mut config) && config.auto_detect_remappings { // need to re-configure here to also catch additional remappings config = self.load_config(); - project = config.project()?; } + // Set up the project. + let project = config.project()?; + let mut filter = self.filter(&config); trace!(target: "forge::test", ?filter, "using filter"); @@ -319,26 +322,18 @@ impl TestArgs { // Create test options from general project settings and compiler output. let project_root = &project.paths.root; - let toml = config.get_config_path(); - let profiles = get_available_profiles(toml)?; // Remove the snapshots directory if it exists. // This is to ensure that we don't have any stale snapshots. // If `FORGE_SNAPSHOT_CHECK` is set, we don't remove the snapshots directory as it is // required for comparison. - if std::env::var("FORGE_SNAPSHOT_CHECK").is_err() { + if std::env::var_os("FORGE_SNAPSHOT_CHECK").is_none() { let snapshot_dir = project_root.join(&config.snapshots); if snapshot_dir.exists() { let _ = fs::remove_dir_all(project_root.join(&config.snapshots)); } } - let test_options: TestOptions = TestOptionsBuilder::default() - .fuzz(config.fuzz.clone()) - .invariant(config.invariant.clone()) - .profiles(profiles) - .build(&output, project_root)?; - let should_debug = self.debug.is_some(); let should_draw = self.flamegraph || self.flamechart; @@ -374,17 +369,9 @@ impl TestArgs { .evm_spec(config.evm_spec_id()) .sender(evm_opts.sender) .with_fork(evm_opts.get_fork(&config, env.clone())) - .with_test_options(test_options.clone()) .enable_isolation(evm_opts.isolate) - .alphanet(evm_opts.alphanet) - .build( - project_root, - output.clone(), - zk_output, - env, - evm_opts, - dual_compiled_contracts.unwrap_or_default(), - )?; + .odyssey(evm_opts.odyssey) + .build::(project_root, &output, env, evm_opts)?; let mut maybe_override_mt = |flag, maybe_regex: Option<&Option>| { if let Some(Some(regex)) = maybe_regex { @@ -498,7 +485,7 @@ impl TestArgs { trace!(target: "forge::test", "running all tests"); // If we need to render to a serialized format, we should not print anything else to stdout. - let silent = self.gas_report && shell::is_json(); + let silent = self.gas_report && shell::is_json() || self.summary && shell::is_json(); let num_filtered = runner.matching_test_functions(filter).count(); if num_filtered != 1 && (self.debug.is_some() || self.flamegraph || self.flamechart) { @@ -526,7 +513,7 @@ impl TestArgs { } // Run tests in a non-streaming fashion and collect results for serialization. - if !self.gas_report && shell::is_json() { + if !self.gas_report && !self.summary && shell::is_json() { let mut results = runner.test_collect(filter); results.values_mut().for_each(|suite_result| { for test_result in suite_result.test_results.values_mut() { @@ -586,7 +573,7 @@ impl TestArgs { if self.decode_internal.is_some() { let sources = - ContractSources::from_project_output(output, &config.root.0, Some(&libraries))?; + ContractSources::from_project_output(output, &config.root, Some(&libraries))?; builder = builder.with_debug_identifier(DebugTraceIdentifier::new(sources)); } let mut decoder = builder.build(); @@ -636,9 +623,7 @@ impl TestArgs { sh_println!("{}", result.short_result(name))?; // Display invariant metrics if invariant kind. - if let TestKind::Invariant { runs: _, calls: _, reverts: _, metrics } = - &result.kind - { + if let TestKind::Invariant { metrics, .. } = &result.kind { print_invariant_metrics(metrics); } @@ -677,7 +662,7 @@ impl TestArgs { // - 0..3: nothing // - 3: only display traces for failed tests // - 4: also display the setup trace for failed tests - // - 5..: display all traces for all tests + // - 5..: display all traces for all tests, including storage changes let should_include = match kind { TraceKind::Execution => { (verbosity == 3 && result.status.is_failure()) || verbosity >= 4 @@ -690,7 +675,7 @@ impl TestArgs { if should_include { decode_trace_arena(arena, &decoder).await?; - decoded_traces.push(render_trace_arena(arena)); + decoded_traces.push(render_trace_arena_inner(arena, false, verbosity > 4)); } } @@ -824,14 +809,13 @@ impl TestArgs { outcome.gas_report = Some(finalized); } - if !silent && !outcome.results.is_empty() { + if !self.summary && !shell::is_json() { sh_println!("{}", outcome.summary(duration))?; + } - if self.summary { - let mut summary_table = TestSummaryReporter::new(self.detailed); - sh_println!("\n\nTest Summary:")?; - summary_table.print_summary(&outcome); - } + if self.summary && !outcome.results.is_empty() { + let summary_report = TestSummaryReport::new(self.detailed, outcome.clone()); + sh_println!("{}", &summary_report)?; } // Reattach the task. @@ -895,6 +879,9 @@ impl Provider for TestArgs { if let Some(fuzz_runs) = self.fuzz_runs { fuzz_dict.insert("runs".to_string(), fuzz_runs.into()); } + if let Some(fuzz_timeout) = self.fuzz_timeout { + fuzz_dict.insert("timeout".to_string(), fuzz_timeout.into()); + } if let Some(fuzz_input_file) = self.fuzz_input_file.clone() { fuzz_dict.insert("failure_persist_file".to_string(), fuzz_input_file.into()); } @@ -1032,7 +1019,7 @@ mod tests { fn extract_chain() { let test = |arg: &str, expected: Chain| { let args = TestArgs::parse_from(["foundry-cli", arg]); - assert_eq!(args.evm_opts.env.chain, Some(expected)); + assert_eq!(args.evm_args.env.chain, Some(expected)); let (config, evm_opts) = args.load_config_and_evm_opts().unwrap(); assert_eq!(config.chain, Some(expected)); assert_eq!(evm_opts.env.chain_id, Some(expected.id())); @@ -1092,9 +1079,9 @@ contract FooBarTest is DSTest { &prj.root().to_string_lossy(), ]); let outcome = args.run().await.unwrap(); - let gas_report = outcome.gas_report.unwrap(); + let gas_report = outcome.gas_report.as_ref().unwrap(); - assert_eq!(gas_report.contracts.len(), 3); + assert_eq!(gas_report.contracts.len(), 3, "{}", outcome.summary(Default::default())); let call_cnts = gas_report .contracts .values() diff --git a/crates/forge/bin/cmd/test/summary.rs b/crates/forge/bin/cmd/test/summary.rs index 1922ce53b..eabf7bd9e 100644 --- a/crates/forge/bin/cmd/test/summary.rs +++ b/crates/forge/bin/cmd/test/summary.rs @@ -1,71 +1,99 @@ use crate::cmd::test::TestOutcome; -use comfy_table::{ - modifiers::UTF8_ROUND_CORNERS, presets::ASCII_MARKDOWN, Attribute, Cell, CellAlignment, Color, - Row, Table, -}; +use comfy_table::{modifiers::UTF8_ROUND_CORNERS, Cell, Color, Row, Table}; +use foundry_common::reports::{report_kind, ReportKind}; use foundry_evm::executors::invariant::InvariantMetrics; use itertools::Itertools; -use std::collections::HashMap; +use serde_json::json; +use std::{collections::HashMap, fmt::Display}; + +/// Represents a test summary report. +pub struct TestSummaryReport { + /// The kind of report to generate. + report_kind: ReportKind, + /// Whether the report should be detailed. + is_detailed: bool, + /// The test outcome to report. + outcome: TestOutcome, +} -/// A simple summary reporter that prints the test results in a table. -pub struct TestSummaryReporter { - /// The test summary table. - pub(crate) table: Table, - pub(crate) is_detailed: bool, +impl TestSummaryReport { + pub fn new(is_detailed: bool, outcome: TestOutcome) -> Self { + Self { report_kind: report_kind(), is_detailed, outcome } + } } -impl TestSummaryReporter { - pub(crate) fn new(is_detailed: bool) -> Self { +impl Display for TestSummaryReport { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { + match self.report_kind { + ReportKind::Text => { + writeln!(f, "\n{}", &self.format_table_output(&self.is_detailed, &self.outcome))?; + } + ReportKind::JSON => { + writeln!(f, "{}", &self.format_json_output(&self.is_detailed, &self.outcome))?; + } + } + + Ok(()) + } +} + +impl TestSummaryReport { + // Helper function to format the JSON output. + fn format_json_output(&self, is_detailed: &bool, outcome: &TestOutcome) -> String { + let output = json!({ + "results": outcome.results.iter().map(|(contract, suite)| { + let (suite_path, suite_name) = contract.split_once(':').unwrap(); + let passed = suite.successes().count(); + let failed = suite.failures().count(); + let skipped = suite.skips().count(); + let mut result = json!({ + "suite": suite_name, + "passed": passed, + "failed": failed, + "skipped": skipped, + }); + + if *is_detailed { + result["file_path"] = serde_json::Value::String(suite_path.to_string()); + result["duration"] = serde_json::Value::String(format!("{:.2?}", suite.duration)); + } + + result + }).collect::>(), + }); + + serde_json::to_string_pretty(&output).unwrap() + } + + fn format_table_output(&self, is_detailed: &bool, outcome: &TestOutcome) -> Table { let mut table = Table::new(); table.apply_modifier(UTF8_ROUND_CORNERS); + let mut row = Row::from(vec![ - Cell::new("Test Suite") - .set_alignment(CellAlignment::Center) - .add_attribute(Attribute::Bold), - Cell::new("Passed") - .set_alignment(CellAlignment::Center) - .add_attribute(Attribute::Bold) - .fg(Color::Green), - Cell::new("Failed") - .set_alignment(CellAlignment::Center) - .add_attribute(Attribute::Bold) - .fg(Color::Red), - Cell::new("Skipped") - .set_alignment(CellAlignment::Center) - .add_attribute(Attribute::Bold) - .fg(Color::Yellow), + Cell::new("Test Suite"), + Cell::new("Passed").fg(Color::Green), + Cell::new("Failed").fg(Color::Red), + Cell::new("Skipped").fg(Color::Yellow), ]); - if is_detailed { - row.add_cell( - Cell::new("File Path") - .set_alignment(CellAlignment::Center) - .add_attribute(Attribute::Bold), - ); - row.add_cell( - Cell::new("Duration") - .set_alignment(CellAlignment::Center) - .add_attribute(Attribute::Bold), - ); + if *is_detailed { + row.add_cell(Cell::new("File Path").fg(Color::Cyan)); + row.add_cell(Cell::new("Duration").fg(Color::Cyan)); } table.set_header(row); - Self { table, is_detailed } - } - - pub(crate) fn print_summary(&mut self, outcome: &TestOutcome) { // Traverse the test_results vector and build the table for (contract, suite) in &outcome.results { let mut row = Row::new(); let (suite_path, suite_name) = contract.split_once(':').unwrap(); let passed = suite.successes().count(); - let mut passed_cell = Cell::new(passed).set_alignment(CellAlignment::Center); + let mut passed_cell = Cell::new(passed); let failed = suite.failures().count(); - let mut failed_cell = Cell::new(failed).set_alignment(CellAlignment::Center); + let mut failed_cell = Cell::new(failed); let skipped = suite.skips().count(); - let mut skipped_cell = Cell::new(skipped).set_alignment(CellAlignment::Center); + let mut skipped_cell = Cell::new(skipped); row.add_cell(Cell::new(suite_name)); @@ -89,43 +117,75 @@ impl TestSummaryReporter { row.add_cell(Cell::new(format!("{:.2?}", suite.duration).to_string())); } - self.table.add_row(row); + table.add_row(row); } - let _ = sh_println!("\n{}", self.table); + table } } -/// Helper to create and render invariant metrics summary table: +/// Helper function to print the invariant metrics. +/// +/// ╭-----------------------+----------------+-------+---------+----------╮ /// | Contract | Selector | Calls | Reverts | Discards | -/// |-----------------------|----------------|-------|---------|----------| -/// | AnotherCounterHandler | doWork | 7451 | 123 | 4941 | -/// | AnotherCounterHandler | doWorkThing | 7279 | 137 | 4849 | -/// | CounterHandler | doAnotherThing | 7302 | 150 | 4794 | -/// | CounterHandler | doSomething | 7382 | 160 | 4830 | +/// +=====================================================================+ +/// | AnotherCounterHandler | doWork | 7451 | 123 | 4941 | +/// |-----------------------+----------------+-------+---------+----------| +/// | AnotherCounterHandler | doWorkThing | 7279 | 137 | 4849 | +/// |-----------------------+----------------+-------+---------+----------| +/// | CounterHandler | doAnotherThing | 7302 | 150 | 4794 | +/// |-----------------------+----------------+-------+---------+----------| +/// | CounterHandler | doSomething | 7382 | 160 |4794 | +/// ╰-----------------------+----------------+-------+---------+----------╯ pub(crate) fn print_invariant_metrics(test_metrics: &HashMap) { if !test_metrics.is_empty() { let mut table = Table::new(); - table.load_preset(ASCII_MARKDOWN); - table.set_header(["Contract", "Selector", "Calls", "Reverts", "Discards"]); + table.apply_modifier(UTF8_ROUND_CORNERS); + + table.set_header(vec![ + Cell::new("Contract"), + Cell::new("Selector"), + Cell::new("Calls").fg(Color::Green), + Cell::new("Reverts").fg(Color::Red), + Cell::new("Discards").fg(Color::Yellow), + ]); for name in test_metrics.keys().sorted() { if let Some((contract, selector)) = name.split_once(':').and_then(|(_, contract)| contract.split_once('.')) { let mut row = Row::new(); - row.add_cell(Cell::new(contract).set_alignment(CellAlignment::Left)); - row.add_cell(Cell::new(selector).set_alignment(CellAlignment::Left)); + row.add_cell(Cell::new(contract)); + row.add_cell(Cell::new(selector)); + if let Some(metrics) = test_metrics.get(name) { - row.add_cell(Cell::new(metrics.calls).set_alignment(CellAlignment::Center)); - row.add_cell(Cell::new(metrics.reverts).set_alignment(CellAlignment::Center)); - row.add_cell(Cell::new(metrics.discards).set_alignment(CellAlignment::Center)); + let calls_cell = Cell::new(metrics.calls).fg(if metrics.calls > 0 { + Color::Green + } else { + Color::White + }); + + let reverts_cell = Cell::new(metrics.reverts).fg(if metrics.reverts > 0 { + Color::Red + } else { + Color::White + }); + + let discards_cell = Cell::new(metrics.discards).fg(if metrics.discards > 0 { + Color::Yellow + } else { + Color::White + }); + + row.add_cell(calls_cell); + row.add_cell(reverts_cell); + row.add_cell(discards_cell); } table.add_row(row); } } - let _ = sh_println!("{table}\n"); + let _ = sh_println!("\n{table}\n"); } } diff --git a/crates/forge/bin/cmd/update.rs b/crates/forge/bin/cmd/update.rs index 5ddc5460a..c61b03d7a 100644 --- a/crates/forge/bin/cmd/update.rs +++ b/crates/forge/bin/cmd/update.rs @@ -52,7 +52,7 @@ impl UpdateArgs { /// Returns `(root, paths)` where `root` is the root of the Git repository and `paths` are the /// relative paths of the dependencies. pub fn dependencies_paths(deps: &[Dependency], config: &Config) -> Result<(PathBuf, Vec)> { - let git_root = Git::root_of(&config.root.0)?; + let git_root = Git::root_of(&config.root)?; let libs = config.install_lib_dir(); let mut paths = Vec::with_capacity(deps.len()); diff --git a/crates/forge/bin/cmd/watch.rs b/crates/forge/bin/cmd/watch.rs index 54d357c12..b8406565c 100644 --- a/crates/forge/bin/cmd/watch.rs +++ b/crates/forge/bin/cmd/watch.rs @@ -268,7 +268,7 @@ pub async fn watch_test(args: TestArgs) -> Result<()> { args.watch.run_all; let last_test_files = Mutex::new(HashSet::::default()); - let project_root = config.root.0.to_string_lossy().into_owned(); + let project_root = config.root.to_string_lossy().into_owned(); let config = args.watch.watchexec_config_with_override( || [&config.test, &config.src], move |events, command| { diff --git a/crates/forge/bin/main.rs b/crates/forge/bin/main.rs index 9f1eba4b5..f652befdf 100644 --- a/crates/forge/bin/main.rs +++ b/crates/forge/bin/main.rs @@ -108,9 +108,8 @@ fn run() -> Result<()> { ForgeSubcommand::Inspect(cmd) => cmd.run(), ForgeSubcommand::Tree(cmd) => cmd.run(), ForgeSubcommand::Geiger(cmd) => { - let check = cmd.check; let n = cmd.run()?; - if check && n > 0 { + if n > 0 { std::process::exit(n as i32); } Ok(()) diff --git a/crates/forge/src/coverage.rs b/crates/forge/src/coverage.rs index de8d0a8aa..ff3cac46e 100644 --- a/crates/forge/src/coverage.rs +++ b/crates/forge/src/coverage.rs @@ -1,9 +1,10 @@ //! Coverage reports. use alloy_primitives::map::HashMap; -use comfy_table::{presets::ASCII_MARKDOWN, Attribute, Cell, Color, Row, Table}; +use comfy_table::{modifiers::UTF8_ROUND_CORNERS, Attribute, Cell, Color, Row, Table}; use evm_disassembler::disassemble_bytes; use foundry_common::fs; +use semver::Version; use std::{ collections::hash_map, io::Write, @@ -18,24 +19,31 @@ pub trait CoverageReporter { } /// A simple summary reporter that prints the coverage results in a table. -pub struct SummaryReporter { +pub struct CoverageSummaryReporter { /// The summary table. table: Table, /// The total coverage of the entire project. total: CoverageSummary, } -impl Default for SummaryReporter { +impl Default for CoverageSummaryReporter { fn default() -> Self { let mut table = Table::new(); - table.load_preset(ASCII_MARKDOWN); - table.set_header(["File", "% Lines", "% Statements", "% Branches", "% Funcs"]); + table.apply_modifier(UTF8_ROUND_CORNERS); + + table.set_header(vec![ + Cell::new("File"), + Cell::new("% Lines"), + Cell::new("% Statements"), + Cell::new("% Branches"), + Cell::new("% Funcs"), + ]); Self { table, total: CoverageSummary::default() } } } -impl SummaryReporter { +impl CoverageSummaryReporter { fn add_row(&mut self, name: impl Into, summary: CoverageSummary) { let mut row = Row::new(); row.add_cell(name.into()) @@ -47,15 +55,15 @@ impl SummaryReporter { } } -impl CoverageReporter for SummaryReporter { +impl CoverageReporter for CoverageSummaryReporter { fn report(mut self, report: &CoverageReport) -> eyre::Result<()> { for (path, summary) in report.summary_by_file() { - self.total += &summary; + self.total.merge(&summary); self.add_row(path.display(), summary); } self.add_row("Total", self.total.clone()); - sh_println!("{}", self.table)?; + sh_println!("\n{}", self.table)?; Ok(()) } } @@ -77,66 +85,82 @@ fn format_cell(hits: usize, total: usize) -> Cell { cell } +/// Writes the coverage report in [LCOV]'s [tracefile format]. +/// +/// [LCOV]: https://github.com/linux-test-project/lcov +/// [tracefile format]: https://man.archlinux.org/man/geninfo.1.en#TRACEFILE_FORMAT pub struct LcovReporter<'a> { - /// Destination buffer - destination: &'a mut (dyn Write + 'a), + out: &'a mut (dyn Write + 'a), + version: Version, } impl<'a> LcovReporter<'a> { - pub fn new(destination: &'a mut (dyn Write + 'a)) -> Self { - Self { destination } + /// Create a new LCOV reporter. + pub fn new(out: &'a mut (dyn Write + 'a), version: Version) -> Self { + Self { out, version } } } impl CoverageReporter for LcovReporter<'_> { fn report(self, report: &CoverageReport) -> eyre::Result<()> { - for (file, items) in report.items_by_source() { - let summary = items.iter().fold(CoverageSummary::default(), |mut summary, item| { - summary += item; - summary - }); + let mut fn_index = 0usize; + for (path, items) in report.items_by_file() { + let summary = CoverageSummary::from_items(items.iter().copied()); - writeln!(self.destination, "TN:")?; - writeln!(self.destination, "SF:{}", file.display())?; + writeln!(self.out, "TN:")?; + writeln!(self.out, "SF:{}", path.display())?; for item in items { - let line = item.loc.line; + let line = item.loc.lines.start; + // `lines` is half-open, so we need to subtract 1 to get the last included line. + let end_line = item.loc.lines.end - 1; let hits = item.hits; match item.kind { - CoverageItemKind::Function { name } => { + CoverageItemKind::Function { ref name } => { let name = format!("{}.{name}", item.loc.contract_name); - writeln!(self.destination, "FN:{line},{name}")?; - writeln!(self.destination, "FNDA:{hits},{name}")?; + if self.version >= Version::new(2, 2, 0) { + // v2.2 changed the FN format. + writeln!(self.out, "FNL:{fn_index},{line},{end_line}")?; + writeln!(self.out, "FNA:{fn_index},{hits},{name}")?; + fn_index += 1; + } else if self.version >= Version::new(2, 0, 0) { + // v2.0 added end_line to FN. + writeln!(self.out, "FN:{line},{end_line},{name}")?; + writeln!(self.out, "FNDA:{hits},{name}")?; + } else { + writeln!(self.out, "FN:{line},{name}")?; + writeln!(self.out, "FNDA:{hits},{name}")?; + } } CoverageItemKind::Line => { - writeln!(self.destination, "DA:{line},{hits}")?; + writeln!(self.out, "DA:{line},{hits}")?; } CoverageItemKind::Branch { branch_id, path_id, .. } => { writeln!( - self.destination, + self.out, "BRDA:{line},{branch_id},{path_id},{}", if hits == 0 { "-".to_string() } else { hits.to_string() } )?; } // Statements are not in the LCOV format. // We don't add them in order to avoid doubling line hits. - _ => {} + CoverageItemKind::Statement { .. } => {} } } // Function summary - writeln!(self.destination, "FNF:{}", summary.function_count)?; - writeln!(self.destination, "FNH:{}", summary.function_hits)?; + writeln!(self.out, "FNF:{}", summary.function_count)?; + writeln!(self.out, "FNH:{}", summary.function_hits)?; // Line summary - writeln!(self.destination, "LF:{}", summary.line_count)?; - writeln!(self.destination, "LH:{}", summary.line_hits)?; + writeln!(self.out, "LF:{}", summary.line_count)?; + writeln!(self.out, "LH:{}", summary.line_hits)?; // Branch summary - writeln!(self.destination, "BRF:{}", summary.branch_count)?; - writeln!(self.destination, "BRH:{}", summary.branch_hits)?; + writeln!(self.out, "BRF:{}", summary.branch_count)?; + writeln!(self.out, "BRH:{}", summary.branch_hits)?; - writeln!(self.destination, "end_of_record")?; + writeln!(self.out, "end_of_record")?; } sh_println!("Wrote LCOV report.")?; @@ -150,7 +174,7 @@ pub struct DebugReporter; impl CoverageReporter for DebugReporter { fn report(self, report: &CoverageReport) -> eyre::Result<()> { - for (path, items) in report.items_by_source() { + for (path, items) in report.items_by_file() { sh_println!("Uncovered for {}:", path.display())?; items.iter().for_each(|item| { if item.hits == 0 { @@ -209,7 +233,7 @@ impl CoverageReporter for BytecodeReporter { let mut line_number_cache = LineNumberCache::new(self.root.clone()); for (contract_id, hits) in &report.bytecode_hits { - let ops = disassemble_bytes(hits.bytecode.to_vec())?; + let ops = disassemble_bytes(hits.bytecode().to_vec())?; let mut formatted = String::new(); let source_elements = @@ -217,8 +241,7 @@ impl CoverageReporter for BytecodeReporter { for (code, source_element) in std::iter::zip(ops.iter(), source_elements) { let hits = hits - .hits - .get(&(code.offset as usize)) + .get(code.offset as usize) .map(|h| format!("[{h:03}]")) .unwrap_or(" ".to_owned()); let source_id = source_element.index(); diff --git a/crates/forge/src/gas_report.rs b/crates/forge/src/gas_report.rs index c6ef4a120..224a3eb14 100644 --- a/crates/forge/src/gas_report.rs +++ b/crates/forge/src/gas_report.rs @@ -5,7 +5,7 @@ use crate::{ traces::{CallTraceArena, CallTraceDecoder, CallTraceNode, DecodedCallData}, }; use alloy_primitives::map::HashSet; -use comfy_table::{presets::ASCII_MARKDOWN, *}; +use comfy_table::{modifiers::UTF8_ROUND_CORNERS, Cell, Color, Table}; use foundry_common::{ calc, reports::{report_kind, ReportKind}, @@ -105,6 +105,11 @@ impl GasReport { if is_create_call { trace!(contract_name, "adding create size info"); contract_info.size = trace.data.len(); + if decoder.zk_contracts.contains(&node.trace.address) { + // Intercepted creates in zkvm mode will have the evm bytecode as input + // and the zkvm bytecode as output on the trace. + contract_info.size = trace.output.len(); + } } // Only include top-level calls which account for calldata and base (21.000) cost. @@ -118,6 +123,8 @@ impl GasReport { if is_create_call { trace!(contract_name, "adding create gas info"); contract_info.gas = trace.gas_used; +<<<<<<< HEAD +======= contract_info.size = trace.data.len(); if decoder.zk_contracts.contains(&node.trace.address) { @@ -125,6 +132,7 @@ impl GasReport { // and the zkvm bytecode as output on the trace. contract_info.size = trace.output.len(); } +>>>>>>> main } else if let Some(DecodedCallData { signature, .. }) = decoded().await.call_data { let name = signature.split('(').next().unwrap(); // ignore any test/setup functions @@ -164,7 +172,7 @@ impl GasReport { impl Display for GasReport { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { match self.report_kind { - ReportKind::Markdown => { + ReportKind::Text => { for (name, contract) in &self.contracts { if contract.functions.is_empty() { trace!(name, "gas report contract without functions"); @@ -172,8 +180,7 @@ impl Display for GasReport { } let table = self.format_table_output(contract, name); - writeln!(f, "{table}")?; - writeln!(f, "\n")?; + writeln!(f, "\n{table}")?; } } ReportKind::JSON => { @@ -222,27 +229,31 @@ impl GasReport { .unwrap() } - // Helper function to format the table output fn format_table_output(&self, contract: &ContractInfo, name: &str) -> Table { let mut table = Table::new(); - table.load_preset(ASCII_MARKDOWN); - table.set_header([Cell::new(format!("{name} contract")) - .add_attribute(Attribute::Bold) - .fg(Color::Green)]); - - table.add_row([ - Cell::new("Deployment Cost").add_attribute(Attribute::Bold).fg(Color::Cyan), - Cell::new("Deployment Size").add_attribute(Attribute::Bold).fg(Color::Cyan), + table.apply_modifier(UTF8_ROUND_CORNERS); + + table.set_header(vec![Cell::new(format!("{name} Contract")).fg(Color::Magenta)]); + + table.add_row(vec![ + Cell::new("Deployment Cost").fg(Color::Cyan), + Cell::new("Deployment Size").fg(Color::Cyan), + ]); + table.add_row(vec![ + Cell::new(contract.gas.to_string()), + Cell::new(contract.size.to_string()), ]); - table.add_row([contract.gas.to_string(), contract.size.to_string()]); - - table.add_row([ - Cell::new("Function Name").add_attribute(Attribute::Bold).fg(Color::Magenta), - Cell::new("min").add_attribute(Attribute::Bold).fg(Color::Green), - Cell::new("avg").add_attribute(Attribute::Bold).fg(Color::Yellow), - Cell::new("median").add_attribute(Attribute::Bold).fg(Color::Yellow), - Cell::new("max").add_attribute(Attribute::Bold).fg(Color::Red), - Cell::new("# calls").add_attribute(Attribute::Bold), + + // Add a blank row to separate deployment info from function info. + table.add_row(vec![Cell::new("")]); + + table.add_row(vec![ + Cell::new("Function Name"), + Cell::new("Min").fg(Color::Green), + Cell::new("Avg").fg(Color::Yellow), + Cell::new("Median").fg(Color::Yellow), + Cell::new("Max").fg(Color::Red), + Cell::new("# Calls").fg(Color::Cyan), ]); contract.functions.iter().for_each(|(fname, sigs)| { @@ -251,8 +262,8 @@ impl GasReport { let display_name = if sigs.len() == 1 { fname.to_string() } else { sig.replace(':', "") }; - table.add_row([ - Cell::new(display_name).add_attribute(Attribute::Bold), + table.add_row(vec![ + Cell::new(display_name), Cell::new(gas_info.min.to_string()).fg(Color::Green), Cell::new(gas_info.mean.to_string()).fg(Color::Yellow), Cell::new(gas_info.median.to_string()).fg(Color::Yellow), diff --git a/crates/forge/src/lib.rs b/crates/forge/src/lib.rs index 0bec55153..ddeada0a6 100644 --- a/crates/forge/src/lib.rs +++ b/crates/forge/src/lib.rs @@ -7,16 +7,6 @@ extern crate foundry_common; #[macro_use] extern crate tracing; -use foundry_compilers::ProjectCompileOutput; -use foundry_config::{ - validate_profiles, Config, FuzzConfig, InlineConfig, InlineConfigError, InlineConfigParser, - InvariantConfig, NatSpec, -}; -use proptest::test_runner::{ - FailurePersistence, FileFailurePersistence, RngAlgorithm, TestRng, TestRunner, -}; -use std::path::Path; - pub mod coverage; pub mod gas_report; @@ -33,201 +23,3 @@ pub mod result; // TODO: remove pub use foundry_common::traits::TestFilter; pub use foundry_evm::*; - -/// Metadata on how to run fuzz/invariant tests -#[derive(Clone, Debug, Default)] -pub struct TestOptions { - /// The base "fuzz" test configuration. To be used as a fallback in case - /// no more specific configs are found for a given run. - pub fuzz: FuzzConfig, - /// The base "invariant" test configuration. To be used as a fallback in case - /// no more specific configs are found for a given run. - pub invariant: InvariantConfig, - /// Contains per-test specific "fuzz" configurations. - pub inline_fuzz: InlineConfig, - /// Contains per-test specific "invariant" configurations. - pub inline_invariant: InlineConfig, -} - -impl TestOptions { - /// Tries to create a new instance by detecting inline configurations from the project compile - /// output. - pub fn new( - output: &ProjectCompileOutput, - root: &Path, - profiles: Vec, - base_fuzz: FuzzConfig, - base_invariant: InvariantConfig, - ) -> Result { - let natspecs: Vec = NatSpec::parse(output, root); - let mut inline_invariant = InlineConfig::::default(); - let mut inline_fuzz = InlineConfig::::default(); - - // Validate all natspecs - for natspec in &natspecs { - validate_profiles(natspec, &profiles)?; - } - - // Firstly, apply contract-level configurations - for natspec in natspecs.iter().filter(|n| n.function.is_none()) { - if let Some(fuzz) = base_fuzz.merge(natspec)? { - inline_fuzz.insert_contract(&natspec.contract, fuzz); - } - - if let Some(invariant) = base_invariant.merge(natspec)? { - inline_invariant.insert_contract(&natspec.contract, invariant); - } - } - - for (natspec, f) in natspecs.iter().filter_map(|n| n.function.as_ref().map(|f| (n, f))) { - // Apply in-line configurations for the current profile - let c = &natspec.contract; - - // We might already have inserted contract-level configs above, so respect data already - // present in inline configs. - let base_fuzz = inline_fuzz.get(c, f).unwrap_or(&base_fuzz); - let base_invariant = inline_invariant.get(c, f).unwrap_or(&base_invariant); - - if let Some(fuzz) = base_fuzz.merge(natspec)? { - inline_fuzz.insert_fn(c, f, fuzz); - } - - if let Some(invariant) = base_invariant.merge(natspec)? { - inline_invariant.insert_fn(c, f, invariant); - } - } - - Ok(Self { fuzz: base_fuzz, invariant: base_invariant, inline_fuzz, inline_invariant }) - } - - /// Returns a "fuzz" test runner instance. Parameters are used to select tight scoped fuzz - /// configs that apply for a contract-function pair. A fallback configuration is applied - /// if no specific setup is found for a given input. - /// - /// - `contract_id` is the id of the test contract, expressed as a relative path from the - /// project root. - /// - `test_fn` is the name of the test function declared inside the test contract. - pub fn fuzz_runner(&self, contract_id: &str, test_fn: &str) -> TestRunner { - let fuzz_config = self.fuzz_config(contract_id, test_fn).clone(); - let failure_persist_path = fuzz_config - .failure_persist_dir - .unwrap() - .join(fuzz_config.failure_persist_file.unwrap()) - .into_os_string() - .into_string() - .unwrap(); - self.fuzzer_with_cases( - fuzz_config.runs, - fuzz_config.max_test_rejects, - Some(Box::new(FileFailurePersistence::Direct(failure_persist_path.leak()))), - ) - } - - /// Returns an "invariant" test runner instance. Parameters are used to select tight scoped fuzz - /// configs that apply for a contract-function pair. A fallback configuration is applied - /// if no specific setup is found for a given input. - /// - /// - `contract_id` is the id of the test contract, expressed as a relative path from the - /// project root. - /// - `test_fn` is the name of the test function declared inside the test contract. - pub fn invariant_runner(&self, contract_id: &str, test_fn: &str) -> TestRunner { - let invariant = self.invariant_config(contract_id, test_fn); - self.fuzzer_with_cases(invariant.runs, invariant.max_assume_rejects, None) - } - - /// Returns a "fuzz" configuration setup. Parameters are used to select tight scoped fuzz - /// configs that apply for a contract-function pair. A fallback configuration is applied - /// if no specific setup is found for a given input. - /// - /// - `contract_id` is the id of the test contract, expressed as a relative path from the - /// project root. - /// - `test_fn` is the name of the test function declared inside the test contract. - pub fn fuzz_config(&self, contract_id: &str, test_fn: &str) -> &FuzzConfig { - self.inline_fuzz.get(contract_id, test_fn).unwrap_or(&self.fuzz) - } - - /// Returns an "invariant" configuration setup. Parameters are used to select tight scoped - /// invariant configs that apply for a contract-function pair. A fallback configuration is - /// applied if no specific setup is found for a given input. - /// - /// - `contract_id` is the id of the test contract, expressed as a relative path from the - /// project root. - /// - `test_fn` is the name of the test function declared inside the test contract. - pub fn invariant_config(&self, contract_id: &str, test_fn: &str) -> &InvariantConfig { - self.inline_invariant.get(contract_id, test_fn).unwrap_or(&self.invariant) - } - - pub fn fuzzer_with_cases( - &self, - cases: u32, - max_global_rejects: u32, - file_failure_persistence: Option>, - ) -> TestRunner { - let config = proptest::test_runner::Config { - failure_persistence: file_failure_persistence, - cases, - max_global_rejects, - // Disable proptest shrink: for fuzz tests we provide single counterexample, - // for invariant tests we shrink outside proptest. - max_shrink_iters: 0, - ..Default::default() - }; - - if let Some(seed) = &self.fuzz.seed { - trace!(target: "forge::test", %seed, "building deterministic fuzzer"); - let rng = TestRng::from_seed(RngAlgorithm::ChaCha, &seed.to_be_bytes::<32>()); - TestRunner::new_with_rng(config, rng) - } else { - trace!(target: "forge::test", "building stochastic fuzzer"); - TestRunner::new(config) - } - } -} - -/// Builder utility to create a [`TestOptions`] instance. -#[derive(Default)] -#[must_use = "builders do nothing unless you call `build` on them"] -pub struct TestOptionsBuilder { - fuzz: Option, - invariant: Option, - profiles: Option>, -} - -impl TestOptionsBuilder { - /// Sets a [`FuzzConfig`] to be used as base "fuzz" configuration. - pub fn fuzz(mut self, conf: FuzzConfig) -> Self { - self.fuzz = Some(conf); - self - } - - /// Sets a [`InvariantConfig`] to be used as base "invariant" configuration. - pub fn invariant(mut self, conf: InvariantConfig) -> Self { - self.invariant = Some(conf); - self - } - - /// Sets available configuration profiles. Profiles are useful to validate existing in-line - /// configurations. This argument is necessary in case a `compile_output`is provided. - pub fn profiles(mut self, p: Vec) -> Self { - self.profiles = Some(p); - self - } - - /// Creates an instance of [`TestOptions`]. This takes care of creating "fuzz" and - /// "invariant" fallbacks, and extracting all inline test configs, if available. - /// - /// `root` is a reference to the user's project root dir. This is essential - /// to determine the base path of generated contract identifiers. This is to provide correct - /// matchers for inline test configs. - pub fn build( - self, - output: &ProjectCompileOutput, - root: &Path, - ) -> Result { - let profiles: Vec = - self.profiles.unwrap_or_else(|| vec![Config::selected_profile().into()]); - let base_fuzz = self.fuzz.unwrap_or_default(); - let base_invariant = self.invariant.unwrap_or_default(); - TestOptions::new(output, root, profiles, base_fuzz, base_invariant) - } -} diff --git a/crates/forge/src/multi_runner.rs b/crates/forge/src/multi_runner.rs index c6a43c1c5..a3455aa38 100644 --- a/crates/forge/src/multi_runner.rs +++ b/crates/forge/src/multi_runner.rs @@ -2,23 +2,25 @@ use crate::{ progress::TestsProgress, result::SuiteResult, runner::LIBRARY_DEPLOYER, ContractRunner, - TestFilter, TestOptions, + TestFilter, }; use alloy_json_abi::{Function, JsonAbi}; use alloy_primitives::{Address, Bytes, U256}; use eyre::Result; -use foundry_common::{get_contract_name, ContractsByArtifact, TestFunctionExt}; +use foundry_common::{get_contract_name, shell::verbosity, ContractsByArtifact, TestFunctionExt}; use foundry_compilers::{ - artifacts::{CompactBytecode, CompactContractBytecode, CompactDeployedBytecode, Libraries}, + artifacts::{ + CompactBytecode, CompactContractBytecode, CompactDeployedBytecode, Contract, Libraries, + }, compilers::Compiler, zksync::compile::output::ProjectCompileOutput as ZkProjectCompileOutput, Artifact, ArtifactId, ProjectCompileOutput, }; -use foundry_config::Config; +use foundry_config::{Config, InlineConfig}; use foundry_evm::{ backend::Backend, decode::RevertDecoder, - executors::ExecutorBuilder, + executors::{Executor, ExecutorBuilder}, fork::CreateFork, inspectors::CheatsConfig, opts::EvmOpts, @@ -53,44 +55,41 @@ pub struct MultiContractRunner { /// Mapping of contract name to JsonAbi, creation bytecode and library bytecode which /// needs to be deployed & linked against pub contracts: DeployableContracts, - /// The EVM instance used in the test runner - pub evm_opts: EvmOpts, - /// The configured evm - pub env: revm::primitives::Env, - /// The EVM spec - pub evm_spec: SpecId, - /// Revert decoder. Contains all known errors and their selectors. - pub revert_decoder: RevertDecoder, - /// The address which will be used as the `from` field in all EVM calls - pub sender: Option
, - /// The fork to use at launch - pub fork: Option, - /// Project config. - pub config: Arc, - /// Whether to collect coverage info - pub coverage: bool, - /// Whether to collect debug info - pub debug: bool, - /// Whether to enable steps tracking in the tracer. - pub decode_internal: InternalTraceMode, - /// Settings related to fuzz and/or invariant tests - pub test_options: TestOptions, - /// Whether to enable call isolation - pub isolation: bool, - /// Whether to enable Alphanet features. - pub alphanet: bool, /// Known contracts linked with computed library addresses. pub known_contracts: ContractsByArtifact, + /// Revert decoder. Contains all known errors and their selectors. + pub revert_decoder: RevertDecoder, /// Libraries to deploy. pub libs_to_deploy: Vec, /// Library addresses used to link contracts. pub libraries: Libraries, + + /// The fork to use at launch + pub fork: Option, + + /// The base configuration for the test runner. + pub tcfg: TestRunnerConfig, + /// Dual compiled contracts pub dual_compiled_contracts: DualCompiledContracts, /// Use zk runner. pub use_zk: bool, } +impl std::ops::Deref for MultiContractRunner { + type Target = TestRunnerConfig; + + fn deref(&self) -> &Self::Target { + &self.tcfg + } +} + +impl std::ops::DerefMut for MultiContractRunner { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.tcfg + } +} + impl MultiContractRunner { /// Returns an iterator over all contracts that match the filter. pub fn matching_contracts<'a: 'b, 'b>( @@ -206,7 +205,7 @@ impl MultiContractRunner { let result = self.run_test_suite( id, contract, - db.clone(), + &db, filter, &tokio_handle, Some(&tests_progress), @@ -229,8 +228,7 @@ impl MultiContractRunner { } else { contracts.par_iter().for_each(|&(id, contract)| { let _guard = tokio_handle.enter(); - let result = - self.run_test_suite(id, contract, db.clone(), filter, &tokio_handle, None); + let result = self.run_test_suite(id, contract, &db, filter, &tokio_handle, None); let _ = tx.send((id.identifier(), result)); }) } @@ -240,7 +238,7 @@ impl MultiContractRunner { &self, artifact_id: &ArtifactId, contract: &TestContract, - db: Backend, + db: &Backend, filter: &dyn TestFilter, tokio_handle: &tokio::runtime::Handle, progress: Option<&TestsProgress>, @@ -248,37 +246,6 @@ impl MultiContractRunner { let identifier = artifact_id.identifier(); let mut span_name = identifier.as_str(); - let cheats_config = CheatsConfig::new( - &self.config, - self.evm_opts.clone(), - Some(self.known_contracts.clone()), - Some(artifact_id.name.clone()), - Some(artifact_id.version.clone()), - self.dual_compiled_contracts.clone(), - self.use_zk, - None, - ); - - let trace_mode = TraceMode::default() - .with_debug(self.debug) - .with_decode_internal(self.decode_internal) - .with_verbosity(self.evm_opts.verbosity); - - let executor = ExecutorBuilder::new() - .inspectors(|stack| { - stack - .cheatcodes(Arc::new(cheats_config)) - .trace_mode(trace_mode) - .coverage(self.coverage) - .enable_isolation(self.isolation) - .alphanet(self.alphanet) - }) - .use_zk_vm(self.use_zk) - .spec(self.evm_spec) - .gas_limit(self.evm_opts.gas_limit()) - .legacy_assertions(self.config.legacy_assertions) - .build(self.env.clone(), db); - if !enabled!(tracing::Level::TRACE) { span_name = get_contract_name(&identifier); } @@ -288,20 +255,22 @@ impl MultiContractRunner { debug!("start executing all tests in contract"); - let runner = ContractRunner { - name: &identifier, + let runner = ContractRunner::new( + &identifier, contract, - libs_to_deploy: &self.libs_to_deploy, - executor, - revert_decoder: &self.revert_decoder, - initial_balance: self.evm_opts.initial_balance, - sender: self.sender.unwrap_or_default(), - debug: self.debug, + self.tcfg.executor( + self.known_contracts.clone(), + artifact_id, + db.clone(), + self.dual_compiled_contracts.clone(), + self.use_zk, + ), progress, tokio_handle, span, - }; - let r = runner.run_tests(filter, &self.test_options, self.known_contracts.clone()); + self, + ); + let r = runner.run_tests(filter); debug!(duration=?r.duration, "executed all tests in contract"); @@ -309,6 +278,123 @@ impl MultiContractRunner { } } +/// Configuration for the test runner. +/// +/// This is modified after instantiation through inline config. +#[derive(Clone)] +pub struct TestRunnerConfig { + /// Project config. + pub config: Arc, + /// Inline configuration. + pub inline_config: Arc, + + /// EVM configuration. + pub evm_opts: EvmOpts, + /// EVM environment. + pub env: revm::primitives::Env, + /// EVM version. + pub spec_id: SpecId, + /// The address which will be used to deploy the initial contracts and send all transactions. + pub sender: Address, + + /// Whether to collect coverage info + pub coverage: bool, + /// Whether to collect debug info + pub debug: bool, + /// Whether to enable steps tracking in the tracer. + pub decode_internal: InternalTraceMode, + /// Whether to enable call isolation. + pub isolation: bool, + /// Whether to enable Odyssey features. + pub odyssey: bool, +} + +impl TestRunnerConfig { + /// Reconfigures all fields using the given `config`. + pub fn reconfigure_with(&mut self, config: Arc) { + debug_assert!(!Arc::ptr_eq(&self.config, &config)); + + // TODO: self.evm_opts + // TODO: self.env + self.spec_id = config.evm_spec_id(); + self.sender = config.sender; + // self.coverage = N/A; + // self.debug = N/A; + // self.decode_internal = N/A; + // self.isolation = N/A; + self.odyssey = config.odyssey; + + self.config = config; + } + + /// Configures the given executor with this configuration. + pub fn configure_executor(&self, executor: &mut Executor) { + // TODO: See above + + let inspector = executor.inspector_mut(); + // inspector.set_env(&self.env); + if let Some(cheatcodes) = inspector.cheatcodes.as_mut() { + cheatcodes.config = + Arc::new(cheatcodes.config.clone_with(&self.config, self.evm_opts.clone())); + } + inspector.tracing(self.trace_mode()); + inspector.collect_coverage(self.coverage); + inspector.enable_isolation(self.isolation); + inspector.odyssey(self.odyssey); + // inspector.set_create2_deployer(self.evm_opts.create2_deployer); + + // executor.env_mut().clone_from(&self.env); + executor.set_spec_id(self.spec_id); + // executor.set_gas_limit(self.evm_opts.gas_limit()); + executor.set_legacy_assertions(self.config.legacy_assertions); + } + + /// Creates a new executor with this configuration. + pub fn executor( + &self, + known_contracts: ContractsByArtifact, + artifact_id: &ArtifactId, + db: Backend, + dual_compiled_contracts: DualCompiledContracts, + use_zk: bool, + ) -> Executor { + let cheats_config = Arc::new(CheatsConfig::new( + &self.config, + self.evm_opts.clone(), + Some(known_contracts), + Some(artifact_id.name.clone()), + Some(artifact_id.version.clone()), + dual_compiled_contracts, + use_zk, + None, + )); + + ExecutorBuilder::new() + .inspectors(|stack| { + stack + .cheatcodes(cheats_config) + .trace_mode(self.trace_mode()) + .coverage(self.coverage) + .enable_isolation(self.isolation) + .odyssey(self.odyssey) + .create2_deployer(self.evm_opts.create2_deployer) + }) + .use_zk_vm(self.use_zk) + .spec_id(self.spec_id) + .gas_limit(self.evm_opts.gas_limit()) + .legacy_assertions(self.config.legacy_assertions) + .build(self.env.clone(), db) + } + + fn trace_mode(&self) -> TraceMode { + TraceMode::default() + .with_debug(self.debug) + .with_decode_internal(self.decode_internal) + .with_verbosity(self.evm_opts.verbosity) + .with_state_changes(verbosity() > 4) + } +} + /// Builder used for instantiating the multi-contract runner #[derive(Clone, Debug)] #[must_use = "builders do nothing unless you call `build` on them"] @@ -332,10 +418,8 @@ pub struct MultiContractRunnerBuilder { pub decode_internal: InternalTraceMode, /// Whether to enable call isolation pub isolation: bool, - /// Whether to enable Alphanet features. - pub alphanet: bool, - /// Settings related to fuzz and/or invariant tests - pub test_options: Option, + /// Whether to enable Odyssey features. + pub odyssey: bool, } impl MultiContractRunnerBuilder { @@ -349,9 +433,8 @@ impl MultiContractRunnerBuilder { coverage: Default::default(), debug: Default::default(), isolation: Default::default(), - test_options: Default::default(), decode_internal: Default::default(), - alphanet: Default::default(), + odyssey: Default::default(), } } @@ -375,11 +458,6 @@ impl MultiContractRunnerBuilder { self } - pub fn with_test_options(mut self, test_options: TestOptions) -> Self { - self.test_options = Some(test_options); - self - } - pub fn set_coverage(mut self, enable: bool) -> Self { self.coverage = enable; self @@ -400,17 +478,17 @@ impl MultiContractRunnerBuilder { self } - pub fn alphanet(mut self, enable: bool) -> Self { - self.alphanet = enable; + pub fn odyssey(mut self, enable: bool) -> Self { + self.odyssey = enable; self } /// Given an EVM, proceeds to return a runner which is able to execute all tests /// against that evm - pub fn build( + pub fn build>( self, root: &Path, - output: ProjectCompileOutput, + output: &ProjectCompileOutput, zk_output: Option, env: revm::primitives::Env, evm_opts: EvmOpts, @@ -500,22 +578,27 @@ impl MultiContractRunnerBuilder { Ok(MultiContractRunner { contracts: deployable_contracts, - evm_opts, - env, - evm_spec: self.evm_spec.unwrap_or(SpecId::CANCUN), - sender: self.sender, revert_decoder, - fork: self.fork, - config: self.config, - coverage: self.coverage, - debug: self.debug, - decode_internal: self.decode_internal, - test_options: self.test_options.unwrap_or_default(), - isolation: self.isolation, - alphanet: self.alphanet, known_contracts, libs_to_deploy, libraries, + fork: self.fork, + + tcfg: TestRunnerConfig { + evm_opts, + env, + spec_id: self.evm_spec.unwrap_or_else(|| self.config.evm_spec_id()), + sender: self.sender.unwrap_or(self.config.sender), + + coverage: self.coverage, + debug: self.debug, + decode_internal: self.decode_internal, + inline_config: Arc::new(InlineConfig::new_parsed(output, &self.config)?), + isolation: self.isolation, + odyssey: self.odyssey, + + config: self.config, + }, dual_compiled_contracts, use_zk, }) diff --git a/crates/forge/src/result.rs b/crates/forge/src/result.rs index 7f02db577..5b134194f 100644 --- a/crates/forge/src/result.rs +++ b/crates/forge/src/result.rs @@ -467,12 +467,12 @@ impl fmt::Display for TestResult { impl TestResult { /// Creates a new test result starting from test setup results. - pub fn new(setup: TestSetup) -> Self { + pub fn new(setup: &TestSetup) -> Self { Self { - labeled_addresses: setup.labels, - logs: setup.logs, - traces: setup.traces, - coverage: setup.coverage, + labeled_addresses: setup.labels.clone(), + logs: setup.logs.clone(), + traces: setup.traces.clone(), + coverage: setup.coverage.clone(), ..Default::default() } } @@ -482,10 +482,10 @@ impl TestResult { Self { status: TestStatus::Failure, reason: Some(reason), ..Default::default() } } - /// Creates a failed test setup result. - pub fn setup_fail(setup: TestSetup) -> Self { + /// Creates a test setup result. + pub fn setup_result(setup: TestSetup) -> Self { Self { - status: TestStatus::Failure, + status: if setup.skipped { TestStatus::Skipped } else { TestStatus::Failure }, reason: setup.reason, logs: setup.logs, traces: setup.traces, @@ -496,27 +496,25 @@ impl TestResult { } /// Returns the skipped result for single test (used in skipped fuzz test too). - pub fn single_skip(mut self, reason: SkipReason) -> Self { + pub fn single_skip(&mut self, reason: SkipReason) { self.status = TestStatus::Skipped; self.reason = reason.0; - self } /// Returns the failed result with reason for single test. - pub fn single_fail(mut self, reason: Option) -> Self { + pub fn single_fail(&mut self, reason: Option) { self.status = TestStatus::Failure; self.reason = reason; - self } /// Returns the result for single test. Merges execution results (logs, labeled addresses, /// traces and coverages) in initial setup results. pub fn single_result( - mut self, + &mut self, success: bool, reason: Option, raw_call_result: RawCallResult, - ) -> Self { + ) { self.kind = TestKind::Unit { gas: raw_call_result.gas_used.wrapping_sub(raw_call_result.stipend) }; @@ -539,13 +537,11 @@ impl TestResult { self.gas_snapshots = cheatcodes.gas_snapshots; self.deprecated_cheatcodes = cheatcodes.deprecated; } - - self } /// Returns the result for a fuzzed test. Merges fuzz execution results (logs, labeled /// addresses, traces and coverages) in initial setup results. - pub fn fuzz_result(mut self, result: FuzzTestResult) -> Self { + pub fn fuzz_result(&mut self, result: FuzzTestResult) { self.kind = TestKind::Fuzz { median_gas: result.median_gas(false), mean_gas: result.mean_gas(false), @@ -572,26 +568,23 @@ impl TestResult { self.gas_report_traces = result.gas_report_traces.into_iter().map(|t| vec![t]).collect(); self.breakpoints = result.breakpoints.unwrap_or_default(); self.deprecated_cheatcodes = result.deprecated_cheatcodes; - - self } /// Returns the skipped result for invariant test. - pub fn invariant_skip(mut self, reason: SkipReason) -> Self { + pub fn invariant_skip(&mut self, reason: SkipReason) { self.kind = TestKind::Invariant { runs: 1, calls: 1, reverts: 1, metrics: HashMap::default() }; self.status = TestStatus::Skipped; self.reason = reason.0; - self } /// Returns the fail result for replayed invariant test. pub fn invariant_replay_fail( - mut self, + &mut self, replayed_entirely: bool, invariant_name: &String, call_sequence: Vec, - ) -> Self { + ) { self.kind = TestKind::Invariant { runs: 1, calls: 1, reverts: 1, metrics: HashMap::default() }; self.status = TestStatus::Failure; @@ -601,22 +594,20 @@ impl TestResult { Some(format!("{invariant_name} persisted failure revert")) }; self.counterexample = Some(CounterExample::Sequence(call_sequence)); - self } /// Returns the fail result for invariant test setup. - pub fn invariant_setup_fail(mut self, e: Report) -> Self { + pub fn invariant_setup_fail(&mut self, e: Report) { self.kind = TestKind::Invariant { runs: 0, calls: 0, reverts: 0, metrics: HashMap::default() }; self.status = TestStatus::Failure; self.reason = Some(format!("failed to set up invariant testing environment: {e}")); - self } /// Returns the invariant test result. #[allow(clippy::too_many_arguments)] pub fn invariant_result( - mut self, + &mut self, gas_report_traces: Vec>, success: bool, reason: Option, @@ -624,7 +615,7 @@ impl TestResult { cases: Vec, reverts: usize, metrics: Map, - ) -> Self { + ) { self.kind = TestKind::Invariant { runs: cases.len(), calls: cases.iter().map(|sequence| sequence.cases().len()).sum(), @@ -638,7 +629,6 @@ impl TestResult { self.reason = reason; self.counterexample = counterexample; self.gas_report_traces = gas_report_traces; - self } /// Returns `true` if this is the result of a fuzz test @@ -762,9 +752,13 @@ pub struct TestSetup { pub traces: Traces, /// Coverage info during setup. pub coverage: Option, + /// Addresses of external libraries deployed during setup. + pub deployed_libs: Vec
, /// The reason the setup failed, if it did. pub reason: Option, + /// Whether setup and entire test suite is skipped. + pub skipped: bool, } impl TestSetup { @@ -772,6 +766,10 @@ impl TestSetup { Self { reason: Some(reason), ..Default::default() } } + pub fn skipped(reason: String) -> Self { + Self { reason: Some(reason), skipped: true, ..Default::default() } + } + pub fn extend(&mut self, raw: RawCallResult, trace_kind: TraceKind) { self.logs.extend(raw.logs); self.labels.extend(raw.labels); diff --git a/crates/forge/src/runner.rs b/crates/forge/src/runner.rs index 094ce9e67..697ea3cf3 100644 --- a/crates/forge/src/runner.rs +++ b/crates/forge/src/runner.rs @@ -2,20 +2,17 @@ use crate::{ fuzz::{invariant::BasicTxDetails, BaseCounterExample}, - multi_runner::{is_matching_test, TestContract}, + multi_runner::{is_matching_test, TestContract, TestRunnerConfig}, progress::{start_fuzz_progress, TestsProgress}, result::{SuiteResult, TestResult, TestSetup}, - TestFilter, TestOptions, + MultiContractRunner, TestFilter, }; use alloy_dyn_abi::DynSolValue; use alloy_json_abi::Function; -use alloy_primitives::{address, map::HashMap, Address, Bytes, U256}; +use alloy_primitives::{address, map::HashMap, Address, U256}; use eyre::Result; -use foundry_common::{ - contracts::{ContractsByAddress, ContractsByArtifact}, - TestFunctionExt, TestFunctionKind, -}; -use foundry_config::{FuzzConfig, InvariantConfig}; +use foundry_common::{contracts::ContractsByAddress, TestFunctionExt, TestFunctionKind}; +use foundry_config::Config; use foundry_evm::{ constants::CALLER, decode::RevertDecoder, @@ -33,9 +30,12 @@ use foundry_evm::{ }, traces::{load_contracts, TraceKind, TraceMode}, }; -use proptest::test_runner::TestRunner; +use proptest::test_runner::{ + FailurePersistence, FileFailurePersistence, RngAlgorithm, TestRng, TestRunner, +}; use rayon::prelude::*; -use std::{borrow::Cow, cmp::min, collections::BTreeMap, time::Instant}; +use std::{borrow::Cow, cmp::min, collections::BTreeMap, sync::Arc, time::Instant}; +use tracing::Span; /// When running tests, we deploy all external libraries present in the project. To avoid additional /// libraries affecting nonces of senders used in tests, we are using separate address to @@ -45,45 +45,73 @@ use std::{borrow::Cow, cmp::min, collections::BTreeMap, time::Instant}; pub const LIBRARY_DEPLOYER: Address = address!("1F95D37F27EA0dEA9C252FC09D5A6eaA97647353"); /// A type that executes all tests of a contract -#[derive(Clone, Debug)] pub struct ContractRunner<'a> { /// The name of the contract. - pub name: &'a str, + name: &'a str, /// The data of the contract. - pub contract: &'a TestContract, - /// The libraries that need to be deployed before the contract. - pub libs_to_deploy: &'a Vec, - /// The executor used by the runner. - pub executor: Executor, - /// Revert decoder. Contains all known errors. - pub revert_decoder: &'a RevertDecoder, - /// The initial balance of the test contract. - pub initial_balance: U256, - /// The address which will be used as the `from` field in all EVM calls. - pub sender: Address, - /// Whether debug traces should be generated. - pub debug: bool, + contract: &'a TestContract, + /// The EVM executor. + executor: Executor, /// Overall test run progress. - pub progress: Option<&'a TestsProgress>, + progress: Option<&'a TestsProgress>, /// The handle to the tokio runtime. - pub tokio_handle: &'a tokio::runtime::Handle, + tokio_handle: &'a tokio::runtime::Handle, /// The span of the contract. - pub span: tracing::Span, + span: tracing::Span, + /// The contract-level configuration. + tcfg: Cow<'a, TestRunnerConfig>, + /// The parent runner. + mcr: &'a MultiContractRunner, } -impl ContractRunner<'_> { +impl<'a> std::ops::Deref for ContractRunner<'a> { + type Target = Cow<'a, TestRunnerConfig>; + + #[inline(always)] + fn deref(&self) -> &Self::Target { + &self.tcfg + } +} + +impl<'a> ContractRunner<'a> { + pub fn new( + name: &'a str, + contract: &'a TestContract, + executor: Executor, + progress: Option<&'a TestsProgress>, + tokio_handle: &'a tokio::runtime::Handle, + span: Span, + mcr: &'a MultiContractRunner, + ) -> Self { + Self { + name, + contract, + executor, + progress, + tokio_handle, + span, + tcfg: Cow::Borrowed(&mcr.tcfg), + mcr, + } + } + /// Deploys the test contract inside the runner from the sending account, and optionally runs /// the `setUp` function on the test contract. pub fn setup(&mut self, call_setup: bool) -> TestSetup { - match self._setup(call_setup) { - Ok(setup) => setup, - Err(err) => TestSetup::failed(err.to_string()), - } + self._setup(call_setup).unwrap_or_else(|err| { + if err.to_string().contains("skipped") { + TestSetup::skipped(err.to_string()) + } else { + TestSetup::failed(err.to_string()) + } + }) } fn _setup(&mut self, call_setup: bool) -> Result { trace!(call_setup, "setting up"); + self.apply_contract_inline_config()?; + // We max out their balance so that they can deploy and make calls. self.executor.set_balance(self.sender, U256::MAX)?; self.executor.set_balance(CALLER, U256::MAX)?; @@ -95,13 +123,19 @@ impl ContractRunner<'_> { self.executor.set_balance(LIBRARY_DEPLOYER, U256::MAX)?; let mut result = TestSetup::default(); - for code in self.libs_to_deploy.iter() { + for code in self.mcr.libs_to_deploy.iter() { let deploy_result = self.executor.deploy( LIBRARY_DEPLOYER, code.clone(), U256::ZERO, - Some(self.revert_decoder), + Some(&self.mcr.revert_decoder), ); + + // Record deployed library address. + if let Ok(deployed) = &deploy_result { + result.deployed_libs.push(deployed.address); + } + let (raw, reason) = RawCallResult::from_evm_result(deploy_result.map(Into::into))?; result.extend(raw, TraceKind::Deployment); if reason.is_some() { @@ -119,14 +153,14 @@ impl ContractRunner<'_> { // Set the contracts initial balance before deployment, so it is available during // construction - self.executor.set_balance(address, self.initial_balance)?; + self.executor.set_balance(address, self.initial_balance())?; // Deploy the test contract let deploy_result = self.executor.deploy( self.sender, self.contract.bytecode.clone(), U256::ZERO, - Some(self.revert_decoder), + Some(&self.mcr.revert_decoder), ); if let Ok(dr) = &deploy_result { debug_assert_eq!(dr.address, address); @@ -139,9 +173,9 @@ impl ContractRunner<'_> { } // Reset `self.sender`s, `CALLER`s and `LIBRARY_DEPLOYER`'s balance to the initial balance. - self.executor.set_balance(self.sender, self.initial_balance)?; - self.executor.set_balance(CALLER, self.initial_balance)?; - self.executor.set_balance(LIBRARY_DEPLOYER, self.initial_balance)?; + self.executor.set_balance(self.sender, self.initial_balance())?; + self.executor.set_balance(CALLER, self.initial_balance())?; + self.executor.set_balance(LIBRARY_DEPLOYER, self.initial_balance())?; self.executor.deploy_create2_deployer()?; @@ -161,7 +195,7 @@ impl ContractRunner<'_> { // Optionally call the `setUp` function if call_setup { trace!("calling setUp"); - let res = self.executor.setup(None, address, Some(self.revert_decoder)); + let res = self.executor.setup(None, address, Some(&self.mcr.revert_decoder)); let (raw, reason) = RawCallResult::from_evm_result(res)?; result.extend(raw, TraceKind::Setup); result.reason = reason; @@ -172,6 +206,31 @@ impl ContractRunner<'_> { Ok(result) } + fn initial_balance(&self) -> U256 { + self.evm_opts.initial_balance + } + + /// Configures this runner with the inline configuration for the contract. + fn apply_contract_inline_config(&mut self) -> Result<()> { + if self.inline_config.contains_contract(self.name) { + let new_config = Arc::new(self.inline_config(None)?); + self.tcfg.to_mut().reconfigure_with(new_config); + let prev_tracer = self.executor.inspector_mut().tracer.take(); + self.tcfg.configure_executor(&mut self.executor); + // Don't set tracer here. + self.executor.inspector_mut().tracer = prev_tracer; + } + Ok(()) + } + + /// Returns the configuration for a contract or function. + fn inline_config(&self, func: Option<&Function>) -> Result { + let function = func.map(|f| f.name.as_str()).unwrap_or(""); + let config = + self.mcr.inline_config.merge(self.name, function, &self.config).extract::()?; + Ok(config) + } + /// Collect fixtures from test contract. /// /// Fixtures can be defined: @@ -227,12 +286,7 @@ impl ContractRunner<'_> { } /// Runs all tests for a contract whose names match the provided regular expression - pub fn run_tests( - mut self, - filter: &dyn TestFilter, - test_options: &TestOptions, - known_contracts: ContractsByArtifact, - ) -> SuiteResult { + pub fn run_tests(mut self, filter: &dyn TestFilter) -> SuiteResult { let start = Instant::now(); let mut warnings = Vec::new(); @@ -305,7 +359,7 @@ impl ContractRunner<'_> { // The setup failed, so we return a single test result for `setUp` return SuiteResult::new( start.elapsed(), - [("setUp()".to_string(), TestResult::setup_fail(setup))].into(), + [("setUp()".to_string(), TestResult::setup_result(setup))].into(), warnings, ) } @@ -319,17 +373,16 @@ impl ContractRunner<'_> { .functions() .filter(|func| is_matching_test(func, filter)) .collect::>(); - let find_time = find_timer.elapsed(); debug!( "Found {} test functions out of {} in {:?}", functions.len(), self.contract.abi.functions().count(), - find_time, + find_timer.elapsed(), ); - let identified_contracts = has_invariants - .then(|| load_contracts(setup.traces.iter().map(|(_, t)| &t.arena), &known_contracts)); - + let identified_contracts = has_invariants.then(|| { + load_contracts(setup.traces.iter().map(|(_, t)| &t.arena), &self.mcr.known_contracts) + }); let test_results = functions .par_iter() .map(|&func| { @@ -353,34 +406,12 @@ impl ContractRunner<'_> { ) .entered(); - let setup = setup.clone(); - let mut res = match kind { - TestFunctionKind::UnitTest { should_fail } => { - self.run_unit_test(func, should_fail, setup) - } - TestFunctionKind::FuzzTest { should_fail } => { - let runner = test_options.fuzz_runner(self.name, &func.name); - let fuzz_config = test_options.fuzz_config(self.name, &func.name); - - self.run_fuzz_test(func, should_fail, runner, setup, fuzz_config.clone()) - } - TestFunctionKind::InvariantTest => { - let runner = test_options.invariant_runner(self.name, &func.name); - let invariant_config = test_options.invariant_config(self.name, &func.name); - - self.run_invariant_test( - runner, - setup, - invariant_config.clone(), - func, - call_after_invariant, - &known_contracts, - identified_contracts.as_ref().unwrap(), - ) - } - _ => unreachable!(), - }; - + let mut res = FunctionRunner::new(&self, &setup).run( + func, + kind, + call_after_invariant, + identified_contracts.as_ref(), + ); res.duration = start.elapsed(); (sig, res) @@ -390,6 +421,83 @@ impl ContractRunner<'_> { let duration = start.elapsed(); SuiteResult::new(duration, test_results, warnings) } +} + +/// Executes a single test function, returning a [`TestResult`]. +struct FunctionRunner<'a> { + /// The function-level configuration. + tcfg: Cow<'a, TestRunnerConfig>, + /// The EVM executor. + executor: Cow<'a, Executor>, + /// The parent runner. + cr: &'a ContractRunner<'a>, + /// The address of the test contract. + address: Address, + /// The test setup result. + setup: &'a TestSetup, + /// The test result. Returned after running the test. + result: TestResult, +} + +impl<'a> std::ops::Deref for FunctionRunner<'a> { + type Target = Cow<'a, TestRunnerConfig>; + + #[inline(always)] + fn deref(&self) -> &Self::Target { + &self.tcfg + } +} + +impl<'a> FunctionRunner<'a> { + fn new(cr: &'a ContractRunner<'a>, setup: &'a TestSetup) -> Self { + Self { + tcfg: match &cr.tcfg { + Cow::Borrowed(tcfg) => Cow::Borrowed(tcfg), + Cow::Owned(tcfg) => Cow::Owned(tcfg.clone()), + }, + executor: Cow::Borrowed(&cr.executor), + cr, + address: setup.address, + setup, + result: TestResult::new(setup), + } + } + + fn revert_decoder(&self) -> &'a RevertDecoder { + &self.cr.mcr.revert_decoder + } + + /// Configures this runner with the inline configuration for the contract. + fn apply_function_inline_config(&mut self, func: &Function) -> Result<()> { + if self.inline_config.contains_function(self.cr.name, &func.name) { + let new_config = Arc::new(self.cr.inline_config(Some(func))?); + self.tcfg.to_mut().reconfigure_with(new_config); + self.tcfg.configure_executor(self.executor.to_mut()); + } + Ok(()) + } + + fn run( + mut self, + func: &Function, + kind: TestFunctionKind, + call_after_invariant: bool, + identified_contracts: Option<&ContractsByAddress>, + ) -> TestResult { + if let Err(e) = self.apply_function_inline_config(func) { + self.result.single_fail(Some(e.to_string())); + return self.result; + } + + match kind { + TestFunctionKind::UnitTest { should_fail } => self.run_unit_test(func, should_fail), + TestFunctionKind::FuzzTest { should_fail } => self.run_fuzz_test(func, should_fail), + TestFunctionKind::InvariantTest => { + self.run_invariant_test(func, call_after_invariant, identified_contracts.unwrap()) + } + _ => unreachable!(), + } + } /// Runs a single unit test. /// @@ -399,80 +507,77 @@ impl ContractRunner<'_> { /// (therefore the unit test call will be made on modified state). /// State modifications of before test txes and unit test function call are discarded after /// test ends, similar to `eth_call`. - pub fn run_unit_test( - &self, - func: &Function, - should_fail: bool, - setup: TestSetup, - ) -> TestResult { + fn run_unit_test(mut self, func: &Function, should_fail: bool) -> TestResult { // Prepare unit test execution. - let (executor, test_result, address) = match self.prepare_test(func, setup) { - Ok(res) => res, - Err(res) => return res, - }; + if self.prepare_test(func).is_err() { + return self.result; + } // Run current unit test. - let (mut raw_call_result, reason) = match executor.call( + let (mut raw_call_result, reason) = match self.executor.call( self.sender, - address, + self.address, func, &[], U256::ZERO, - Some(self.revert_decoder), + Some(self.revert_decoder()), ) { Ok(res) => (res.raw, None), Err(EvmError::Execution(err)) => (err.raw, Some(err.reason)), - Err(EvmError::Skip(reason)) => return test_result.single_skip(reason), - Err(err) => return test_result.single_fail(Some(err.to_string())), + Err(EvmError::Skip(reason)) => { + self.result.single_skip(reason); + return self.result; + } + Err(err) => { + self.result.single_fail(Some(err.to_string())); + return self.result; + } }; - let success = executor.is_raw_call_mut_success(address, &mut raw_call_result, should_fail); - test_result.single_result(success, reason, raw_call_result) + let success = + self.executor.is_raw_call_mut_success(self.address, &mut raw_call_result, should_fail); + self.result.single_result(success, reason, raw_call_result); + self.result } - #[allow(clippy::too_many_arguments)] - pub fn run_invariant_test( - &self, - runner: TestRunner, - setup: TestSetup, - invariant_config: InvariantConfig, + fn run_invariant_test( + mut self, func: &Function, call_after_invariant: bool, - known_contracts: &ContractsByArtifact, identified_contracts: &ContractsByAddress, ) -> TestResult { - let address = setup.address; - let fuzz_fixtures = setup.fuzz_fixtures.clone(); - let mut test_result = TestResult::new(setup); - // First, run the test normally to see if it needs to be skipped. if let Err(EvmError::Skip(reason)) = self.executor.call( self.sender, - address, + self.address, func, &[], U256::ZERO, - Some(self.revert_decoder), + Some(self.revert_decoder()), ) { - return test_result.invariant_skip(reason); + self.result.invariant_skip(reason); + return self.result; }; + let runner = self.invariant_runner(); + let invariant_config = &self.config.invariant; + let mut evm = InvariantExecutor::new( - self.executor.clone(), + self.clone_executor(), runner, invariant_config.clone(), identified_contracts, - known_contracts, + &self.cr.mcr.known_contracts, ); let invariant_contract = InvariantContract { - address, + address: self.address, invariant_function: func, call_after_invariant, - abi: &self.contract.abi, + abi: &self.cr.contract.abi, }; - let failure_dir = invariant_config.clone().failure_dir(self.name); - let failure_file = failure_dir.join(invariant_contract.invariant_function.clone().name); + let failure_dir = invariant_config.clone().failure_dir(self.cr.name); + let failure_file = failure_dir.join(&invariant_contract.invariant_function.name); // Try to replay recorded failure if any. if let Ok(call_sequence) = @@ -490,7 +595,7 @@ impl ContractRunner<'_> { }) .collect::>(); if let Ok((success, replayed_entirely)) = check_sequence( - self.executor.clone(), + self.clone_executor(), &txes, (0..min(txes.len(), invariant_config.depth as usize)).collect(), invariant_contract.address, @@ -508,34 +613,41 @@ impl ContractRunner<'_> { // exit without executing new runs. let _ = replay_run( &invariant_contract, - self.executor.clone(), - known_contracts, + self.clone_executor(), + &self.cr.mcr.known_contracts, identified_contracts.clone(), - &mut test_result.logs, - &mut test_result.traces, - &mut test_result.coverage, - &mut test_result.deprecated_cheatcodes, + &mut self.result.logs, + &mut self.result.traces, + &mut self.result.coverage, + &mut self.result.deprecated_cheatcodes, &txes, ); - return test_result.invariant_replay_fail( + self.result.invariant_replay_fail( replayed_entirely, &invariant_contract.invariant_function.name, call_sequence, - ) + ); + return self.result; } } } let progress = - start_fuzz_progress(self.progress, self.name, &func.name, invariant_config.runs); - let invariant_result = - match evm.invariant_fuzz(invariant_contract.clone(), &fuzz_fixtures, progress.as_ref()) - { - Ok(x) => x, - Err(e) => return test_result.invariant_setup_fail(e), - }; + start_fuzz_progress(self.cr.progress, self.cr.name, &func.name, invariant_config.runs); + let invariant_result = match evm.invariant_fuzz( + invariant_contract.clone(), + &self.setup.fuzz_fixtures, + &self.setup.deployed_libs, + progress.as_ref(), + ) { + Ok(x) => x, + Err(e) => { + self.result.invariant_setup_fail(e); + return self.result; + } + }; // Merge coverage collected during invariant run with test setup coverage. - test_result.merge_coverages(invariant_result.coverage); + self.result.merge_coverages(invariant_result.coverage); let mut counterexample = None; let success = invariant_result.error.is_none(); @@ -551,13 +663,13 @@ impl ContractRunner<'_> { match replay_error( &case_data, &invariant_contract, - self.executor.clone(), - known_contracts, + self.clone_executor(), + &self.cr.mcr.known_contracts, identified_contracts.clone(), - &mut test_result.logs, - &mut test_result.traces, - &mut test_result.coverage, - &mut test_result.deprecated_cheatcodes, + &mut self.result.logs, + &mut self.result.traces, + &mut self.result.coverage, + &mut self.result.deprecated_cheatcodes, progress.as_ref(), ) { Ok(call_sequence) => { @@ -587,13 +699,13 @@ impl ContractRunner<'_> { _ => { if let Err(err) = replay_run( &invariant_contract, - self.executor.clone(), - known_contracts, + self.clone_executor(), + &self.cr.mcr.known_contracts, identified_contracts.clone(), - &mut test_result.logs, - &mut test_result.traces, - &mut test_result.coverage, - &mut test_result.deprecated_cheatcodes, + &mut self.result.logs, + &mut self.result.traces, + &mut self.result.coverage, + &mut self.result.deprecated_cheatcodes, &invariant_result.last_run_inputs, ) { error!(%err, "Failed to replay last invariant run"); @@ -601,7 +713,7 @@ impl ContractRunner<'_> { } } - test_result.invariant_result( + self.result.invariant_result( invariant_result.gas_report_traces, success, reason, @@ -609,7 +721,8 @@ impl ContractRunner<'_> { invariant_result.cases, invariant_result.reverts, invariant_result.metrics, - ) + ); + self.result } /// Runs a fuzzed test. @@ -621,35 +734,32 @@ impl ContractRunner<'_> { /// (therefore the fuzz test will use the modified state). /// State modifications of before test txes and fuzz test are discarded after test ends, /// similar to `eth_call`. - pub fn run_fuzz_test( - &self, - func: &Function, - should_fail: bool, - runner: TestRunner, - setup: TestSetup, - fuzz_config: FuzzConfig, - ) -> TestResult { - let progress = start_fuzz_progress(self.progress, self.name, &func.name, fuzz_config.runs); - + fn run_fuzz_test(mut self, func: &Function, should_fail: bool) -> TestResult { // Prepare fuzz test execution. - let fuzz_fixtures = setup.fuzz_fixtures.clone(); - let (executor, test_result, address) = match self.prepare_test(func, setup) { - Ok(res) => res, - Err(res) => return res, - }; + if self.prepare_test(func).is_err() { + return self.result; + } + + let runner = self.fuzz_runner(); + let fuzz_config = self.config.fuzz.clone(); + + let progress = + start_fuzz_progress(self.cr.progress, self.cr.name, &func.name, fuzz_config.runs); // Run fuzz test. let fuzzed_executor = - FuzzedExecutor::new(executor.into_owned(), runner, self.sender, fuzz_config); + FuzzedExecutor::new(self.executor.into_owned(), runner, self.tcfg.sender, fuzz_config); let result = fuzzed_executor.fuzz( func, - &fuzz_fixtures, - address, + &self.setup.fuzz_fixtures, + &self.setup.deployed_libs, + self.address, should_fail, - self.revert_decoder, + &self.cr.mcr.revert_decoder, progress.as_ref(), ); - test_result.fuzz_result(result) + self.result.fuzz_result(result); + self.result } /// Prepares single unit test and fuzz test execution: @@ -661,20 +771,15 @@ impl ContractRunner<'_> { /// /// Unit tests within same contract (or even current test) are valid options for before test tx /// configuration. Test execution stops if any of before test txes fails. - fn prepare_test( - &self, - func: &Function, - setup: TestSetup, - ) -> Result<(Cow<'_, Executor>, TestResult, Address), TestResult> { - let address = setup.address; - let mut executor = Cow::Borrowed(&self.executor); - let mut test_result = TestResult::new(setup); + fn prepare_test(&mut self, func: &Function) -> Result<(), ()> { + let address = self.setup.address; // Apply before test configured functions (if any). - if self.contract.abi.functions().filter(|func| func.name.is_before_test_setup()).count() == + if self.cr.contract.abi.functions().filter(|func| func.name.is_before_test_setup()).count() == 1 { - for calldata in executor + for calldata in self + .executor .call_sol_default( address, &ITest::beforeTestSetupCall { testSelector: func.selector() }, @@ -682,22 +787,84 @@ impl ContractRunner<'_> { .beforeTestCalldata { // Apply before test configured calldata. - match executor.to_mut().transact_raw(self.sender, address, calldata, U256::ZERO) { + match self.executor.to_mut().transact_raw( + self.tcfg.sender, + address, + calldata, + U256::ZERO, + ) { Ok(call_result) => { let reverted = call_result.reverted; // Merge tx result traces in unit test result. - test_result.extend(call_result); + self.result.extend(call_result); // To continue unit test execution the call should not revert. if reverted { - return Err(test_result.single_fail(None)) + self.result.single_fail(None); + return Err(()); } } - Err(_) => return Err(test_result.single_fail(None)), + Err(_) => { + self.result.single_fail(None); + return Err(()); + } } } } - Ok((executor, test_result, address)) + Ok(()) + } + + fn fuzz_runner(&self) -> TestRunner { + let config = &self.config.fuzz; + let failure_persist_path = config + .failure_persist_dir + .as_ref() + .unwrap() + .join(config.failure_persist_file.as_ref().unwrap()) + .into_os_string() + .into_string() + .unwrap(); + fuzzer_with_cases( + config.seed, + config.runs, + config.max_test_rejects, + Some(Box::new(FileFailurePersistence::Direct(failure_persist_path.leak()))), + ) + } + + fn invariant_runner(&self) -> TestRunner { + let config = &self.config.invariant; + fuzzer_with_cases(self.config.fuzz.seed, config.runs, config.max_assume_rejects, None) + } + + fn clone_executor(&self) -> Executor { + self.executor.clone().into_owned() + } +} + +fn fuzzer_with_cases( + seed: Option, + cases: u32, + max_global_rejects: u32, + file_failure_persistence: Option>, +) -> TestRunner { + let config = proptest::test_runner::Config { + failure_persistence: file_failure_persistence, + cases, + max_global_rejects, + // Disable proptest shrink: for fuzz tests we provide single counterexample, + // for invariant tests we shrink outside proptest. + max_shrink_iters: 0, + ..Default::default() + }; + + if let Some(seed) = seed { + trace!(target: "forge::test", %seed, "building deterministic fuzzer"); + let rng = TestRng::from_seed(RngAlgorithm::ChaCha, &seed.to_be_bytes::<32>()); + TestRunner::new_with_rng(config, rng) + } else { + trace!(target: "forge::test", "building stochastic fuzzer"); + TestRunner::new(config) } } diff --git a/crates/forge/tests/cli/build.rs b/crates/forge/tests/cli/build.rs index dfc70e10f..840e591e5 100644 --- a/crates/forge/tests/cli/build.rs +++ b/crates/forge/tests/cli/build.rs @@ -73,15 +73,19 @@ contract Dummy { forgetest!(initcode_size_exceeds_limit, |prj, cmd| { prj.add_source("LargeContract", generate_large_contract(5450).as_str()).unwrap(); - cmd.args(["build", "--sizes"]).assert_failure().stdout_eq(str![ - r#" -... + cmd.args(["build", "--sizes"]).assert_failure().stdout_eq(str![[r#" +[COMPILING_FILES] with [SOLC_VERSION] +[SOLC_VERSION] [ELAPSED] +Compiler run successful! + +╭--------------+------------------+-------------------+--------------------+---------------------╮ | Contract | Runtime Size (B) | Initcode Size (B) | Runtime Margin (B) | Initcode Margin (B) | -|--------------|------------------|-------------------|--------------------|---------------------| -| HugeContract | 194 | 49,344 | 24,382 | -192 | -... -"# - ]); ++================================================================================================+ +| HugeContract | 194 | 49,344 | 24,382 | -192 | +╰--------------+------------------+-------------------+--------------------+---------------------╯ + + +"#]]); cmd.forge_fuse().args(["build", "--sizes", "--json"]).assert_failure().stdout_eq( str![[r#" @@ -100,15 +104,19 @@ forgetest!(initcode_size_exceeds_limit, |prj, cmd| { forgetest!(initcode_size_limit_can_be_ignored, |prj, cmd| { prj.add_source("LargeContract", generate_large_contract(5450).as_str()).unwrap(); - cmd.args(["build", "--sizes", "--ignore-eip-3860"]).assert_success().stdout_eq(str![ - r#" -... + cmd.args(["build", "--sizes", "--ignore-eip-3860"]).assert_success().stdout_eq(str![[r#" +[COMPILING_FILES] with [SOLC_VERSION] +[SOLC_VERSION] [ELAPSED] +Compiler run successful! + +╭--------------+------------------+-------------------+--------------------+---------------------╮ | Contract | Runtime Size (B) | Initcode Size (B) | Runtime Margin (B) | Initcode Margin (B) | -|--------------|------------------|-------------------|--------------------|---------------------| -| HugeContract | 194 | 49,344 | 24,382 | -192 | -... -"# - ]); ++================================================================================================+ +| HugeContract | 194 | 49,344 | 24,382 | -192 | +╰--------------+------------------+-------------------+--------------------+---------------------╯ + + +"#]]); cmd.forge_fuse() .args(["build", "--sizes", "--ignore-eip-3860", "--json"]) @@ -140,26 +148,33 @@ Compiler run successful! // tests build output is as expected forgetest_init!(build_sizes_no_forge_std, |prj, cmd| { - cmd.args(["build", "--sizes"]).assert_success().stdout_eq(str![ - r#" + prj.write_config(Config { + solc: Some(foundry_config::SolcReq::Version(semver::Version::new(0, 8, 27))), + ..Default::default() + }); + + cmd.args(["build", "--sizes"]).assert_success().stdout_eq(str![[r#" ... + +╭----------+------------------+-------------------+--------------------+---------------------╮ | Contract | Runtime Size (B) | Initcode Size (B) | Runtime Margin (B) | Initcode Margin (B) | -|----------|------------------|-------------------|--------------------|---------------------| -| Counter | 236 | 263 | 24,340 | 48,889 | -... -"# - ]); ++============================================================================================+ +| Counter | 236 | 263 | 24,340 | 48,889 | +╰----------+------------------+-------------------+--------------------+---------------------╯ + + +"#]]); cmd.forge_fuse().args(["build", "--sizes", "--json"]).assert_success().stdout_eq( str![[r#" { "Counter": { - "runtime_size": 247, - "init_size": 277, - "runtime_margin": 24329, - "init_margin": 48875 + "runtime_size": 236, + "init_size": 263, + "runtime_margin": 24340, + "init_margin": 48889 } -} +} "#]] .is_json(), ); diff --git a/crates/forge/tests/cli/cmd.rs b/crates/forge/tests/cli/cmd.rs index 6c06d00b2..9a9771975 100644 --- a/crates/forge/tests/cli/cmd.rs +++ b/crates/forge/tests/cli/cmd.rs @@ -68,7 +68,7 @@ Display options: - 2 (-vv): Print logs for all tests. - 3 (-vvv): Print execution traces for failing tests. - 4 (-vvvv): Print execution traces for all tests, and setup traces for failing tests. - - 5 (-vvvvv): Print execution and setup traces for all tests. + - 5 (-vvvvv): Print execution and setup traces for all tests, including storage changes. Find more information in the book: http://book.getfoundry.sh/reference/forge/forge.html @@ -1585,29 +1585,50 @@ forgetest!(gas_report_all_contracts, |prj, cmd| { cmd.forge_fuse().arg("test").arg("--gas-report").assert_success().stdout_eq(str![[r#" ... -| src/Contracts.sol:ContractOne contract | | | | | | -|----------------------------------------|-----------------|-------|--------|-------|---------| +╭----------------------------------------+-----------------+-------+--------+-------+---------╮ +| src/Contracts.sol:ContractOne Contract | | | | | | ++=============================================================================================+ | Deployment Cost | Deployment Size | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| | 101532 | 241 | | | | | -| Function Name | min | avg | median | max | # calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| | | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| | foo | 45370 | 45370 | 45370 | 45370 | 1 | +╰----------------------------------------+-----------------+-------+--------+-------+---------╯ - -| src/Contracts.sol:ContractThree contract | | | | | | -|------------------------------------------|-----------------|--------|--------|--------|---------| +╭------------------------------------------+-----------------+--------+--------+--------+---------╮ +| src/Contracts.sol:ContractThree Contract | | | | | | ++=================================================================================================+ | Deployment Cost | Deployment Size | | | | | +|------------------------------------------+-----------------+--------+--------+--------+---------| | 101748 | 242 | | | | | -| Function Name | min | avg | median | max | # calls | +|------------------------------------------+-----------------+--------+--------+--------+---------| +| | | | | | | +|------------------------------------------+-----------------+--------+--------+--------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|------------------------------------------+-----------------+--------+--------+--------+---------| | baz | 259210 | 259210 | 259210 | 259210 | 1 | +╰------------------------------------------+-----------------+--------+--------+--------+---------╯ - -| src/Contracts.sol:ContractTwo contract | | | | | | -|----------------------------------------|-----------------|-------|--------|-------|---------| +╭----------------------------------------+-----------------+-------+--------+-------+---------╮ +| src/Contracts.sol:ContractTwo Contract | | | | | | ++=============================================================================================+ | Deployment Cost | Deployment Size | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| | 101520 | 241 | | | | | -| Function Name | min | avg | median | max | # calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| | | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| | bar | 64832 | 64832 | 64832 | 64832 | 1 | -... +╰----------------------------------------+-----------------+-------+--------+-------+---------╯ + + +Ran 3 test suites [ELAPSED]: 3 tests passed, 0 failed, 0 skipped (3 total tests) "#]]); cmd.forge_fuse().arg("test").arg("--gas-report").arg("--json").assert_success().stdout_eq( @@ -1669,29 +1690,50 @@ forgetest!(gas_report_all_contracts, |prj, cmd| { prj.write_config(Config { gas_reports: (vec![]), ..Default::default() }); cmd.forge_fuse().arg("test").arg("--gas-report").assert_success().stdout_eq(str![[r#" ... -| src/Contracts.sol:ContractOne contract | | | | | | -|----------------------------------------|-----------------|-------|--------|-------|---------| +╭----------------------------------------+-----------------+-------+--------+-------+---------╮ +| src/Contracts.sol:ContractOne Contract | | | | | | ++=============================================================================================+ | Deployment Cost | Deployment Size | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| | 101532 | 241 | | | | | -| Function Name | min | avg | median | max | # calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| | | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| | foo | 45370 | 45370 | 45370 | 45370 | 1 | +╰----------------------------------------+-----------------+-------+--------+-------+---------╯ - -| src/Contracts.sol:ContractThree contract | | | | | | -|------------------------------------------|-----------------|--------|--------|--------|---------| +╭------------------------------------------+-----------------+--------+--------+--------+---------╮ +| src/Contracts.sol:ContractThree Contract | | | | | | ++=================================================================================================+ | Deployment Cost | Deployment Size | | | | | +|------------------------------------------+-----------------+--------+--------+--------+---------| | 101748 | 242 | | | | | -| Function Name | min | avg | median | max | # calls | +|------------------------------------------+-----------------+--------+--------+--------+---------| +| | | | | | | +|------------------------------------------+-----------------+--------+--------+--------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|------------------------------------------+-----------------+--------+--------+--------+---------| | baz | 259210 | 259210 | 259210 | 259210 | 1 | +╰------------------------------------------+-----------------+--------+--------+--------+---------╯ - -| src/Contracts.sol:ContractTwo contract | | | | | | -|----------------------------------------|-----------------|-------|--------|-------|---------| +╭----------------------------------------+-----------------+-------+--------+-------+---------╮ +| src/Contracts.sol:ContractTwo Contract | | | | | | ++=============================================================================================+ | Deployment Cost | Deployment Size | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| | 101520 | 241 | | | | | -| Function Name | min | avg | median | max | # calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| | | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| | bar | 64832 | 64832 | 64832 | 64832 | 1 | -... +╰----------------------------------------+-----------------+-------+--------+-------+---------╯ + + +Ran 3 test suites [ELAPSED]: 3 tests passed, 0 failed, 0 skipped (3 total tests) "#]]); cmd.forge_fuse().arg("test").arg("--gas-report").arg("--json").assert_success().stdout_eq( @@ -1753,29 +1795,50 @@ forgetest!(gas_report_all_contracts, |prj, cmd| { prj.write_config(Config { gas_reports: (vec!["*".to_string()]), ..Default::default() }); cmd.forge_fuse().arg("test").arg("--gas-report").assert_success().stdout_eq(str![[r#" ... -| src/Contracts.sol:ContractOne contract | | | | | | -|----------------------------------------|-----------------|-------|--------|-------|---------| +╭----------------------------------------+-----------------+-------+--------+-------+---------╮ +| src/Contracts.sol:ContractOne Contract | | | | | | ++=============================================================================================+ | Deployment Cost | Deployment Size | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| | 101532 | 241 | | | | | -| Function Name | min | avg | median | max | # calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| | | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| | foo | 45370 | 45370 | 45370 | 45370 | 1 | +╰----------------------------------------+-----------------+-------+--------+-------+---------╯ - -| src/Contracts.sol:ContractThree contract | | | | | | -|------------------------------------------|-----------------|--------|--------|--------|---------| +╭------------------------------------------+-----------------+--------+--------+--------+---------╮ +| src/Contracts.sol:ContractThree Contract | | | | | | ++=================================================================================================+ | Deployment Cost | Deployment Size | | | | | +|------------------------------------------+-----------------+--------+--------+--------+---------| | 101748 | 242 | | | | | -| Function Name | min | avg | median | max | # calls | +|------------------------------------------+-----------------+--------+--------+--------+---------| +| | | | | | | +|------------------------------------------+-----------------+--------+--------+--------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|------------------------------------------+-----------------+--------+--------+--------+---------| | baz | 259210 | 259210 | 259210 | 259210 | 1 | +╰------------------------------------------+-----------------+--------+--------+--------+---------╯ - -| src/Contracts.sol:ContractTwo contract | | | | | | -|----------------------------------------|-----------------|-------|--------|-------|---------| +╭----------------------------------------+-----------------+-------+--------+-------+---------╮ +| src/Contracts.sol:ContractTwo Contract | | | | | | ++=============================================================================================+ | Deployment Cost | Deployment Size | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| | 101520 | 241 | | | | | -| Function Name | min | avg | median | max | # calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| | | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| | bar | 64832 | 64832 | 64832 | 64832 | 1 | -... +╰----------------------------------------+-----------------+-------+--------+-------+---------╯ + + +Ran 3 test suites [ELAPSED]: 3 tests passed, 0 failed, 0 skipped (3 total tests) "#]]); cmd.forge_fuse().arg("test").arg("--gas-report").arg("--json").assert_success().stdout_eq( @@ -1844,29 +1907,50 @@ forgetest!(gas_report_all_contracts, |prj, cmd| { }); cmd.forge_fuse().arg("test").arg("--gas-report").assert_success().stdout_eq(str![[r#" ... -| src/Contracts.sol:ContractOne contract | | | | | | -|----------------------------------------|-----------------|-------|--------|-------|---------| +╭----------------------------------------+-----------------+-------+--------+-------+---------╮ +| src/Contracts.sol:ContractOne Contract | | | | | | ++=============================================================================================+ | Deployment Cost | Deployment Size | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| | 101532 | 241 | | | | | -| Function Name | min | avg | median | max | # calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| | | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| | foo | 45370 | 45370 | 45370 | 45370 | 1 | +╰----------------------------------------+-----------------+-------+--------+-------+---------╯ - -| src/Contracts.sol:ContractThree contract | | | | | | -|------------------------------------------|-----------------|--------|--------|--------|---------| +╭------------------------------------------+-----------------+--------+--------+--------+---------╮ +| src/Contracts.sol:ContractThree Contract | | | | | | ++=================================================================================================+ | Deployment Cost | Deployment Size | | | | | +|------------------------------------------+-----------------+--------+--------+--------+---------| | 101748 | 242 | | | | | -| Function Name | min | avg | median | max | # calls | +|------------------------------------------+-----------------+--------+--------+--------+---------| +| | | | | | | +|------------------------------------------+-----------------+--------+--------+--------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|------------------------------------------+-----------------+--------+--------+--------+---------| | baz | 259210 | 259210 | 259210 | 259210 | 1 | +╰------------------------------------------+-----------------+--------+--------+--------+---------╯ - -| src/Contracts.sol:ContractTwo contract | | | | | | -|----------------------------------------|-----------------|-------|--------|-------|---------| +╭----------------------------------------+-----------------+-------+--------+-------+---------╮ +| src/Contracts.sol:ContractTwo Contract | | | | | | ++=============================================================================================+ | Deployment Cost | Deployment Size | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| | 101520 | 241 | | | | | -| Function Name | min | avg | median | max | # calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| | | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| | bar | 64832 | 64832 | 64832 | 64832 | 1 | -... +╰----------------------------------------+-----------------+-------+--------+-------+---------╯ + + +Ran 3 test suites [ELAPSED]: 3 tests passed, 0 failed, 0 skipped (3 total tests) "#]]); cmd.forge_fuse().arg("test").arg("--gas-report").arg("--json").assert_success().stdout_eq( @@ -1935,13 +2019,22 @@ forgetest!(gas_report_some_contracts, |prj, cmd| { cmd.forge_fuse(); cmd.arg("test").arg("--gas-report").assert_success().stdout_eq(str![[r#" ... -| src/Contracts.sol:ContractOne contract | | | | | | -|----------------------------------------|-----------------|-------|--------|-------|---------| +╭----------------------------------------+-----------------+-------+--------+-------+---------╮ +| src/Contracts.sol:ContractOne Contract | | | | | | ++=============================================================================================+ | Deployment Cost | Deployment Size | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| | 101532 | 241 | | | | | -| Function Name | min | avg | median | max | # calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| | | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| | foo | 45370 | 45370 | 45370 | 45370 | 1 | -... +╰----------------------------------------+-----------------+-------+--------+-------+---------╯ + + +Ran 3 test suites [ELAPSED]: 3 tests passed, 0 failed, 0 skipped (3 total tests) "#]]); cmd.forge_fuse().arg("test").arg("--gas-report").arg("--json").assert_success().stdout_eq( @@ -1973,13 +2066,22 @@ forgetest!(gas_report_some_contracts, |prj, cmd| { cmd.forge_fuse(); cmd.arg("test").arg("--gas-report").assert_success().stdout_eq(str![[r#" ... -| src/Contracts.sol:ContractTwo contract | | | | | | -|----------------------------------------|-----------------|-------|--------|-------|---------| +╭----------------------------------------+-----------------+-------+--------+-------+---------╮ +| src/Contracts.sol:ContractTwo Contract | | | | | | ++=============================================================================================+ | Deployment Cost | Deployment Size | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| | 101520 | 241 | | | | | -| Function Name | min | avg | median | max | # calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| | | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| | bar | 64832 | 64832 | 64832 | 64832 | 1 | -... +╰----------------------------------------+-----------------+-------+--------+-------+---------╯ + + +Ran 3 test suites [ELAPSED]: 3 tests passed, 0 failed, 0 skipped (3 total tests) "#]]); cmd.forge_fuse().arg("test").arg("--gas-report").arg("--json").assert_success().stdout_eq( @@ -2014,13 +2116,22 @@ forgetest!(gas_report_some_contracts, |prj, cmd| { cmd.forge_fuse(); cmd.arg("test").arg("--gas-report").assert_success().stdout_eq(str![[r#" ... -| src/Contracts.sol:ContractThree contract | | | | | | -|------------------------------------------|-----------------|--------|--------|--------|---------| +╭------------------------------------------+-----------------+--------+--------+--------+---------╮ +| src/Contracts.sol:ContractThree Contract | | | | | | ++=================================================================================================+ | Deployment Cost | Deployment Size | | | | | +|------------------------------------------+-----------------+--------+--------+--------+---------| | 101748 | 242 | | | | | -| Function Name | min | avg | median | max | # calls | +|------------------------------------------+-----------------+--------+--------+--------+---------| +| | | | | | | +|------------------------------------------+-----------------+--------+--------+--------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|------------------------------------------+-----------------+--------+--------+--------+---------| | baz | 259210 | 259210 | 259210 | 259210 | 1 | -... +╰------------------------------------------+-----------------+--------+--------+--------+---------╯ + + +Ran 3 test suites [ELAPSED]: 3 tests passed, 0 failed, 0 skipped (3 total tests) "#]]); cmd.forge_fuse().arg("test").arg("--gas-report").arg("--json").assert_success().stdout_eq( @@ -2061,21 +2172,36 @@ forgetest!(gas_report_ignore_some_contracts, |prj, cmd| { cmd.forge_fuse(); cmd.arg("test").arg("--gas-report").assert_success().stdout_eq(str![[r#" ... -| src/Contracts.sol:ContractThree contract | | | | | | -|------------------------------------------|-----------------|--------|--------|--------|---------| +╭------------------------------------------+-----------------+--------+--------+--------+---------╮ +| src/Contracts.sol:ContractThree Contract | | | | | | ++=================================================================================================+ | Deployment Cost | Deployment Size | | | | | +|------------------------------------------+-----------------+--------+--------+--------+---------| | 101748 | 242 | | | | | -| Function Name | min | avg | median | max | # calls | +|------------------------------------------+-----------------+--------+--------+--------+---------| +| | | | | | | +|------------------------------------------+-----------------+--------+--------+--------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|------------------------------------------+-----------------+--------+--------+--------+---------| | baz | 259210 | 259210 | 259210 | 259210 | 1 | +╰------------------------------------------+-----------------+--------+--------+--------+---------╯ - -| src/Contracts.sol:ContractTwo contract | | | | | | -|----------------------------------------|-----------------|-------|--------|-------|---------| +╭----------------------------------------+-----------------+-------+--------+-------+---------╮ +| src/Contracts.sol:ContractTwo Contract | | | | | | ++=============================================================================================+ | Deployment Cost | Deployment Size | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| | 101520 | 241 | | | | | -| Function Name | min | avg | median | max | # calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| | | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| | bar | 64832 | 64832 | 64832 | 64832 | 1 | -... +╰----------------------------------------+-----------------+-------+--------+-------+---------╯ + + +Ran 3 test suites [ELAPSED]: 3 tests passed, 0 failed, 0 skipped (3 total tests) "#]]); cmd.forge_fuse().arg("test").arg("--gas-report").arg("--json").assert_success().stdout_eq( @@ -2128,21 +2254,36 @@ forgetest!(gas_report_ignore_some_contracts, |prj, cmd| { cmd.forge_fuse(); cmd.arg("test").arg("--gas-report").assert_success().stdout_eq(str![[r#" ... -| src/Contracts.sol:ContractOne contract | | | | | | -|----------------------------------------|-----------------|-------|--------|-------|---------| +╭----------------------------------------+-----------------+-------+--------+-------+---------╮ +| src/Contracts.sol:ContractOne Contract | | | | | | ++=============================================================================================+ | Deployment Cost | Deployment Size | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| | 101532 | 241 | | | | | -| Function Name | min | avg | median | max | # calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| | | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| | foo | 45370 | 45370 | 45370 | 45370 | 1 | +╰----------------------------------------+-----------------+-------+--------+-------+---------╯ - -| src/Contracts.sol:ContractThree contract | | | | | | -|------------------------------------------|-----------------|--------|--------|--------|---------| +╭------------------------------------------+-----------------+--------+--------+--------+---------╮ +| src/Contracts.sol:ContractThree Contract | | | | | | ++=================================================================================================+ | Deployment Cost | Deployment Size | | | | | +|------------------------------------------+-----------------+--------+--------+--------+---------| | 101748 | 242 | | | | | -| Function Name | min | avg | median | max | # calls | +|------------------------------------------+-----------------+--------+--------+--------+---------| +| | | | | | | +|------------------------------------------+-----------------+--------+--------+--------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|------------------------------------------+-----------------+--------+--------+--------+---------| | baz | 259210 | 259210 | 259210 | 259210 | 1 | -... +╰------------------------------------------+-----------------+--------+--------+--------+---------╯ + + +Ran 3 test suites [ELAPSED]: 3 tests passed, 0 failed, 0 skipped (3 total tests) "#]]); cmd.forge_fuse().arg("test").arg("--gas-report").arg("--json").assert_success().stdout_eq( @@ -2206,29 +2347,51 @@ forgetest!(gas_report_ignore_some_contracts, |prj, cmd| { .assert_success() .stdout_eq(str![[r#" ... -| src/Contracts.sol:ContractOne contract | | | | | | -|----------------------------------------|-----------------|-------|--------|-------|---------| +╭----------------------------------------+-----------------+-------+--------+-------+---------╮ +| src/Contracts.sol:ContractOne Contract | | | | | | ++=============================================================================================+ | Deployment Cost | Deployment Size | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| | 101532 | 241 | | | | | -| Function Name | min | avg | median | max | # calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| | | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| | foo | 45370 | 45370 | 45370 | 45370 | 1 | +╰----------------------------------------+-----------------+-------+--------+-------+---------╯ - -| src/Contracts.sol:ContractThree contract | | | | | | -|------------------------------------------|-----------------|--------|--------|--------|---------| +╭------------------------------------------+-----------------+--------+--------+--------+---------╮ +| src/Contracts.sol:ContractThree Contract | | | | | | ++=================================================================================================+ | Deployment Cost | Deployment Size | | | | | +|------------------------------------------+-----------------+--------+--------+--------+---------| | 101748 | 242 | | | | | -| Function Name | min | avg | median | max | # calls | +|------------------------------------------+-----------------+--------+--------+--------+---------| +| | | | | | | +|------------------------------------------+-----------------+--------+--------+--------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|------------------------------------------+-----------------+--------+--------+--------+---------| | baz | 259210 | 259210 | 259210 | 259210 | 1 | +╰------------------------------------------+-----------------+--------+--------+--------+---------╯ - -| src/Contracts.sol:ContractTwo contract | | | | | | -|----------------------------------------|-----------------|-------|--------|-------|---------| +╭----------------------------------------+-----------------+-------+--------+-------+---------╮ +| src/Contracts.sol:ContractTwo Contract | | | | | | ++=============================================================================================+ | Deployment Cost | Deployment Size | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| | 101520 | 241 | | | | | -| Function Name | min | avg | median | max | # calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| | | | | | | +|----------------------------------------+-----------------+-------+--------+-------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|----------------------------------------+-----------------+-------+--------+-------+---------| | bar | 64832 | 64832 | 64832 | 64832 | 1 | -... +╰----------------------------------------+-----------------+-------+--------+-------+---------╯ + + +Ran 3 test suites [ELAPSED]: 3 tests passed, 0 failed, 0 skipped (3 total tests) + "#]]) .stderr_eq(str![[r#" ... @@ -2349,16 +2512,29 @@ contract CounterTest is DSTest { cmd.arg("test").arg("--gas-report").assert_success().stdout_eq(str![[r#" ... -| src/Counter.sol:Counter contract | | | | | | -|----------------------------------|-----------------|-------|--------|-------|---------| +╭----------------------------------+-----------------+-------+--------+-------+---------╮ +| src/Counter.sol:Counter Contract | | | | | | ++=======================================================================================+ | Deployment Cost | Deployment Size | | | | | +|----------------------------------+-----------------+-------+--------+-------+---------| | 99711 | 240 | | | | | -| Function Name | min | avg | median | max | # calls | +|----------------------------------+-----------------+-------+--------+-------+---------| +| | | | | | | +|----------------------------------+-----------------+-------+--------+-------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|----------------------------------+-----------------+-------+--------+-------+---------| | a | 2259 | 2259 | 2259 | 2259 | 1 | +|----------------------------------+-----------------+-------+--------+-------+---------| | b | 2304 | 2304 | 2304 | 2304 | 1 | +|----------------------------------+-----------------+-------+--------+-------+---------| | setNumber(int256) | 23646 | 33602 | 33602 | 43558 | 2 | +|----------------------------------+-----------------+-------+--------+-------+---------| | setNumber(uint256) | 23601 | 33557 | 33557 | 43513 | 2 | -... +╰----------------------------------+-----------------+-------+--------+-------+---------╯ + + +Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests) + "#]]); cmd.forge_fuse().arg("test").arg("--gas-report").arg("--json").assert_success().stdout_eq( str![[r#" @@ -2463,39 +2639,38 @@ contract GasReportFallbackTest is Test { .assert_success() .stdout_eq(str![[r#" ... -Ran 1 test for test/DelegateProxyTest.sol:GasReportFallbackTest -[PASS] test_fallback_gas_report() ([GAS]) -Traces: - [327404] GasReportFallbackTest::test_fallback_gas_report() - ├─ [104475] → new ProxiedContract@[..] - │ └─ ← [Return] 236 bytes of code - ├─ [107054] → new DelegateProxy@[..] - │ └─ ← [Return] 135 bytes of code - ├─ [29384] DelegateProxy::fallback(100) - │ ├─ [3316] ProxiedContract::deposit(100) [delegatecall] - │ │ └─ ← [Stop] - │ └─ ← [Return] - ├─ [21159] DelegateProxy::deposit() - │ └─ ← [Stop] - └─ ← [Stop] - -Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED] -| test/DelegateProxyTest.sol:DelegateProxy contract | | | | | | -|---------------------------------------------------|-----------------|-------|--------|-------|---------| +╭---------------------------------------------------+-----------------+-------+--------+-------+---------╮ +| test/DelegateProxyTest.sol:DelegateProxy Contract | | | | | | ++========================================================================================================+ | Deployment Cost | Deployment Size | | | | | +|---------------------------------------------------+-----------------+-------+--------+-------+---------| | 107054 | 300 | | | | | -| Function Name | min | avg | median | max | # calls | +|---------------------------------------------------+-----------------+-------+--------+-------+---------| +| | | | | | | +|---------------------------------------------------+-----------------+-------+--------+-------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|---------------------------------------------------+-----------------+-------+--------+-------+---------| | deposit | 21159 | 21159 | 21159 | 21159 | 1 | +|---------------------------------------------------+-----------------+-------+--------+-------+---------| | fallback | 29384 | 29384 | 29384 | 29384 | 1 | +╰---------------------------------------------------+-----------------+-------+--------+-------+---------╯ - -| test/DelegateProxyTest.sol:ProxiedContract contract | | | | | | -|-----------------------------------------------------|-----------------|------|--------|------|---------| +╭-----------------------------------------------------+-----------------+------+--------+------+---------╮ +| test/DelegateProxyTest.sol:ProxiedContract Contract | | | | | | ++========================================================================================================+ | Deployment Cost | Deployment Size | | | | | +|-----------------------------------------------------+-----------------+------+--------+------+---------| | 104475 | 263 | | | | | -| Function Name | min | avg | median | max | # calls | +|-----------------------------------------------------+-----------------+------+--------+------+---------| +| | | | | | | +|-----------------------------------------------------+-----------------+------+--------+------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|-----------------------------------------------------+-----------------+------+--------+------+---------| | deposit | 3316 | 3316 | 3316 | 3316 | 1 | -... +╰-----------------------------------------------------+-----------------+------+--------+------+---------╯ + + +Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests) "#]]); @@ -2588,32 +2763,51 @@ contract NestedDeploy is Test { .assert_success() .stdout_eq(str![[r#" ... -Ran 1 test for test/NestedDeployTest.sol:NestedDeploy -[PASS] test_nested_create_gas_report() ([GAS]) -Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED] -| test/NestedDeployTest.sol:AnotherChild contract | | | | | | -|-------------------------------------------------|-----------------|-------|--------|-------|---------| +╭-------------------------------------------------+-----------------+-------+--------+-------+---------╮ +| test/NestedDeployTest.sol:AnotherChild Contract | | | | | | ++======================================================================================================+ | Deployment Cost | Deployment Size | | | | | +|-------------------------------------------------+-----------------+-------+--------+-------+---------| | 0 | 124 | | | | | -| Function Name | min | avg | median | max | # calls | +|-------------------------------------------------+-----------------+-------+--------+-------+---------| +| | | | | | | +|-------------------------------------------------+-----------------+-------+--------+-------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|-------------------------------------------------+-----------------+-------+--------+-------+---------| | w | 21161 | 21161 | 21161 | 21161 | 1 | +╰-------------------------------------------------+-----------------+-------+--------+-------+---------╯ - -| test/NestedDeployTest.sol:Child contract | | | | | | -|------------------------------------------|-----------------|-----|--------|-----|---------| +╭------------------------------------------+-----------------+-----+--------+-----+---------╮ +| test/NestedDeployTest.sol:Child Contract | | | | | | ++===========================================================================================+ | Deployment Cost | Deployment Size | | | | | +|------------------------------------------+-----------------+-----+--------+-----+---------| | 0 | 477 | | | | | -| Function Name | min | avg | median | max | # calls | +|------------------------------------------+-----------------+-----+--------+-----+---------| +| | | | | | | +|------------------------------------------+-----------------+-----+--------+-----+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|------------------------------------------+-----------------+-----+--------+-----+---------| | child | 323 | 323 | 323 | 323 | 1 | +╰------------------------------------------+-----------------+-----+--------+-----+---------╯ - -| test/NestedDeployTest.sol:Parent contract | | | | | | -|-------------------------------------------|-----------------|-----|--------|-----|---------| +╭-------------------------------------------+-----------------+-----+--------+-----+---------╮ +| test/NestedDeployTest.sol:Parent Contract | | | | | | ++============================================================================================+ | Deployment Cost | Deployment Size | | | | | +|-------------------------------------------+-----------------+-----+--------+-----+---------| | 251997 | 739 | | | | | -| Function Name | min | avg | median | max | # calls | +|-------------------------------------------+-----------------+-----+--------+-----+---------| +| | | | | | | +|-------------------------------------------+-----------------+-----+--------+-----+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|-------------------------------------------+-----------------+-----+--------+-----+---------| | child | 181 | 181 | 181 | 181 | 1 | -... +╰-------------------------------------------+-----------------+-----+--------+-----+---------╯ + + +Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests) + "#]]); cmd.forge_fuse() @@ -2974,12 +3168,12 @@ forgetest_init!(can_build_sizes_repeatedly, |prj, cmd| { prj.clear_cache(); cmd.args(["build", "--sizes"]).assert_success().stdout_eq(str![[r#" -[COMPILING_FILES] with [SOLC_VERSION] -[SOLC_VERSION] [ELAPSED] -Compiler run successful! +... +╭----------+------------------+-------------------+--------------------+---------------------╮ | Contract | Runtime Size (B) | Initcode Size (B) | Runtime Margin (B) | Initcode Margin (B) | -|----------|------------------|-------------------|--------------------|---------------------| -| Counter | 236 | 263 | 24,340 | 48,889 | ++============================================================================================+ +| Counter | 236 | 263 | 24,340 | 48,889 | +╰----------+------------------+-------------------+--------------------+---------------------╯ "#]]); @@ -3045,24 +3239,42 @@ forgetest_init!(gas_report_include_tests, |prj, cmd| { cmd.args(["test", "--mt", "test_Increment", "--gas-report"]).assert_success().stdout_eq(str![ [r#" ... -| src/Counter.sol:Counter contract | | | | | | -|----------------------------------|-----------------|-------|--------|-------|---------| +╭----------------------------------+-----------------+-------+--------+-------+---------╮ +| src/Counter.sol:Counter Contract | | | | | | ++=======================================================================================+ | Deployment Cost | Deployment Size | | | | | +|----------------------------------+-----------------+-------+--------+-------+---------| | 104475 | 263 | | | | | -| Function Name | min | avg | median | max | # calls | +|----------------------------------+-----------------+-------+--------+-------+---------| +| | | | | | | +|----------------------------------+-----------------+-------+--------+-------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|----------------------------------+-----------------+-------+--------+-------+---------| | increment | 43401 | 43401 | 43401 | 43401 | 1 | +|----------------------------------+-----------------+-------+--------+-------+---------| | number | 281 | 281 | 281 | 281 | 1 | +|----------------------------------+-----------------+-------+--------+-------+---------| | setNumber | 23579 | 23579 | 23579 | 23579 | 1 | +╰----------------------------------+-----------------+-------+--------+-------+---------╯ - -| test/Counter.t.sol:CounterTest contract | | | | | | -|-----------------------------------------|-----------------|--------|--------|--------|---------| +╭-----------------------------------------+-----------------+--------+--------+--------+---------╮ +| test/Counter.t.sol:CounterTest Contract | | | | | | ++================================================================================================+ | Deployment Cost | Deployment Size | | | | | +|-----------------------------------------+-----------------+--------+--------+--------+---------| | 938190 | 4522 | | | | | -| Function Name | min | avg | median | max | # calls | +|-----------------------------------------+-----------------+--------+--------+--------+---------| +| | | | | | | +|-----------------------------------------+-----------------+--------+--------+--------+---------| +| Function Name | Min | Avg | Median | Max | # Calls | +|-----------------------------------------+-----------------+--------+--------+--------+---------| | setUp | 165834 | 165834 | 165834 | 165834 | 1 | +|-----------------------------------------+-----------------+--------+--------+--------+---------| | test_Increment | 52357 | 52357 | 52357 | 52357 | 1 | -... +╰-----------------------------------------+-----------------+--------+--------+--------+---------╯ + + +Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests) "#] ]); diff --git a/crates/forge/tests/cli/config.rs b/crates/forge/tests/cli/config.rs index a45120b26..b80ec8819 100644 --- a/crates/forge/tests/cli/config.rs +++ b/crates/forge/tests/cli/config.rs @@ -29,7 +29,9 @@ forgetest!(can_extract_config_values, |prj, cmd| { // explicitly set all values let input = Config { profile: Config::DEFAULT_PROFILE, - root: Default::default(), + // `profiles` is not serialized. + profiles: vec![], + root: ".".into(), src: "test-src".into(), test: "test-test".into(), script: "test-script".into(), @@ -107,6 +109,7 @@ forgetest!(can_extract_config_values, |prj, cmd| { eth_rpc_url: Some("localhost".to_string()), eth_rpc_jwt: None, eth_rpc_timeout: None, + eth_rpc_headers: None, etherscan_api_key: None, etherscan: Default::default(), verbosity: 4, @@ -143,6 +146,7 @@ forgetest!(can_extract_config_values, |prj, cmd| { isolate: true, unchecked_cheatcode_artifacts: false, create2_library_salt: Config::DEFAULT_CREATE2_LIBRARY_SALT, + create2_deployer: Config::DEFAULT_CREATE2_DEPLOYER, vyper: Default::default(), skip: vec![], dependencies: Default::default(), @@ -152,7 +156,7 @@ forgetest!(can_extract_config_values, |prj, cmd| { legacy_assertions: false, extra_args: vec![], eof_version: None, - alphanet: false, + odyssey: false, transaction_timeout: 120, additional_compiler_profiles: Default::default(), compilation_restrictions: Default::default(), @@ -891,3 +895,36 @@ contract MyScript is BaseScript { pretty_err(&lib_toml_file, fs::write(&lib_toml_file, lib_config.to_string_pretty().unwrap())); cmd.forge_fuse().args(["build"]).assert_success(); }); + +// Tests that project remappings use config paths. +// For `src=src/contracts` config, remapping should be `src/contracts/ = src/contracts/`. +// For `src=src` config, remapping should be `src/ = src/`. +// +forgetest!(test_project_remappings, |prj, cmd| { + foundry_test_utils::util::initialize(prj.root()); + let config = Config { + src: "src/contracts".into(), + remappings: vec![Remapping::from_str("contracts/=src/contracts/").unwrap().into()], + ..Default::default() + }; + prj.write_config(config); + + // Add Counter.sol in `src/contracts` project dir. + let src_dir = &prj.root().join("src/contracts"); + pretty_err(src_dir, fs::create_dir_all(src_dir)); + pretty_err( + src_dir.join("Counter.sol"), + fs::write(src_dir.join("Counter.sol"), "contract Counter{}"), + ); + prj.add_test( + "CounterTest.sol", + r#" +import "contracts/Counter.sol"; + +contract CounterTest { +} + "#, + ) + .unwrap(); + cmd.forge_fuse().args(["build"]).assert_success(); +}); diff --git a/crates/forge/tests/cli/coverage.rs b/crates/forge/tests/cli/coverage.rs index 65900c592..c840a8036 100644 --- a/crates/forge/tests/cli/coverage.rs +++ b/crates/forge/tests/cli/coverage.rs @@ -1,21 +1,188 @@ -use foundry_test_utils::{assert_data_eq, str}; +use foundry_common::fs; +use foundry_test_utils::{ + snapbox::{Data, IntoData}, + TestCommand, TestProject, +}; +use std::path::Path; + +fn basic_base(prj: TestProject, mut cmd: TestCommand) { + cmd.args(["coverage", "--report=lcov", "--report=summary"]).assert_success().stdout_eq(str![[ + r#" +[COMPILING_FILES] with [SOLC_VERSION] +[SOLC_VERSION] [ELAPSED] +Compiler run successful! +Analysing contracts... +Running tests... + +Ran 2 tests for test/Counter.t.sol:CounterTest +[PASS] testFuzz_SetNumber(uint256) (runs: 256, [AVG_GAS]) +[PASS] test_Increment() ([GAS]) +Suite result: ok. 2 passed; 0 failed; 0 skipped; [ELAPSED] + +Ran 1 test suite [ELAPSED]: 2 tests passed, 0 failed, 0 skipped (2 total tests) +Wrote LCOV report. + +╭----------------------+---------------+---------------+---------------+---------------╮ +| File | % Lines | % Statements | % Branches | % Funcs | ++======================================================================================+ +| script/Counter.s.sol | 0.00% (0/5) | 0.00% (0/3) | 100.00% (0/0) | 0.00% (0/2) | +|----------------------+---------------+---------------+---------------+---------------| +| src/Counter.sol | 100.00% (4/4) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) | +|----------------------+---------------+---------------+---------------+---------------| +| Total | 44.44% (4/9) | 40.00% (2/5) | 100.00% (0/0) | 50.00% (2/4) | +╰----------------------+---------------+---------------+---------------+---------------╯ + +"# + ]]); + + let lcov = prj.root().join("lcov.info"); + assert!(lcov.exists(), "lcov.info was not created"); + let default_lcov = str![[r#" +TN: +SF:script/Counter.s.sol +DA:10,0 +FN:10,CounterScript.setUp +FNDA:0,CounterScript.setUp +DA:12,0 +FN:12,CounterScript.run +FNDA:0,CounterScript.run +DA:13,0 +DA:15,0 +DA:17,0 +FNF:2 +FNH:0 +LF:5 +LH:0 +BRF:0 +BRH:0 +end_of_record +TN: +SF:src/Counter.sol +DA:7,258 +FN:7,Counter.setNumber +FNDA:258,Counter.setNumber +DA:8,258 +DA:11,1 +FN:11,Counter.increment +FNDA:1,Counter.increment +DA:12,1 +FNF:2 +FNH:2 +LF:4 +LH:4 +BRF:0 +BRH:0 +end_of_record + +"#]]; + assert_data_eq!(Data::read_from(&lcov, None), default_lcov.clone()); + assert_lcov( + cmd.forge_fuse().args(["coverage", "--report=lcov", "--lcov-version=1"]), + default_lcov, + ); + + assert_lcov( + cmd.forge_fuse().args(["coverage", "--report=lcov", "--lcov-version=2"]), + str![[r#" +TN: +SF:script/Counter.s.sol +DA:10,0 +FN:10,10,CounterScript.setUp +FNDA:0,CounterScript.setUp +DA:12,0 +FN:12,18,CounterScript.run +FNDA:0,CounterScript.run +DA:13,0 +DA:15,0 +DA:17,0 +FNF:2 +FNH:0 +LF:5 +LH:0 +BRF:0 +BRH:0 +end_of_record +TN: +SF:src/Counter.sol +DA:7,258 +FN:7,9,Counter.setNumber +FNDA:258,Counter.setNumber +DA:8,258 +DA:11,1 +FN:11,13,Counter.increment +FNDA:1,Counter.increment +DA:12,1 +FNF:2 +FNH:2 +LF:4 +LH:4 +BRF:0 +BRH:0 +end_of_record + +"#]], + ); -forgetest!(basic_coverage, |_prj, cmd| { - cmd.args(["coverage"]); - cmd.assert_success(); + assert_lcov( + cmd.forge_fuse().args(["coverage", "--report=lcov", "--lcov-version=2.2"]), + str![[r#" +TN: +SF:script/Counter.s.sol +DA:10,0 +FNL:0,10,10 +FNA:0,0,CounterScript.setUp +DA:12,0 +FNL:1,12,18 +FNA:1,0,CounterScript.run +DA:13,0 +DA:15,0 +DA:17,0 +FNF:2 +FNH:0 +LF:5 +LH:0 +BRF:0 +BRH:0 +end_of_record +TN: +SF:src/Counter.sol +DA:7,258 +FNL:2,7,9 +FNA:2,258,Counter.setNumber +DA:8,258 +DA:11,1 +FNL:3,11,13 +FNA:3,1,Counter.increment +DA:12,1 +FNF:2 +FNH:2 +LF:4 +LH:4 +BRF:0 +BRH:0 +end_of_record + +"#]], + ); +} + +forgetest_init!(basic, |prj, cmd| { + basic_base(prj, cmd); }); -forgetest!(report_file_coverage, |prj, cmd| { - cmd.arg("coverage").args([ - "--report".to_string(), - "lcov".to_string(), - "--report-file".to_string(), - prj.root().join("lcov.info").to_str().unwrap().to_string(), - ]); - cmd.assert_success(); +forgetest_init!(basic_crlf, |prj, cmd| { + // Manually replace `\n` with `\r\n` in the source file. + let make_crlf = |path: &Path| { + fs::write(path, fs::read_to_string(path).unwrap().replace('\n', "\r\n")).unwrap() + }; + make_crlf(&prj.paths().sources.join("Counter.sol")); + make_crlf(&prj.paths().scripts.join("Counter.s.sol")); + + // Should have identical stdout and lcov output. + basic_base(prj, cmd); }); -forgetest!(test_setup_coverage, |prj, cmd| { +forgetest!(setup, |prj, cmd| { prj.insert_ds_test(); prj.add_source( "AContract.sol", @@ -58,17 +225,20 @@ contract AContractTest is DSTest { .unwrap(); // Assert 100% coverage (init function coverage called in setUp is accounted). - cmd.arg("coverage").args(["--summary".to_string()]).assert_success().stdout_eq(str![[r#" + cmd.arg("coverage").assert_success().stdout_eq(str![[r#" ... +╭-------------------+---------------+---------------+---------------+---------------╮ | File | % Lines | % Statements | % Branches | % Funcs | -|-------------------|---------------|---------------|---------------|---------------| -| src/AContract.sol | 100.00% (2/2) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) | -| Total | 100.00% (2/2) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) | ++===================================================================================+ +| src/AContract.sol | 100.00% (4/4) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) | +|-------------------+---------------+---------------+---------------+---------------| +| Total | 100.00% (4/4) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) | +╰-------------------+---------------+---------------+---------------+---------------╯ "#]]); }); -forgetest!(test_no_match_coverage, |prj, cmd| { +forgetest!(no_match, |prj, cmd| { prj.insert_ds_test(); prj.add_source( "AContract.sol", @@ -151,23 +321,22 @@ contract BContractTest is DSTest { .unwrap(); // Assert AContract is not included in report. - cmd.arg("coverage") - .args([ - "--no-match-coverage".to_string(), - "AContract".to_string(), // Filter out `AContract` - ]) - .assert_success() - .stdout_eq(str![[r#" + cmd.arg("coverage").arg("--no-match-coverage=AContract").assert_success().stdout_eq(str![[ + r#" ... +╭-------------------+---------------+---------------+---------------+---------------╮ | File | % Lines | % Statements | % Branches | % Funcs | -|-------------------|---------------|---------------|---------------|---------------| -| src/BContract.sol | 100.00% (2/2) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) | -| Total | 100.00% (2/2) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) | - -"#]]); ++===================================================================================+ +| src/BContract.sol | 100.00% (4/4) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) | +|-------------------+---------------+---------------+---------------+---------------| +| Total | 100.00% (4/4) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) | +╰-------------------+---------------+---------------+---------------+---------------╯ + +"# + ]]); }); -forgetest!(test_assert_coverage, |prj, cmd| { +forgetest!(assert, |prj, cmd| { prj.insert_ds_test(); prj.add_source( "AContract.sol", @@ -210,47 +379,38 @@ contract AContractTest is DSTest { ) .unwrap(); - // Assert 50% branch coverage for assert failure. - cmd.arg("coverage") - .args(["--mt".to_string(), "testAssertRevertBranch".to_string()]) - .assert_success() - .stdout_eq(str![[r#" -... -| File | % Lines | % Statements | % Branches | % Funcs | -|-------------------|--------------|--------------|--------------|---------------| -| src/AContract.sol | 50.00% (1/2) | 50.00% (1/2) | 50.00% (1/2) | 100.00% (1/1) | -| Total | 50.00% (1/2) | 50.00% (1/2) | 50.00% (1/2) | 100.00% (1/1) | - -"#]]); - - // Assert 50% branch coverage for proper assert. - cmd.forge_fuse() - .arg("coverage") - .args(["--mt".to_string(), "testAssertBranch".to_string()]) - .assert_success() - .stdout_eq(str![[r#" + // Assert 50% statement coverage for assert failure (assert not considered a branch). + cmd.arg("coverage").args(["--mt", "testAssertRevertBranch"]).assert_success().stdout_eq(str![ + [r#" ... -| File | % Lines | % Statements | % Branches | % Funcs | -|-------------------|---------------|---------------|--------------|---------------| -| src/AContract.sol | 100.00% (2/2) | 100.00% (2/2) | 50.00% (1/2) | 100.00% (1/1) | -| Total | 100.00% (2/2) | 100.00% (2/2) | 50.00% (1/2) | 100.00% (1/1) | - -"#]]); +╭-------------------+--------------+--------------+---------------+---------------╮ +| File | % Lines | % Statements | % Branches | % Funcs | ++=================================================================================+ +| src/AContract.sol | 66.67% (2/3) | 50.00% (1/2) | 100.00% (0/0) | 100.00% (1/1) | +|-------------------+--------------+--------------+---------------+---------------| +| Total | 66.67% (2/3) | 50.00% (1/2) | 100.00% (0/0) | 100.00% (1/1) | +╰-------------------+--------------+--------------+---------------+---------------╯ + +"#] + ]); - // Assert 100% coverage (assert properly covered). - cmd.forge_fuse().arg("coverage").args(["--summary".to_string()]).assert_success().stdout_eq( + // Assert 100% statement coverage for proper assert (assert not considered a branch). + cmd.forge_fuse().arg("coverage").args(["--mt", "testAssertBranch"]).assert_success().stdout_eq( str![[r#" ... +╭-------------------+---------------+---------------+---------------+---------------╮ | File | % Lines | % Statements | % Branches | % Funcs | -|-------------------|---------------|---------------|---------------|---------------| -| src/AContract.sol | 100.00% (2/2) | 100.00% (2/2) | 100.00% (2/2) | 100.00% (1/1) | -| Total | 100.00% (2/2) | 100.00% (2/2) | 100.00% (2/2) | 100.00% (1/1) | ++===================================================================================+ +| src/AContract.sol | 100.00% (3/3) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (1/1) | +|-------------------+---------------+---------------+---------------+---------------| +| Total | 100.00% (3/3) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (1/1) | +╰-------------------+---------------+---------------+---------------+---------------╯ "#]], ); }); -forgetest!(test_require_coverage, |prj, cmd| { +forgetest!(require, |prj, cmd| { prj.insert_ds_test(); prj.add_source( "AContract.sol", @@ -292,46 +452,50 @@ contract AContractTest is DSTest { .unwrap(); // Assert 50% branch coverage if only revert tested. - cmd.arg("coverage") - .args(["--mt".to_string(), "testRequireRevert".to_string()]) - .assert_success() - .stdout_eq(str![[r#" + cmd.arg("coverage").args(["--mt", "testRequireRevert"]).assert_success().stdout_eq(str![[r#" ... +╭-------------------+---------------+---------------+--------------+---------------╮ | File | % Lines | % Statements | % Branches | % Funcs | -|-------------------|---------------|---------------|--------------|---------------| -| src/AContract.sol | 100.00% (1/1) | 100.00% (1/1) | 50.00% (1/2) | 100.00% (1/1) | -| Total | 100.00% (1/1) | 100.00% (1/1) | 50.00% (1/2) | 100.00% (1/1) | ++==================================================================================+ +| src/AContract.sol | 100.00% (2/2) | 100.00% (1/1) | 50.00% (1/2) | 100.00% (1/1) | +|-------------------+---------------+---------------+--------------+---------------| +| Total | 100.00% (2/2) | 100.00% (1/1) | 50.00% (1/2) | 100.00% (1/1) | +╰-------------------+---------------+---------------+--------------+---------------╯ "#]]); // Assert 50% branch coverage if only happy path tested. cmd.forge_fuse() .arg("coverage") - .args(["--mt".to_string(), "testRequireNoRevert".to_string()]) + .args(["--mt", "testRequireNoRevert"]) .assert_success() .stdout_eq(str![[r#" ... +╭-------------------+---------------+---------------+--------------+---------------╮ | File | % Lines | % Statements | % Branches | % Funcs | -|-------------------|---------------|---------------|--------------|---------------| -| src/AContract.sol | 100.00% (1/1) | 100.00% (1/1) | 50.00% (1/2) | 100.00% (1/1) | -| Total | 100.00% (1/1) | 100.00% (1/1) | 50.00% (1/2) | 100.00% (1/1) | ++==================================================================================+ +| src/AContract.sol | 100.00% (2/2) | 100.00% (1/1) | 50.00% (1/2) | 100.00% (1/1) | +|-------------------+---------------+---------------+--------------+---------------| +| Total | 100.00% (2/2) | 100.00% (1/1) | 50.00% (1/2) | 100.00% (1/1) | +╰-------------------+---------------+---------------+--------------+---------------╯ "#]]); // Assert 100% branch coverage. - cmd.forge_fuse().arg("coverage").args(["--summary".to_string()]).assert_success().stdout_eq( - str![[r#" + cmd.forge_fuse().arg("coverage").assert_success().stdout_eq(str![[r#" ... +╭-------------------+---------------+---------------+---------------+---------------╮ | File | % Lines | % Statements | % Branches | % Funcs | -|-------------------|---------------|---------------|---------------|---------------| -| src/AContract.sol | 100.00% (1/1) | 100.00% (1/1) | 100.00% (2/2) | 100.00% (1/1) | -| Total | 100.00% (1/1) | 100.00% (1/1) | 100.00% (2/2) | 100.00% (1/1) | ++===================================================================================+ +| src/AContract.sol | 100.00% (2/2) | 100.00% (1/1) | 100.00% (2/2) | 100.00% (1/1) | +|-------------------+---------------+---------------+---------------+---------------| +| Total | 100.00% (2/2) | 100.00% (1/1) | 100.00% (2/2) | 100.00% (1/1) | +╰-------------------+---------------+---------------+---------------+---------------╯ -"#]], - ); +"#]]); }); -forgetest!(test_line_hit_not_doubled, |prj, cmd| { +forgetest!(line_hit_not_doubled, |prj, cmd| { prj.insert_ds_test(); prj.add_source( "AContract.sol", @@ -363,36 +527,29 @@ contract AContractTest is DSTest { ) .unwrap(); - let lcov_info = prj.root().join("lcov.info"); - cmd.arg("coverage").args([ - "--report".to_string(), - "lcov".to_string(), - "--report-file".to_string(), - lcov_info.to_str().unwrap().to_string(), - ]); - cmd.assert_success(); - assert!(lcov_info.exists()); - // We want to make sure DA:8,1 is added only once so line hit is not doubled. - assert_data_eq!( - std::fs::read_to_string(lcov_info).unwrap(), - str![[r#"TN: + assert_lcov( + cmd.arg("coverage"), + str![[r#" +TN: SF:src/AContract.sol +DA:7,1 FN:7,AContract.foo FNDA:1,AContract.foo DA:8,1 FNF:1 FNH:1 -LF:1 -LH:1 +LF:2 +LH:2 BRF:0 BRH:0 -end[..] -"#]] +end_of_record + +"#]], ); }); -forgetest!(test_branch_coverage, |prj, cmd| { +forgetest!(branch, |prj, cmd| { prj.insert_ds_test(); prj.add_source( "Foo.sol", @@ -608,48 +765,52 @@ contract FooTest is DSTest { // Assert no coverage for single path branch. 2 branches (parent and child) not covered. cmd.arg("coverage") - .args([ - "--nmt".to_string(), - "test_single_path_child_branch|test_single_path_parent_branch".to_string(), - ]) + .args(["--nmt", "test_single_path_child_branch|test_single_path_parent_branch"]) .assert_success() .stdout_eq(str![[r#" ... -| File | % Lines | % Statements | % Branches | % Funcs | -|-------------|----------------|----------------|----------------|---------------| -| src/Foo.sol | 88.89% (24/27) | 90.00% (27/30) | 87.50% (14/16) | 100.00% (9/9) | -| Total | 88.89% (24/27) | 90.00% (27/30) | 87.50% (14/16) | 100.00% (9/9) | +╭-------------+----------------+----------------+---------------+---------------╮ +| File | % Lines | % Statements | % Branches | % Funcs | ++===============================================================================+ +| src/Foo.sol | 91.67% (33/36) | 90.00% (27/30) | 80.00% (8/10) | 100.00% (9/9) | +|-------------+----------------+----------------+---------------+---------------| +| Total | 91.67% (33/36) | 90.00% (27/30) | 80.00% (8/10) | 100.00% (9/9) | +╰-------------+----------------+----------------+---------------+---------------╯ "#]]); // Assert no coverage for single path child branch. 1 branch (child) not covered. cmd.forge_fuse() .arg("coverage") - .args(["--nmt".to_string(), "test_single_path_child_branch".to_string()]) + .args(["--nmt", "test_single_path_child_branch"]) .assert_success() .stdout_eq(str![[r#" ... -| File | % Lines | % Statements | % Branches | % Funcs | -|-------------|----------------|----------------|----------------|---------------| -| src/Foo.sol | 96.30% (26/27) | 96.67% (29/30) | 93.75% (15/16) | 100.00% (9/9) | -| Total | 96.30% (26/27) | 96.67% (29/30) | 93.75% (15/16) | 100.00% (9/9) | +╭-------------+----------------+----------------+---------------+---------------╮ +| File | % Lines | % Statements | % Branches | % Funcs | ++===============================================================================+ +| src/Foo.sol | 97.22% (35/36) | 96.67% (29/30) | 90.00% (9/10) | 100.00% (9/9) | +|-------------+----------------+----------------+---------------+---------------| +| Total | 97.22% (35/36) | 96.67% (29/30) | 90.00% (9/10) | 100.00% (9/9) | +╰-------------+----------------+----------------+---------------+---------------╯ "#]]); // Assert 100% coverage. - cmd.forge_fuse().arg("coverage").args(["--summary".to_string()]).assert_success().stdout_eq( - str![[r#" + cmd.forge_fuse().arg("coverage").assert_success().stdout_eq(str![[r#" ... +╭-------------+-----------------+-----------------+-----------------+---------------╮ | File | % Lines | % Statements | % Branches | % Funcs | -|-------------|-----------------|-----------------|-----------------|---------------| -| src/Foo.sol | 100.00% (27/27) | 100.00% (30/30) | 100.00% (16/16) | 100.00% (9/9) | -| Total | 100.00% (27/27) | 100.00% (30/30) | 100.00% (16/16) | 100.00% (9/9) | ++===================================================================================+ +| src/Foo.sol | 100.00% (36/36) | 100.00% (30/30) | 100.00% (10/10) | 100.00% (9/9) | +|-------------+-----------------+-----------------+-----------------+---------------| +| Total | 100.00% (36/36) | 100.00% (30/30) | 100.00% (10/10) | 100.00% (9/9) | +╰-------------+-----------------+-----------------+-----------------+---------------╯ -"#]], - ); +"#]]); }); -forgetest!(test_function_call_coverage, |prj, cmd| { +forgetest!(function_call, |prj, cmd| { prj.insert_ds_test(); prj.add_source( "AContract.sol", @@ -709,19 +870,21 @@ contract AContractTest is DSTest { ) .unwrap(); - // Assert 100% coverage and only 9 lines reported (comments, type conversions and struct - // constructor calls are not included). - cmd.arg("coverage").args(["--summary".to_string()]).assert_success().stdout_eq(str![[r#" + // Assert 100% coverage. + cmd.arg("coverage").assert_success().stdout_eq(str![[r#" ... -| File | % Lines | % Statements | % Branches | % Funcs | -|-------------------|---------------|---------------|---------------|---------------| -| src/AContract.sol | 100.00% (9/9) | 100.00% (9/9) | 100.00% (0/0) | 100.00% (5/5) | -| Total | 100.00% (9/9) | 100.00% (9/9) | 100.00% (0/0) | 100.00% (5/5) | +╭-------------------+-----------------+---------------+---------------+---------------╮ +| File | % Lines | % Statements | % Branches | % Funcs | ++=====================================================================================+ +| src/AContract.sol | 100.00% (14/14) | 100.00% (9/9) | 100.00% (0/0) | 100.00% (5/5) | +|-------------------+-----------------+---------------+---------------+---------------| +| Total | 100.00% (14/14) | 100.00% (9/9) | 100.00% (0/0) | 100.00% (5/5) | +╰-------------------+-----------------+---------------+---------------+---------------╯ "#]]); }); -forgetest!(test_try_catch_coverage, |prj, cmd| { +forgetest!(try_catch, |prj, cmd| { prj.insert_ds_test(); prj.add_source( "Foo.sol", @@ -810,31 +973,33 @@ contract FooTest is DSTest { .unwrap(); // Assert coverage not 100% for happy paths only. - cmd.arg("coverage").args(["--mt".to_string(), "happy".to_string()]).assert_success().stdout_eq( - str![[r#" + cmd.arg("coverage").args(["--mt", "happy"]).assert_success().stdout_eq(str![[r#" ... +╭-------------+----------------+----------------+--------------+---------------╮ | File | % Lines | % Statements | % Branches | % Funcs | -|-------------|----------------|----------------|--------------|---------------| -| src/Foo.sol | 66.67% (10/15) | 66.67% (14/21) | 83.33% (5/6) | 100.00% (5/5) | -| Total | 66.67% (10/15) | 66.67% (14/21) | 83.33% (5/6) | 100.00% (5/5) | ++==============================================================================+ +| src/Foo.sol | 75.00% (15/20) | 66.67% (14/21) | 75.00% (3/4) | 100.00% (5/5) | +|-------------+----------------+----------------+--------------+---------------| +| Total | 75.00% (15/20) | 66.67% (14/21) | 75.00% (3/4) | 100.00% (5/5) | +╰-------------+----------------+----------------+--------------+---------------╯ -"#]], - ); +"#]]); // Assert 100% branch coverage (including clauses without body). - cmd.forge_fuse().arg("coverage").args(["--summary".to_string()]).assert_success().stdout_eq( - str![[r#" + cmd.forge_fuse().arg("coverage").assert_success().stdout_eq(str![[r#" ... +╭-------------+-----------------+-----------------+---------------+---------------╮ | File | % Lines | % Statements | % Branches | % Funcs | -|-------------|-----------------|-----------------|---------------|---------------| -| src/Foo.sol | 100.00% (15/15) | 100.00% (21/21) | 100.00% (6/6) | 100.00% (5/5) | -| Total | 100.00% (15/15) | 100.00% (21/21) | 100.00% (6/6) | 100.00% (5/5) | ++=================================================================================+ +| src/Foo.sol | 100.00% (20/20) | 100.00% (21/21) | 100.00% (4/4) | 100.00% (5/5) | +|-------------+-----------------+-----------------+---------------+---------------| +| Total | 100.00% (20/20) | 100.00% (21/21) | 100.00% (4/4) | 100.00% (5/5) | +╰-------------+-----------------+-----------------+---------------+---------------╯ -"#]], - ); +"#]]); }); -forgetest!(test_yul_coverage, |prj, cmd| { +forgetest!(yul, |prj, cmd| { prj.insert_ds_test(); prj.add_source( "Foo.sol", @@ -927,19 +1092,20 @@ contract FooTest is DSTest { ) .unwrap(); - cmd.forge_fuse().arg("coverage").args(["--summary".to_string()]).assert_success().stdout_eq( - str![[r#" + cmd.forge_fuse().arg("coverage").assert_success().stdout_eq(str![[r#" ... +╭-------------+-----------------+-----------------+---------------+---------------╮ | File | % Lines | % Statements | % Branches | % Funcs | -|-------------|-----------------|-----------------|---------------|---------------| -| src/Foo.sol | 100.00% (23/23) | 100.00% (40/40) | 100.00% (1/1) | 100.00% (7/7) | -| Total | 100.00% (23/23) | 100.00% (40/40) | 100.00% (1/1) | 100.00% (7/7) | ++=================================================================================+ +| src/Foo.sol | 100.00% (30/30) | 100.00% (40/40) | 100.00% (1/1) | 100.00% (7/7) | +|-------------+-----------------+-----------------+---------------+---------------| +| Total | 100.00% (30/30) | 100.00% (40/40) | 100.00% (1/1) | 100.00% (7/7) | +╰-------------+-----------------+-----------------+---------------+---------------╯ -"#]], - ); +"#]]); }); -forgetest!(test_misc_coverage, |prj, cmd| { +forgetest!(misc, |prj, cmd| { prj.insert_ds_test(); prj.add_source( "Foo.sol", @@ -1019,20 +1185,21 @@ contract FooTest is DSTest { ) .unwrap(); - cmd.forge_fuse().arg("coverage").args(["--summary".to_string()]).assert_success().stdout_eq( - str![[r#" + cmd.forge_fuse().arg("coverage").assert_success().stdout_eq(str![[r#" ... -| File | % Lines | % Statements | % Branches | % Funcs | -|-------------|---------------|---------------|---------------|---------------| -| src/Foo.sol | 100.00% (8/8) | 100.00% (9/9) | 100.00% (0/0) | 100.00% (4/4) | -| Total | 100.00% (8/8) | 100.00% (9/9) | 100.00% (0/0) | 100.00% (4/4) | +╭-------------+-----------------+---------------+---------------+---------------╮ +| File | % Lines | % Statements | % Branches | % Funcs | ++===============================================================================+ +| src/Foo.sol | 100.00% (12/12) | 100.00% (9/9) | 100.00% (0/0) | 100.00% (4/4) | +|-------------+-----------------+---------------+---------------+---------------| +| Total | 100.00% (12/12) | 100.00% (9/9) | 100.00% (0/0) | 100.00% (4/4) | +╰-------------+-----------------+---------------+---------------+---------------╯ -"#]], - ); +"#]]); }); // https://github.com/foundry-rs/foundry/issues/8605 -forgetest!(test_single_statement_coverage, |prj, cmd| { +forgetest!(single_statement, |prj, cmd| { prj.insert_ds_test(); prj.add_source( "AContract.sol", @@ -1075,47 +1242,51 @@ contract AContractTest is DSTest { .unwrap(); // Assert 50% coverage for true branches. - cmd.arg("coverage") - .args(["--mt".to_string(), "testTrueCoverage".to_string()]) - .assert_success() - .stdout_eq(str![[r#" + cmd.arg("coverage").args(["--mt", "testTrueCoverage"]).assert_success().stdout_eq(str![[r#" ... +╭-------------------+--------------+--------------+--------------+---------------╮ | File | % Lines | % Statements | % Branches | % Funcs | -|-------------------|--------------|--------------|--------------|---------------| -| src/AContract.sol | 50.00% (2/4) | 50.00% (2/4) | 50.00% (2/4) | 100.00% (1/1) | -| Total | 50.00% (2/4) | 50.00% (2/4) | 50.00% (2/4) | 100.00% (1/1) | ++================================================================================+ +| src/AContract.sol | 60.00% (3/5) | 50.00% (2/4) | 50.00% (2/4) | 100.00% (1/1) | +|-------------------+--------------+--------------+--------------+---------------| +| Total | 60.00% (3/5) | 50.00% (2/4) | 50.00% (2/4) | 100.00% (1/1) | +╰-------------------+--------------+--------------+--------------+---------------╯ "#]]); // Assert 50% coverage for false branches. cmd.forge_fuse() .arg("coverage") - .args(["--mt".to_string(), "testFalseCoverage".to_string()]) + .args(["--mt", "testFalseCoverage"]) .assert_success() .stdout_eq(str![[r#" ... +╭-------------------+--------------+--------------+--------------+---------------╮ | File | % Lines | % Statements | % Branches | % Funcs | -|-------------------|--------------|--------------|--------------|---------------| -| src/AContract.sol | 50.00% (2/4) | 50.00% (2/4) | 50.00% (2/4) | 100.00% (1/1) | -| Total | 50.00% (2/4) | 50.00% (2/4) | 50.00% (2/4) | 100.00% (1/1) | ++================================================================================+ +| src/AContract.sol | 60.00% (3/5) | 50.00% (2/4) | 50.00% (2/4) | 100.00% (1/1) | +|-------------------+--------------+--------------+--------------+---------------| +| Total | 60.00% (3/5) | 50.00% (2/4) | 50.00% (2/4) | 100.00% (1/1) | +╰-------------------+--------------+--------------+--------------+---------------╯ "#]]); // Assert 100% coverage (true/false branches properly covered). - cmd.forge_fuse().arg("coverage").args(["--summary".to_string()]).assert_success().stdout_eq( - str![[r#" + cmd.forge_fuse().arg("coverage").assert_success().stdout_eq(str![[r#" ... +╭-------------------+---------------+---------------+---------------+---------------╮ | File | % Lines | % Statements | % Branches | % Funcs | -|-------------------|---------------|---------------|---------------|---------------| -| src/AContract.sol | 100.00% (4/4) | 100.00% (4/4) | 100.00% (4/4) | 100.00% (1/1) | -| Total | 100.00% (4/4) | 100.00% (4/4) | 100.00% (4/4) | 100.00% (1/1) | ++===================================================================================+ +| src/AContract.sol | 100.00% (5/5) | 100.00% (4/4) | 100.00% (4/4) | 100.00% (1/1) | +|-------------------+---------------+---------------+---------------+---------------| +| Total | 100.00% (5/5) | 100.00% (4/4) | 100.00% (4/4) | 100.00% (1/1) | +╰-------------------+---------------+---------------+---------------+---------------╯ -"#]], - ); +"#]]); }); // https://github.com/foundry-rs/foundry/issues/8604 -forgetest!(test_branch_with_calldata_reads, |prj, cmd| { +forgetest!(branch_with_calldata_reads, |prj, cmd| { prj.insert_ds_test(); prj.add_source( "AContract.sol", @@ -1164,46 +1335,50 @@ contract AContractTest is DSTest { .unwrap(); // Assert 50% coverage for true branches. - cmd.arg("coverage") - .args(["--mt".to_string(), "testTrueCoverage".to_string()]) - .assert_success() - .stdout_eq(str![[r#" + cmd.arg("coverage").args(["--mt", "testTrueCoverage"]).assert_success().stdout_eq(str![[r#" ... +╭-------------------+--------------+--------------+--------------+---------------╮ | File | % Lines | % Statements | % Branches | % Funcs | -|-------------------|--------------|--------------|--------------|---------------| -| src/AContract.sol | 75.00% (3/4) | 80.00% (4/5) | 50.00% (1/2) | 100.00% (1/1) | -| Total | 75.00% (3/4) | 80.00% (4/5) | 50.00% (1/2) | 100.00% (1/1) | ++================================================================================+ +| src/AContract.sol | 80.00% (4/5) | 80.00% (4/5) | 50.00% (1/2) | 100.00% (1/1) | +|-------------------+--------------+--------------+--------------+---------------| +| Total | 80.00% (4/5) | 80.00% (4/5) | 50.00% (1/2) | 100.00% (1/1) | +╰-------------------+--------------+--------------+--------------+---------------╯ "#]]); // Assert 50% coverage for false branches. cmd.forge_fuse() .arg("coverage") - .args(["--mt".to_string(), "testFalseCoverage".to_string()]) + .args(["--mt", "testFalseCoverage"]) .assert_success() .stdout_eq(str![[r#" ... +╭-------------------+--------------+--------------+--------------+---------------╮ | File | % Lines | % Statements | % Branches | % Funcs | -|-------------------|--------------|--------------|--------------|---------------| -| src/AContract.sol | 50.00% (2/4) | 80.00% (4/5) | 50.00% (1/2) | 100.00% (1/1) | -| Total | 50.00% (2/4) | 80.00% (4/5) | 50.00% (1/2) | 100.00% (1/1) | ++================================================================================+ +| src/AContract.sol | 60.00% (3/5) | 80.00% (4/5) | 50.00% (1/2) | 100.00% (1/1) | +|-------------------+--------------+--------------+--------------+---------------| +| Total | 60.00% (3/5) | 80.00% (4/5) | 50.00% (1/2) | 100.00% (1/1) | +╰-------------------+--------------+--------------+--------------+---------------╯ "#]]); // Assert 100% coverage (true/false branches properly covered). - cmd.forge_fuse().arg("coverage").args(["--summary".to_string()]).assert_success().stdout_eq( - str![[r#" + cmd.forge_fuse().arg("coverage").assert_success().stdout_eq(str![[r#" ... +╭-------------------+---------------+---------------+---------------+---------------╮ | File | % Lines | % Statements | % Branches | % Funcs | -|-------------------|---------------|---------------|---------------|---------------| -| src/AContract.sol | 100.00% (4/4) | 100.00% (5/5) | 100.00% (2/2) | 100.00% (1/1) | -| Total | 100.00% (4/4) | 100.00% (5/5) | 100.00% (2/2) | 100.00% (1/1) | ++===================================================================================+ +| src/AContract.sol | 100.00% (5/5) | 100.00% (5/5) | 100.00% (2/2) | 100.00% (1/1) | +|-------------------+---------------+---------------+---------------+---------------| +| Total | 100.00% (5/5) | 100.00% (5/5) | 100.00% (2/2) | 100.00% (1/1) | +╰-------------------+---------------+---------------+---------------+---------------╯ -"#]], - ); +"#]]); }); -forgetest!(test_identical_bytecodes, |prj, cmd| { +forgetest!(identical_bytecodes, |prj, cmd| { prj.insert_ds_test(); prj.add_source( "AContract.sol", @@ -1262,17 +1437,20 @@ contract AContractTest is DSTest { ) .unwrap(); - cmd.arg("coverage").args(["--summary".to_string()]).assert_success().stdout_eq(str![[r#" + cmd.arg("coverage").assert_success().stdout_eq(str![[r#" ... -| File | % Lines | % Statements | % Branches | % Funcs | -|-------------------|---------------|---------------|---------------|---------------| -| src/AContract.sol | 100.00% (9/9) | 100.00% (9/9) | 100.00% (0/0) | 100.00% (3/3) | -| Total | 100.00% (9/9) | 100.00% (9/9) | 100.00% (0/0) | 100.00% (3/3) | +╭-------------------+-----------------+---------------+---------------+---------------╮ +| File | % Lines | % Statements | % Branches | % Funcs | ++=====================================================================================+ +| src/AContract.sol | 100.00% (12/12) | 100.00% (9/9) | 100.00% (0/0) | 100.00% (3/3) | +|-------------------+-----------------+---------------+---------------+---------------| +| Total | 100.00% (12/12) | 100.00% (9/9) | 100.00% (0/0) | 100.00% (3/3) | +╰-------------------+-----------------+---------------+---------------+---------------╯ "#]]); }); -forgetest!(test_constructors_coverage, |prj, cmd| { +forgetest!(constructors, |prj, cmd| { prj.insert_ds_test(); prj.add_source( "AContract.sol", @@ -1312,19 +1490,24 @@ contract AContractTest is DSTest { ) .unwrap(); - cmd.arg("coverage").args(["--summary".to_string()]).assert_success().stdout_eq(str![[r#" + cmd.arg("coverage").assert_success().stdout_eq(str![[r#" ... +╭-------------------+---------------+---------------+---------------+---------------╮ | File | % Lines | % Statements | % Branches | % Funcs | -|-------------------|---------------|---------------|---------------|---------------| -| src/AContract.sol | 100.00% (2/2) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) | -| Total | 100.00% (2/2) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) | ++===================================================================================+ +| src/AContract.sol | 100.00% (4/4) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) | +|-------------------+---------------+---------------+---------------+---------------| +| Total | 100.00% (4/4) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) | +╰-------------------+---------------+---------------+---------------+---------------╯ "#]]); }); -// -// Test that constructor with no statements is not counted in functions coverage. -forgetest!(test_ignore_empty_constructors_coverage, |prj, cmd| { +// https://github.com/foundry-rs/foundry/issues/9270, https://github.com/foundry-rs/foundry/issues/9444 +// Test that special functions with no statements are not counted. +// TODO: We should support this, but for now just ignore them. +// See TODO in `visit_function_definition`: https://github.com/foundry-rs/foundry/issues/9458 +forgetest!(empty_functions, |prj, cmd| { prj.insert_ds_test(); prj.add_source( "AContract.sol", @@ -1332,6 +1515,8 @@ forgetest!(test_ignore_empty_constructors_coverage, |prj, cmd| { contract AContract { constructor() {} + receive() external payable {} + function increment() public {} } "#, @@ -1348,25 +1533,49 @@ contract AContractTest is DSTest { function test_constructors() public { AContract a = new AContract(); a.increment(); + (bool success,) = address(a).call{value: 1}(""); + require(success); } } "#, ) .unwrap(); + assert_lcov( + cmd.arg("coverage"), + str![[r#" +TN: +SF:src/AContract.sol +DA:9,1 +FN:9,AContract.increment +FNDA:1,AContract.increment +FNF:1 +FNH:1 +LF:1 +LH:1 +BRF:0 +BRH:0 +end_of_record + +"#]], + ); + // Assert there's only one function (`increment`) reported. - cmd.arg("coverage").args(["--summary".to_string()]).assert_success().stdout_eq(str![[r#" + cmd.forge_fuse().arg("coverage").assert_success().stdout_eq(str![[r#" ... +╭-------------------+---------------+---------------+---------------+---------------╮ | File | % Lines | % Statements | % Branches | % Funcs | -|-------------------|---------------|---------------|---------------|---------------| -| src/AContract.sol | 100.00% (0/0) | 100.00% (0/0) | 100.00% (0/0) | 100.00% (1/1) | -| Total | 100.00% (0/0) | 100.00% (0/0) | 100.00% (0/0) | 100.00% (1/1) | ++===================================================================================+ +| src/AContract.sol | 100.00% (1/1) | 100.00% (0/0) | 100.00% (0/0) | 100.00% (1/1) | +|-------------------+---------------+---------------+---------------+---------------| +| Total | 100.00% (1/1) | 100.00% (0/0) | 100.00% (0/0) | 100.00% (1/1) | +╰-------------------+---------------+---------------+---------------+---------------╯ "#]]); }); // Test coverage for `receive` functions. -forgetest!(test_receive_coverage, |prj, cmd| { +forgetest!(receive, |prj, cmd| { prj.insert_ds_test(); prj.add_source( "AContract.sol", @@ -1403,20 +1612,47 @@ contract AContractTest is DSTest { ) .unwrap(); - // Assert both constructor and receive functions coverage reported. - cmd.arg("coverage").args(["--summary".to_string()]).assert_success().stdout_eq(str![[r#" + // Assert both constructor and receive functions coverage reported and appear in LCOV. + assert_lcov( + cmd.arg("coverage"), + str![[r#" +TN: +SF:src/AContract.sol +DA:7,1 +FN:7,AContract.constructor +FNDA:1,AContract.constructor +DA:8,1 +DA:11,1 +FN:11,AContract.receive +FNDA:1,AContract.receive +DA:12,1 +FNF:2 +FNH:2 +LF:4 +LH:4 +BRF:0 +BRH:0 +end_of_record + +"#]], + ); + + cmd.forge_fuse().arg("coverage").assert_success().stdout_eq(str![[r#" ... +╭-------------------+---------------+---------------+---------------+---------------╮ | File | % Lines | % Statements | % Branches | % Funcs | -|-------------------|---------------|---------------|---------------|---------------| -| src/AContract.sol | 100.00% (2/2) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) | -| Total | 100.00% (2/2) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) | ++===================================================================================+ +| src/AContract.sol | 100.00% (4/4) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) | +|-------------------+---------------+---------------+---------------+---------------| +| Total | 100.00% (4/4) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) | +╰-------------------+---------------+---------------+---------------+---------------╯ "#]]); }); -// +// https://github.com/foundry-rs/foundry/issues/9322 // Test coverage with `--ir-minimum` for solidity < 0.8.5. -forgetest!(test_ir_minimum_coverage, |prj, cmd| { +forgetest!(ir_minimum_early, |prj, cmd| { prj.insert_ds_test(); prj.add_source( "AContract.sol", @@ -1440,10 +1676,18 @@ contract AContract { // Assert coverage doesn't fail with `Error: Unknown key "inliner"`. cmd.arg("coverage").arg("--ir-minimum").assert_success().stdout_eq(str![[r#" ... +╭-------------------+-------------+--------------+---------------+-------------╮ | File | % Lines | % Statements | % Branches | % Funcs | -|-------------------|-------------|--------------|---------------|-------------| -| src/AContract.sol | 0.00% (0/4) | 0.00% (0/4) | 100.00% (0/0) | 0.00% (0/1) | -| Total | 0.00% (0/4) | 0.00% (0/4) | 100.00% (0/0) | 0.00% (0/1) | ++==============================================================================+ +| src/AContract.sol | 0.00% (0/5) | 0.00% (0/4) | 100.00% (0/0) | 0.00% (0/1) | +|-------------------+-------------+--------------+---------------+-------------| +| Total | 0.00% (0/5) | 0.00% (0/4) | 100.00% (0/0) | 0.00% (0/1) | +╰-------------------+-------------+--------------+---------------+-------------╯ "#]]); }); + +#[track_caller] +fn assert_lcov(cmd: &mut TestCommand, data: impl IntoData) { + cmd.args(["--report=lcov", "--report-file"]).assert_file(data.into_data()); +} diff --git a/crates/forge/tests/cli/create.rs b/crates/forge/tests/cli/create.rs index ebf8c81db..6a78f8323 100644 --- a/crates/forge/tests/cli/create.rs +++ b/crates/forge/tests/cli/create.rs @@ -9,7 +9,9 @@ use anvil::{spawn, NodeConfig}; use foundry_compilers::artifacts::{remappings::Remapping, BytecodeHash}; use foundry_config::Config; use foundry_test_utils::{ - forgetest, forgetest_async, str, + forgetest, forgetest_async, + snapbox::IntoData, + str, util::{OutputExt, TestCommand, TestProject}, }; use std::str::FromStr; @@ -145,6 +147,7 @@ forgetest_async!(can_create_template_contract, |prj, cmd| { let config = Config { bytecode_hash: BytecodeHash::None, ..Default::default() }; prj.write_config(config); + // Dry-run without the `--broadcast` flag cmd.forge_fuse().args([ "create", format!("./src/{TEMPLATE_CONTRACT}.sol:{TEMPLATE_CONTRACT}").as_str(), @@ -154,20 +157,131 @@ forgetest_async!(can_create_template_contract, |prj, cmd| { pk.as_str(), ]); + // Dry-run cmd.assert().stdout_eq(str![[r#" [COMPILING_FILES] with [SOLC_VERSION] [SOLC_VERSION] [ELAPSED] Compiler run successful! -Deployer: 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266 -Deployed to: 0x5FbDB2315678afecb367f032d93F642f64180aa3 -[TX_HASH] +Contract: Counter +Transaction: { + "from": "0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266", + "to": null, + "maxFeePerGas": "0x77359401", + "maxPriorityFeePerGas": "0x1", + "gas": "0x17575", + "input": "[..]", + "nonce": "0x0", + "chainId": "0x7a69" +} +ABI: [ + { + "type": "function", + "name": "increment", + "inputs": [], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "number", + "inputs": [], + "outputs": [ + { + "name": "", + "type": "uint256", + "internalType": "uint256" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "setNumber", + "inputs": [ + { + "name": "newNumber", + "type": "uint256", + "internalType": "uint256" + } + ], + "outputs": [], + "stateMutability": "nonpayable" + } +] + "#]]); + // Dry-run with `--json` flag + cmd.arg("--json").assert().stdout_eq( + str![[r#" +{ + "contract": "Counter", + "transaction": { + "from": "0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266", + "to": null, + "maxFeePerGas": "0x77359401", + "maxPriorityFeePerGas": "0x1", + "gas": "0x17575", + "input": "[..]", + "nonce": "0x0", + "chainId": "0x7a69" + }, + "abi": [ + { + "type": "function", + "name": "increment", + "inputs": [], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "number", + "inputs": [], + "outputs": [ + { + "name": "", + "type": "uint256", + "internalType": "uint256" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "setNumber", + "inputs": [ + { + "name": "newNumber", + "type": "uint256", + "internalType": "uint256" + } + ], + "outputs": [], + "stateMutability": "nonpayable" + } + ] +} + +"#]] + .is_json(), + ); + + cmd.forge_fuse().args([ + "create", + format!("./src/{TEMPLATE_CONTRACT}.sol:{TEMPLATE_CONTRACT}").as_str(), + "--rpc-url", + rpc.as_str(), + "--private-key", + pk.as_str(), + "--broadcast", + ]); + cmd.assert().stdout_eq(str![[r#" No files changed, compilation skipped Deployer: 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266 -Deployed to: 0xe7f1725E7734CE288F8367e1Bb143E90bb3F0512 +Deployed to: 0x5FbDB2315678afecb367f032d93F642f64180aa3 [TX_HASH] "#]]); @@ -193,6 +307,7 @@ forgetest_async!(can_create_using_unlocked, |prj, cmd| { "--from", format!("{dev:?}").as_str(), "--unlocked", + "--broadcast", ]); cmd.assert().stdout_eq(str![[r#" @@ -204,6 +319,7 @@ Deployed to: 0x5FbDB2315678afecb367f032d93F642f64180aa3 [TX_HASH] "#]]); + cmd.assert().stdout_eq(str![[r#" No files changed, compilation skipped Deployer: 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266 @@ -248,6 +364,7 @@ contract ConstructorContract { rpc.as_str(), "--private-key", pk.as_str(), + "--broadcast", "--constructor-args", "My Constructor", ]) @@ -285,6 +402,7 @@ contract TupleArrayConstructorContract { rpc.as_str(), "--private-key", pk.as_str(), + "--broadcast", "--constructor-args", "[(1,2), (2,3), (3,4)]", ]) @@ -335,6 +453,7 @@ contract UniswapV2Swap { rpc.as_str(), "--private-key", pk.as_str(), + "--broadcast", ]) .assert_success() .stdout_eq(str![[r#" diff --git a/crates/forge/tests/cli/debug.rs b/crates/forge/tests/cli/debug.rs index e8cd08418..c217beeb5 100644 --- a/crates/forge/tests/cli/debug.rs +++ b/crates/forge/tests/cli/debug.rs @@ -3,7 +3,7 @@ use std::path::Path; // Sets up a debuggable test case. // Run with `cargo test-debugger`. -forgetest_async!( +forgetest!( #[ignore = "ran manually"] manual_debug_setup, |prj, cmd| { diff --git a/crates/forge/tests/cli/ext_integration.rs b/crates/forge/tests/cli/ext_integration.rs index e9437f04c..2e5e383e5 100644 --- a/crates/forge/tests/cli/ext_integration.rs +++ b/crates/forge/tests/cli/ext_integration.rs @@ -100,6 +100,7 @@ fn lil_web3() { #[test] #[cfg_attr(windows, ignore = "Windows cannot find installed programs")] +#[cfg(not(feature = "isolate-by-default"))] fn snekmate() { ExtTester::new("pcaversaccio", "snekmate", "df226f4a45e86c8f8c3ff1f9fa3443d260002050") .install_command(&["pnpm", "install", "--prefer-offline"]) diff --git a/crates/forge/tests/cli/geiger.rs b/crates/forge/tests/cli/geiger.rs new file mode 100644 index 000000000..fd2165628 --- /dev/null +++ b/crates/forge/tests/cli/geiger.rs @@ -0,0 +1,92 @@ +forgetest!(call, |prj, cmd| { + prj.add_source( + "call.sol", + r#" + contract A is Test { + function do_ffi() public { + string[] memory inputs = new string[](1); + vm.ffi(inputs); + } + } + "#, + ) + .unwrap(); + + cmd.arg("geiger").assert_code(1).stderr_eq(str![[r#" +error: usage of unsafe cheatcode `vm.ffi` + [FILE]:7:20 + | +7 | vm.ffi(inputs); + | ^^^ + | + + +"#]]); +}); + +forgetest!(assignment, |prj, cmd| { + prj.add_source( + "assignment.sol", + r#" + contract A is Test { + function do_ffi() public { + string[] memory inputs = new string[](1); + bytes stuff = vm.ffi(inputs); + } + } + "#, + ) + .unwrap(); + + cmd.arg("geiger").assert_code(1).stderr_eq(str![[r#" +error: usage of unsafe cheatcode `vm.ffi` + [FILE]:7:34 + | +7 | bytes stuff = vm.ffi(inputs); + | ^^^ + | + + +"#]]); +}); + +forgetest!(exit_code, |prj, cmd| { + prj.add_source( + "multiple.sol", + r#" + contract A is Test { + function do_ffi() public { + vm.ffi(inputs); + vm.ffi(inputs); + vm.ffi(inputs); + } + } + "#, + ) + .unwrap(); + + cmd.arg("geiger").assert_code(3).stderr_eq(str![[r#" +error: usage of unsafe cheatcode `vm.ffi` + [FILE]:6:20 + | +6 | vm.ffi(inputs); + | ^^^ + | + +error: usage of unsafe cheatcode `vm.ffi` + [FILE]:7:20 + | +7 | vm.ffi(inputs); + | ^^^ + | + +error: usage of unsafe cheatcode `vm.ffi` + [FILE]:8:20 + | +8 | vm.ffi(inputs); + | ^^^ + | + + +"#]]); +}); diff --git a/crates/forge/tests/cli/inline_config.rs b/crates/forge/tests/cli/inline_config.rs new file mode 100644 index 000000000..5e0273195 --- /dev/null +++ b/crates/forge/tests/cli/inline_config.rs @@ -0,0 +1,272 @@ +forgetest!(runs, |prj, cmd| { + prj.add_test( + "inline.sol", + " + contract Inline { + /** forge-config: default.fuzz.runs = 2 */ + function test1(bool) public {} + + \t///\t forge-config:\tdefault.fuzz.runs=\t3 \t + + function test2(bool) public {} + } + ", + ) + .unwrap(); + + cmd.arg("test").assert_success().stdout_eq(str![[r#" +[COMPILING_FILES] with [SOLC_VERSION] +[SOLC_VERSION] [ELAPSED] +Compiler run successful! + +Ran 2 tests for test/inline.sol:Inline +[PASS] test1(bool) (runs: 2, [AVG_GAS]) +[PASS] test2(bool) (runs: 3, [AVG_GAS]) +Suite result: ok. 2 passed; 0 failed; 0 skipped; [ELAPSED] + +Ran 1 test suite [ELAPSED]: 2 tests passed, 0 failed, 0 skipped (2 total tests) + +"#]]); + + // Make sure inline config is parsed in coverage too. + cmd.forge_fuse().arg("coverage").assert_success().stdout_eq(str![[r#" +[COMPILING_FILES] with [SOLC_VERSION] +[SOLC_VERSION] [ELAPSED] +Compiler run successful! +Analysing contracts... +Running tests... + +Ran 2 tests for test/inline.sol:Inline +[PASS] test1(bool) (runs: 2, [AVG_GAS]) +[PASS] test2(bool) (runs: 3, [AVG_GAS]) +Suite result: ok. 2 passed; 0 failed; 0 skipped; [ELAPSED] + +Ran 1 test suite [ELAPSED]: 2 tests passed, 0 failed, 0 skipped (2 total tests) + +╭-------+---------------+---------------+---------------+---------------╮ +| File | % Lines | % Statements | % Branches | % Funcs | ++=======================================================================+ +| Total | 100.00% (0/0) | 100.00% (0/0) | 100.00% (0/0) | 100.00% (0/0) | +╰-------+---------------+---------------+---------------+---------------╯ + +"#]]); +}); + +forgetest!(invalid_profile, |prj, cmd| { + prj.add_test( + "inline.sol", + " + /** forge-config: unknown.fuzz.runs = 2 */ + contract Inline { + function test(bool) public {} + } + ", + ) + .unwrap(); + + cmd.arg("test").assert_failure().stderr_eq(str![[r#" +Error: Inline config error at test/inline.sol:0:0:0: invalid profile `unknown.fuzz.runs = 2`; valid profiles: default + +"#]]); +}); + +// TODO: Uncomment once this done for normal config too. +/* +forgetest!(invalid_key, |prj, cmd| { + prj.add_test( + "inline.sol", + " + /** forge-config: default.fuzzz.runs = 2 */ + contract Inline { + function test(bool) public {} + } + ", + ) + .unwrap(); + + cmd.arg("test").assert_failure().stderr_eq(str![[]]).stdout_eq(str![[r#" +[COMPILING_FILES] with [SOLC_VERSION] +[SOLC_VERSION] [ELAPSED] +Compiler run successful! + +Ran 1 test for test/inline.sol:Inline +[FAIL: failed to get inline configuration: unknown config section `default`] test(bool) ([GAS]) +Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED] + +Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests) + +Failing tests: +Encountered 1 failing test in test/inline.sol:Inline +[FAIL: failed to get inline configuration: unknown config section `default`] test(bool) ([GAS]) + +Encountered a total of 1 failing tests, 0 tests succeeded + +"#]]); +}); + +forgetest!(invalid_key_2, |prj, cmd| { + prj.add_test( + "inline.sol", + " +/** forge-config: default.fuzz.runss = 2 */ + contract Inline { + function test(bool) public {} + } + ", + ) + .unwrap(); + + cmd.arg("test").assert_failure().stderr_eq(str![[]]).stdout_eq(str![[r#" +[COMPILING_FILES] with [SOLC_VERSION] +[SOLC_VERSION] [ELAPSED] +Compiler run successful! + +Ran 1 test for test/inline.sol:Inline +[FAIL: failed to get inline configuration: unknown config section `default`] test(bool) ([GAS]) +Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED] + +Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests) + +Failing tests: +Encountered 1 failing test in test/inline.sol:Inline +[FAIL: failed to get inline configuration: unknown config section `default`] test(bool) ([GAS]) + +Encountered a total of 1 failing tests, 0 tests succeeded + +"#]]); +}); +*/ + +forgetest!(invalid_value, |prj, cmd| { + prj.add_test( + "inline.sol", + " + /** forge-config: default.fuzz.runs = [2] */ + contract Inline { + function test(bool) public {} + } + ", + ) + .unwrap(); + + cmd.arg("test").assert_failure().stderr_eq(str![[]]).stdout_eq(str![[r#" +[COMPILING_FILES] with [SOLC_VERSION] +[SOLC_VERSION] [ELAPSED] +Compiler run successful! + +Ran 1 test for test/inline.sol:Inline +[FAIL: invalid type: found sequence, expected u32 for key "default.fuzz.runs" in inline config] setUp() ([GAS]) +Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED] + +Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests) + +Failing tests: +Encountered 1 failing test in test/inline.sol:Inline +[FAIL: invalid type: found sequence, expected u32 for key "default.fuzz.runs" in inline config] setUp() ([GAS]) + +Encountered a total of 1 failing tests, 0 tests succeeded + +"#]]); +}); + +forgetest!(invalid_value_2, |prj, cmd| { + prj.add_test( + "inline.sol", + " + /** forge-config: default.fuzz.runs = '2' */ + contract Inline { + function test(bool) public {} + } + ", + ) + .unwrap(); + + cmd.arg("test").assert_failure().stderr_eq(str![[]]).stdout_eq(str![[r#" +[COMPILING_FILES] with [SOLC_VERSION] +[SOLC_VERSION] [ELAPSED] +Compiler run successful! + +Ran 1 test for test/inline.sol:Inline +[FAIL: invalid type: found string "2", expected u32 for key "default.fuzz.runs" in inline config] setUp() ([GAS]) +Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED] + +Ran 1 test suite [ELAPSED]: 0 tests passed, 1 failed, 0 skipped (1 total tests) + +Failing tests: +Encountered 1 failing test in test/inline.sol:Inline +[FAIL: invalid type: found string "2", expected u32 for key "default.fuzz.runs" in inline config] setUp() ([GAS]) + +Encountered a total of 1 failing tests, 0 tests succeeded + +"#]]); +}); + +forgetest_init!(evm_version, |prj, cmd| { + prj.wipe_contracts(); + prj.add_test( + "inline.sol", + r#" + import {Test} from "forge-std/Test.sol"; + + contract Dummy { + function getBlobBaseFee() public returns (uint256) { + return block.blobbasefee; + } + } + + contract FunctionConfig is Test { + Dummy dummy; + + function setUp() public { + dummy = new Dummy(); + } + + /// forge-config: default.evm_version = "shanghai" + function test_old() public { + vm.expectRevert(); + dummy.getBlobBaseFee(); + } + + function test_new() public { + dummy.getBlobBaseFee(); + } + } + + /// forge-config: default.evm_version = "shanghai" + contract ContractConfig is Test { + Dummy dummy; + + function setUp() public { + dummy = new Dummy(); + } + + function test_old() public { + vm.expectRevert(); + dummy.getBlobBaseFee(); + } + + /// forge-config: default.evm_version = "cancun" + function test_new() public { + dummy.getBlobBaseFee(); + } + } + "#, + ) + .unwrap(); + + cmd.arg("test").arg("--evm-version=cancun").assert_success().stdout_eq(str![[r#" +... +Ran 2 tests for test/inline.sol:FunctionConfig +[PASS] test_new() ([GAS]) +[PASS] test_old() ([GAS]) +Suite result: ok. 2 passed; 0 failed; 0 skipped; [ELAPSED] + +Ran 2 tests for test/inline.sol:ContractConfig +[PASS] test_new() ([GAS]) +[PASS] test_old() ([GAS]) +Suite result: ok. 2 passed; 0 failed; 0 skipped; [ELAPSED] + +Ran 2 test suites [ELAPSED]: 4 tests passed, 0 failed, 0 skipped (4 total tests) + +"#]]); +}); diff --git a/crates/forge/tests/cli/main.rs b/crates/forge/tests/cli/main.rs index 2698df9fe..fdf69f101 100644 --- a/crates/forge/tests/cli/main.rs +++ b/crates/forge/tests/cli/main.rs @@ -4,7 +4,6 @@ extern crate foundry_test_utils; pub mod constants; pub mod utils; -mod alphanet; mod bind_json; mod build; mod cache; @@ -17,7 +16,10 @@ mod create; mod debug; mod doc; mod eip712; +mod geiger; +mod inline_config; mod multi_script; +mod odyssey; mod script; mod soldeer; mod svm; diff --git a/crates/forge/tests/cli/alphanet.rs b/crates/forge/tests/cli/odyssey.rs similarity index 65% rename from crates/forge/tests/cli/alphanet.rs rename to crates/forge/tests/cli/odyssey.rs index 6e41551ac..49b8c01fc 100644 --- a/crates/forge/tests/cli/alphanet.rs +++ b/crates/forge/tests/cli/odyssey.rs @@ -1,6 +1,10 @@ // Ensure we can run basic counter tests with EOF support. -#[cfg(target_os = "linux")] forgetest_init!(test_eof_flag, |prj, cmd| { + if !has_docker() { + println!("skipping because no docker is available"); + return; + } + cmd.forge_fuse().args(["test", "--eof"]).assert_success().stdout_eq(str![[r#" [COMPILING_FILES] with [SOLC_VERSION] [SOLC_VERSION] [ELAPSED] @@ -17,3 +21,12 @@ Ran 1 test suite [ELAPSED]: 2 tests passed, 0 failed, 0 skipped (2 total tests) "#]]); }); + +fn has_docker() -> bool { + if !cfg!(target_os = "linux") { + return false; + } + + // `images` will also check for the daemon. + std::process::Command::new("docker").arg("images").output().is_ok_and(|o| o.status.success()) +} diff --git a/crates/forge/tests/cli/script.rs b/crates/forge/tests/cli/script.rs index 82c61ccbc..9cf3e746c 100644 --- a/crates/forge/tests/cli/script.rs +++ b/crates/forge/tests/cli/script.rs @@ -1,11 +1,12 @@ //! Contains various tests related to `forge script`. use crate::constants::TEMPLATE_CONTRACT; -use alloy_primitives::{hex, Address, Bytes}; +use alloy_primitives::{address, hex, Address, Bytes}; use anvil::{spawn, NodeConfig}; use forge_script_sequence::ScriptSequence; use foundry_test_utils::{ rpc, + snapbox::IntoData, util::{OTHER_SOLC_VERSION, SOLC_VERSION}, ScriptOutcome, ScriptTester, }; @@ -199,8 +200,7 @@ contract DeployScript is Script { let deploy_contract = deploy_script.display().to_string() + ":DeployScript"; - let node_config = - NodeConfig::test().with_eth_rpc_url(Some(rpc::next_http_archive_rpc_endpoint())); + let node_config = NodeConfig::test().with_eth_rpc_url(Some(rpc::next_http_archive_rpc_url())); let (_api, handle) = spawn(node_config).await; let dev = handle.dev_accounts().next().unwrap(); cmd.set_current_dir(prj.root()); @@ -301,8 +301,7 @@ contract DeployScript is Script { let deploy_contract = deploy_script.display().to_string() + ":DeployScript"; - let node_config = - NodeConfig::test().with_eth_rpc_url(Some(rpc::next_http_archive_rpc_endpoint())); + let node_config = NodeConfig::test().with_eth_rpc_url(Some(rpc::next_http_archive_rpc_url())); let (_api, handle) = spawn(node_config).await; let private_key = "ac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80".to_string(); @@ -491,8 +490,7 @@ contract DeployScript is Script { let deploy_contract = deploy_script.display().to_string() + ":DeployScript"; - let node_config = - NodeConfig::test().with_eth_rpc_url(Some(rpc::next_http_archive_rpc_endpoint())); + let node_config = NodeConfig::test().with_eth_rpc_url(Some(rpc::next_http_archive_rpc_url())); let (_api, handle) = spawn(node_config).await; let private_key = "ac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80".to_string(); @@ -854,6 +852,70 @@ forgetest_async!(can_deploy_with_create2, |prj, cmd| { .run(ScriptOutcome::ScriptFailed); }); +forgetest_async!(can_deploy_with_custom_create2, |prj, cmd| { + let (api, handle) = spawn(NodeConfig::test()).await; + let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root()); + let create2 = Address::from_str("0x0000000000000000000000000000000000b4956c").unwrap(); + + // Prepare CREATE2 Deployer + api.anvil_set_code( + create2, + Bytes::from_static(foundry_evm::constants::DEFAULT_CREATE2_DEPLOYER_RUNTIME_CODE), + ) + .await + .unwrap(); + + tester + .add_deployer(0) + .load_private_keys(&[0]) + .await + .add_create2_deployer(create2) + .add_sig("BroadcastTestNoLinking", "deployCreate2(address)") + .arg(&create2.to_string()) + .simulate(ScriptOutcome::OkSimulation) + .broadcast(ScriptOutcome::OkBroadcast) + .assert_nonce_increment(&[(0, 2)]) + .await; +}); + +forgetest_async!(can_deploy_with_custom_create2_notmatched_bytecode, |prj, cmd| { + let (api, handle) = spawn(NodeConfig::test()).await; + let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root()); + let create2 = Address::from_str("0x0000000000000000000000000000000000b4956c").unwrap(); + + // Prepare CREATE2 Deployer + api.anvil_set_code( + create2, + Bytes::from_static(&hex!("7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe03601600081602082378035828234f58015156039578182fd5b8082525050506014600cef")), + ) + .await + .unwrap(); + + tester + .add_deployer(0) + .load_private_keys(&[0]) + .await + .add_create2_deployer(create2) + .add_sig("BroadcastTestNoLinking", "deployCreate2()") + .simulate(ScriptOutcome::ScriptFailed) + .broadcast(ScriptOutcome::ScriptFailed); +}); + +forgetest_async!(canot_deploy_with_nonexist_create2, |prj, cmd| { + let (_api, handle) = spawn(NodeConfig::test()).await; + let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root()); + let create2 = Address::from_str("0x0000000000000000000000000000000000b4956c").unwrap(); + + tester + .add_deployer(0) + .load_private_keys(&[0]) + .await + .add_create2_deployer(create2) + .add_sig("BroadcastTestNoLinking", "deployCreate2()") + .simulate(ScriptOutcome::ScriptFailed) + .broadcast(ScriptOutcome::ScriptFailed); +}); + forgetest_async!(can_deploy_and_simulate_25_txes_concurrently, |prj, cmd| { let (_api, handle) = spawn(NodeConfig::test()).await; let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root()); @@ -1821,6 +1883,87 @@ Warning: Script contains a transaction to 0x000000000000000000000000000000000000 "#]]); }); +// Asserts that the script runs with expected non-output using `--quiet` flag +forgetest_async!(adheres_to_quiet_flag, |prj, cmd| { + foundry_test_utils::util::initialize(prj.root()); + prj.add_script( + "Foo", + r#" +import "forge-std/Script.sol"; + +contract SimpleScript is Script { + function run() external returns (bool success) { + vm.startBroadcast(); + (success, ) = address(0).call(""); + } +} + "#, + ) + .unwrap(); + + let (_api, handle) = spawn(NodeConfig::test()).await; + + cmd.args([ + "script", + "SimpleScript", + "--fork-url", + &handle.http_endpoint(), + "--sender", + "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", + "--broadcast", + "--unlocked", + "--non-interactive", + "--quiet", + ]) + .assert_empty_stdout(); +}); + +// Asserts that the script runs with expected non-output using `--quiet` flag +forgetest_async!(adheres_to_json_flag, |prj, cmd| { + if cfg!(feature = "isolate-by-default") { + return; + } + + foundry_test_utils::util::initialize(prj.root()); + prj.add_script( + "Foo", + r#" +import "forge-std/Script.sol"; + +contract SimpleScript is Script { + function run() external returns (bool success) { + vm.startBroadcast(); + (success, ) = address(0).call(""); + } +} + "#, + ) + .unwrap(); + + let (_api, handle) = spawn(NodeConfig::test()).await; + + cmd.args([ + "script", + "SimpleScript", + "--fork-url", + &handle.http_endpoint(), + "--sender", + "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", + "--broadcast", + "--unlocked", + "--non-interactive", + "--json", + ]) + .assert_success() + .stdout_eq(str![[r#" +{"logs":[],"returns":{"success":{"internal_type":"bool","value":"true"}},"success":true,"raw_logs":[],"traces":[["Deployment",{"arena":[{"parent":null,"children":[],"idx":0,"trace":{"depth":0,"success":true,"caller":"0x1804c8ab1f12e6bbf3894d4083f33e07309d1f38","address":"0x5b73c5498c1e3b4dba84de0f1833c4a029d90519","maybe_precompile":false,"selfdestruct_address":null,"selfdestruct_refund_target":null,"selfdestruct_transferred_value":null,"kind":"CREATE","value":"0x0","data":"0x6080604052600c805462ff00ff191662010001179055348015601f575f5ffd5b506101568061002d5f395ff3fe608060405234801561000f575f5ffd5b5060043610610034575f3560e01c8063c040622614610038578063f8ccbf4714610054575b5f5ffd5b610040610067565b604051901515815260200160405180910390f35b600c546100409062010000900460ff1681565b5f7f885cb69240a935d632d79c317109709ecfa91a80626ff3989d68f67f5b1dd12d5f1c6001600160a01b0316637fb5297f6040518163ffffffff1660e01b81526004015f604051808303815f87803b1580156100c2575f5ffd5b505af11580156100d4573d5f5f3e3d5ffd5b50506040515f925090508181818181805af19150503d805f8114610113576040519150601f19603f3d011682016040523d82523d5f602084013e610118565b606091505b50909291505056fea264697066735822122060ba6332e526de9b6bc731fb4682b44e42845196324ec33068982984d700cdd964736f6c634300081b0033","output":"0x608060405234801561000f575f5ffd5b5060043610610034575f3560e01c8063c040622614610038578063f8ccbf4714610054575b5f5ffd5b610040610067565b604051901515815260200160405180910390f35b600c546100409062010000900460ff1681565b5f7f885cb69240a935d632d79c317109709ecfa91a80626ff3989d68f67f5b1dd12d5f1c6001600160a01b0316637fb5297f6040518163ffffffff1660e01b81526004015f604051808303815f87803b1580156100c2575f5ffd5b505af11580156100d4573d5f5f3e3d5ffd5b50506040515f925090508181818181805af19150503d805f8114610113576040519150601f19603f3d011682016040523d82523d5f602084013e610118565b606091505b50909291505056fea264697066735822122060ba6332e526de9b6bc731fb4682b44e42845196324ec33068982984d700cdd964736f6c634300081b0033","gas_used":90639,"gas_limit":1073682810,"status":"Return","steps":[],"decoded":{"label":null,"return_data":null,"call_data":null}},"logs":[],"ordering":[]}]}],["Execution",{"arena":[{"parent":null,"children":[1,2],"idx":0,"trace":{"depth":0,"success":true,"caller":"0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266","address":"0x5b73c5498c1e3b4dba84de0f1833c4a029d90519","maybe_precompile":null,"selfdestruct_address":null,"selfdestruct_refund_target":null,"selfdestruct_transferred_value":null,"kind":"CALL","value":"0x0","data":"0xc0406226","output":"0x0000000000000000000000000000000000000000000000000000000000000001","gas_used":3214,"gas_limit":1073720760,"status":"Return","steps":[],"decoded":{"label":null,"return_data":null,"call_data":null}},"logs":[],"ordering":[{"Call":0},{"Call":1}]},{"parent":0,"children":[],"idx":1,"trace":{"depth":1,"success":true,"caller":"0x5b73c5498c1e3b4dba84de0f1833c4a029d90519","address":"0x7109709ecfa91a80626ff3989d68f67f5b1dd12d","maybe_precompile":null,"selfdestruct_address":null,"selfdestruct_refund_target":null,"selfdestruct_transferred_value":null,"kind":"CALL","value":"0x0","data":"0x7fb5297f","output":"0x","gas_used":0,"gas_limit":1056940983,"status":"Return","steps":[],"decoded":{"label":null,"return_data":null,"call_data":null}},"logs":[],"ordering":[]},{"parent":0,"children":[],"idx":2,"trace":{"depth":1,"success":true,"caller":"0x5b73c5498c1e3b4dba84de0f1833c4a029d90519","address":"0x0000000000000000000000000000000000000000","maybe_precompile":null,"selfdestruct_address":null,"selfdestruct_refund_target":null,"selfdestruct_transferred_value":null,"kind":"CALL","value":"0x0","data":"0x","output":"0x","gas_used":0,"gas_limit":1056940820,"status":"Stop","steps":[],"decoded":{"label":null,"return_data":null,"call_data":null}},"logs":[],"ordering":[]}]}]],"gas_used":24278,"labeled_addresses":{},"returned":"0x0000000000000000000000000000000000000000000000000000000000000001","address":null} +{"chain":31337,"estimated_gas_price":"2.000000001","estimated_total_gas_used":29005,"estimated_amount_required":"0.000058010000029005"} +{"chain":"anvil-hardhat","status":"success","tx_hash":"0x4f78afe915fceb282c7625a68eb350bc0bf78acb59ad893e5c62b710a37f3156","contract_address":null,"block_number":1,"gas_used":21000,"gas_price":1000000001} +{"status":"success","transactions":"[..]/broadcast/Foo.sol/31337/run-latest.json","sensitive":"[..]/cache/Foo.sol/31337/run-latest.json"} + +"#]].is_jsonlines()); +}); + // https://github.com/foundry-rs/foundry/pull/7742 forgetest_async!(unlocked_no_sender, |prj, cmd| { foundry_test_utils::util::initialize(prj.root()); @@ -1924,7 +2067,7 @@ contract SimpleScript is Script { ]); cmd.assert_failure().stderr_eq(str![[r#" -Error: script failed: missing CREATE2 deployer +Error: script failed: missing CREATE2 deployer: 0x4e59b44847b379578588920cA78FbF26c0B4956C "#]]); }); @@ -2039,8 +2182,7 @@ forgetest_async!(can_deploy_library_create2_different_sender, |prj, cmd| { // forgetest_async!(test_broadcast_raw_create2_deployer, |prj, cmd| { - let (_api, handle) = - spawn(NodeConfig::test().with_disable_default_create2_deployer(true)).await; + let (api, handle) = spawn(NodeConfig::test().with_disable_default_create2_deployer(true)).await; foundry_test_utils::util::initialize(prj.root()); prj.add_script( @@ -2051,7 +2193,7 @@ import "forge-std/Script.sol"; contract SimpleScript is Script { function run() external { // send funds to create2 factory deployer - vm.broadcast(); + vm.startBroadcast(); payable(0x3fAB184622Dc19b6109349B94811493BF2a45362).transfer(10000000 gwei); // deploy create2 factory vm.broadcastRawTransaction( @@ -2070,6 +2212,7 @@ contract SimpleScript is Script { "--rpc-url", &handle.http_endpoint(), "--broadcast", + "--slow", "SimpleScript", ]); @@ -2104,6 +2247,12 @@ ONCHAIN EXECUTION COMPLETE & SUCCESSFUL. "#]]); + + assert!(!api + .get_code(address!("4e59b44847b379578588920cA78FbF26c0B4956C"), Default::default()) + .await + .unwrap() + .is_empty()); }); forgetest_init!(can_get_script_wallets, |prj, cmd| { @@ -2219,12 +2368,12 @@ contract SimpleScript is Script { [SOLC_VERSION] [ELAPSED] Compiler run successful! Traces: - [103771] SimpleScript::run() + [..] SimpleScript::run() ├─ [0] VM::startBroadcast() │ └─ ← [Return] - ├─ [23273] → new A@0x5b73C5498c1E3b4dbA84de0F1833c4a029d90519 + ├─ [..] → new A@0x5b73C5498c1E3b4dbA84de0F1833c4a029d90519 │ └─ ← [Return] 116 bytes of code - ├─ [13162] → new B@0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496 + ├─ [..] → new B@0x7FA9385bE102ac3EAc297483Dd6233D62b3e1496 │ ├─ [145] A::getValue() [staticcall] │ │ └─ ← [Return] 100 │ └─ ← [Return] 62 bytes of code @@ -2248,33 +2397,6 @@ Simulated On-chain Traces: "#]]); }); -// Tests that chained errors are properly displayed. -// -forgetest_init!( - #[ignore] - should_display_evm_chained_error, - |prj, cmd| { - let script = prj - .add_source( - "Foo", - r#" -import "forge-std/Script.sol"; - -contract ContractScript is Script { - function run() public { - } -} - "#, - ) - .unwrap(); - cmd.arg("script").arg(script).args(["--fork-url", "https://public-node.testnet.rsk.co"]).assert_failure().stderr_eq(str![[r#" -Error: Failed to deploy script: -backend: failed while inspecting; header validation error: `prevrandao` not set; `prevrandao` not set; - -"#]]); - } -); - forgetest_async!(should_detect_additional_contracts, |prj, cmd| { let (_api, handle) = spawn(NodeConfig::test()).await; diff --git a/crates/forge/tests/cli/test_cmd.rs b/crates/forge/tests/cli/test_cmd.rs index 8e064c63c..e8da6a490 100644 --- a/crates/forge/tests/cli/test_cmd.rs +++ b/crates/forge/tests/cli/test_cmd.rs @@ -473,7 +473,7 @@ contract Contract { ) .unwrap(); - let endpoint = rpc::next_http_archive_rpc_endpoint(); + let endpoint = rpc::next_http_archive_rpc_url(); prj.add_test( "Contract.t.sol", @@ -545,7 +545,7 @@ forgetest_init!(exit_code_error_on_fail_fast_with_json, |prj, cmd| { forgetest_init!(fork_traces, |prj, cmd| { prj.wipe_contracts(); - let endpoint = rpc::next_http_archive_rpc_endpoint(); + let endpoint = rpc::next_http_archive_rpc_url(); prj.add_test( "Contract.t.sol", @@ -699,7 +699,7 @@ contract TransientTest is Test { forgetest_init!(can_disable_block_gas_limit, |prj, cmd| { prj.wipe_contracts(); - let endpoint = rpc::next_http_archive_rpc_endpoint(); + let endpoint = rpc::next_http_archive_rpc_url(); prj.add_test( "Contract.t.sol", @@ -2026,6 +2026,48 @@ Ran 1 test suite [ELAPSED]: 0 tests passed, 0 failed, 6 skipped (6 total tests) "#]]); }); +forgetest_init!(skip_setup, |prj, cmd| { + prj.add_test( + "Counter.t.sol", + r#" +import "forge-std/Test.sol"; + +contract SkipCounterSetup is Test { + + function setUp() public { + vm.skip(true, "skip counter test"); + } + + function test_require1() public pure { + require(1 > 2); + } + + function test_require2() public pure { + require(1 > 2); + } + + function test_require3() public pure { + require(1 > 2); + } +} + "#, + ) + .unwrap(); + + cmd.args(["test", "--mc", "SkipCounterSetup"]).assert_success().stdout_eq(str![[r#" +[COMPILING_FILES] with [SOLC_VERSION] +[SOLC_VERSION] [ELAPSED] +Compiler run successful! + +Ran 1 test for test/Counter.t.sol:SkipCounterSetup +[SKIP: skipped: skip counter test] setUp() ([GAS]) +Suite result: ok. 0 passed; 0 failed; 1 skipped; [ELAPSED] + +Ran 1 test suite [ELAPSED]: 0 tests passed, 0 failed, 1 skipped (1 total tests) + +"#]]); +}); + forgetest_init!(should_generate_junit_xml_report, |prj, cmd| { prj.wipe_contracts(); prj.insert_ds_test(); @@ -2357,10 +2399,10 @@ Compiler run successful! Ran 1 test for test/MetadataTraceTest.t.sol:MetadataTraceTest [PASS] test_proxy_trace() ([GAS]) Traces: - [149783] MetadataTraceTest::test_proxy_trace() - ├─ [47297] → new Counter@0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f + [..] MetadataTraceTest::test_proxy_trace() + ├─ [..] → new Counter@0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f │ └─ ← [Return] 236 bytes of code - ├─ [37762] → new Proxy@0x2e234DAe75C793f67A35089C9d99245E1C58470b + ├─ [..] → new Proxy@0x2e234DAe75C793f67A35089C9d99245E1C58470b │ └─ ← [Return] 62 bytes of code └─ ← [Stop] @@ -2382,10 +2424,10 @@ Compiler run successful! Ran 1 test for test/MetadataTraceTest.t.sol:MetadataTraceTest [PASS] test_proxy_trace() ([GAS]) Traces: - [128142] MetadataTraceTest::test_proxy_trace() - ├─ [36485] → new Counter@0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f + [..] MetadataTraceTest::test_proxy_trace() + ├─ [..] → new Counter@0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f │ └─ ← [Return] 182 bytes of code - ├─ [26959] → new Proxy@0x2e234DAe75C793f67A35089C9d99245E1C58470b + ├─ [..] → new Proxy@0x2e234DAe75C793f67A35089C9d99245E1C58470b │ └─ ← [Return] 8 bytes of code └─ ← [Stop] @@ -2659,9 +2701,109 @@ contract ForkTest is Test { cmd.args(["test", "--mt", "test_fork_err_message"]).assert_failure().stdout_eq(str![[r#" ... Ran 1 test for test/ForkTest.t.sol:ForkTest -[FAIL: vm.createSelectFork: Could not instantiate forked environment with provider eth-mainnet.g.alchemy.com;] test_fork_err_message() ([GAS]) +[FAIL: vm.createSelectFork: Could not instantiate forked environment with provider eth-mainnet.g.alchemy.com] test_fork_err_message() ([GAS]) Suite result: FAILED. 0 passed; 1 failed; 0 skipped; [ELAPSED] ... "#]]); }); + +// Tests that test traces display state changes when running with verbosity. +#[cfg(not(feature = "isolate-by-default"))] +forgetest_init!(should_show_state_changes, |prj, cmd| { + cmd.args(["test", "--mt", "test_Increment", "-vvvvv"]).assert_success().stdout_eq(str![[r#" +... +Ran 1 test for test/Counter.t.sol:CounterTest +[PASS] test_Increment() ([GAS]) +Traces: + [87464] CounterTest::setUp() + ├─ [47297] → new Counter@0x5615dEB798BB3E4dFa0139dFa1b3D433Cc23b72f + │ └─ ← [Return] 236 bytes of code + ├─ [2387] Counter::setNumber(0) + │ └─ ← [Stop] + └─ ← [Stop] + + [31293] CounterTest::test_Increment() + ├─ [22337] Counter::increment() + │ ├─ storage changes: + │ │ @ 0: 0 → 1 + │ └─ ← [Stop] + ├─ [281] Counter::number() [staticcall] + │ └─ ← [Return] 1 + ├─ [0] VM::assertEq(1, 1) [staticcall] + │ └─ ← [Return] + ├─ storage changes: + │ @ 0: 0 → 1 + └─ ← [Stop] + +Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED] + +Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests) + +"#]]); +}); + +// Tests that chained errors are properly displayed. +// +forgetest!(displays_chained_error, |prj, cmd| { + prj.add_test( + "Foo.t.sol", + r#" +contract ContractTest { + function test_anything(uint) public {} +} + "#, + ) + .unwrap(); + + cmd.arg("test").arg("--gas-limit=100").assert_failure().stdout_eq(str![[r#" +... +Failing tests: +Encountered 1 failing test in test/Foo.t.sol:ContractTest +[FAIL: EVM error; transaction validation error: call gas cost exceeds the gas limit] setUp() ([GAS]) + +Encountered a total of 1 failing tests, 0 tests succeeded + +"#]]); +}); + +// Tests that `start/stopAndReturn` debugTraceRecording does not panic when running with +// verbosity > 3. +forgetest_init!(should_not_panic_on_debug_trace_verbose, |prj, cmd| { + prj.add_test( + "DebugTraceRecordingTest.t.sol", + r#" +import "forge-std/Test.sol"; +import {Counter} from "../src/Counter.sol"; + +contract DebugTraceRecordingTest is Test { + function test_start_stop_recording() public { + vm.startDebugTraceRecording(); + Counter counter = new Counter(); + counter.increment(); + vm.stopAndReturnDebugTraceRecording(); + } +} + "#, + ) + .unwrap(); + + cmd.args(["test", "--mt", "test_start_stop_recording", "-vvvv"]).assert_success().stdout_eq( + str![[r#" +[COMPILING_FILES] with [SOLC_VERSION] +[SOLC_VERSION] [ELAPSED] +Compiler run successful! + +Ran 1 test for test/DebugTraceRecordingTest.t.sol:DebugTraceRecordingTest +[PASS] test_start_stop_recording() ([GAS]) +Traces: + [476338] DebugTraceRecordingTest::test_start_stop_recording() + └─ ← [Stop] + +Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED] + +Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests) + +"#]], + ); +}); diff --git a/crates/forge/tests/cli/verify.rs b/crates/forge/tests/cli/verify.rs index 154c74e30..60a794477 100644 --- a/crates/forge/tests/cli/verify.rs +++ b/crates/forge/tests/cli/verify.rs @@ -75,9 +75,8 @@ contract Verify is Unique { #[allow(clippy::disallowed_macros)] fn parse_verification_result(cmd: &mut TestCommand, retries: u32) -> eyre::Result<()> { - // give etherscan some time to verify the contract - let retry = Retry::new(retries, Some(Duration::from_secs(30))); - retry.run(|| -> eyre::Result<()> { + // Give Etherscan some time to verify the contract. + Retry::new(retries, Duration::from_secs(30)).run(|| -> eyre::Result<()> { let output = cmd.execute(); let out = String::from_utf8_lossy(&output.stdout); println!("{out}"); @@ -94,9 +93,8 @@ fn parse_verification_result(cmd: &mut TestCommand, retries: u32) -> eyre::Resul fn await_verification_response(info: EnvExternalities, mut cmd: TestCommand) { let guid = { - // give etherscan some time to detect the transaction - let retry = Retry::new(5, Some(Duration::from_secs(60))); - retry + // Give Etherscan some time to detect the transaction. + Retry::new(5, Duration::from_secs(60)) .run(|| -> eyre::Result { let output = cmd.execute(); let out = String::from_utf8_lossy(&output.stdout); diff --git a/crates/forge/tests/cli/verify_bytecode.rs b/crates/forge/tests/cli/verify_bytecode.rs index 398ecb52d..6e89f1e94 100644 --- a/crates/forge/tests/cli/verify_bytecode.rs +++ b/crates/forge/tests/cli/verify_bytecode.rs @@ -2,7 +2,7 @@ use foundry_compilers::artifacts::{BytecodeHash, EvmVersion}; use foundry_config::Config; use foundry_test_utils::{ forgetest_async, - rpc::{next_http_archive_rpc_endpoint, next_mainnet_etherscan_api_key}, + rpc::{next_http_archive_rpc_url, next_mainnet_etherscan_api_key}, util::OutputExt, TestCommand, TestProject, }; @@ -20,7 +20,7 @@ fn test_verify_bytecode( expected_matches: (&str, &str), ) { let etherscan_key = next_mainnet_etherscan_api_key(); - let rpc_url = next_http_archive_rpc_endpoint(); + let rpc_url = next_http_archive_rpc_url(); // fetch and flatten source code let source_code = cmd @@ -75,7 +75,7 @@ fn test_verify_bytecode_with_ignore( chain: &str, ) { let etherscan_key = next_mainnet_etherscan_api_key(); - let rpc_url = next_http_archive_rpc_endpoint(); + let rpc_url = next_http_archive_rpc_url(); // fetch and flatten source code let source_code = cmd diff --git a/crates/forge/tests/it/cheats.rs b/crates/forge/tests/it/cheats.rs index 871cda045..11fcdbcfd 100644 --- a/crates/forge/tests/it/cheats.rs +++ b/crates/forge/tests/it/cheats.rs @@ -27,9 +27,9 @@ async fn test_cheats_local(test_data: &ForgeTestData) { filter = filter.exclude_contracts("(LastCallGasDefaultTest|MockFunctionTest|WithSeed)"); } - let mut config = test_data.config.clone(); - config.fs_permissions = FsPermissions::new(vec![PathPermission::read_write("./")]); - let runner = test_data.runner_with_config(config); + let runner = test_data.runner_with(|config| { + config.fs_permissions = FsPermissions::new(vec![PathPermission::read_write("./")]); + }); TestConfig::with_filter(runner, filter).run().await; } @@ -38,9 +38,9 @@ async fn test_cheats_local(test_data: &ForgeTestData) { async fn test_cheats_local_isolated(test_data: &ForgeTestData) { let filter = Filter::new(".*", ".*(Isolated)", &format!(".*cheats{RE_PATH_SEPARATOR}*")); - let mut config = test_data.config.clone(); - config.isolate = true; - let runner = test_data.runner_with_config(config); + let runner = test_data.runner_with(|config| { + config.isolate = true; + }); TestConfig::with_filter(runner, filter).run().await; } @@ -49,9 +49,9 @@ async fn test_cheats_local_isolated(test_data: &ForgeTestData) { async fn test_cheats_local_with_seed(test_data: &ForgeTestData) { let filter = Filter::new(".*", ".*(WithSeed)", &format!(".*cheats{RE_PATH_SEPARATOR}*")); - let mut config = test_data.config.clone(); - config.fuzz.seed = Some(U256::from(100)); - let runner = test_data.runner_with_config(config); + let runner = test_data.runner_with(|config| { + config.fuzz.seed = Some(U256::from(100)); + }); TestConfig::with_filter(runner, filter).run().await; } diff --git a/crates/forge/tests/it/config.rs b/crates/forge/tests/it/config.rs index 9cabd998a..655fae4db 100644 --- a/crates/forge/tests/it/config.rs +++ b/crates/forge/tests/it/config.rs @@ -31,8 +31,8 @@ impl TestConfig { Self { runner, should_fail: false, filter } } - pub fn evm_spec(mut self, spec: SpecId) -> Self { - self.runner.evm_spec = spec; + pub fn spec_id(mut self, spec: SpecId) -> Self { + self.runner.spec_id = spec; self } diff --git a/crates/forge/tests/it/core.rs b/crates/forge/tests/it/core.rs index c8a599195..94dc945e5 100644 --- a/crates/forge/tests/it/core.rs +++ b/crates/forge/tests/it/core.rs @@ -742,8 +742,8 @@ async fn test_trace() { assert_eq!( deployment_traces.count(), - 12, - "Test {test_name} did not have exactly 12 deployment trace." + 13, + "Test {test_name} did not have exactly 13 deployment trace." ); assert!(setup_traces.count() <= 1, "Test {test_name} had more than 1 setup trace."); assert_eq!( @@ -758,9 +758,9 @@ async fn test_trace() { #[tokio::test(flavor = "multi_thread")] async fn test_assertions_revert_false() { let filter = Filter::new(".*", ".*NoAssertionsRevertTest", ".*"); - let mut config = TEST_DATA_DEFAULT.config.clone(); - config.assertions_revert = false; - let mut runner = TEST_DATA_DEFAULT.runner_with_config(config); + let mut runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.assertions_revert = false; + }); let results = runner.test_collect(&filter); assert_multiple( @@ -784,9 +784,9 @@ async fn test_assertions_revert_false() { #[tokio::test(flavor = "multi_thread")] async fn test_legacy_assertions() { let filter = Filter::new(".*", ".*LegacyAssertions", ".*"); - let mut config = TEST_DATA_DEFAULT.config.clone(); - config.legacy_assertions = true; - let mut runner = TEST_DATA_DEFAULT.runner_with_config(config); + let mut runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.legacy_assertions = true; + }); let results = runner.test_collect(&filter); assert_multiple( diff --git a/crates/forge/tests/it/fork.rs b/crates/forge/tests/it/fork.rs index 8dc637528..d84309275 100644 --- a/crates/forge/tests/it/fork.rs +++ b/crates/forge/tests/it/fork.rs @@ -35,9 +35,9 @@ async fn test_cheats_fork_revert() { /// Executes all non-reverting fork cheatcodes #[tokio::test(flavor = "multi_thread")] async fn test_cheats_fork() { - let mut config = TEST_DATA_PARIS.config.clone(); - config.fs_permissions = FsPermissions::new(vec![PathPermission::read("./fixtures")]); - let runner = TEST_DATA_PARIS.runner_with_config(config); + let runner = TEST_DATA_PARIS.runner_with(|config| { + config.fs_permissions = FsPermissions::new(vec![PathPermission::read("./fixtures")]); + }); let filter = Filter::new(".*", ".*", &format!(".*cheats{RE_PATH_SEPARATOR}Fork")) .exclude_tests(".*Revert"); TestConfig::with_filter(runner, filter).run().await; @@ -46,9 +46,9 @@ async fn test_cheats_fork() { /// Executes eth_getLogs cheatcode #[tokio::test(flavor = "multi_thread")] async fn test_get_logs_fork() { - let mut config = TEST_DATA_DEFAULT.config.clone(); - config.fs_permissions = FsPermissions::new(vec![PathPermission::read("./fixtures")]); - let runner = TEST_DATA_DEFAULT.runner_with_config(config); + let runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.fs_permissions = FsPermissions::new(vec![PathPermission::read("./fixtures")]); + }); let filter = Filter::new("testEthGetLogs", ".*", &format!(".*cheats{RE_PATH_SEPARATOR}Fork")) .exclude_tests(".*Revert"); TestConfig::with_filter(runner, filter).run().await; @@ -57,9 +57,9 @@ async fn test_get_logs_fork() { /// Executes rpc cheatcode #[tokio::test(flavor = "multi_thread")] async fn test_rpc_fork() { - let mut config = TEST_DATA_DEFAULT.config.clone(); - config.fs_permissions = FsPermissions::new(vec![PathPermission::read("./fixtures")]); - let runner = TEST_DATA_DEFAULT.runner_with_config(config); + let runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.fs_permissions = FsPermissions::new(vec![PathPermission::read("./fixtures")]); + }); let filter = Filter::new("testRpc", ".*", &format!(".*cheats{RE_PATH_SEPARATOR}Fork")) .exclude_tests(".*Revert"); TestConfig::with_filter(runner, filter).run().await; @@ -68,7 +68,7 @@ async fn test_rpc_fork() { /// Tests that we can launch in forking mode #[tokio::test(flavor = "multi_thread")] async fn test_launch_fork() { - let rpc_url = foundry_test_utils::rpc::next_http_archive_rpc_endpoint(); + let rpc_url = foundry_test_utils::rpc::next_http_archive_rpc_url(); let runner = TEST_DATA_DEFAULT.forked_runner(&rpc_url).await; let filter = Filter::new(".*", ".*", &format!(".*fork{RE_PATH_SEPARATOR}Launch")); TestConfig::with_filter(runner, filter).run().await; @@ -77,7 +77,7 @@ async fn test_launch_fork() { /// Smoke test that forking workings with websockets #[tokio::test(flavor = "multi_thread")] async fn test_launch_fork_ws() { - let rpc_url = foundry_test_utils::rpc::next_ws_archive_rpc_endpoint(); + let rpc_url = foundry_test_utils::rpc::next_ws_archive_rpc_url(); let runner = TEST_DATA_DEFAULT.forked_runner(&rpc_url).await; let filter = Filter::new(".*", ".*", &format!(".*fork{RE_PATH_SEPARATOR}Launch")); TestConfig::with_filter(runner, filter).run().await; @@ -102,25 +102,25 @@ async fn test_create_same_fork() { /// Test that `no_storage_caching` config is properly applied #[tokio::test(flavor = "multi_thread")] async fn test_storage_caching_config() { - // no_storage_caching set to true: storage should not be cached - let mut config = TEST_DATA_DEFAULT.config.clone(); - config.no_storage_caching = true; - let runner = TEST_DATA_DEFAULT.runner_with_config(config); let filter = Filter::new("testStorageCaching", ".*", &format!(".*cheats{RE_PATH_SEPARATOR}Fork")) .exclude_tests(".*Revert"); - TestConfig::with_filter(runner, filter).run().await; + + let runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.no_storage_caching = true; + }); + + // no_storage_caching set to true: storage should not be cached + TestConfig::with_filter(runner, filter.clone()).run().await; let cache_dir = Config::foundry_block_cache_dir(Chain::mainnet(), 19800000).unwrap(); let _ = fs::remove_file(cache_dir); - // no_storage_caching set to false: storage should be cached - let mut config = TEST_DATA_DEFAULT.config.clone(); - config.no_storage_caching = false; - let runner = TEST_DATA_DEFAULT.runner_with_config(config); - let filter = - Filter::new("testStorageCaching", ".*", &format!(".*cheats{RE_PATH_SEPARATOR}Fork")) - .exclude_tests(".*Revert"); + let runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.no_storage_caching = false; + }); TestConfig::with_filter(runner, filter).run().await; + + // no_storage_caching set to false: storage should be cached let cache_dir = Config::foundry_block_cache_dir(Chain::mainnet(), 19800000).unwrap(); assert!(cache_dir.exists()); diff --git a/crates/forge/tests/it/fs.rs b/crates/forge/tests/it/fs.rs index 5bb0b59fb..5733ec584 100644 --- a/crates/forge/tests/it/fs.rs +++ b/crates/forge/tests/it/fs.rs @@ -6,18 +6,18 @@ use foundry_test_utils::Filter; #[tokio::test(flavor = "multi_thread")] async fn test_fs_disabled() { - let mut config = TEST_DATA_DEFAULT.config.clone(); - config.fs_permissions = FsPermissions::new(vec![PathPermission::none("./")]); - let runner = TEST_DATA_DEFAULT.runner_with_config(config); + let runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.fs_permissions = FsPermissions::new(vec![PathPermission::none("./")]); + }); let filter = Filter::new(".*", ".*", ".*fs/Disabled"); TestConfig::with_filter(runner, filter).run().await; } #[tokio::test(flavor = "multi_thread")] async fn test_fs_default() { - let mut config = TEST_DATA_DEFAULT.config.clone(); - config.fs_permissions = FsPermissions::new(vec![PathPermission::read("./fixtures")]); - let runner = TEST_DATA_DEFAULT.runner_with_config(config); + let runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.fs_permissions = FsPermissions::new(vec![PathPermission::read("./fixtures")]); + }); let filter = Filter::new(".*", ".*", ".*fs/Default"); TestConfig::with_filter(runner, filter).run().await; } diff --git a/crates/forge/tests/it/fuzz.rs b/crates/forge/tests/it/fuzz.rs index 8972c9bd9..8b49d4acc 100644 --- a/crates/forge/tests/it/fuzz.rs +++ b/crates/forge/tests/it/fuzz.rs @@ -82,11 +82,12 @@ async fn test_successful_fuzz_cases() { #[ignore] async fn test_fuzz_collection() { let filter = Filter::new(".*", ".*", ".*fuzz/FuzzCollection.t.sol"); - let mut runner = TEST_DATA_DEFAULT.runner(); - runner.test_options.invariant.depth = 100; - runner.test_options.invariant.runs = 1000; - runner.test_options.fuzz.runs = 1000; - runner.test_options.fuzz.seed = Some(U256::from(6u32)); + let mut runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.invariant.depth = 100; + config.invariant.runs = 1000; + config.fuzz.runs = 1000; + config.fuzz.seed = Some(U256::from(6u32)); + }); let results = runner.test_collect(&filter); assert_multiple( @@ -111,11 +112,14 @@ async fn test_fuzz_collection() { #[tokio::test(flavor = "multi_thread")] async fn test_persist_fuzz_failure() { let filter = Filter::new(".*", ".*", ".*fuzz/FuzzFailurePersist.t.sol"); - let mut runner = TEST_DATA_DEFAULT.runner(); - runner.test_options.fuzz.runs = 1000; - macro_rules! get_failure_result { - () => { + macro_rules! run_fail { + () => { run_fail!(|config| {}) }; + (|$config:ident| $e:expr) => {{ + let mut runner = TEST_DATA_DEFAULT.runner_with(|$config| { + $config.fuzz.runs = 1000; + $e + }); runner .test_collect(&filter) .get("default/fuzz/FuzzFailurePersist.t.sol:FuzzFailurePersistTest") @@ -125,11 +129,11 @@ async fn test_persist_fuzz_failure() { .unwrap() .counterexample .clone() - }; + }}; } // record initial counterexample calldata - let initial_counterexample = get_failure_result!(); + let initial_counterexample = run_fail!(); let initial_calldata = match initial_counterexample { Some(CounterExample::Single(counterexample)) => counterexample.calldata, _ => Bytes::new(), @@ -137,7 +141,7 @@ async fn test_persist_fuzz_failure() { // run several times and compare counterexamples calldata for i in 0..10 { - let new_calldata = match get_failure_result!() { + let new_calldata = match run_fail!() { Some(CounterExample::Single(counterexample)) => counterexample.calldata, _ => Bytes::new(), }; @@ -146,8 +150,9 @@ async fn test_persist_fuzz_failure() { } // write new failure in different file - runner.test_options.fuzz.failure_persist_file = Some("failure1".to_string()); - let new_calldata = match get_failure_result!() { + let new_calldata = match run_fail!(|config| { + config.fuzz.failure_persist_file = Some("failure1".to_string()); + }) { Some(CounterExample::Single(counterexample)) => counterexample.calldata, _ => Bytes::new(), }; @@ -235,3 +240,38 @@ contract InlineMaxRejectsTest is Test { ... "#]]); }); + +// Tests that test timeout config is properly applied. +// If test doesn't timeout after one second, then test will fail with `rejected too many inputs`. +forgetest_init!(test_fuzz_timeout, |prj, cmd| { + prj.wipe_contracts(); + + prj.add_test( + "Contract.t.sol", + r#" +import {Test} from "forge-std/Test.sol"; + +contract FuzzTimeoutTest is Test { + /// forge-config: default.fuzz.max-test-rejects = 10000 + /// forge-config: default.fuzz.timeout = 1 + function test_fuzz_bound(uint256 a) public pure { + vm.assume(a == 0); + } +} + "#, + ) + .unwrap(); + + cmd.args(["test"]).assert_success().stdout_eq(str![[r#" +[COMPILING_FILES] with [SOLC_VERSION] +[SOLC_VERSION] [ELAPSED] +Compiler run successful! + +Ran 1 test for test/Contract.t.sol:FuzzTimeoutTest +[PASS] test_fuzz_bound(uint256) (runs: [..], [AVG_GAS]) +Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED] + +Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests) + +"#]]); +}); diff --git a/crates/forge/tests/it/inline.rs b/crates/forge/tests/it/inline.rs index 4448f982d..eab7f9ec1 100644 --- a/crates/forge/tests/it/inline.rs +++ b/crates/forge/tests/it/inline.rs @@ -1,15 +1,13 @@ //! Inline configuration tests. -use crate::test_helpers::{ForgeTestData, ForgeTestProfile, TEST_DATA_DEFAULT}; -use forge::{result::TestKind, TestOptionsBuilder}; -use foundry_config::{FuzzConfig, InvariantConfig}; +use crate::test_helpers::TEST_DATA_DEFAULT; +use forge::result::TestKind; use foundry_test_utils::Filter; #[tokio::test(flavor = "multi_thread")] async fn inline_config_run_fuzz() { let filter = Filter::new(".*", ".*", ".*inline/FuzzInlineConf.t.sol"); - // Fresh runner to make sure there's no persisted failure from previous tests. - let mut runner = ForgeTestData::new(ForgeTestProfile::Default).runner(); + let mut runner = TEST_DATA_DEFAULT.runner(); let result = runner.test_collect(&filter); let results = result .into_iter() @@ -70,31 +68,3 @@ async fn inline_config_run_invariant() { _ => unreachable!(), } } - -#[test] -fn build_test_options() { - let root = &TEST_DATA_DEFAULT.project.paths.root; - let profiles = vec!["default".to_string(), "ci".to_string()]; - let build_result = TestOptionsBuilder::default() - .fuzz(FuzzConfig::default()) - .invariant(InvariantConfig::default()) - .profiles(profiles) - .build(&TEST_DATA_DEFAULT.output, root); - - assert!(build_result.is_ok()); -} - -#[test] -fn build_test_options_just_one_valid_profile() { - let root = &TEST_DATA_DEFAULT.project.root(); - let valid_profiles = vec!["profile-sheldon-cooper".to_string()]; - let build_result = TestOptionsBuilder::default() - .fuzz(FuzzConfig::default()) - .invariant(InvariantConfig::default()) - .profiles(valid_profiles) - .build(&TEST_DATA_DEFAULT.output, root); - - // We expect an error, since COMPILED contains in-line - // per-test configs for "default" and "ci" profiles - assert!(build_result.is_err()); -} diff --git a/crates/forge/tests/it/invariant.rs b/crates/forge/tests/it/invariant.rs index 3e09cd465..ab04ae7b5 100644 --- a/crates/forge/tests/it/invariant.rs +++ b/crates/forge/tests/it/invariant.rs @@ -48,8 +48,9 @@ async fn test_invariant_with_alias() { #[tokio::test(flavor = "multi_thread")] async fn test_invariant_filters() { - let mut runner = TEST_DATA_DEFAULT.runner(); - runner.test_options.invariant.runs = 10; + let mut runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.invariant.runs = 10; + }); // Contracts filter tests. assert_multiple( @@ -173,9 +174,10 @@ async fn test_invariant_filters() { #[tokio::test(flavor = "multi_thread")] async fn test_invariant_override() { let filter = Filter::new(".*", ".*", ".*fuzz/invariant/common/InvariantReentrancy.t.sol"); - let mut runner = TEST_DATA_DEFAULT.runner(); - runner.test_options.invariant.fail_on_revert = false; - runner.test_options.invariant.call_override = true; + let mut runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.invariant.fail_on_revert = false; + config.invariant.call_override = true; + }); let results = runner.test_collect(&filter); assert_multiple( &results, @@ -189,10 +191,11 @@ async fn test_invariant_override() { #[tokio::test(flavor = "multi_thread")] async fn test_invariant_fail_on_revert() { let filter = Filter::new(".*", ".*", ".*fuzz/invariant/common/InvariantHandlerFailure.t.sol"); - let mut runner = TEST_DATA_DEFAULT.runner(); - runner.test_options.invariant.fail_on_revert = true; - runner.test_options.invariant.runs = 1; - runner.test_options.invariant.depth = 10; + let mut runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.invariant.fail_on_revert = true; + config.invariant.runs = 1; + config.invariant.depth = 10; + }); let results = runner.test_collect(&filter); assert_multiple( &results, @@ -213,9 +216,13 @@ async fn test_invariant_fail_on_revert() { #[ignore] async fn test_invariant_storage() { let filter = Filter::new(".*", ".*", ".*fuzz/invariant/storage/InvariantStorageTest.t.sol"); - let mut runner = TEST_DATA_DEFAULT.runner(); - runner.test_options.invariant.depth = 100 + (50 * cfg!(windows) as u32); - runner.test_options.fuzz.seed = Some(U256::from(6u32)); + let mut runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.invariant.depth = 100; + if cfg!(windows) { + config.invariant.depth += 50; + } + config.fuzz.seed = Some(U256::from(6u32)); + }); let results = runner.test_collect(&filter); assert_multiple( &results, @@ -254,8 +261,9 @@ async fn test_invariant_inner_contract() { #[cfg_attr(windows, ignore = "for some reason there's different rng")] async fn test_invariant_shrink() { let filter = Filter::new(".*", ".*", ".*fuzz/invariant/common/InvariantInnerContract.t.sol"); - let mut runner = TEST_DATA_DEFAULT.runner(); - runner.test_options.fuzz.seed = Some(U256::from(119u32)); + let mut runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.fuzz.seed = Some(U256::from(119u32)); + }); match get_counterexample!(runner, &filter) { CounterExample::Single(_) => panic!("CounterExample should be a sequence."), @@ -300,10 +308,11 @@ async fn test_invariant_require_shrink() { async fn check_shrink_sequence(test_pattern: &str, expected_len: usize) { let filter = Filter::new(test_pattern, ".*", ".*fuzz/invariant/common/InvariantShrinkWithAssert.t.sol"); - let mut runner = TEST_DATA_DEFAULT.runner(); - runner.test_options.fuzz.seed = Some(U256::from(100u32)); - runner.test_options.invariant.runs = 1; - runner.test_options.invariant.depth = 15; + let mut runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.fuzz.seed = Some(U256::from(100u32)); + config.invariant.runs = 1; + config.invariant.depth = 15; + }); match get_counterexample!(runner, &filter) { CounterExample::Single(_) => panic!("CounterExample should be a sequence."), @@ -318,10 +327,11 @@ async fn check_shrink_sequence(test_pattern: &str, expected_len: usize) { async fn test_shrink_big_sequence() { let filter = Filter::new(".*", ".*", ".*fuzz/invariant/common/InvariantShrinkBigSequence.t.sol"); - let mut runner = TEST_DATA_DEFAULT.runner(); - runner.test_options.fuzz.seed = Some(U256::from(119u32)); - runner.test_options.invariant.runs = 1; - runner.test_options.invariant.depth = 1000; + let mut runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.fuzz.seed = Some(U256::from(119u32)); + config.invariant.runs = 1; + config.invariant.depth = 1000; + }); let initial_counterexample = runner .test_collect(&filter) @@ -390,11 +400,12 @@ async fn test_shrink_big_sequence() { async fn test_shrink_fail_on_revert() { let filter = Filter::new(".*", ".*", ".*fuzz/invariant/common/InvariantShrinkFailOnRevert.t.sol"); - let mut runner = TEST_DATA_DEFAULT.runner(); - runner.test_options.fuzz.seed = Some(U256::from(119u32)); - runner.test_options.invariant.fail_on_revert = true; - runner.test_options.invariant.runs = 1; - runner.test_options.invariant.depth = 200; + let mut runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.fuzz.seed = Some(U256::from(119u32)); + config.invariant.fail_on_revert = true; + config.invariant.runs = 1; + config.invariant.depth = 200; + }); match get_counterexample!(runner, &filter) { CounterExample::Single(_) => panic!("CounterExample should be a sequence."), @@ -408,8 +419,9 @@ async fn test_shrink_fail_on_revert() { #[tokio::test(flavor = "multi_thread")] async fn test_invariant_preserve_state() { let filter = Filter::new(".*", ".*", ".*fuzz/invariant/common/InvariantPreserveState.t.sol"); - let mut runner = TEST_DATA_DEFAULT.runner(); - runner.test_options.invariant.fail_on_revert = true; + let mut runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.invariant.fail_on_revert = true; + }); let results = runner.test_collect(&filter); assert_multiple( &results, @@ -452,9 +464,10 @@ async fn test_invariant_with_address_fixture() { #[tokio::test(flavor = "multi_thread")] async fn test_invariant_assume_does_not_revert() { let filter = Filter::new(".*", ".*", ".*fuzz/invariant/common/InvariantAssume.t.sol"); - let mut runner = TEST_DATA_DEFAULT.runner(); - // Should not treat vm.assume as revert. - runner.test_options.invariant.fail_on_revert = true; + let mut runner = TEST_DATA_DEFAULT.runner_with(|config| { + // Should not treat vm.assume as revert. + config.invariant.fail_on_revert = true; + }); let results = runner.test_collect(&filter); assert_multiple( &results, @@ -468,10 +481,11 @@ async fn test_invariant_assume_does_not_revert() { #[tokio::test(flavor = "multi_thread")] async fn test_invariant_assume_respects_restrictions() { let filter = Filter::new(".*", ".*", ".*fuzz/invariant/common/InvariantAssume.t.sol"); - let mut runner = TEST_DATA_DEFAULT.runner(); - runner.test_options.invariant.runs = 1; - runner.test_options.invariant.depth = 10; - runner.test_options.invariant.max_assume_rejects = 1; + let mut runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.invariant.runs = 1; + config.invariant.depth = 10; + config.invariant.max_assume_rejects = 1; + }); let results = runner.test_collect(&filter); assert_multiple( &results, @@ -491,8 +505,9 @@ async fn test_invariant_assume_respects_restrictions() { #[tokio::test(flavor = "multi_thread")] async fn test_invariant_decode_custom_error() { let filter = Filter::new(".*", ".*", ".*fuzz/invariant/common/InvariantCustomError.t.sol"); - let mut runner = TEST_DATA_DEFAULT.runner(); - runner.test_options.invariant.fail_on_revert = true; + let mut runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.invariant.fail_on_revert = true; + }); let results = runner.test_collect(&filter); assert_multiple( &results, @@ -512,8 +527,9 @@ async fn test_invariant_decode_custom_error() { #[tokio::test(flavor = "multi_thread")] async fn test_invariant_fuzzed_selected_targets() { let filter = Filter::new(".*", ".*", ".*fuzz/invariant/target/FuzzedTargetContracts.t.sol"); - let mut runner = TEST_DATA_DEFAULT.runner(); - runner.test_options.invariant.fail_on_revert = true; + let mut runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.invariant.fail_on_revert = true; + }); let results = runner.test_collect(&filter); assert_multiple( &results, @@ -539,9 +555,10 @@ async fn test_invariant_fuzzed_selected_targets() { #[tokio::test(flavor = "multi_thread")] async fn test_invariant_fixtures() { let filter = Filter::new(".*", ".*", ".*fuzz/invariant/common/InvariantFixtures.t.sol"); - let mut runner = TEST_DATA_DEFAULT.runner(); - runner.test_options.invariant.runs = 1; - runner.test_options.invariant.depth = 100; + let mut runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.invariant.runs = 1; + config.invariant.depth = 100; + }); let results = runner.test_collect(&filter); assert_multiple( &results, @@ -592,8 +609,9 @@ async fn test_invariant_scrape_values() { #[tokio::test(flavor = "multi_thread")] async fn test_invariant_roll_fork_handler() { let filter = Filter::new(".*", ".*", ".*fuzz/invariant/common/InvariantRollFork.t.sol"); - let mut runner = TEST_DATA_DEFAULT.runner(); - runner.test_options.fuzz.seed = Some(U256::from(119u32)); + let mut runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.fuzz.seed = Some(U256::from(119u32)); + }); let results = runner.test_collect(&filter); assert_multiple( &results, @@ -625,8 +643,9 @@ async fn test_invariant_roll_fork_handler() { #[tokio::test(flavor = "multi_thread")] async fn test_invariant_excluded_senders() { let filter = Filter::new(".*", ".*", ".*fuzz/invariant/common/InvariantExcludedSenders.t.sol"); - let mut runner = TEST_DATA_DEFAULT.runner(); - runner.test_options.invariant.fail_on_revert = true; + let mut runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.invariant.fail_on_revert = true; + }); let results = runner.test_collect(&filter); assert_multiple( &results, @@ -670,10 +689,11 @@ async fn test_invariant_after_invariant() { #[tokio::test(flavor = "multi_thread")] async fn test_invariant_selectors_weight() { let filter = Filter::new(".*", ".*", ".*fuzz/invariant/common/InvariantSelectorsWeight.t.sol"); - let mut runner = TEST_DATA_DEFAULT.runner(); - runner.test_options.fuzz.seed = Some(U256::from(119u32)); - runner.test_options.invariant.runs = 1; - runner.test_options.invariant.depth = 10; + let mut runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.fuzz.seed = Some(U256::from(119u32)); + config.invariant.runs = 1; + config.invariant.depth = 10; + }); let results = runner.test_collect(&filter); assert_multiple( &results, @@ -688,10 +708,11 @@ async fn test_invariant_selectors_weight() { async fn test_no_reverts_in_counterexample() { let filter = Filter::new(".*", ".*", ".*fuzz/invariant/common/InvariantSequenceNoReverts.t.sol"); - let mut runner = TEST_DATA_DEFAULT.runner(); - runner.test_options.invariant.fail_on_revert = false; - // Use original counterexample to test sequence len. - runner.test_options.invariant.shrink_run_limit = 0; + let mut runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.invariant.fail_on_revert = false; + // Use original counterexample to test sequence len. + config.invariant.shrink_run_limit = 0; + }); match get_counterexample!(runner, &filter) { CounterExample::Single(_) => panic!("CounterExample should be a sequence."), @@ -908,23 +929,87 @@ contract AnotherCounterHandler is Test { cmd.args(["test", "--mt", "invariant_"]).assert_success().stdout_eq(str![[r#" ... -Ran 2 tests for test/SelectorMetricsTest.t.sol:CounterTest [PASS] invariant_counter() (runs: 10, calls: 5000, reverts: [..]) + +╭-----------------------+----------------+-------+---------+----------╮ | Contract | Selector | Calls | Reverts | Discards | -|-----------------------|----------------|-------|---------|----------| -| AnotherCounterHandler | doWork | [..] | [..] | [..] | -| AnotherCounterHandler | doWorkThing | [..] | [..] | [..] | -| CounterHandler | doAnotherThing | [..] | [..] | [..] | -| CounterHandler | doSomething | [..] | [..] | [..] | ++=====================================================================+ +| AnotherCounterHandler | doWork | [..] | [..] | [..] | +|-----------------------+----------------+-------+---------+----------| +| AnotherCounterHandler | doWorkThing | [..] | [..] | [..] | +|-----------------------+----------------+-------+---------+----------| +| CounterHandler | doAnotherThing | [..] | [..] | [..] | +|-----------------------+----------------+-------+---------+----------| +| CounterHandler | doSomething | [..] | [..] | [..] | +╰-----------------------+----------------+-------+---------+----------╯ [PASS] invariant_counter2() (runs: 10, calls: 5000, reverts: [..]) + +╭-----------------------+----------------+-------+---------+----------╮ | Contract | Selector | Calls | Reverts | Discards | -|-----------------------|----------------|-------|---------|----------| -| AnotherCounterHandler | doWork | [..] | [..] | [..] | -| AnotherCounterHandler | doWorkThing | [..] | [..] | [..] | -| CounterHandler | doAnotherThing | [..] | [..] | [..] | -| CounterHandler | doSomething | [..] | [..] | [..] | ++=====================================================================+ +| AnotherCounterHandler | doWork | [..] | [..] | [..] | +|-----------------------+----------------+-------+---------+----------| +| AnotherCounterHandler | doWorkThing | [..] | [..] | [..] | +|-----------------------+----------------+-------+---------+----------| +| CounterHandler | doAnotherThing | [..] | [..] | [..] | +|-----------------------+----------------+-------+---------+----------| +| CounterHandler | doSomething | [..] | [..] | [..] | +╰-----------------------+----------------+-------+---------+----------╯ + +Suite result: ok. 2 passed; 0 failed; 0 skipped; [ELAPSED] + +Ran 1 test suite [ELAPSED]: 2 tests passed, 0 failed, 0 skipped (2 total tests) + +"#]]); +}); + +// Tests that invariant exists with success after configured timeout. +forgetest_init!(should_apply_configured_timeout, |prj, cmd| { + // Add initial test that breaks invariant. + prj.add_test( + "TimeoutTest.t.sol", + r#" +import {Test} from "forge-std/Test.sol"; + +contract TimeoutHandler is Test { + uint256 public count; + + function increment() public { + count++; + } +} + +contract TimeoutTest is Test { + TimeoutHandler handler; + + function setUp() public { + handler = new TimeoutHandler(); + } + + /// forge-config: default.invariant.runs = 10000 + /// forge-config: default.invariant.depth = 20000 + /// forge-config: default.invariant.timeout = 1 + function invariant_counter_timeout() public view { + // Invariant will fail if more than 10000 increments. + // Make sure test timeouts after one second and remaining runs are canceled. + require(handler.count() < 10000); + } +} + "#, + ) + .unwrap(); + + cmd.args(["test", "--mt", "invariant_counter_timeout"]).assert_success().stdout_eq(str![[r#" +[COMPILING_FILES] with [SOLC_VERSION] +[SOLC_VERSION] [ELAPSED] +Compiler run successful! + +Ran 1 test for test/TimeoutTest.t.sol:TimeoutTest +[PASS] invariant_counter_timeout() (runs: 0, calls: 0, reverts: 0) +Suite result: ok. 1 passed; 0 failed; 0 skipped; [ELAPSED] + +Ran 1 test suite [ELAPSED]: 1 tests passed, 0 failed, 0 skipped (1 total tests) -... "#]]); }); diff --git a/crates/forge/tests/it/repros.rs b/crates/forge/tests/it/repros.rs index f3c623b38..fb7114af3 100644 --- a/crates/forge/tests/it/repros.rs +++ b/crates/forge/tests/it/repros.rs @@ -1,11 +1,6 @@ //! Regression tests for previous issues. -use std::sync::Arc; - -use crate::{ - config::*, - test_helpers::{ForgeTestData, TEST_DATA_DEFAULT}, -}; +use crate::{config::*, test_helpers::TEST_DATA_DEFAULT}; use alloy_dyn_abi::{DecodedEvent, DynSolValue, EventExt}; use alloy_json_abi::Event; use alloy_primitives::{address, b256, Address, U256}; @@ -17,6 +12,7 @@ use foundry_config::{fs_permissions::PathPermission, Config, FsPermissions}; use foundry_evm::traces::{CallKind, CallTraceDecoder, DecodedCallData, TraceKind}; use foundry_evm_abi::HARDHAT_CONSOLE_ADDRESS; use foundry_test_utils::Filter; +use std::sync::Arc; /// Creates a test that runs `testdata/repros/Issue{issue}.t.sol`. macro_rules! test_repro { @@ -31,7 +27,7 @@ macro_rules! test_repro { #[tokio::test(flavor = "multi_thread")] $(#[$attr])* async fn [< issue_ $issue_number >]() { - repro_config($issue_number, $should_fail, $sender.into(), &*TEST_DATA_DEFAULT).await.run().await; + repro_config($issue_number, $should_fail, $sender.into()).await.run().await; } } }; @@ -40,7 +36,7 @@ macro_rules! test_repro { #[tokio::test(flavor = "multi_thread")] $(#[$attr])* async fn [< issue_ $issue_number >]() { - let mut $res = repro_config($issue_number, $should_fail, $sender.into(), &*TEST_DATA_DEFAULT).await.test(); + let mut $res = repro_config($issue_number, $should_fail, $sender.into()).await.test(); $e } } @@ -50,7 +46,7 @@ macro_rules! test_repro { #[tokio::test(flavor = "multi_thread")] $(#[$attr])* async fn [< issue_ $issue_number >]() { - let mut $config = repro_config($issue_number, false, None, &*TEST_DATA_DEFAULT).await; + let mut $config = repro_config($issue_number, false, None).await; $e $config.run().await; } @@ -59,23 +55,19 @@ macro_rules! test_repro { } pub(crate) use test_repro; -async fn repro_config( - issue: usize, - should_fail: bool, - sender: Option
, - test_data: &ForgeTestData, -) -> TestConfig { +async fn repro_config(issue: usize, should_fail: bool, sender: Option
) -> TestConfig { foundry_test_utils::init_tracing(); let filter = Filter::path(&format!(".*repros/Issue{issue}.t.sol")); - let mut config = test_data.config.clone(); - config.fs_permissions = - FsPermissions::new(vec![PathPermission::read("./fixtures"), PathPermission::read("out")]); - if let Some(sender) = sender { - config.sender = sender; - } - - let runner = TEST_DATA_DEFAULT.runner_with_config(config); + let runner = TEST_DATA_DEFAULT.runner_with(|config| { + config.fs_permissions = FsPermissions::new(vec![ + PathPermission::read("./fixtures"), + PathPermission::read("out"), + ]); + if let Some(sender) = sender { + config.sender = sender; + } + }); TestConfig::with_filter(runner, filter).set_should_fail(should_fail) } @@ -187,6 +179,9 @@ test_repro!(3753); // https://github.com/foundry-rs/foundry/issues/3792 test_repro!(3792); +// https://github.com/foundry-rs/foundry/issues/4232 +test_repro!(4232); + // https://github.com/foundry-rs/foundry/issues/4402 test_repro!(4402); @@ -306,7 +301,7 @@ test_repro!(6538); // https://github.com/foundry-rs/foundry/issues/6554 test_repro!(6554; |config| { - let path = config.runner.config.root.0.join("out/default/Issue6554.t.sol"); + let path = config.runner.config.root.join("out/default/Issue6554.t.sol"); let mut prj_config = Config::clone(&config.runner.config); prj_config.fs_permissions.add(PathPermission::read_write(path)); @@ -390,3 +385,6 @@ test_repro!(8971; |config| { prj_config.isolate = true; config.runner.config = Arc::new(prj_config); }); + +// https://github.com/foundry-rs/foundry/issues/8639 +test_repro!(8639); diff --git a/crates/forge/tests/it/spec.rs b/crates/forge/tests/it/spec.rs index aed2063a0..52e581c33 100644 --- a/crates/forge/tests/it/spec.rs +++ b/crates/forge/tests/it/spec.rs @@ -7,8 +7,5 @@ use foundry_test_utils::Filter; #[tokio::test(flavor = "multi_thread")] async fn test_shanghai_compat() { let filter = Filter::new("", "ShanghaiCompat", ".*spec"); - TestConfig::with_filter(TEST_DATA_PARIS.runner(), filter) - .evm_spec(SpecId::SHANGHAI) - .run() - .await; + TestConfig::with_filter(TEST_DATA_PARIS.runner(), filter).spec_id(SpecId::SHANGHAI).run().await; } diff --git a/crates/forge/tests/it/test_helpers.rs b/crates/forge/tests/it/test_helpers.rs index fe06d785f..bf5b61966 100644 --- a/crates/forge/tests/it/test_helpers.rs +++ b/crates/forge/tests/it/test_helpers.rs @@ -2,12 +2,10 @@ use alloy_chains::NamedChain; use alloy_primitives::U256; -use forge::{ - revm::primitives::SpecId, MultiContractRunner, MultiContractRunnerBuilder, TestOptions, - TestOptionsBuilder, -}; +use forge::{revm::primitives::SpecId, MultiContractRunner, MultiContractRunnerBuilder}; use foundry_compilers::{ artifacts::{EvmVersion, Libraries, Settings}, + compilers::multi::MultiCompiler, utils::RuntimeOrHandle, zksolc::ZkSolcCompiler, zksync::{ @@ -87,10 +85,6 @@ impl ForgeTestProfile { SolcConfig { settings } } - pub fn project(&self) -> Project { - self.config().project().expect("Failed to build project") - } - pub fn zk_project(&self) -> ZkProject { let zk_config = self.zk_config(); let mut zk_project = @@ -107,67 +101,6 @@ impl ForgeTestProfile { zk_project } - pub fn test_opts(&self, output: &ProjectCompileOutput) -> TestOptions { - TestOptionsBuilder::default() - .fuzz(FuzzConfig { - runs: 256, - max_test_rejects: 65536, - seed: None, - dictionary: FuzzDictionaryConfig { - include_storage: true, - include_push_bytes: true, - dictionary_weight: 40, - max_fuzz_dictionary_addresses: 10_000, - max_fuzz_dictionary_values: 10_000, - }, - gas_report_samples: 256, - failure_persist_dir: Some(tempfile::tempdir().unwrap().into_path()), - failure_persist_file: Some("testfailure".to_string()), - no_zksync_reserved_addresses: false, - show_logs: false, - }) - .invariant(InvariantConfig { - runs: 256, - depth: 15, - fail_on_revert: false, - call_override: false, - dictionary: FuzzDictionaryConfig { - dictionary_weight: 80, - include_storage: true, - include_push_bytes: true, - max_fuzz_dictionary_addresses: 10_000, - max_fuzz_dictionary_values: 10_000, - }, - shrink_run_limit: 5000, - max_assume_rejects: 65536, - gas_report_samples: 256, - failure_persist_dir: Some(tempfile::tempdir().unwrap().into_path()), - show_metrics: false, - no_zksync_reserved_addresses: false, - }) - .build(output, Path::new(self.project().root())) - .expect("Config loaded") - } - - pub fn evm_opts(&self) -> EvmOpts { - EvmOpts { - env: Env { - gas_limit: u64::MAX, - chain_id: None, - tx_origin: CALLER, - block_number: 1, - block_timestamp: 1, - ..Default::default() - }, - sender: CALLER, - initial_balance: U256::MAX, - ffi: true, - verbosity: 3, - memory_limit: 1 << 26, - ..Default::default() - } - } - /// Build [Config] for test profile. /// /// Project source files are read from testdata/{profile_name} @@ -186,11 +119,68 @@ impl ForgeTestProfile { "fork/Fork.t.sol:DssExecLib:0xfD88CeE74f7D78697775aBDAE53f9Da1559728E4".to_string(), ]; + config.prompt_timeout = 0; + + config.gas_limit = u64::MAX.into(); + config.chain = None; + config.tx_origin = CALLER; + config.block_number = 1; + config.block_timestamp = 1; + + config.sender = CALLER; + config.initial_balance = U256::MAX; + config.ffi = true; + config.verbosity = 3; + config.memory_limit = 1 << 26; + if self.is_paris() { config.evm_version = EvmVersion::Paris; } - config + config.fuzz = FuzzConfig { + runs: 256, + max_test_rejects: 65536, + seed: None, + dictionary: FuzzDictionaryConfig { + include_storage: true, + include_push_bytes: true, + dictionary_weight: 40, + max_fuzz_dictionary_addresses: 10_000, + max_fuzz_dictionary_values: 10_000, + }, + gas_report_samples: 256, + failure_persist_dir: Some(tempfile::tempdir().unwrap().into_path()), + failure_persist_file: Some("testfailure".to_string()), + show_logs: false, + timeout: None, + }; + config.invariant = InvariantConfig { + runs: 256, + depth: 15, + fail_on_revert: false, + call_override: false, + dictionary: FuzzDictionaryConfig { + dictionary_weight: 80, + include_storage: true, + include_push_bytes: true, + max_fuzz_dictionary_addresses: 10_000, + max_fuzz_dictionary_values: 10_000, + }, + shrink_run_limit: 5000, + max_assume_rejects: 65536, + gas_report_samples: 256, + failure_persist_dir: Some( + tempfile::Builder::new() + .prefix(&format!("foundry-{self}")) + .tempdir() + .unwrap() + .into_path(), + ), + show_metrics: false, + timeout: None, + }; + + config.sanitized() } /// Build [Config] for zksync test profile. @@ -233,9 +223,7 @@ pub struct ZkTestData { pub struct ForgeTestData { pub project: Project, pub output: ProjectCompileOutput, - pub test_opts: TestOptions, - pub evm_opts: EvmOpts, - pub config: Config, + pub config: Arc, pub profile: ForgeTestProfile, pub zk_test_data: ZkTestData, } @@ -246,16 +234,15 @@ impl ForgeTestData { /// Uses [get_compiled] to lazily compile the project. pub fn new(profile: ForgeTestProfile) -> Self { init_tracing(); +<<<<<<< HEAD // NOTE(zk): We need to manually install the crypto provider as zksync-era uses `aws-lc-rs` // provider, while foundry uses the `ring` provider. As a result, rustls cannot // disambiguate between the two while selecting a default provider. let _ = rustls::crypto::ring::default_provider().install_default(); - let mut project = profile.project(); + let config = Arc::new(profile.config()); + let mut project = config.project().unwrap(); let output = get_compiled(&mut project); - let test_opts = profile.test_opts(&output); - let config = profile.config(); - let evm_opts = profile.evm_opts(); let zk_test_data = { let zk_config = profile.zk_config(); @@ -269,28 +256,23 @@ impl ForgeTestData { ZkTestData { dual_compiled_contracts, zk_config, zk_project, output, zk_output } }; - Self { project, output, test_opts, evm_opts, config, profile, zk_test_data } + Self { project, output, config, profile, zk_test_data } } /// Builds a base runner pub fn base_runner(&self) -> MultiContractRunnerBuilder { init_tracing(); - let mut runner = MultiContractRunnerBuilder::new(Arc::new(self.config.clone())) - .sender(self.evm_opts.sender) - .with_test_options(self.test_opts.clone()); + let config = self.config.clone(); + let mut runner = MultiContractRunnerBuilder::new(config).sender(self.config.sender); if self.profile.is_paris() { runner = runner.evm_spec(SpecId::MERGE); } - runner } /// Builds a non-tracing runner pub fn runner(&self) -> MultiContractRunner { - let mut config = self.config.clone(); - config.fs_permissions = - FsPermissions::new(vec![PathPermission::read_write(manifest_root())]); - self.runner_with_config(config) + self.runner_with(|_| {}) } /// Builds a non-tracing zksync runner @@ -303,32 +285,31 @@ impl ForgeTestData { } /// Builds a non-tracing runner - pub fn runner_with_config(&self, mut config: Config) -> MultiContractRunner { + pub fn runner_with(&self, modify: impl FnOnce(&mut Config)) -> MultiContractRunner { + let mut config = (*self.config).clone(); + modify(&mut config); + self.runner_with_config(config) + } + + fn runner_with_config(&self, mut config: Config) -> MultiContractRunner { config.rpc_endpoints = rpc_endpoints(); config.allow_paths.push(manifest_root().to_path_buf()); - // no prompt testing - config.prompt_timeout = 0; - - let root = self.project.root(); - let mut opts = self.evm_opts.clone(); - - if config.isolate { - opts.isolate = true; + if config.fs_permissions.is_empty() { + config.fs_permissions = + FsPermissions::new(vec![PathPermission::read_write(manifest_root())]); } - let env = opts.local_evm_env(); - let output = self.output.clone(); - - let sender = config.sender; + let opts = config_evm_opts(&config); let mut builder = self.base_runner(); - builder.config = Arc::new(config); + let config = Arc::new(config); + let root = self.project.root(); + builder.config = config.clone(); builder .enable_isolation(opts.isolate) - .sender(sender) - .with_test_options(self.test_opts.clone()) - .build(root, output, None, env, opts, Default::default()) + .sender(config.sender) + .build::(root, &self.output, opts.local_evm_env(), opts) .unwrap() } @@ -368,23 +349,16 @@ impl ForgeTestData { /// Builds a tracing runner pub fn tracing_runner(&self) -> MultiContractRunner { - let mut opts = self.evm_opts.clone(); + let mut opts = config_evm_opts(&self.config); opts.verbosity = 5; self.base_runner() - .build( - self.project.root(), - self.output.clone(), - None, - opts.local_evm_env(), - opts, - Default::default(), - ) + .build::(self.project.root(), &self.output, opts.local_evm_env(), opts) .unwrap() } /// Builds a runner that runs against forked state pub async fn forked_runner(&self, rpc: &str) -> MultiContractRunner { - let mut opts = self.evm_opts.clone(); + let mut opts = config_evm_opts(&self.config); opts.env.chain_id = None; // clear chain id so the correct one gets fetched from the RPC opts.fork_url = Some(rpc.to_string()); @@ -394,7 +368,7 @@ impl ForgeTestData { self.base_runner() .with_fork(fork) - .build(self.project.root(), self.output.clone(), None, env, opts, Default::default()) + .build::(self.project.root(), &self.output, env, opts) .unwrap() } } @@ -551,6 +525,7 @@ pub fn rpc_endpoints() -> RpcEndpoints { ("arbitrum", RpcEndpoint::Url(next_rpc_endpoint(NamedChain::Arbitrum))), ("polygon", RpcEndpoint::Url(next_rpc_endpoint(NamedChain::Polygon))), ("avaxTestnet", RpcEndpoint::Url("https://api.avax-test.network/ext/bc/C/rpc".into())), + ("moonbeam", RpcEndpoint::Url("https://moonbeam-rpc.publicnode.com".into())), ("rpcEnvAlias", RpcEndpoint::Env("${RPC_ENV_ALIAS}".into())), ]) } @@ -676,3 +651,7 @@ pub fn deploy_zk_contract( Err(format!("Deployment failed. Stdout: {stdout}\nStderr: {stderr}")) } } + +fn config_evm_opts(config: &Config) -> EvmOpts { + config.to_figment(foundry_config::FigmentProviders::None).extract().unwrap() +} diff --git a/crates/macros/src/cheatcodes.rs b/crates/macros/src/cheatcodes.rs index d9c2d2c91..4d0f260c2 100644 --- a/crates/macros/src/cheatcodes.rs +++ b/crates/macros/src/cheatcodes.rs @@ -58,6 +58,17 @@ fn derive_call(name: &Ident, data: &DataStruct, attrs: &[Attribute]) -> Result(); if required_addresses.contains(&Config::DEFAULT_SENDER) { @@ -463,8 +468,10 @@ impl BundledState { seq_progress.inner.write().finish(); } - sh_println!("\n\n==========================")?; - sh_println!("\nONCHAIN EXECUTION COMPLETE & SUCCESSFUL.")?; + if !shell::is_json() { + sh_println!("\n\n==========================")?; + sh_println!("\nONCHAIN EXECUTION COMPLETE & SUCCESSFUL.")?; + } Ok(BroadcastedState { args: self.args, diff --git a/crates/script/src/build.rs b/crates/script/src/build.rs index 3bb0da6a7..4ea94b298 100644 --- a/crates/script/src/build.rs +++ b/crates/script/src/build.rs @@ -22,7 +22,7 @@ use foundry_compilers::{ zksync::compile::output::ProjectCompileOutput as ZkProjectCompileOutput, ArtifactId, ProjectCompileOutput, }; -use foundry_evm::{constants::DEFAULT_CREATE2_DEPLOYER, traces::debug::ContractSources}; +use foundry_evm::traces::debug::ContractSources; use foundry_linking::Linker; use foundry_zksync_compiler::DualCompiledContracts; use std::{collections::BTreeMap, path::PathBuf, str::FromStr, sync::Arc}; @@ -49,9 +49,10 @@ impl BuildData { /// Links contracts. Uses CREATE2 linking when possible, otherwise falls back to /// default linking with sender nonce and address. pub async fn link(self, script_config: &ScriptConfig) -> Result { + let create2_deployer = script_config.evm_opts.create2_deployer; let can_use_create2 = if let Some(fork_url) = &script_config.evm_opts.fork_url { let provider = try_get_http_provider(fork_url)?; - let deployer_code = provider.get_code_at(DEFAULT_CREATE2_DEPLOYER).await?; + let deployer_code = provider.get_code_at(create2_deployer).await?; !deployer_code.is_empty() } else { @@ -66,7 +67,7 @@ impl BuildData { self.get_linker() .link_with_create2( known_libraries.clone(), - DEFAULT_CREATE2_DEPLOYER, + create2_deployer, script_config.config.create2_library_salt, &self.target, ) @@ -263,8 +264,8 @@ impl PreprocessedState { if id.name != *name { continue; } - } else if contract.abi.as_ref().map_or(true, |abi| abi.is_empty()) || - contract.bytecode.as_ref().map_or(true, |b| match &b.object { + } else if contract.abi.as_ref().is_none_or(|abi| abi.is_empty()) || + contract.bytecode.as_ref().is_none_or(|b| match &b.object { BytecodeObject::Bytecode(b) => b.is_empty(), BytecodeObject::Unlinked(_) => false, }) diff --git a/crates/script/src/execute.rs b/crates/script/src/execute.rs index c47c8ac4c..f4a57c07a 100644 --- a/crates/script/src/execute.rs +++ b/crates/script/src/execute.rs @@ -12,7 +12,6 @@ use alloy_primitives::{ }; use alloy_provider::Provider; use alloy_rpc_types::TransactionInput; -use async_recursion::async_recursion; use eyre::{OptionExt, Result}; use foundry_cheatcodes::Wallets; use foundry_cli::utils::{ensure_clean_constructor, needs_setup}; @@ -102,7 +101,6 @@ pub struct PreExecutionState { impl PreExecutionState { /// Executes the script and returns the state after execution. /// Might require executing script twice in cases when we determine sender from execution. - #[async_recursion] pub async fn execute(mut self) -> Result { let mut runner = self .script_config @@ -129,7 +127,7 @@ impl PreExecutionState { build_data: self.build_data.build_data, }; - return state.link().await?.prepare_execution().await?.execute().await; + return Box::pin(state.link().await?.prepare_execution().await?.execute()).await; } Ok(ExecutedState { @@ -191,7 +189,7 @@ impl PreExecutionState { if let Some(txs) = transactions { // If the user passed a `--sender` don't check anything. if self.build_data.predeploy_libraries.libraries_count() > 0 && - self.args.evm_opts.sender.is_none() + self.args.evm_args.sender.is_none() { for tx in txs.iter() { if tx.transaction.to().is_none() { diff --git a/crates/script/src/lib.rs b/crates/script/src/lib.rs index ea6a0545b..03c9dbd10 100644 --- a/crates/script/src/lib.rs +++ b/crates/script/src/lib.rs @@ -47,7 +47,6 @@ use foundry_config::{ }; use foundry_evm::{ backend::Backend, - constants::DEFAULT_CREATE2_DEPLOYER, executors::ExecutorBuilder, inspectors::{ cheatcodes::{BroadcastableTransactions, Wallets}, @@ -76,7 +75,7 @@ mod transaction; mod verify; // Loads project's figment and merges the build cli arguments into it -foundry_config::merge_impl_figment_convert!(ScriptArgs, opts, evm_opts); +foundry_config::merge_impl_figment_convert!(ScriptArgs, opts, evm_args); /// CLI arguments for `forge script`. #[derive(Clone, Debug, Default, Parser)] @@ -213,7 +212,7 @@ pub struct ScriptArgs { pub wallets: MultiWalletOpts, #[command(flatten)] - pub evm_opts: EvmArgs, + pub evm_args: EvmArgs, #[command(flatten)] pub verifier: forge_verify::VerifierArgs, @@ -225,7 +224,7 @@ pub struct ScriptArgs { impl ScriptArgs { pub async fn preprocess(self) -> Result { let script_wallets = - Wallets::new(self.wallets.get_multi_wallet().await?, self.evm_opts.sender); + Wallets::new(self.wallets.get_multi_wallet().await?, self.evm_args.sender); let (config, mut evm_opts) = self.load_config_and_evm_opts_emit_warnings()?; @@ -242,7 +241,9 @@ impl ScriptArgs { pub async fn run_script(self) -> Result<()> { trace!(target: "script", "executing script command"); - let compiled = self.preprocess().await?.compile()?; + let state = self.preprocess().await?; + let create2_deployer = state.script_config.evm_opts.create2_deployer; + let compiled = state.compile()?; // Move from `CompiledState` to `BundledState` either by resuming or executing and // simulating script. @@ -280,20 +281,24 @@ impl ScriptArgs { .execution_result .transactions .as_ref() - .map_or(true, |txs| txs.is_empty()) + .is_none_or(|txs| txs.is_empty()) { return Ok(()); } // Check if there are any missing RPCs and exit early to avoid hard error. if pre_simulation.execution_artifacts.rpc_data.missing_rpc { - sh_println!("\nIf you wish to simulate on-chain transactions pass a RPC URL.")?; + if !shell::is_json() { + sh_println!("\nIf you wish to simulate on-chain transactions pass a RPC URL.")?; + } + return Ok(()); } pre_simulation.args.check_contract_sizes( &pre_simulation.execution_result, &pre_simulation.build_data.known_contracts, + create2_deployer, )?; pre_simulation.fill_metadata().await?.bundle().await? @@ -301,7 +306,9 @@ impl ScriptArgs { // Exit early in case user didn't provide any broadcast/verify related flags. if !bundled.args.should_broadcast() { - sh_println!("\nSIMULATION COMPLETE. To broadcast these transactions, add --broadcast and wallet configuration(s) to the previous command. See forge script --help for more.")?; + if !shell::is_json() { + sh_println!("\nSIMULATION COMPLETE. To broadcast these transactions, add --broadcast and wallet configuration(s) to the previous command. See forge script --help for more.")?; + } return Ok(()); } @@ -382,6 +389,7 @@ impl ScriptArgs { &self, result: &ScriptResult, known_contracts: &ContractsByArtifact, + create2_deployer: Address, ) -> Result<()> { //TODO: zk mode contract size check @@ -411,7 +419,7 @@ impl ScriptArgs { } let mut prompt_user = false; - let max_size = match self.evm_opts.env.code_size_limit { + let max_size = match self.evm_args.env.code_size_limit { Some(size) => size, None => CONTRACT_MAX_SIZE, }; @@ -428,7 +436,7 @@ impl ScriptArgs { // Find if it's a CREATE or CREATE2. Otherwise, skip transaction. if let Some(TxKind::Call(to)) = to { - if to == DEFAULT_CREATE2_DEPLOYER { + if to == create2_deployer { // Size of the salt prefix. offset = 32; } else { @@ -553,6 +561,7 @@ impl ScriptConfig { // dapptools compatibility 1 }; + Ok(Self { config, evm_opts, sender_nonce, backends: HashMap::default() }) } @@ -616,9 +625,10 @@ impl ScriptConfig { .inspectors(|stack| { stack .trace_mode(if debug { TraceMode::Debug } else { TraceMode::Call }) - .alphanet(self.evm_opts.alphanet) + .odyssey(self.evm_opts.odyssey) + .create2_deployer(self.evm_opts.create2_deployer) }) - .spec(self.config.evm_spec_id()) + .spec_id(self.config.evm_spec_id()) .gas_limit(self.evm_opts.gas_limit()) .legacy_assertions(self.config.legacy_assertions); @@ -769,7 +779,7 @@ mod tests { "--code-size-limit", "50000", ]); - assert_eq!(args.evm_opts.env.code_size_limit, Some(50000)); + assert_eq!(args.evm_args.env.code_size_limit, Some(50000)); } #[test] diff --git a/crates/script/src/multi_sequence.rs b/crates/script/src/multi_sequence.rs index ec2f03ae9..e0fd4d1bc 100644 --- a/crates/script/src/multi_sequence.rs +++ b/crates/script/src/multi_sequence.rs @@ -2,7 +2,7 @@ use eyre::{ContextCompat, Result, WrapErr}; use forge_script_sequence::{ now, sig_to_file_name, ScriptSequence, SensitiveScriptSequence, DRY_RUN_DIR, }; -use foundry_common::fs; +use foundry_common::{fs, shell}; use foundry_compilers::ArtifactId; use foundry_config::Config; use serde::{Deserialize, Serialize}; @@ -146,8 +146,19 @@ impl MultiChainSequence { } if !silent { - sh_println!("\nTransactions saved to: {}\n", self.path.display())?; - sh_println!("Sensitive details saved to: {}\n", self.sensitive_path.display())?; + if shell::is_json() { + sh_println!( + "{}", + serde_json::json!({ + "status": "success", + "transactions": self.path.display().to_string(), + "sensitive": self.sensitive_path.display().to_string(), + }) + )?; + } else { + sh_println!("\nTransactions saved to: {}\n", self.path.display())?; + sh_println!("Sensitive details saved to: {}\n", self.sensitive_path.display())?; + } } Ok(()) diff --git a/crates/script/src/progress.rs b/crates/script/src/progress.rs index 050ce4002..26d6a54af 100644 --- a/crates/script/src/progress.rs +++ b/crates/script/src/progress.rs @@ -7,7 +7,7 @@ use alloy_primitives::{ use eyre::Result; use forge_script_sequence::ScriptSequence; use foundry_cli::utils::init_progress; -use foundry_common::provider::RetryProvider; +use foundry_common::{provider::RetryProvider, shell}; use futures::StreamExt; use indicatif::{MultiProgress, ProgressBar, ProgressStyle}; use parking_lot::RwLock; @@ -31,33 +31,42 @@ pub struct SequenceProgressState { impl SequenceProgressState { pub fn new(sequence_idx: usize, sequence: &ScriptSequence, multi: MultiProgress) -> Self { - let mut template = "{spinner:.green}".to_string(); - write!(template, " Sequence #{} on {}", sequence_idx + 1, Chain::from(sequence.chain)) - .unwrap(); - template.push_str("{msg}"); - - let top_spinner = ProgressBar::new_spinner() - .with_style(ProgressStyle::with_template(&template).unwrap().tick_chars("⠁⠂⠄⡀⢀⠠⠐⠈✅")); - let top_spinner = multi.add(top_spinner); - - let txs = multi.insert_after( - &top_spinner, - init_progress(sequence.transactions.len() as u64, "txes").with_prefix(" "), - ); - - let receipts = multi.insert_after( - &txs, - init_progress(sequence.transactions.len() as u64, "receipts").with_prefix(" "), - ); - - top_spinner.enable_steady_tick(Duration::from_millis(100)); - txs.enable_steady_tick(Duration::from_millis(1000)); - receipts.enable_steady_tick(Duration::from_millis(1000)); - - txs.set_position(sequence.receipts.len() as u64); - receipts.set_position(sequence.receipts.len() as u64); - - let mut state = Self { top_spinner, txs, receipts, tx_spinners: Default::default(), multi }; + let mut state = if shell::is_quiet() || shell::is_json() { + let top_spinner = ProgressBar::hidden(); + let txs = ProgressBar::hidden(); + let receipts = ProgressBar::hidden(); + + Self { top_spinner, txs, receipts, tx_spinners: Default::default(), multi } + } else { + let mut template = "{spinner:.green}".to_string(); + write!(template, " Sequence #{} on {}", sequence_idx + 1, Chain::from(sequence.chain)) + .unwrap(); + template.push_str("{msg}"); + + let top_spinner = ProgressBar::new_spinner().with_style( + ProgressStyle::with_template(&template).unwrap().tick_chars("⠁⠂⠄⡀⢀⠠⠐⠈✅"), + ); + let top_spinner = multi.add(top_spinner); + + let txs = multi.insert_after( + &top_spinner, + init_progress(sequence.transactions.len() as u64, "txes").with_prefix(" "), + ); + + let receipts = multi.insert_after( + &txs, + init_progress(sequence.transactions.len() as u64, "receipts").with_prefix(" "), + ); + + top_spinner.enable_steady_tick(Duration::from_millis(100)); + txs.enable_steady_tick(Duration::from_millis(1000)); + receipts.enable_steady_tick(Duration::from_millis(1000)); + + txs.set_position(sequence.receipts.len() as u64); + receipts.set_position(sequence.receipts.len() as u64); + + Self { top_spinner, txs, receipts, tx_spinners: Default::default(), multi } + }; for tx_hash in sequence.pending.iter() { state.tx_sent(*tx_hash); @@ -71,16 +80,21 @@ impl SequenceProgressState { pub fn tx_sent(&mut self, tx_hash: B256) { // Avoid showing more than 10 spinners. if self.tx_spinners.len() < 10 { - let spinner = ProgressBar::new_spinner() - .with_style( - ProgressStyle::with_template(" {spinner:.green} {msg}") - .unwrap() - .tick_chars("⠁⠂⠄⡀⢀⠠⠐⠈"), - ) - .with_message(format!("{} {}", "[Pending]".yellow(), tx_hash)); - - let spinner = self.multi.insert_before(&self.txs, spinner); - spinner.enable_steady_tick(Duration::from_millis(100)); + let spinner = if shell::is_quiet() || shell::is_json() { + ProgressBar::hidden() + } else { + let spinner = ProgressBar::new_spinner() + .with_style( + ProgressStyle::with_template(" {spinner:.green} {msg}") + .unwrap() + .tick_chars("⠁⠂⠄⡀⢀⠠⠐⠈"), + ) + .with_message(format!("{} {}", "[Pending]".yellow(), tx_hash)); + + let spinner = self.multi.insert_before(&self.txs, spinner); + spinner.enable_steady_tick(Duration::from_millis(100)); + spinner + }; self.tx_spinners.insert(tx_hash, spinner); } @@ -98,7 +112,10 @@ impl SequenceProgressState { /// Same as finish_tx_spinner but also prints a message to stdout above all other progress bars. pub fn finish_tx_spinner_with_msg(&mut self, tx_hash: B256, msg: &str) -> std::io::Result<()> { self.finish_tx_spinner(tx_hash); - self.multi.println(msg)?; + + if !(shell::is_quiet() || shell::is_json()) { + self.multi.println(msg)?; + } Ok(()) } diff --git a/crates/script/src/receipts.rs b/crates/script/src/receipts.rs index c11fdd71a..cff893b55 100644 --- a/crates/script/src/receipts.rs +++ b/crates/script/src/receipts.rs @@ -1,9 +1,9 @@ use alloy_chains::Chain; +use alloy_network::AnyTransactionReceipt; use alloy_primitives::{utils::format_units, TxHash, U256}; use alloy_provider::{PendingTransactionBuilder, PendingTransactionError, Provider, WatchTxError}; -use alloy_rpc_types::AnyTransactionReceipt; use eyre::Result; -use foundry_common::provider::RetryProvider; +use foundry_common::{provider::RetryProvider, shell}; use std::time::Duration; /// Convenience enum for internal signalling of transaction status @@ -71,31 +71,51 @@ pub async fn check_tx_status( pub fn format_receipt(chain: Chain, receipt: &AnyTransactionReceipt) -> String { let gas_used = receipt.gas_used; let gas_price = receipt.effective_gas_price; - format!( - "\n##### {chain}\n{status} Hash: {tx_hash:?}{caddr}\nBlock: {bn}\n{gas}\n\n", - status = if !receipt.inner.inner.inner.receipt.status.coerce_status() { - "❌ [Failed]" - } else { - "✅ [Success]" - }, - tx_hash = receipt.transaction_hash, - caddr = if let Some(addr) = &receipt.contract_address { - format!("\nContract Address: {}", addr.to_checksum(None)) - } else { - String::new() - }, - bn = receipt.block_number.unwrap_or_default(), - gas = if gas_price == 0 { - format!("Gas Used: {gas_used}") - } else { - let paid = format_units(gas_used.saturating_mul(gas_price), 18) - .unwrap_or_else(|_| "N/A".into()); - let gas_price = format_units(U256::from(gas_price), 9).unwrap_or_else(|_| "N/A".into()); - format!( - "Paid: {} ETH ({gas_used} gas * {} gwei)", - paid.trim_end_matches('0'), - gas_price.trim_end_matches('0').trim_end_matches('.') - ) - }, - ) + let block_number = receipt.block_number.unwrap_or_default(); + let success = receipt.inner.inner.inner.receipt.status.coerce_status(); + + if shell::is_json() { + let _ = sh_println!( + "{}", + serde_json::json!({ + "chain": chain, + "status": if success { + "success" + } else { + "failed" + }, + "tx_hash": receipt.transaction_hash, + "contract_address": receipt.contract_address.map(|addr| addr.to_string()), + "block_number": block_number, + "gas_used": gas_used, + "gas_price": gas_price, + }) + ); + + String::new() + } else { + format!( + "\n##### {chain}\n{status} Hash: {tx_hash:?}{contract_address}\nBlock: {block_number}\n{gas}\n\n", + status = if success { "✅ [Success]" } else { "❌ [Failed]" }, + tx_hash = receipt.transaction_hash, + contract_address = if let Some(addr) = &receipt.contract_address { + format!("\nContract Address: {}", addr.to_checksum(None)) + } else { + String::new() + }, + gas = if gas_price == 0 { + format!("Gas Used: {gas_used}") + } else { + let paid = format_units(gas_used.saturating_mul(gas_price), 18) + .unwrap_or_else(|_| "N/A".into()); + let gas_price = + format_units(U256::from(gas_price), 9).unwrap_or_else(|_| "N/A".into()); + format!( + "Paid: {} ETH ({gas_used} gas * {} gwei)", + paid.trim_end_matches('0'), + gas_price.trim_end_matches('0').trim_end_matches('.') + ) + }, + ) + } } diff --git a/crates/script/src/runner.rs b/crates/script/src/runner.rs index 34cc54e28..ca21d37e9 100644 --- a/crates/script/src/runner.rs +++ b/crates/script/src/runner.rs @@ -7,7 +7,7 @@ use eyre::Result; use foundry_cheatcodes::BroadcastableTransaction; use foundry_config::Config; use foundry_evm::{ - constants::{CALLER, DEFAULT_CREATE2_DEPLOYER}, + constants::CALLER, executors::{DeployResult, EvmError, ExecutionErr, Executor, RawCallResult}, opts::EvmOpts, revm::interpreter::{return_ok, InstructionResult}, @@ -85,9 +85,9 @@ impl ScriptRunner { }) }), ScriptPredeployLibraries::Create2(libraries, salt) => { + let create2_deployer = self.executor.create2_deployer(); for library in libraries { - let address = - DEFAULT_CREATE2_DEPLOYER.create2_from_code(salt, library.as_ref()); + let address = create2_deployer.create2_from_code(salt, library.as_ref()); // Skip if already deployed if !self.executor.is_empty_code(address)? { continue; @@ -97,7 +97,7 @@ impl ScriptRunner { .executor .transact_raw( self.evm_opts.sender, - DEFAULT_CREATE2_DEPLOYER, + create2_deployer, calldata.clone().into(), U256::from(0), ) @@ -113,7 +113,7 @@ impl ScriptRunner { from: Some(self.evm_opts.sender), input: calldata.into(), nonce: Some(sender_nonce + library_transactions.len() as u64), - to: Some(TxKind::Call(DEFAULT_CREATE2_DEPLOYER)), + to: Some(TxKind::Call(create2_deployer)), ..Default::default() } .into(), diff --git a/crates/script/src/simulate.rs b/crates/script/src/simulate.rs index 2b6012b10..848510ac1 100644 --- a/crates/script/src/simulate.rs +++ b/crates/script/src/simulate.rs @@ -16,7 +16,7 @@ use eyre::{Context, Result}; use forge_script_sequence::{ScriptSequence, TransactionWithMetadata}; use foundry_cheatcodes::Wallets; use foundry_cli::utils::{has_different_gas_calc, now}; -use foundry_common::ContractData; +use foundry_common::{shell, ContractData}; use foundry_evm::traces::{decode_trace_arena, render_trace_arena}; use futures::future::{join_all, try_join_all}; use parking_lot::RwLock; @@ -64,7 +64,11 @@ impl PreSimulationState { let mut builder = ScriptTransactionBuilder::new(tx.transaction, rpc, tx.zk_tx); if let Some(TxKind::Call(_)) = to { - builder.set_call(&address_to_abi, &self.execution_artifacts.decoder)?; + builder.set_call( + &address_to_abi, + &self.execution_artifacts.decoder, + self.script_config.evm_opts.create2_deployer, + )?; } else { builder.set_create(false, sender.create(nonce), &address_to_abi)?; } @@ -153,7 +157,7 @@ impl PreSimulationState { }) .collect::>(); - if self.script_config.evm_opts.verbosity > 3 { + if !shell::is_json() && self.script_config.evm_opts.verbosity > 3 { sh_println!("==========================")?; sh_println!("Simulated On-chain Traces:\n")?; } @@ -222,9 +226,11 @@ impl PreSimulationState { async fn build_runners(&self) -> Result> { let rpcs = self.execution_artifacts.rpc_data.total_rpcs.clone(); - let n = rpcs.len(); - let s = if n != 1 { "s" } else { "" }; - sh_println!("\n## Setting up {n} EVM{s}.")?; + if !shell::is_json() { + let n = rpcs.len(); + let s = if n != 1 { "s" } else { "" }; + sh_println!("\n## Setting up {n} EVM{s}.")?; + } let futs = rpcs.into_iter().map(|rpc| async move { let mut script_config = self.script_config.clone(); @@ -350,24 +356,34 @@ impl FilledTransactionsState { provider_info.gas_price()? }; - sh_println!("\n==========================")?; - sh_println!("\nChain {}", provider_info.chain)?; - - sh_println!( - "\nEstimated gas price: {} gwei", - format_units(per_gas, 9) - .unwrap_or_else(|_| "[Could not calculate]".to_string()) - .trim_end_matches('0') - .trim_end_matches('.') - )?; - sh_println!("\nEstimated total gas used for script: {total_gas}")?; - sh_println!( - "\nEstimated amount required: {} ETH", - format_units(total_gas.saturating_mul(per_gas), 18) - .unwrap_or_else(|_| "[Could not calculate]".to_string()) - .trim_end_matches('0') - )?; - sh_println!("\n==========================")?; + let estimated_gas_price_raw = format_units(per_gas, 9) + .unwrap_or_else(|_| "[Could not calculate]".to_string()); + let estimated_gas_price = + estimated_gas_price_raw.trim_end_matches('0').trim_end_matches('.'); + + let estimated_amount_raw = format_units(total_gas.saturating_mul(per_gas), 18) + .unwrap_or_else(|_| "[Could not calculate]".to_string()); + let estimated_amount = estimated_amount_raw.trim_end_matches('0'); + + if !shell::is_json() { + sh_println!("\n==========================")?; + sh_println!("\nChain {}", provider_info.chain)?; + + sh_println!("\nEstimated gas price: {} gwei", estimated_gas_price)?; + sh_println!("\nEstimated total gas used for script: {total_gas}")?; + sh_println!("\nEstimated amount required: {estimated_amount} ETH",)?; + sh_println!("\n==========================")?; + } else { + sh_println!( + "{}", + serde_json::json!({ + "chain": provider_info.chain, + "estimated_gas_price": estimated_gas_price, + "estimated_total_gas_used": total_gas, + "estimated_amount_required": estimated_amount, + }) + )?; + } } } @@ -413,7 +429,7 @@ impl FilledTransactionsState { )?) }; - let commit = get_commit_hash(&self.script_config.config.root.0); + let commit = get_commit_hash(&self.script_config.config.root); let libraries = self .build_data diff --git a/crates/script/src/transaction.rs b/crates/script/src/transaction.rs index 4bef28d57..f59e4b45c 100644 --- a/crates/script/src/transaction.rs +++ b/crates/script/src/transaction.rs @@ -4,7 +4,7 @@ use alloy_primitives::{hex, Address, TxKind, B256}; use eyre::Result; use forge_script_sequence::TransactionWithMetadata; use foundry_common::{fmt::format_token_raw, ContractData, TransactionMaybeSigned, SELECTOR_LEN}; -use foundry_evm::{constants::DEFAULT_CREATE2_DEPLOYER, traces::CallTraceDecoder}; +use foundry_evm::traces::CallTraceDecoder; use foundry_zksync_core::ZkTransactionMetadata; use itertools::Itertools; use revm_inspectors::tracing::types::CallKind; @@ -35,16 +35,16 @@ impl ScriptTransactionBuilder { &mut self, local_contracts: &BTreeMap, decoder: &CallTraceDecoder, + create2_deployer: Address, ) -> Result<()> { if let Some(TxKind::Call(to)) = self.transaction.transaction.to() { - if to == DEFAULT_CREATE2_DEPLOYER { + if to == create2_deployer { if let Some(input) = self.transaction.transaction.input() { let (salt, init_code) = input.split_at(32); self.set_create( true, - DEFAULT_CREATE2_DEPLOYER - .create2_from_code(B256::from_slice(salt), init_code), + create2_deployer.create2_from_code(B256::from_slice(salt), init_code), local_contracts, )?; } diff --git a/crates/test-utils/Cargo.toml b/crates/test-utils/Cargo.toml index 5dd41a29f..ababc423b 100644 --- a/crates/test-utils/Cargo.toml +++ b/crates/test-utils/Cargo.toml @@ -32,6 +32,7 @@ tracing-subscriber = { workspace = true, features = ["env-filter"] } rand.workspace = true snapbox = { version = "0.6", features = ["json", "regex"] } tokio.workspace = true +tempfile.workspace = true # zk zksync_types.workspace = true diff --git a/crates/test-utils/src/filter.rs b/crates/test-utils/src/filter.rs index 003b0170f..1ba905d27 100644 --- a/crates/test-utils/src/filter.rs +++ b/crates/test-utils/src/filter.rs @@ -2,6 +2,7 @@ use foundry_common::TestFilter; use regex::Regex; use std::path::Path; +#[derive(Clone, Debug)] pub struct Filter { test_regex: Regex, contract_regex: Regex, diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs index e51f5e831..eb37979cb 100644 --- a/crates/test-utils/src/lib.rs +++ b/crates/test-utils/src/lib.rs @@ -4,6 +4,10 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies))] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] +#![allow(clippy::disallowed_macros)] + +#[macro_use] +extern crate foundry_common; #[macro_use] extern crate foundry_common; diff --git a/crates/test-utils/src/rpc.rs b/crates/test-utils/src/rpc.rs index 361bf56c2..ed0dfaa3c 100644 --- a/crates/test-utils/src/rpc.rs +++ b/crates/test-utils/src/rpc.rs @@ -101,53 +101,109 @@ fn next(list: &[T]) -> &T { &list[next_idx() % list.len()] } -/// Returns the next _mainnet_ rpc endpoint in inline +/// Returns the next _mainnet_ rpc URL in inline /// /// This will rotate all available rpc endpoints pub fn next_http_rpc_endpoint() -> String { next_rpc_endpoint(NamedChain::Mainnet) } -/// Returns the next _mainnet_ rpc endpoint in inline +/// Returns the next _mainnet_ rpc URL in inline /// /// This will rotate all available rpc endpoints pub fn next_ws_rpc_endpoint() -> String { next_ws_endpoint(NamedChain::Mainnet) } -/// Returns the next HTTP RPC endpoint. +/// Returns the next HTTP RPC URL. pub fn next_rpc_endpoint(chain: NamedChain) -> String { next_url(false, chain) } -/// Returns the next WS RPC endpoint. +/// Returns the next WS RPC URL. pub fn next_ws_endpoint(chain: NamedChain) -> String { next_url(true, chain) } -/// Returns endpoint that has access to archive state -pub fn next_http_archive_rpc_endpoint() -> String { - next_archive_endpoint(false) +/// Returns a websocket URL that has access to archive state +pub fn next_http_archive_rpc_url() -> String { + next_archive_url(false) } -/// Returns endpoint that has access to archive state -pub fn next_ws_archive_rpc_endpoint() -> String { - next_archive_endpoint(true) +/// Returns an HTTP URL that has access to archive state +pub fn next_ws_archive_rpc_url() -> String { + next_archive_url(true) } -/// Returns endpoint that has access to archive state, http or ws. -/// Use env vars (comma separated urls) or default inline keys (Alchemy for ws, Infura for http). -fn next_archive_endpoint(is_ws: bool) -> String { - let env_urls = if is_ws { ENV_WS_ARCHIVE_ENDPOINTS } else { ENV_HTTP_ARCHIVE_ENDPOINTS }; - - let rpc_env_vars = env::var(env_urls).unwrap_or_default(); - if !rpc_env_vars.is_empty() { - let urls = rpc_env_vars.split(',').collect::>(); - next(&urls).to_string() - } else if is_ws { - format!("wss://eth-mainnet.g.alchemy.com/v2/{}", next(&ALCHEMY_KEYS)) +/// Returns a URL that has access to archive state. +/// +/// Uses either environment variables (comma separated urls) or default keys. +fn next_archive_url(is_ws: bool) -> String { + let urls = archive_urls(is_ws); + let url = if env_archive_urls(is_ws).is_empty() { + next(urls) } else { - format!("https://eth-mainnet.g.alchemy.com/v2/{}", next(&ALCHEMY_KEYS)) + urls.choose_weighted(&mut rand::thread_rng(), |url| { + if url.contains("reth") { + 2usize + } else { + 1usize + } + }) + .unwrap() + }; + eprintln!("--- next_archive_url(is_ws={is_ws}) = {url} ---"); + url.clone() +} + +fn archive_urls(is_ws: bool) -> &'static [String] { + static WS: LazyLock> = LazyLock::new(|| get(true)); + static HTTP: LazyLock> = LazyLock::new(|| get(false)); + + fn get(is_ws: bool) -> Vec { + let env_urls = env_archive_urls(is_ws); + if !env_urls.is_empty() { + let mut urls = env_urls.to_vec(); + urls.shuffle(&mut rand::thread_rng()); + return urls; + } + + let mut urls = Vec::new(); + for &key in ALCHEMY_KEYS.iter() { + if is_ws { + urls.push(format!("wss://eth-mainnet.g.alchemy.com/v2/{key}")); + } else { + urls.push(format!("https://eth-mainnet.g.alchemy.com/v2/{key}")); + } + } + urls + } + + if is_ws { + &WS + } else { + &HTTP + } +} + +fn env_archive_urls(is_ws: bool) -> &'static [String] { + static WS: LazyLock> = LazyLock::new(|| get(true)); + static HTTP: LazyLock> = LazyLock::new(|| get(false)); + + fn get(is_ws: bool) -> Vec { + let env = if is_ws { ENV_WS_ARCHIVE_ENDPOINTS } else { ENV_HTTP_ARCHIVE_ENDPOINTS }; + let env = env::var(env).unwrap_or_default(); + let env = env.trim(); + if env.is_empty() { + return vec![]; + } + env.split(',').map(str::trim).filter(|s| !s.is_empty()).map(ToString::to_string).collect() + } + + if is_ws { + &WS + } else { + &HTTP } } @@ -162,7 +218,9 @@ pub fn next_etherscan_api_key(chain: NamedChain) -> String { Optimism => ÐERSCAN_OPTIMISM_KEYS, _ => ÐERSCAN_MAINNET_KEYS, }; - next(keys).to_string() + let key = next(keys).to_string(); + eprintln!("--- next_etherscan_api_key(chain={chain:?}) = {key} ---"); + key } fn next_url(is_ws: bool, chain: NamedChain) -> String { @@ -206,12 +264,14 @@ fn next_url(is_ws: bool, chain: NamedChain) -> String { }; let full = if prefix.is_empty() { network.to_string() } else { format!("{prefix}-{network}") }; - match (is_ws, is_infura) { + let url = match (is_ws, is_infura) { (false, true) => format!("https://{full}.infura.io/v3/{key}"), (true, true) => format!("wss://{full}.infura.io/ws/v3/{key}"), (false, false) => format!("https://{full}.g.alchemy.com/v2/{key}"), (true, false) => format!("wss://{full}.g.alchemy.com/v2/{key}"), - } + }; + eprintln!("--- next_url(is_ws={is_ws}, chain={chain:?}) = {url} ---"); + url } #[cfg(test)] diff --git a/crates/test-utils/src/script.rs b/crates/test-utils/src/script.rs index f15e91d5a..b82126d2d 100644 --- a/crates/test-utils/src/script.rs +++ b/crates/test-utils/src/script.rs @@ -171,6 +171,10 @@ impl ScriptTester { self.args(&["--tc", contract_name, "--sig", sig]) } + pub fn add_create2_deployer(&mut self, create2_deployer: Address) -> &mut Self { + self.args(&["--create2-deployer", create2_deployer.to_string().as_str()]) + } + /// Adds the `--unlocked` flag pub fn unlocked(&mut self) -> &mut Self { self.arg("--unlocked") diff --git a/crates/test-utils/src/util.rs b/crates/test-utils/src/util.rs index 8d7f6cbb5..e7410ed60 100644 --- a/crates/test-utils/src/util.rs +++ b/crates/test-utils/src/util.rs @@ -1,6 +1,7 @@ use crate::init_tracing; use eyre::{Result, WrapErr}; use foundry_compilers::{ + artifacts::Contract, cache::CompilerCache, compilers::multi::MultiCompiler, error::Result as SolcResult, @@ -11,7 +12,7 @@ use foundry_compilers::{ use foundry_config::Config; use parking_lot::Mutex; use regex::Regex; -use snapbox::{assert_data_eq, cmd::OutputAssert, str, IntoData}; +use snapbox::{assert_data_eq, cmd::OutputAssert, Data, IntoData}; use std::{ env, ffi::OsStr, @@ -202,7 +203,7 @@ impl ExtTester { test_cmd.envs(self.envs.iter().map(|(k, v)| (k, v))); if let Some(fork_block) = self.fork_block { - test_cmd.env("FOUNDRY_ETH_RPC_URL", crate::rpc::next_http_archive_rpc_endpoint()); + test_cmd.env("FOUNDRY_ETH_RPC_URL", crate::rpc::next_http_archive_rpc_url()); test_cmd.env("FOUNDRY_FORK_BLOCK_NUMBER", fork_block.to_string()); } test_cmd.env("FOUNDRY_INVARIANT_DEPTH", "15"); @@ -420,7 +421,9 @@ pub fn setup_cast_project(test: TestProject) -> (TestProject, TestCommand) { /// /// Test projects are created from a global atomic counter to avoid duplicates. #[derive(Clone, Debug)] -pub struct TestProject { +pub struct TestProject< + T: ArtifactOutput + Default = ConfigurableArtifacts, +> { /// The directory in which this test executable is running. exe_root: PathBuf, /// The project in which the test should run. @@ -883,7 +886,7 @@ impl TestCommand { let assert = OutputAssert::new(self.execute()); if self.redact_output { return assert.with_assert(test_assert()); - }; + } assert } @@ -902,10 +905,10 @@ impl TestCommand { assert_data_eq!(actual, expected); } - /// Runs the command and asserts that it **failed** nothing was printed to stdout. + /// Runs the command and asserts that it **succeeded** nothing was printed to stdout. #[track_caller] pub fn assert_empty_stdout(&mut self) { - self.assert_success().stdout_eq(str![[r#""#]]); + self.assert_success().stdout_eq(Data::new()); } /// Runs the command and asserts that it failed. @@ -914,10 +917,32 @@ impl TestCommand { self.assert().failure() } + /// Runs the command and asserts that the exit code is `expected`. + #[track_caller] + pub fn assert_code(&mut self, expected: i32) -> OutputAssert { + self.assert().code(expected) + } + /// Runs the command and asserts that it **failed** nothing was printed to stderr. #[track_caller] pub fn assert_empty_stderr(&mut self) { - self.assert_failure().stderr_eq(str![[r#""#]]); + self.assert_failure().stderr_eq(Data::new()); + } + + /// Runs the command with a temporary file argument and asserts that the contents of the file + /// match the given data. + #[track_caller] + pub fn assert_file(&mut self, data: impl IntoData) { + self.assert_file_with(|this, path| _ = this.arg(path).assert_success(), data); + } + + /// Creates a temporary file, passes it to `f`, then asserts that the contents of the file match + /// the given data. + #[track_caller] + pub fn assert_file_with(&mut self, f: impl FnOnce(&mut Self, &Path), data: impl IntoData) { + let file = tempfile::NamedTempFile::new().expect("couldn't create temporary file"); + f(self, file.path()); + assert_data_eq!(Data::read_from(file.path(), None), data); } /// Does not apply [`snapbox`] redactions to the command output. diff --git a/crates/verify/src/etherscan/mod.rs b/crates/verify/src/etherscan/mod.rs index c19862504..c4f0e2f9c 100644 --- a/crates/verify/src/etherscan/mod.rs +++ b/crates/verify/src/etherscan/mod.rs @@ -16,14 +16,10 @@ use foundry_block_explorers::{ Client, }; use foundry_cli::utils::{get_provider, read_constructor_args_file, LoadConfig}; -use foundry_common::{ - abi::encode_function_args, - retry::{Retry, RetryError}, -}; +use foundry_common::{abi::encode_function_args, retry::RetryError}; use foundry_compilers::{artifacts::BytecodeObject, Artifact}; use foundry_config::{Chain, Config}; use foundry_evm::constants::DEFAULT_CREATE2_DEPLOYER; -use futures::FutureExt; use regex::Regex; use semver::{BuildMetadata, Version}; use std::{fmt::Debug, sync::LazyLock}; @@ -88,8 +84,9 @@ impl VerificationProvider for EtherscanVerificationProvider { trace!(?verify_args, "submitting verification request"); - let retry: Retry = args.retry.into(); - let resp = retry + let resp = args + .retry + .into_retry() .run_async(|| async { sh_println!( "\nSubmitting verification for [{}] {}.", @@ -150,7 +147,6 @@ impl VerificationProvider for EtherscanVerificationProvider { retry: RETRY_CHECK_ON_VERIFY, verifier: args.verifier, }; - // return check_args.run().await return self.check(check_args).await } } else { @@ -169,52 +165,45 @@ impl VerificationProvider for EtherscanVerificationProvider { args.etherscan.key().as_deref(), &config, )?; - let retry: Retry = args.retry.into(); - retry - .run_async_until_break(|| { - async { - let resp = etherscan - .check_contract_verification_status(args.id.clone()) - .await - .wrap_err("Failed to request verification status") - .map_err(RetryError::Retry)?; - - trace!(?resp, "Received verification response"); - - let _ = sh_println!( - "Contract verification status:\nResponse: `{}`\nDetails: `{}`", - resp.message, - resp.result - ); + args.retry + .into_retry() + .run_async_until_break(|| async { + let resp = etherscan + .check_contract_verification_status(args.id.clone()) + .await + .wrap_err("Failed to request verification status") + .map_err(RetryError::Retry)?; - if resp.result == "Pending in queue" { - return Err(RetryError::Retry(eyre!("Verification is still pending...",))) - } + trace!(?resp, "Received verification response"); - if resp.result == "In progress" { - return Err(RetryError::Retry(eyre!("Verification is in progress...",))) - } + let _ = sh_println!( + "Contract verification status:\nResponse: `{}`\nDetails: `{}`", + resp.message, + resp.result + ); - if resp.result == "Unable to verify" { - return Err(RetryError::Retry(eyre!("Unable to verify.",))) - } + if resp.result == "Pending in queue" { + return Err(RetryError::Retry(eyre!("Verification is still pending..."))) + } - if resp.result == "Already Verified" { - let _ = sh_println!("Contract source code already verified"); - return Ok(()) - } + if resp.result == "Unable to verify" { + return Err(RetryError::Retry(eyre!("Unable to verify."))) + } - if resp.status == "0" { - return Err(RetryError::Break(eyre!("Contract failed to verify.",))) - } + if resp.result == "Already Verified" { + let _ = sh_println!("Contract source code already verified"); + return Ok(()) + } - if resp.result == "Pass - Verified" { - let _ = sh_println!("Contract successfully verified"); - } + if resp.status == "0" { + return Err(RetryError::Break(eyre!("Contract failed to verify."))) + } - Ok(()) + if resp.result == "Pass - Verified" { + let _ = sh_println!("Contract successfully verified"); } - .boxed() + + Ok(()) }) .await .wrap_err("Checking verification result failed") diff --git a/crates/verify/src/provider.rs b/crates/verify/src/provider.rs index 4aa95e7ba..dd1a62dcc 100644 --- a/crates/verify/src/provider.rs +++ b/crates/verify/src/provider.rs @@ -184,7 +184,7 @@ impl VerificationProviderType { pub fn client(&self, key: &Option) -> Result> { match self { Self::Etherscan => { - if key.as_ref().map_or(true, |key| key.is_empty()) { + if key.as_ref().is_none_or(|key| key.is_empty()) { eyre::bail!("ETHERSCAN_API_KEY must be set") } Ok(Box::::default()) diff --git a/crates/verify/src/retry.rs b/crates/verify/src/retry.rs index 6067d9d85..a01b1c945 100644 --- a/crates/verify/src/retry.rs +++ b/crates/verify/src/retry.rs @@ -35,9 +35,10 @@ impl Default for RetryArgs { } } -impl From for Retry { - fn from(r: RetryArgs) -> Self { - Self::new(r.retries, Some(Duration::from_secs(r.delay as u64))) +impl RetryArgs { + /// Converts the arguments into a `Retry` instance. + pub fn into_retry(self) -> Retry { + Retry::new(self.retries, Duration::from_secs(self.delay as u64)) } } diff --git a/crates/verify/src/sourcify.rs b/crates/verify/src/sourcify.rs index 0cb4443b4..b50522929 100644 --- a/crates/verify/src/sourcify.rs +++ b/crates/verify/src/sourcify.rs @@ -6,7 +6,7 @@ use crate::{ use alloy_primitives::map::HashMap; use async_trait::async_trait; use eyre::Result; -use foundry_common::{fs, retry::Retry}; +use foundry_common::fs; use futures::FutureExt; use reqwest::Url; use serde::{Deserialize, Serialize}; @@ -41,8 +41,9 @@ impl VerificationProvider for SourcifyVerificationProvider { let client = reqwest::Client::new(); - let retry: Retry = args.retry.into(); - let resp = retry + let resp = args + .retry + .into_retry() .run_async(|| { async { sh_println!( @@ -61,7 +62,9 @@ impl VerificationProvider for SourcifyVerificationProvider { if !status.is_success() { let error: serde_json::Value = response.json().await?; eyre::bail!( - "Sourcify verification request for address ({}) failed with status code {status}\nDetails: {error:#}", + "Sourcify verification request for address ({}) \ + failed with status code {status}\n\ + Details: {error:#}", args.address, ); } @@ -77,8 +80,9 @@ impl VerificationProvider for SourcifyVerificationProvider { } async fn check(&self, args: VerifyCheckArgs) -> Result<()> { - let retry: Retry = args.retry.into(); - let resp = retry + let resp = args + .retry + .into_retry() .run_async(|| { async { let url = Url::from_str( diff --git a/crates/verify/src/utils.rs b/crates/verify/src/utils.rs index a14d6af6d..6165fa718 100644 --- a/crates/verify/src/utils.rs +++ b/crates/verify/src/utils.rs @@ -12,7 +12,10 @@ use foundry_block_explorers::{ use foundry_common::{abi::encode_args, compile::ProjectCompiler, provider::RetryProvider, shell}; use foundry_compilers::artifacts::{BytecodeHash, CompactContractBytecode, EvmVersion}; use foundry_config::Config; -use foundry_evm::{constants::DEFAULT_CREATE2_DEPLOYER, executors::TracingExecutor, opts::EvmOpts}; +use foundry_evm::{ + constants::DEFAULT_CREATE2_DEPLOYER, executors::TracingExecutor, opts::EvmOpts, + traces::TraceMode, +}; use reqwest::Url; use revm_primitives::{ db::Database, @@ -325,16 +328,17 @@ pub async fn get_tracing_executor( fork_config.fork_block_number = Some(fork_blk_num); fork_config.evm_version = evm_version; - let (env, fork, _chain, is_alphanet) = + let create2_deployer = evm_opts.create2_deployer; + let (env, fork, _chain, is_odyssey) = TracingExecutor::get_fork_material(fork_config, evm_opts).await?; let executor = TracingExecutor::new( env.clone(), fork, Some(fork_config.evm_version), - false, - false, - is_alphanet, + TraceMode::Call, + is_odyssey, + create2_deployer, ); Ok((env, executor)) diff --git a/crates/verify/src/verify.rs b/crates/verify/src/verify.rs index 784261eeb..d35ec90ce 100644 --- a/crates/verify/src/verify.rs +++ b/crates/verify/src/verify.rs @@ -21,6 +21,7 @@ use foundry_config::{figment, impl_figment_convert, impl_figment_convert_cast, C use itertools::Itertools; use reqwest::Url; use revm_primitives::HashSet; +use semver::BuildMetadata; use std::path::PathBuf; /// Verification provider arguments @@ -49,7 +50,7 @@ impl Default for VerifierArgs { } } -/// CLI arguments for `forge verify`. +/// CLI arguments for `forge verify-contract`. #[derive(Clone, Debug, Parser)] pub struct VerifyArgs { /// The address of the contract to verify. @@ -227,6 +228,17 @@ impl VerifyArgs { let verifier_url = self.verifier.verifier_url.clone(); sh_println!("Start verifying contract `{}` deployed on {chain}", self.address)?; + if let Some(version) = &self.compiler_version { + sh_println!("Compiler version: {version}")?; + } + if let Some(optimizations) = &self.num_of_optimizations { + sh_println!("Optimizations: {optimizations}")? + } + if let Some(args) = &self.constructor_args { + if !args.is_empty() { + sh_println!("Constructor args: {args}")? + } + } self.verifier.verifier.client(&self.etherscan.key())?.verify(self, context).await.map_err(|err| { if let Some(verifier_url) = verifier_url { match Url::parse(&verifier_url) { @@ -270,8 +282,7 @@ impl VerifyArgs { }; let cache = project.read_cache_file().ok(); - - let version = if let Some(ref version) = self.compiler_version { + let mut version = if let Some(ref version) = self.compiler_version { version.trim_start_matches('v').parse()? } else if let Some(ref solc) = config.solc { match solc { @@ -301,7 +312,6 @@ impl VerifyArgs { } else { eyre::bail!("If cache is disabled, compiler version must be either provided with `--compiler-version` option or set in foundry.toml") }; - let settings = if let Some(profile) = &self.compilation_profile { if profile == "default" { &project.settings @@ -317,7 +327,21 @@ impl VerifyArgs { let profiles = entry .artifacts .get(&contract.name) - .and_then(|artifacts| artifacts.get(&version)) + .and_then(|artifacts| { + let mut cached_artifacts = artifacts.get(&version); + // If we try to verify with specific build version and no cached artifacts + // found, then check if we have artifacts cached for same version but + // without any build metadata. + // This could happen when artifacts are built / cached + // with a version like `0.8.20` but verify is using a compiler-version arg + // as `0.8.20+commit.a1b79de6`. + // See . + if cached_artifacts.is_none() && version.build != BuildMetadata::EMPTY { + version.build = BuildMetadata::EMPTY; + cached_artifacts = artifacts.get(&version); + } + cached_artifacts + }) .map(|artifacts| artifacts.keys().collect::>()) .unwrap_or_default(); @@ -351,7 +375,7 @@ impl VerifyArgs { let provider = utils::get_provider(&config)?; let code = provider.get_code_at(self.address).await?; - let output = ProjectCompiler::new().compile(&project)?; + let output = ProjectCompiler::new().zksync_compile(&project)?; let contracts = ContractsByArtifact::new( output.artifact_ids().map(|(id, artifact)| (id, artifact.clone().into())), ); @@ -393,7 +417,7 @@ impl VerifyArgs { project.find_contract_path(&contract.name)? }; - let version = if let Some(ref version) = self.compiler_version { + let mut version = if let Some(ref version) = self.compiler_version { version.trim_start_matches('v').parse()? } else if let Some(ref solc) = config.solc { match solc { diff --git a/crates/zksync/core/src/lib.rs b/crates/zksync/core/src/lib.rs index 7218ff60e..7f3d82c39 100644 --- a/crates/zksync/core/src/lib.rs +++ b/crates/zksync/core/src/lib.rs @@ -23,7 +23,7 @@ use alloy_primitives::{address, hex, keccak256, Address, Bytes, U256 as rU256}; use alloy_transport::Transport; use alloy_zksync::{ network::transaction_request::TransactionRequest as ZkTransactionRequest, - provider::ZksyncProvider, + provider::ZkyncProvider, }; use convert::{ConvertAddress, ConvertH160, ConvertH256, ConvertRU256, ConvertU256}; use eyre::eyre; diff --git a/foundryup-zksync/install b/foundryup-zksync/install index c2c6ec802..e80760bf6 100755 --- a/foundryup-zksync/install +++ b/foundryup-zksync/install @@ -54,7 +54,7 @@ if [[ ":$PATH:" != *":${FOUNDRY_BIN_DIR}:"* ]]; then fi # Warn MacOS users that they may need to manually install libusb via Homebrew: -if [[ "$OSTYPE" =~ ^darwin ]] && [[ ! -f /usr/local/opt/libusb/lib/libusb-1.0.0.dylib && ! -f /opt/homebrew/opt/libusb/lib/libusb-1.0.0.dylib ]]; then +if [[ "$OSTYPE" =~ ^darwin ]] && [[ ! -f /usr/local/opt/libusb/lib/libusb-1.0.0.dylib ]] && [[ ! -f /opt/homebrew/opt/libusb/lib/libusb-1.0.0.dylib ]]; then echo && echo "warning: libusb not found. You may need to install it manually on MacOS via Homebrew (brew install libusb)." fi diff --git a/rust-toolchain b/rust-toolchain index 0aa4e7d35..94176b8f3 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1,3 +1,3 @@ [toolchain] -channel = "nightly-2024-09-01" +channel = "nightly-2024-09-05" components = [ "rustfmt", "clippy" ] diff --git a/testdata/cheats/Vm.sol b/testdata/cheats/Vm.sol index a34eac3d0..bdbb68e37 100644 --- a/testdata/cheats/Vm.sol +++ b/testdata/cheats/Vm.sol @@ -149,7 +149,7 @@ interface Vm { function assertTrue(bool condition, string calldata error) external pure; function assume(bool condition) external pure; function assumeNoRevert() external pure; - function attachDelegation(SignedDelegation memory signedDelegation) external; + function attachDelegation(SignedDelegation calldata signedDelegation) external; function blobBaseFee(uint256 newBlobBaseFee) external; function blobhashes(bytes32[] calldata hashes) external; function breakpoint(string calldata char) external pure; @@ -224,7 +224,7 @@ interface Vm { function envUint(string calldata name) external view returns (uint256 value); function envUint(string calldata name, string calldata delim) external view returns (uint256[] memory value); function etch(address target, bytes calldata newRuntimeBytecode) external; - function eth_getLogs(uint256 fromBlock, uint256 toBlock, address target, bytes32[] memory topics) external returns (EthGetLogs[] memory logs); + function eth_getLogs(uint256 fromBlock, uint256 toBlock, address target, bytes32[] calldata topics) external returns (EthGetLogs[] memory logs); function exists(string calldata path) external view returns (bool result); function expectCallMinGas(address callee, uint256 msgValue, uint64 minGas, bytes calldata data) external; function expectCallMinGas(address callee, uint256 msgValue, uint64 minGas, bytes calldata data, uint64 count) external; @@ -246,10 +246,16 @@ interface Vm { function expectPartialRevert(bytes4 revertData, address reverter) external; function expectRevert() external; function expectRevert(bytes4 revertData) external; + function expectRevert(bytes4 revertData, address reverter, uint64 count) external; + function expectRevert(bytes calldata revertData, address reverter, uint64 count) external; function expectRevert(bytes calldata revertData) external; function expectRevert(address reverter) external; function expectRevert(bytes4 revertData, address reverter) external; function expectRevert(bytes calldata revertData, address reverter) external; + function expectRevert(uint64 count) external; + function expectRevert(bytes4 revertData, uint64 count) external; + function expectRevert(bytes calldata revertData, uint64 count) external; + function expectRevert(address reverter, uint64 count) external; function expectSafeMemory(uint64 min, uint64 max) external; function expectSafeMemoryCall(uint64 min, uint64 max) external; function fee(uint256 newBasefee) external; @@ -261,14 +267,14 @@ interface Vm { function getBlobhashes() external view returns (bytes32[] memory hashes); function getBlockNumber() external view returns (uint256 height); function getBlockTimestamp() external view returns (uint256 timestamp); - function getBroadcast(string memory contractName, uint64 chainId, BroadcastTxType txType) external view returns (BroadcastTxSummary memory); - function getBroadcasts(string memory contractName, uint64 chainId, BroadcastTxType txType) external view returns (BroadcastTxSummary[] memory); - function getBroadcasts(string memory contractName, uint64 chainId) external view returns (BroadcastTxSummary[] memory); + function getBroadcast(string calldata contractName, uint64 chainId, BroadcastTxType txType) external view returns (BroadcastTxSummary memory); + function getBroadcasts(string calldata contractName, uint64 chainId, BroadcastTxType txType) external view returns (BroadcastTxSummary[] memory); + function getBroadcasts(string calldata contractName, uint64 chainId) external view returns (BroadcastTxSummary[] memory); function getCode(string calldata artifactPath) external view returns (bytes memory creationBytecode); function getDeployedCode(string calldata artifactPath) external view returns (bytes memory runtimeBytecode); - function getDeployment(string memory contractName) external view returns (address deployedAddress); - function getDeployment(string memory contractName, uint64 chainId) external view returns (address deployedAddress); - function getDeployments(string memory contractName, uint64 chainId) external view returns (address[] memory deployedAddresses); + function getDeployment(string calldata contractName) external view returns (address deployedAddress); + function getDeployment(string calldata contractName, uint64 chainId) external view returns (address deployedAddress); + function getDeployments(string calldata contractName, uint64 chainId) external view returns (address[] memory deployedAddresses); function getFoundryVersion() external view returns (string memory version); function getLabel(address account) external view returns (string memory currentLabel); function getMappingKeyAndParentOf(address target, bytes32 elementSlot) external returns (bool found, bytes32 key, bytes32 parent); @@ -277,6 +283,8 @@ interface Vm { function getNonce(address account) external view returns (uint64 nonce); function getNonce(Wallet calldata wallet) external returns (uint64 nonce); function getRecordedLogs() external returns (Log[] memory logs); + function getStateDiff() external view returns (string memory diff); + function getStateDiffJson() external view returns (string memory diff); function getWallets() external returns (address[] memory wallets); function indexOf(string calldata input, string calldata key) external pure returns (uint256); function isContext(ForgeContext context) external view returns (bool result); @@ -424,8 +432,8 @@ interface Vm { function serializeInt(string calldata objectKey, string calldata valueKey, int256 value) external returns (string memory json); function serializeInt(string calldata objectKey, string calldata valueKey, int256[] calldata values) external returns (string memory json); function serializeJson(string calldata objectKey, string calldata value) external returns (string memory json); - function serializeJsonType(string calldata typeDescription, bytes memory value) external pure returns (string memory json); - function serializeJsonType(string calldata objectKey, string calldata valueKey, string calldata typeDescription, bytes memory value) external returns (string memory json); + function serializeJsonType(string calldata typeDescription, bytes calldata value) external pure returns (string memory json); + function serializeJsonType(string calldata objectKey, string calldata valueKey, string calldata typeDescription, bytes calldata value) external returns (string memory json); function serializeString(string calldata objectKey, string calldata valueKey, string calldata value) external returns (string memory json); function serializeString(string calldata objectKey, string calldata valueKey, string[] calldata values) external returns (string memory json); function serializeUintToHex(string calldata objectKey, string calldata valueKey, uint256 value) external returns (string memory json); @@ -505,9 +513,4 @@ interface Vm { function writeLine(string calldata path, string calldata data) external; function writeToml(string calldata json, string calldata path) external; function writeToml(string calldata json, string calldata path, string calldata valueKey) external; - function zkRegisterContract(string calldata name, bytes32 evmBytecodeHash, bytes calldata evmDeployedBytecode, bytes calldata evmBytecode, bytes32 zkBytecodeHash, bytes calldata zkDeployedBytecode) external pure; - function zkUseFactoryDep(string calldata name) external pure; - function zkUsePaymaster(address paymaster_address, bytes calldata paymaster_input) external pure; - function zkVm(bool enable) external pure; - function zkVmSkip() external pure; } diff --git a/testdata/default/cheats/Broadcast.t.sol b/testdata/default/cheats/Broadcast.t.sol index bca8cc2ee..97b9d5275 100644 --- a/testdata/default/cheats/Broadcast.t.sol +++ b/testdata/default/cheats/Broadcast.t.sol @@ -219,6 +219,32 @@ contract BroadcastTestNoLinking is DSTest { vm.stopBroadcast(); } + function deployCreate2(address deployer) public { + vm.startBroadcast(); + bytes32 salt = bytes32(uint256(1338)); + NoLink test_c2 = new NoLink{salt: salt}(); + assert(test_c2.view_me() == 1337); + + address expectedAddress = address( + uint160( + uint256( + keccak256( + abi.encodePacked( + bytes1(0xff), + deployer, + salt, + keccak256(abi.encodePacked(type(NoLink).creationCode, abi.encode())) + ) + ) + ) + ) + ); + require(address(test_c2) == expectedAddress, "Create2 address mismatch"); + + NoLink test2 = new NoLink(); + vm.stopBroadcast(); + } + function errorStaticCall() public { vm.broadcast(); NoLink test11 = new NoLink(); diff --git a/testdata/default/cheats/ExpectRevert.t.sol b/testdata/default/cheats/ExpectRevert.t.sol index 18a90bac6..fef4ebaf5 100644 --- a/testdata/default/cheats/ExpectRevert.t.sol +++ b/testdata/default/cheats/ExpectRevert.t.sol @@ -30,6 +30,10 @@ contract Reverter { revert(message); } + function callThenNoRevert(Dummy dummy) public pure { + dummy.callMe(); + } + function revertWithoutReason() public pure { revert(); } @@ -188,7 +192,7 @@ contract ExpectRevertTest is DSTest { } function testexpectCheatcodeRevert() public { - vm._expectCheatcodeRevert("JSON value at \".a\" is not an object"); + vm._expectCheatcodeRevert('JSON value at ".a" is not an object'); vm.parseJsonKeys('{"a": "b"}', ".a"); } @@ -351,3 +355,211 @@ contract ExpectRevertWithReverterTest is DSTest { aContract.callAndRevert(); } } + +contract ExpectRevertCount is DSTest { + Vm constant vm = Vm(HEVM_ADDRESS); + + function testRevertCountAny() public { + uint64 count = 3; + Reverter reverter = new Reverter(); + vm.expectRevert(count); + reverter.revertWithMessage("revert"); + reverter.revertWithMessage("revert2"); + reverter.revertWithMessage("revert3"); + + vm.expectRevert("revert"); + reverter.revertWithMessage("revert"); + } + + function testFailRevertCountAny() public { + uint64 count = 3; + Reverter reverter = new Reverter(); + vm.expectRevert(count); + reverter.revertWithMessage("revert"); + reverter.revertWithMessage("revert2"); + } + + function testNoRevert() public { + uint64 count = 0; + Reverter reverter = new Reverter(); + vm.expectRevert(count); + reverter.doNotRevert(); + } + + function testFailNoRevert() public { + uint64 count = 0; + Reverter reverter = new Reverter(); + vm.expectRevert(count); + reverter.revertWithMessage("revert"); + } + + function testRevertCountSpecific() public { + uint64 count = 2; + Reverter reverter = new Reverter(); + vm.expectRevert("revert", count); + reverter.revertWithMessage("revert"); + reverter.revertWithMessage("revert"); + } + + function testFailReverCountSpecifc() public { + uint64 count = 2; + Reverter reverter = new Reverter(); + vm.expectRevert("revert", count); + reverter.revertWithMessage("revert"); + reverter.revertWithMessage("second-revert"); + } + + function testNoRevertSpecific() public { + uint64 count = 0; + Reverter reverter = new Reverter(); + vm.expectRevert("revert", count); + reverter.doNotRevert(); + } + + function testFailNoRevertSpecific() public { + uint64 count = 0; + Reverter reverter = new Reverter(); + vm.expectRevert("revert", count); + reverter.revertWithMessage("revert"); + } + + function testNoRevertSpecificButDiffRevert() public { + uint64 count = 0; + Reverter reverter = new Reverter(); + vm.expectRevert("revert", count); + reverter.revertWithMessage("revert2"); + } + + function testRevertCountWithConstructor() public { + uint64 count = 1; + vm.expectRevert("constructor revert", count); + new ConstructorReverter("constructor revert"); + } + + function testNoRevertWithConstructor() public { + uint64 count = 0; + vm.expectRevert("constructor revert", count); + new CContract(); + } + + function testRevertCountNestedSpecific() public { + uint64 count = 2; + Reverter reverter = new Reverter(); + Reverter inner = new Reverter(); + + vm.expectRevert("nested revert", count); + reverter.revertWithMessage("nested revert"); + reverter.nestedRevert(inner, "nested revert"); + + vm.expectRevert("nested revert", count); + reverter.nestedRevert(inner, "nested revert"); + reverter.nestedRevert(inner, "nested revert"); + } + + function testRevertCountCallsThenReverts() public { + uint64 count = 2; + Reverter reverter = new Reverter(); + Dummy dummy = new Dummy(); + + vm.expectRevert("called a function and then reverted", count); + reverter.callThenRevert(dummy, "called a function and then reverted"); + reverter.callThenRevert(dummy, "called a function and then reverted"); + } + + function testFailRevertCountCallsThenReverts() public { + uint64 count = 2; + Reverter reverter = new Reverter(); + Dummy dummy = new Dummy(); + + vm.expectRevert("called a function and then reverted", count); + reverter.callThenRevert(dummy, "called a function and then reverted"); + reverter.callThenRevert(dummy, "wrong revert"); + } + + function testNoRevertCall() public { + uint64 count = 0; + Reverter reverter = new Reverter(); + Dummy dummy = new Dummy(); + + vm.expectRevert("called a function and then reverted", count); + reverter.callThenNoRevert(dummy); + } +} + +contract ExpectRevertCountWithReverter is DSTest { + Vm constant vm = Vm(HEVM_ADDRESS); + + function testRevertCountWithReverter() public { + uint64 count = 2; + Reverter reverter = new Reverter(); + vm.expectRevert(address(reverter), count); + reverter.revertWithMessage("revert"); + reverter.revertWithMessage("revert"); + } + + function testFailRevertCountWithReverter() public { + uint64 count = 2; + Reverter reverter = new Reverter(); + Reverter reverter2 = new Reverter(); + vm.expectRevert(address(reverter), count); + reverter.revertWithMessage("revert"); + reverter2.revertWithMessage("revert"); + } + + function testNoRevertWithReverter() public { + uint64 count = 0; + Reverter reverter = new Reverter(); + vm.expectRevert(address(reverter), count); + reverter.doNotRevert(); + } + + function testNoRevertWithWrongReverter() public { + uint64 count = 0; + Reverter reverter = new Reverter(); + Reverter reverter2 = new Reverter(); + vm.expectRevert(address(reverter), count); + reverter2.revertWithMessage("revert"); // revert from wrong reverter + } + + function testFailNoRevertWithReverter() public { + uint64 count = 0; + Reverter reverter = new Reverter(); + vm.expectRevert(address(reverter), count); + reverter.revertWithMessage("revert"); + } + + function testReverterCountWithData() public { + uint64 count = 2; + Reverter reverter = new Reverter(); + vm.expectRevert("revert", address(reverter), count); + reverter.revertWithMessage("revert"); + reverter.revertWithMessage("revert"); + } + + function testFailReverterCountWithWrongData() public { + uint64 count = 2; + Reverter reverter = new Reverter(); + vm.expectRevert("revert", address(reverter), count); + reverter.revertWithMessage("revert"); + reverter.revertWithMessage("wrong revert"); + } + + function testFailWrongReverterCountWithData() public { + uint64 count = 2; + Reverter reverter = new Reverter(); + Reverter reverter2 = new Reverter(); + vm.expectRevert("revert", address(reverter), count); + reverter.revertWithMessage("revert"); + reverter2.revertWithMessage("revert"); + } + + function testNoReverterCountWithData() public { + uint64 count = 0; + Reverter reverter = new Reverter(); + vm.expectRevert("revert", address(reverter), count); + reverter.doNotRevert(); + + vm.expectRevert("revert", address(reverter), count); + reverter.revertWithMessage("revert2"); + } +} diff --git a/testdata/default/cheats/RecordAccountAccesses.t.sol b/testdata/default/cheats/RecordAccountAccesses.t.sol index 98b5843b2..8de7bcdc5 100644 --- a/testdata/default/cheats/RecordAccountAccesses.t.sol +++ b/testdata/default/cheats/RecordAccountAccesses.t.sol @@ -3,6 +3,7 @@ pragma solidity ^0.8.18; import "ds-test/test.sol"; import "cheats/Vm.sol"; +import "../logs/console.sol"; /// @notice Helper contract with a construction that makes a call to itself then /// optionally reverts if zero-length data is passed @@ -261,6 +262,16 @@ contract RecordAccountAccessesTest is DSTest { two.write(bytes32(uint256(5678)), bytes32(uint256(123469))); two.write(bytes32(uint256(5678)), bytes32(uint256(1234))); + string memory diffs = cheats.getStateDiff(); + assertEq( + "0x5991A2dF15A8F6A256D3Ec51E99254Cd3fb576A9\n- state diff:\n@ 0x00000000000000000000000000000000000000000000000000000000000004d3: 0x0000000000000000000000000000000000000000000000000000000000000000 \xE2\x86\x92 0x000000000000000000000000000000000000000000000000000000000000162e\n\n0xc7183455a4C133Ae270771860664b6B7ec320bB1\n- state diff:\n@ 0x000000000000000000000000000000000000000000000000000000000000162e: 0x0000000000000000000000000000000000000000000000000000000000000000 \xE2\x86\x92 0x00000000000000000000000000000000000000000000000000000000000004d2\n\n", + diffs + ); + string memory diffsJson = cheats.getStateDiffJson(); + assertEq( + "{\"0x5991a2df15a8f6a256d3ec51e99254cd3fb576a9\":{\"label\":null,\"balanceDiff\":null,\"stateDiff\":{\"0x00000000000000000000000000000000000000000000000000000000000004d3\":{\"previousValue\":\"0x0000000000000000000000000000000000000000000000000000000000000000\",\"newValue\":\"0x000000000000000000000000000000000000000000000000000000000000162e\"}}},\"0xc7183455a4c133ae270771860664b6b7ec320bb1\":{\"label\":null,\"balanceDiff\":null,\"stateDiff\":{\"0x000000000000000000000000000000000000000000000000000000000000162e\":{\"previousValue\":\"0x0000000000000000000000000000000000000000000000000000000000000000\",\"newValue\":\"0x00000000000000000000000000000000000000000000000000000000000004d2\"}}}}", + diffsJson + ); Vm.AccountAccess[] memory called = filterExtcodesizeForLegacyTests(cheats.stopAndReturnStateDiff()); assertEq(called.length, 4, "incorrect length"); @@ -332,6 +343,15 @@ contract RecordAccountAccessesTest is DSTest { // contract calls to self in constructor SelfCaller caller = new SelfCaller{value: 2 ether}("hello2 world2"); + assertEq( + "0x000000000000000000000000000000000000162e\n- balance diff: 0 \xE2\x86\x92 1000000000000000000\n\n0x1d1499e622D69689cdf9004d05Ec547d650Ff211\n- balance diff: 0 \xE2\x86\x92 2000000000000000000\n\n", + cheats.getStateDiff() + ); + assertEq( + "{\"0x000000000000000000000000000000000000162e\":{\"label\":null,\"balanceDiff\":{\"previousValue\":\"0x0\",\"newValue\":\"0xde0b6b3a7640000\"},\"stateDiff\":{}},\"0x1d1499e622d69689cdf9004d05ec547d650ff211\":{\"label\":null,\"balanceDiff\":{\"previousValue\":\"0x0\",\"newValue\":\"0x1bc16d674ec80000\"},\"stateDiff\":{}}}", + cheats.getStateDiffJson() + ); + Vm.AccountAccess[] memory called = filterExtcodesizeForLegacyTests(cheats.stopAndReturnStateDiff()); assertEq(called.length, 6); assertEq( @@ -451,6 +471,14 @@ contract RecordAccountAccessesTest is DSTest { uint256 initBalance = address(this).balance; cheats.startStateDiffRecording(); try this.revertingCall{value: 1 ether}(address(1234), "") {} catch {} + assertEq( + "0x00000000000000000000000000000000000004d2\n- balance diff: 0 \xE2\x86\x92 100000000000000000\n\n", + cheats.getStateDiff() + ); + assertEq( + "{\"0x00000000000000000000000000000000000004d2\":{\"label\":null,\"balanceDiff\":{\"previousValue\":\"0x0\",\"newValue\":\"0x16345785d8a0000\"},\"stateDiff\":{}}}", + cheats.getStateDiffJson() + ); Vm.AccountAccess[] memory called = filterExtcodesizeForLegacyTests(cheats.stopAndReturnStateDiff()); assertEq(called.length, 2); assertEq( @@ -768,6 +796,15 @@ contract RecordAccountAccessesTest is DSTest { function testNestedStorage() public { cheats.startStateDiffRecording(); nestedStorer.run(); + cheats.label(address(nestedStorer), "NestedStorer"); + assertEq( + "0x2e234DAe75C793f67A35089C9d99245E1C58470b\nlabel: NestedStorer\n- state diff:\n@ 0x4566fa0cd03218c55bba914d793f5e6b9113172c1f684bb5f464c08c867e8977: 0x0000000000000000000000000000000000000000000000000000000000000000 \xE2\x86\x92 0x0000000000000000000000000000000000000000000000000000000000000001\n@ 0xbf57896b60daefa2c41de2feffecfc11debd98ea8c913a5170f60e53959ac00a: 0x0000000000000000000000000000000000000000000000000000000000000000 \xE2\x86\x92 0x0000000000000000000000000000000000000000000000000000000000000001\n@ 0xc664893a982d78bbeab379feef216ff517b7ea73626b280723be1ace370364cd: 0x0000000000000000000000000000000000000000000000000000000000000000 \xE2\x86\x92 0x0000000000000000000000000000000000000000000000000000000000000001\n@ 0xdc5330afa9872081253545dca3f448752688ff1b098b38c1abe4c4cdff4b0b0e: 0x0000000000000000000000000000000000000000000000000000000000000000 \xE2\x86\x92 0x0000000000000000000000000000000000000000000000000000000000000001\n\n", + cheats.getStateDiff() + ); + assertEq( + "{\"0x2e234dae75c793f67a35089c9d99245e1c58470b\":{\"label\":\"NestedStorer\",\"balanceDiff\":null,\"stateDiff\":{\"0x4566fa0cd03218c55bba914d793f5e6b9113172c1f684bb5f464c08c867e8977\":{\"previousValue\":\"0x0000000000000000000000000000000000000000000000000000000000000000\",\"newValue\":\"0x0000000000000000000000000000000000000000000000000000000000000001\"},\"0xbf57896b60daefa2c41de2feffecfc11debd98ea8c913a5170f60e53959ac00a\":{\"previousValue\":\"0x0000000000000000000000000000000000000000000000000000000000000000\",\"newValue\":\"0x0000000000000000000000000000000000000000000000000000000000000001\"},\"0xc664893a982d78bbeab379feef216ff517b7ea73626b280723be1ace370364cd\":{\"previousValue\":\"0x0000000000000000000000000000000000000000000000000000000000000000\",\"newValue\":\"0x0000000000000000000000000000000000000000000000000000000000000001\"},\"0xdc5330afa9872081253545dca3f448752688ff1b098b38c1abe4c4cdff4b0b0e\":{\"previousValue\":\"0x0000000000000000000000000000000000000000000000000000000000000000\",\"newValue\":\"0x0000000000000000000000000000000000000000000000000000000000000001\"}}}}", + cheats.getStateDiffJson() + ); Vm.AccountAccess[] memory called = filterExtcodesizeForLegacyTests(cheats.stopAndReturnStateDiff()); assertEq(called.length, 3, "incorrect account access length"); diff --git a/testdata/default/repros/Issue4232.t.sol b/testdata/default/repros/Issue4232.t.sol new file mode 100644 index 000000000..0ac6a77c7 --- /dev/null +++ b/testdata/default/repros/Issue4232.t.sol @@ -0,0 +1,31 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +pragma solidity ^0.8.18; + +import "ds-test/test.sol"; +import "cheats/Vm.sol"; + +// https://github.com/foundry-rs/foundry/issues/4232 +contract Issue4232Test is DSTest { + Vm constant vm = Vm(HEVM_ADDRESS); + + function testFork() public { + // Smoke test, worked previously as well + vm.createSelectFork("sepolia", 7215400); + vm.assertFalse(block.prevrandao == 0); + + // Would previously fail with: + // [FAIL: backend: failed while inspecting; header validation error: `prevrandao` not set; `prevrandao` not set; ] setUp() (gas: 0) + // + // Related fix: + // Moonbeam | Moonbase | Moonriver | MoonbeamDev => { + // if env.block.prevrandao.is_none() { + // // + // env.block.prevrandao = Some(B256::random()); + // } + // } + // + // Note: public RPC node used for `moonbeam` discards state quickly so we need to fork against the latest block + vm.createSelectFork("moonbeam"); + vm.assertFalse(block.prevrandao == 0); + } +} diff --git a/testdata/default/repros/Issue8639.t.sol b/testdata/default/repros/Issue8639.t.sol new file mode 100644 index 000000000..6f0a7b526 --- /dev/null +++ b/testdata/default/repros/Issue8639.t.sol @@ -0,0 +1,43 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +pragma solidity ^0.8.18; + +import "ds-test/test.sol"; + +library ExternalLibrary { + function doWork(uint256 a) public returns (uint256) { + return a++; + } +} + +contract Counter { + uint256 public number; + + function setNumber(uint256 newNumber) public { + ExternalLibrary.doWork(1); + } + + function increment() public {} +} + +// https://github.com/foundry-rs/foundry/issues/8639 +contract Issue8639Test is DSTest { + Counter counter; + + function setUp() public { + counter = new Counter(); + } + + /// forge-config: default.fuzz.runs = 1000 + /// forge-config: default.fuzz.seed = '100' + function test_external_library_address(address test) public { + require(test != address(ExternalLibrary)); + } +} + +contract Issue8639AnotherTest is DSTest { + /// forge-config: default.fuzz.runs = 1000 + /// forge-config: default.fuzz.seed = '100' + function test_another_external_library_address(address test) public { + require(test != address(ExternalLibrary)); + } +}