diff --git a/Cargo.lock b/Cargo.lock index aaa5f5ef207a..56af557d64da 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3569,6 +3569,7 @@ dependencies = [ "k256", "p256", "parking_lot", + "proptest", "rand", "revm", "rustc-hash 2.0.0", diff --git a/Cargo.toml b/Cargo.toml index ac7bee7e0f3d..a64a6688cbfa 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -261,3 +261,5 @@ tower = "0.4" tower-http = "0.5" # soldeer soldeer = "0.2.17" + +proptest = "1" diff --git a/crates/cheatcodes/Cargo.toml b/crates/cheatcodes/Cargo.toml index 09cd319b8d7d..f04254b1812b 100644 --- a/crates/cheatcodes/Cargo.toml +++ b/crates/cheatcodes/Cargo.toml @@ -53,3 +53,6 @@ semver.workspace = true rustc-hash.workspace = true dialoguer = "0.11.0" rand = "0.8" + +[dev-dependencies] +proptest.workspace = true \ No newline at end of file diff --git a/crates/cheatcodes/assets/cheatcodes.json b/crates/cheatcodes/assets/cheatcodes.json index f4fe4f91d643..9c7e36965be0 100644 --- a/crates/cheatcodes/assets/cheatcodes.json +++ b/crates/cheatcodes/assets/cheatcodes.json @@ -5791,6 +5791,66 @@ "status": "stable", "safety": "safe" }, + { + "func": { + "id": "parseJsonTypeArray", + "description": "Parses a string of JSON data at `key` and coerces it to type array corresponding to `typeDescription`.", + "declaration": "function parseJsonTypeArray(string calldata json, string calldata key, string calldata typeDescription) external pure returns (bytes memory);", + "visibility": "external", + "mutability": "pure", + "signature": "parseJsonTypeArray(string,string,string)", + "selector": "0x0175d535", + "selectorBytes": [ + 1, + 117, + 213, + 53 + ] + }, + "group": "json", + "status": "stable", + "safety": "safe" + }, + { + "func": { + "id": "parseJsonType_0", + "description": "Parses a string of JSON data and coerces it to type corresponding to `typeDescription`.", + "declaration": "function parseJsonType(string calldata json, string calldata typeDescription) external pure returns (bytes memory);", + "visibility": "external", + "mutability": "pure", + "signature": "parseJsonType(string,string)", + "selector": "0xa9da313b", + "selectorBytes": [ + 169, + 218, + 49, + 59 + ] + }, + "group": "json", + "status": "stable", + "safety": "safe" + }, + { + "func": { + "id": "parseJsonType_1", + "description": "Parses a string of JSON data at `key` and coerces it to type corresponding to `typeDescription`.", + "declaration": "function parseJsonType(string calldata json, string calldata key, string calldata typeDescription) external pure returns (bytes memory);", + "visibility": "external", + "mutability": "pure", + "signature": "parseJsonType(string,string,string)", + "selector": "0xe3f5ae33", + "selectorBytes": [ + 227, + 245, + 174, + 51 + ] + }, + "group": "json", + "status": "stable", + "safety": "safe" + }, { "func": { "id": "parseJsonUint", @@ -7351,6 +7411,46 @@ "status": "stable", "safety": "safe" }, + { + "func": { + "id": "serializeJsonType_0", + "description": "See `serializeJson`.", + "declaration": "function serializeJsonType(string calldata typeDescription, bytes memory value) external pure returns (string memory json);", + "visibility": "external", + "mutability": "pure", + "signature": "serializeJsonType(string,bytes)", + "selector": "0x6d4f96a6", + "selectorBytes": [ + 109, + 79, + 150, + 166 + ] + }, + "group": "json", + "status": "stable", + "safety": "safe" + }, + { + "func": { + "id": "serializeJsonType_1", + "description": "See `serializeJson`.", + "declaration": "function serializeJsonType(string calldata objectKey, string calldata valueKey, string calldata typeDescription, bytes memory value) external returns (string memory json);", + "visibility": "external", + "mutability": "", + "signature": "serializeJsonType(string,string,string,bytes)", + "selector": "0x6f93bccb", + "selectorBytes": [ + 111, + 147, + 188, + 203 + ] + }, + "group": "json", + "status": "stable", + "safety": "safe" + }, { "func": { "id": "serializeString_0", diff --git a/crates/cheatcodes/spec/src/vm.rs b/crates/cheatcodes/spec/src/vm.rs index 5bb36619d3de..54f1fe8fe77f 100644 --- a/crates/cheatcodes/spec/src/vm.rs +++ b/crates/cheatcodes/spec/src/vm.rs @@ -1878,6 +1878,19 @@ interface Vm { pure returns (bytes32[] memory); + /// Parses a string of JSON data and coerces it to type corresponding to `typeDescription`. + #[cheatcode(group = Json)] + function parseJsonType(string calldata json, string calldata typeDescription) external pure returns (bytes memory); + /// Parses a string of JSON data at `key` and coerces it to type corresponding to `typeDescription`. + #[cheatcode(group = Json)] + function parseJsonType(string calldata json, string calldata key, string calldata typeDescription) external pure returns (bytes memory); + /// Parses a string of JSON data at `key` and coerces it to type array corresponding to `typeDescription`. + #[cheatcode(group = Json)] + function parseJsonTypeArray(string calldata json, string calldata key, string calldata typeDescription) + external + pure + returns (bytes memory); + /// Returns an array of all the keys in a JSON object. #[cheatcode(group = Json)] function parseJsonKeys(string calldata json, string calldata key) external pure returns (string[] memory keys); @@ -1968,6 +1981,17 @@ interface Vm { function serializeBytes(string calldata objectKey, string calldata valueKey, bytes[] calldata values) external returns (string memory json); + /// See `serializeJson`. + #[cheatcode(group = Json)] + function serializeJsonType(string calldata typeDescription, bytes memory value) + external + pure + returns (string memory json); + /// See `serializeJson`. + #[cheatcode(group = Json)] + function serializeJsonType(string calldata objectKey, string calldata valueKey, string calldata typeDescription, bytes memory value) + external + returns (string memory json); // NOTE: Please read https://book.getfoundry.sh/cheatcodes/write-json to understand how // to use the JSON writing cheats. diff --git a/crates/cheatcodes/src/error.rs b/crates/cheatcodes/src/error.rs index 5509efa2d162..26aba7348088 100644 --- a/crates/cheatcodes/src/error.rs +++ b/crates/cheatcodes/src/error.rs @@ -286,6 +286,7 @@ macro_rules! impl_from { impl_from!( alloy_sol_types::Error, + alloy_dyn_abi::Error, alloy_primitives::SignatureError, FsPathError, hex::FromHexError, diff --git a/crates/cheatcodes/src/json.rs b/crates/cheatcodes/src/json.rs index eea16997172a..48c14e2a3ddc 100644 --- a/crates/cheatcodes/src/json.rs +++ b/crates/cheatcodes/src/json.rs @@ -1,13 +1,13 @@ //! Implementations of [`Json`](spec::Group::Json) cheatcodes. use crate::{string, Cheatcode, Cheatcodes, Result, Vm::*}; -use alloy_dyn_abi::{DynSolType, DynSolValue}; +use alloy_dyn_abi::{eip712_parser::EncodeType, DynSolType, DynSolValue, Resolver}; use alloy_primitives::{hex, Address, B256, I256}; use alloy_sol_types::SolValue; use foundry_common::fs; use foundry_config::fs_permissions::FsAccessKind; -use serde_json::Value; -use std::{borrow::Cow, collections::BTreeMap, fmt::Write}; +use serde_json::{Map, Value}; +use std::{borrow::Cow, collections::BTreeMap}; impl Cheatcode for keyExistsCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { @@ -47,7 +47,7 @@ impl Cheatcode for parseJsonUintCall { impl Cheatcode for parseJsonUintArrayCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::Uint(256)) + parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::Uint(256)))) } } @@ -61,7 +61,7 @@ impl Cheatcode for parseJsonIntCall { impl Cheatcode for parseJsonIntArrayCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::Int(256)) + parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::Int(256)))) } } @@ -75,7 +75,7 @@ impl Cheatcode for parseJsonBoolCall { impl Cheatcode for parseJsonBoolArrayCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::Bool) + parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::Bool))) } } @@ -89,7 +89,7 @@ impl Cheatcode for parseJsonAddressCall { impl Cheatcode for parseJsonAddressArrayCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::Address) + parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::Address))) } } @@ -103,7 +103,7 @@ impl Cheatcode for parseJsonStringCall { impl Cheatcode for parseJsonStringArrayCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::String) + parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::String))) } } @@ -117,7 +117,7 @@ impl Cheatcode for parseJsonBytesCall { impl Cheatcode for parseJsonBytesArrayCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::Bytes) + parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::Bytes))) } } @@ -131,7 +131,29 @@ impl Cheatcode for parseJsonBytes32Call { impl Cheatcode for parseJsonBytes32ArrayCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { json, key } = self; - parse_json_coerce(json, key, &DynSolType::FixedBytes(32)) + parse_json_coerce(json, key, &DynSolType::Array(Box::new(DynSolType::FixedBytes(32)))) + } +} + +impl Cheatcode for parseJsonType_0Call { + fn apply(&self, _state: &mut Cheatcodes) -> Result { + let Self { json, typeDescription } = self; + parse_json_coerce(json, "$", &resolve_type(typeDescription)?).map(|v| v.abi_encode()) + } +} + +impl Cheatcode for parseJsonType_1Call { + fn apply(&self, _state: &mut Cheatcodes) -> Result { + let Self { json, key, typeDescription } = self; + parse_json_coerce(json, key, &resolve_type(typeDescription)?).map(|v| v.abi_encode()) + } +} + +impl Cheatcode for parseJsonTypeArrayCall { + fn apply(&self, _state: &mut Cheatcodes) -> Result { + let Self { json, key, typeDescription } = self; + let ty = resolve_type(typeDescription)?; + parse_json_coerce(json, key, &DynSolType::Array(Box::new(ty))).map(|v| v.abi_encode()) } } @@ -145,106 +167,162 @@ impl Cheatcode for parseJsonKeysCall { impl Cheatcode for serializeJsonCall { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { objectKey, value } = self; - serialize_json(state, objectKey, None, value) + *state.serialized_jsons.entry(objectKey.into()).or_default() = serde_json::from_str(value)?; + Ok(value.abi_encode()) } } impl Cheatcode for serializeBool_0Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { objectKey, valueKey, value } = self; - serialize_json(state, objectKey, Some(valueKey), &value.to_string()) + serialize_json(state, objectKey, valueKey, (*value).into()) } } impl Cheatcode for serializeUint_0Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { objectKey, valueKey, value } = self; - serialize_json(state, objectKey, Some(valueKey), &value.to_string()) + serialize_json(state, objectKey, valueKey, (*value).into()) } } impl Cheatcode for serializeInt_0Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { objectKey, valueKey, value } = self; - serialize_json(state, objectKey, Some(valueKey), &value.to_string()) + serialize_json(state, objectKey, valueKey, (*value).into()) } } impl Cheatcode for serializeAddress_0Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { objectKey, valueKey, value } = self; - serialize_json(state, objectKey, Some(valueKey), &value.to_string()) + serialize_json(state, objectKey, valueKey, (*value).into()) } } impl Cheatcode for serializeBytes32_0Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { objectKey, valueKey, value } = self; - serialize_json(state, objectKey, Some(valueKey), &value.to_string()) + serialize_json(state, objectKey, valueKey, DynSolValue::FixedBytes(*value, 32)) } } impl Cheatcode for serializeString_0Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { objectKey, valueKey, value } = self; - serialize_json(state, objectKey, Some(valueKey), value) + serialize_json(state, objectKey, valueKey, value.clone().into()) } } impl Cheatcode for serializeBytes_0Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { objectKey, valueKey, value } = self; - serialize_json(state, objectKey, Some(valueKey), &hex::encode_prefixed(value)) + serialize_json(state, objectKey, valueKey, value.to_vec().into()) } } impl Cheatcode for serializeBool_1Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { objectKey, valueKey, values } = self; - serialize_json(state, objectKey, Some(valueKey), &array_str(values, false)) + serialize_json( + state, + objectKey, + valueKey, + DynSolValue::Array(values.iter().copied().map(DynSolValue::Bool).collect()), + ) } } impl Cheatcode for serializeUint_1Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { objectKey, valueKey, values } = self; - serialize_json(state, objectKey, Some(valueKey), &array_str(values, false)) + serialize_json( + state, + objectKey, + valueKey, + DynSolValue::Array(values.iter().map(|v| DynSolValue::Uint(*v, 256)).collect()), + ) } } impl Cheatcode for serializeInt_1Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { objectKey, valueKey, values } = self; - serialize_json(state, objectKey, Some(valueKey), &array_str(values, false)) + serialize_json( + state, + objectKey, + valueKey, + DynSolValue::Array(values.iter().map(|v| DynSolValue::Int(*v, 256)).collect()), + ) } } impl Cheatcode for serializeAddress_1Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { objectKey, valueKey, values } = self; - serialize_json(state, objectKey, Some(valueKey), &array_str(values, true)) + serialize_json( + state, + objectKey, + valueKey, + DynSolValue::Array(values.iter().copied().map(DynSolValue::Address).collect()), + ) } } impl Cheatcode for serializeBytes32_1Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { objectKey, valueKey, values } = self; - serialize_json(state, objectKey, Some(valueKey), &array_str(values, true)) + serialize_json( + state, + objectKey, + valueKey, + DynSolValue::Array(values.iter().map(|v| DynSolValue::FixedBytes(*v, 32)).collect()), + ) } } impl Cheatcode for serializeString_1Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { objectKey, valueKey, values } = self; - serialize_json(state, objectKey, Some(valueKey), &array_str(values, true)) + serialize_json( + state, + objectKey, + valueKey, + DynSolValue::Array(values.iter().cloned().map(DynSolValue::String).collect()), + ) } } impl Cheatcode for serializeBytes_1Call { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { objectKey, valueKey, values } = self; - let values = values.iter().map(hex::encode_prefixed); - serialize_json(state, objectKey, Some(valueKey), &array_str(values, true)) + serialize_json( + state, + objectKey, + valueKey, + DynSolValue::Array( + values.iter().cloned().map(Into::into).map(DynSolValue::Bytes).collect(), + ), + ) + } +} + +impl Cheatcode for serializeJsonType_0Call { + fn apply(&self, _state: &mut Cheatcodes) -> Result { + let Self { typeDescription, value } = self; + let ty = resolve_type(typeDescription)?; + let value = ty.abi_decode(value)?; + let value = serialize_value_as_json(value)?; + Ok(value.to_string().abi_encode()) + } +} + +impl Cheatcode for serializeJsonType_1Call { + fn apply(&self, state: &mut Cheatcodes) -> Result { + let Self { objectKey, valueKey, typeDescription, value } = self; + let ty = resolve_type(typeDescription)?; + let value = ty.abi_decode(value)?; + serialize_json(state, objectKey, valueKey, value) } } @@ -252,7 +330,7 @@ impl Cheatcode for serializeUintToHexCall { fn apply(&self, state: &mut Cheatcodes) -> Result { let Self { objectKey, valueKey, value } = self; let hex = format!("0x{value:x}"); - serialize_json(state, objectKey, Some(valueKey), &hex) + serialize_json(state, objectKey, valueKey, hex.into()) } } @@ -298,29 +376,75 @@ pub(super) fn parse_json(json: &str, path: &str) -> Result { } pub(super) fn parse_json_coerce(json: &str, path: &str, ty: &DynSolType) -> Result { - let value = parse_json_str(json)?; - let values = select(&value, path)?; - ensure!(!values.is_empty(), "no matching value found at {path:?}"); + let json = parse_json_str(json)?; + let [value] = select(&json, path)?[..] else { + bail!("path {path:?} must return exactly one JSON value"); + }; - ensure!( - values.iter().all(|value| !value.is_object()), - "values at {path:?} must not be JSON objects" - ); + parse_json_as(value, ty).map(|v| v.abi_encode()) +} +/// Parses given [serde_json::Value] as a [DynSolValue]. +pub(super) fn parse_json_as(value: &Value, ty: &DynSolType) -> Result { let to_string = |v: &Value| { let mut s = v.to_string(); s.retain(|c: char| c != '"'); s }; - if let Some(array) = values[0].as_array() { - debug!(target: "cheatcodes", %ty, "parsing array"); - string::parse_array(array.iter().map(to_string), ty) - } else { - debug!(target: "cheatcodes", %ty, "parsing string"); - string::parse(&to_string(values[0]), ty) + + match (value, ty) { + (Value::Array(array), ty) => parse_json_array(array, ty), + (Value::Object(object), ty) => parse_json_map(object, ty), + (Value::String(s), DynSolType::String) => Ok(DynSolValue::String(s.clone())), + _ => string::parse_value(&to_string(value), ty), + } +} + +pub(super) fn parse_json_array(array: &[Value], ty: &DynSolType) -> Result { + match ty { + DynSolType::Tuple(types) => { + ensure!(array.len() == types.len(), "array length mismatch"); + let values = array + .iter() + .zip(types) + .map(|(e, ty)| parse_json_as(e, ty)) + .collect::>>()?; + + Ok(DynSolValue::Tuple(values)) + } + DynSolType::Array(inner) => { + let values = + array.iter().map(|e| parse_json_as(e, inner)).collect::>>()?; + Ok(DynSolValue::Array(values)) + } + DynSolType::FixedArray(inner, len) => { + ensure!(array.len() == *len, "array length mismatch"); + let values = + array.iter().map(|e| parse_json_as(e, inner)).collect::>>()?; + Ok(DynSolValue::FixedArray(values)) + } + _ => bail!("expected {ty}, found array"), } } +pub(super) fn parse_json_map(map: &Map, ty: &DynSolType) -> Result { + let Some((name, fields, types)) = ty.as_custom_struct() else { + bail!("expected {ty}, found JSON object"); + }; + + let mut values = Vec::with_capacity(fields.len()); + for (field, ty) in fields.iter().zip(types.iter()) { + let Some(value) = map.get(field) else { bail!("field {field:?} not found in JSON object") }; + values.push(parse_json_as(value, ty)?); + } + + Ok(DynSolValue::CustomStruct { + name: name.to_string(), + prop_names: fields.to_vec(), + tuple: values, + }) +} + pub(super) fn parse_json_keys(json: &str, key: &str) -> Result { let json = parse_json_str(json)?; let values = select(&json, key)?; @@ -376,7 +500,8 @@ pub(super) fn canonicalize_json_path(path: &str) -> Cow<'_, str> { } } -/// Converts a JSON [`Value`] to a [`DynSolValue`]. +/// Converts a JSON [`Value`] to a [`DynSolValue`] by trying to guess encoded type. For safer +/// decoding, use [`parse_json_as`]. /// /// The function is designed to run recursively, so that in case of an object /// it will call itself to convert each of it's value and encode the whole as a @@ -461,7 +586,50 @@ pub(super) fn json_value_to_token(value: &Value) -> Result { } } -/// Serializes a key:value pair to a specific object. If the key is Some(valueKey), the value is +/// Serializes given [DynSolValue] into a [serde_json::Value]. +fn serialize_value_as_json(value: DynSolValue) -> Result { + match value { + DynSolValue::Bool(b) => Ok(Value::Bool(b)), + DynSolValue::String(s) => { + // Strings are allowed to contain strigified JSON objects, so we try to parse it like + // one first. + if let Ok(map) = serde_json::from_str(&s) { + Ok(Value::Object(map)) + } else { + Ok(Value::String(s)) + } + } + DynSolValue::Bytes(b) => Ok(Value::String(hex::encode_prefixed(b))), + DynSolValue::FixedBytes(b, size) => Ok(Value::String(hex::encode_prefixed(&b[..size]))), + DynSolValue::Int(i, _) => { + // let serde handle number parsing + let n = serde_json::from_str(&i.to_string())?; + Ok(Value::Number(n)) + } + DynSolValue::Uint(i, _) => { + // let serde handle number parsing + let n = serde_json::from_str(&i.to_string())?; + Ok(Value::Number(n)) + } + DynSolValue::Address(a) => Ok(Value::String(a.to_string())), + DynSolValue::Array(e) | DynSolValue::FixedArray(e) => { + Ok(Value::Array(e.into_iter().map(serialize_value_as_json).collect::>()?)) + } + DynSolValue::CustomStruct { name: _, prop_names, tuple } => { + let values = + tuple.into_iter().map(serialize_value_as_json).collect::>>()?; + let map = prop_names.into_iter().zip(values).collect(); + + Ok(Value::Object(map)) + } + DynSolValue::Tuple(values) => Ok(Value::Array( + values.into_iter().map(serialize_value_as_json).collect::>()?, + )), + DynSolValue::Function(_) => bail!("cannot serialize function pointer"), + } +} + +/// Serializes a key:value pair to a specific object. If the key is valueKey, the value is /// expected to be an object, which will be set as the root object for the provided object key, /// overriding the whole root object if the object key already exists. By calling this function /// multiple times, the user can serialize multiple KV pairs to the same object. The value can be of @@ -472,44 +640,99 @@ pub(super) fn json_value_to_token(value: &Value) -> Result { fn serialize_json( state: &mut Cheatcodes, object_key: &str, - value_key: Option<&str>, - value: &str, + value_key: &str, + value: DynSolValue, ) -> Result { + let value = serialize_value_as_json(value)?; let map = state.serialized_jsons.entry(object_key.into()).or_default(); - if let Some(value_key) = value_key { - let parsed_value = - serde_json::from_str(value).unwrap_or_else(|_| Value::String(value.into())); - map.insert(value_key.into(), parsed_value); - } else { - *map = serde_json::from_str(value) - .map_err(|err| fmt_err!("failed to parse JSON object: {err}"))?; - } + map.insert(value_key.into(), value); let stringified = serde_json::to_string(map).unwrap(); Ok(stringified.abi_encode()) } -fn array_str(values: I, quoted: bool) -> String -where - I: IntoIterator, - I::IntoIter: ExactSizeIterator, - T: std::fmt::Display, -{ - let iter = values.into_iter(); - let mut s = String::with_capacity(2 + iter.len() * 32); - s.push('['); - for (i, item) in iter.enumerate() { - if i > 0 { - s.push(','); +/// Resolves a [DynSolType] from user input. +fn resolve_type(type_description: &str) -> Result { + if let Ok(ty) = DynSolType::parse(type_description) { + return Ok(ty); + }; + + if let Ok(encoded) = EncodeType::parse(type_description) { + let main_type = encoded.types[0].type_name; + let mut resolver = Resolver::default(); + for t in encoded.types { + resolver.ingest(t.to_owned()); + } + + return Ok(resolver.resolve(main_type)?) + }; + + bail!("type description should be a valid Solidity type or a EIP712 `encodeType` string") +} + +#[cfg(test)] +mod tests { + use super::*; + use alloy_primitives::FixedBytes; + use proptest::strategy::Strategy; + + fn contains_tuple(value: &DynSolValue) -> bool { + match value { + DynSolValue::Tuple(_) | DynSolValue::CustomStruct { .. } => true, + DynSolValue::Array(v) | DynSolValue::FixedArray(v) => { + v.first().map_or(false, contains_tuple) + } + _ => false, + } + } + + /// [DynSolValue::Bytes] of length 32 and 20 are converted to [DynSolValue::FixedBytes] and + /// [DynSolValue::Address] respectively. Thus, we can't distinguish between address and bytes of + /// length 20 during decoding. Because of that, there are issues with handling of arrays of + /// those types. + fn fixup_guessable(value: DynSolValue) -> DynSolValue { + match value { + DynSolValue::Array(mut v) | DynSolValue::FixedArray(mut v) => { + if let Some(DynSolValue::Bytes(_)) = v.first() { + v.retain(|v| { + let len = v.as_bytes().unwrap().len(); + len != 32 && len != 20 + }) + } + DynSolValue::Array(v.into_iter().map(fixup_guessable).collect()) + } + DynSolValue::FixedBytes(v, _) => DynSolValue::FixedBytes(v, 32), + DynSolValue::Bytes(v) if v.len() == 32 => { + DynSolValue::FixedBytes(FixedBytes::from_slice(&v), 32) + } + DynSolValue::Bytes(v) if v.len() == 20 => DynSolValue::Address(Address::from_slice(&v)), + _ => value, } + } + + fn guessable_types() -> impl proptest::strategy::Strategy { + proptest::arbitrary::any::() + .prop_map(fixup_guessable) + .prop_filter("tuples are not supported", |v| !contains_tuple(v)) + .prop_filter("filter out values without type", |v| v.as_type().is_some()) + } - if quoted { - s.push('"'); + // Tests to ensure that conversion [DynSolValue] -> [serde_json::Value] -> [DynSolValue] + proptest::proptest! { + #[test] + fn test_json_roundtrip_guessed(v in guessable_types()) { + let json = serialize_value_as_json(v.clone()).unwrap(); + let value = json_value_to_token(&json).unwrap(); + + // do additional abi_encode -> abi_decode to avoid zero signed integers getting decoded as unsigned and causing assert_eq to fail. + let decoded = v.as_type().unwrap().abi_decode(&value.abi_encode()).unwrap(); + assert_eq!(decoded, v); } - write!(s, "{item}").unwrap(); - if quoted { - s.push('"'); + + #[test] + fn test_json_roundtrip(v in proptest::arbitrary::any::().prop_filter("filter out values without type", |v| v.as_type().is_some())) { + let json = serialize_value_as_json(v.clone()).unwrap(); + let value = parse_json_as(&json, &v.as_type().unwrap()).unwrap(); + assert_eq!(value, v); } } - s.push(']'); - s } diff --git a/crates/cheatcodes/src/string.rs b/crates/cheatcodes/src/string.rs index c808bc04f6ce..7b7d9b505356 100644 --- a/crates/cheatcodes/src/string.rs +++ b/crates/cheatcodes/src/string.rs @@ -166,7 +166,7 @@ where } #[instrument(target = "cheatcodes", level = "debug", skip(ty), fields(%ty), ret)] -fn parse_value(s: &str, ty: &DynSolType) -> Result { +pub(super) fn parse_value(s: &str, ty: &DynSolType) -> Result { match ty.coerce_str(s) { Ok(value) => Ok(value), Err(e) => match parse_value_fallback(s, ty) { diff --git a/crates/cheatcodes/src/toml.rs b/crates/cheatcodes/src/toml.rs index e1827dbef194..e83a18390547 100644 --- a/crates/cheatcodes/src/toml.rs +++ b/crates/cheatcodes/src/toml.rs @@ -45,7 +45,7 @@ impl Cheatcode for parseTomlUintCall { impl Cheatcode for parseTomlUintArrayCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { toml, key } = self; - parse_toml_coerce(toml, key, &DynSolType::Uint(256)) + parse_toml_coerce(toml, key, &DynSolType::Array(Box::new(DynSolType::Uint(256)))) } } @@ -59,7 +59,7 @@ impl Cheatcode for parseTomlIntCall { impl Cheatcode for parseTomlIntArrayCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { toml, key } = self; - parse_toml_coerce(toml, key, &DynSolType::Int(256)) + parse_toml_coerce(toml, key, &DynSolType::Array(Box::new(DynSolType::Int(256)))) } } @@ -73,7 +73,7 @@ impl Cheatcode for parseTomlBoolCall { impl Cheatcode for parseTomlBoolArrayCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { toml, key } = self; - parse_toml_coerce(toml, key, &DynSolType::Bool) + parse_toml_coerce(toml, key, &DynSolType::Array(Box::new(DynSolType::Bool))) } } @@ -87,7 +87,7 @@ impl Cheatcode for parseTomlAddressCall { impl Cheatcode for parseTomlAddressArrayCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { toml, key } = self; - parse_toml_coerce(toml, key, &DynSolType::Address) + parse_toml_coerce(toml, key, &DynSolType::Array(Box::new(DynSolType::Address))) } } @@ -101,7 +101,7 @@ impl Cheatcode for parseTomlStringCall { impl Cheatcode for parseTomlStringArrayCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { toml, key } = self; - parse_toml_coerce(toml, key, &DynSolType::String) + parse_toml_coerce(toml, key, &DynSolType::Array(Box::new(DynSolType::String))) } } @@ -115,7 +115,7 @@ impl Cheatcode for parseTomlBytesCall { impl Cheatcode for parseTomlBytesArrayCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { toml, key } = self; - parse_toml_coerce(toml, key, &DynSolType::Bytes) + parse_toml_coerce(toml, key, &DynSolType::Array(Box::new(DynSolType::Bytes))) } } @@ -129,7 +129,7 @@ impl Cheatcode for parseTomlBytes32Call { impl Cheatcode for parseTomlBytes32ArrayCall { fn apply(&self, _state: &mut Cheatcodes) -> Result { let Self { toml, key } = self; - parse_toml_coerce(toml, key, &DynSolType::FixedBytes(32)) + parse_toml_coerce(toml, key, &DynSolType::Array(Box::new(DynSolType::FixedBytes(32)))) } } diff --git a/crates/config/src/bind_json.rs b/crates/config/src/bind_json.rs new file mode 100644 index 000000000000..71d8d41aa911 --- /dev/null +++ b/crates/config/src/bind_json.rs @@ -0,0 +1,27 @@ +use crate::filter::GlobMatcher; +use serde::{Deserialize, Serialize}; +use std::path::PathBuf; + +/// Contains the config for `forge bind-json` +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub struct BindJsonConfig { + /// Path for the generated bindings file. + pub out: PathBuf, + /// Globs to include. + /// + /// If provided, only the files matching the globs will be included. Otherwise, defaults to + /// including all project files. + pub include: Vec, + /// Globs to ignore + pub exclude: Vec, +} + +impl Default for BindJsonConfig { + fn default() -> Self { + Self { + out: PathBuf::from("utils/JsonBindings.sol"), + exclude: Vec::new(), + include: Vec::new(), + } + } +} diff --git a/crates/config/src/filter.rs b/crates/config/src/filter.rs index b7b3a3ab3ace..0d00b613f26f 100644 --- a/crates/config/src/filter.rs +++ b/crates/config/src/filter.rs @@ -2,6 +2,7 @@ use core::fmt; use foundry_compilers::FileFilter; +use serde::{Deserialize, Serialize}; use std::{ convert::Infallible, path::{Path, PathBuf}, @@ -96,6 +97,27 @@ impl From for GlobMatcher { } } +impl Serialize for GlobMatcher { + fn serialize(&self, serializer: S) -> Result { + self.glob().glob().serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for GlobMatcher { + fn deserialize>(deserializer: D) -> Result { + let s = String::deserialize(deserializer)?; + s.parse().map_err(serde::de::Error::custom) + } +} + +impl PartialEq for GlobMatcher { + fn eq(&self, other: &Self) -> bool { + self.as_str() == other.as_str() + } +} + +impl Eq for GlobMatcher {} + /// Bundles multiple `SkipBuildFilter` into a single `FileFilter` #[derive(Clone, Debug)] pub struct SkipBuildFilters { diff --git a/crates/config/src/lib.rs b/crates/config/src/lib.rs index 89924d5c8b80..e59c44a57b95 100644 --- a/crates/config/src/lib.rs +++ b/crates/config/src/lib.rs @@ -16,6 +16,7 @@ use figment::{ value::{Dict, Map, Value}, Error, Figment, Metadata, Profile, Provider, }; +use filter::GlobMatcher; use foundry_compilers::{ artifacts::{ output_selection::{ContractOutputSelection, OutputSelection}, @@ -110,6 +111,9 @@ use soldeer::SoldeerConfig; mod vyper; use vyper::VyperConfig; +mod bind_json; +use bind_json::BindJsonConfig; + /// Foundry configuration /// /// # Defaults @@ -178,8 +182,7 @@ pub struct Config { /// additional solc include paths for `--include-path` pub include_paths: Vec, /// glob patterns to skip - #[serde(with = "from_vec_glob")] - pub skip: Vec, + pub skip: Vec, /// whether to force a `project.clean()` pub force: bool, /// evm version to use @@ -389,6 +392,8 @@ pub struct Config { pub fmt: FormatterConfig, /// Configuration for `forge doc` pub doc: DocConfig, + /// Configuration for `forge bind-json` + pub bind_json: BindJsonConfig, /// Configures the permissions of cheat codes that touch the file system. /// /// This includes what operations can be executed (read, write) @@ -484,6 +489,7 @@ impl Config { "labels", "dependencies", "vyper", + "bind_json", ]; /// File name of config toml file @@ -1951,30 +1957,6 @@ pub(crate) mod from_opt_glob { } } -/// Ser/de `globset::Glob` explicitly to handle `Option` properly -pub(crate) mod from_vec_glob { - use serde::{Deserialize, Deserializer, Serialize, Serializer}; - - pub fn serialize(value: &[globset::Glob], serializer: S) -> Result - where - S: Serializer, - { - let value = value.iter().map(|g| g.glob()).collect::>(); - value.serialize(serializer) - } - - pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> - where - D: Deserializer<'de>, - { - let s: Vec = Vec::deserialize(deserializer)?; - s.into_iter() - .map(|s| globset::Glob::new(&s)) - .collect::, _>>() - .map_err(serde::de::Error::custom) - } -} - /// A helper wrapper around the root path used during Config detection #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] #[serde(transparent)] @@ -2142,6 +2124,7 @@ impl Default for Config { build_info_path: None, fmt: Default::default(), doc: Default::default(), + bind_json: Default::default(), labels: Default::default(), unchecked_cheatcode_artifacts: false, create2_library_salt: Self::DEFAULT_CREATE2_LIBRARY_SALT, diff --git a/crates/evm/evm/Cargo.toml b/crates/evm/evm/Cargo.toml index 39b48d1107ba..5250ea448ac4 100644 --- a/crates/evm/evm/Cargo.toml +++ b/crates/evm/evm/Cargo.toml @@ -46,7 +46,7 @@ revm-inspectors.workspace = true eyre.workspace = true parking_lot.workspace = true -proptest = "1" +proptest.workspace = true thiserror.workspace = true tracing.workspace = true indicatif = "0.17" diff --git a/crates/evm/fuzz/Cargo.toml b/crates/evm/fuzz/Cargo.toml index 0106b1d2704c..124514b561fe 100644 --- a/crates/evm/fuzz/Cargo.toml +++ b/crates/evm/fuzz/Cargo.toml @@ -42,7 +42,7 @@ revm = { workspace = true, features = [ eyre .workspace = true itertools.workspace = true parking_lot.workspace = true -proptest = "1" +proptest.workspace = true rand.workspace = true serde.workspace = true thiserror.workspace = true diff --git a/crates/forge/Cargo.toml b/crates/forge/Cargo.toml index 790f8da9ef35..02bc6d0c8a64 100644 --- a/crates/forge/Cargo.toml +++ b/crates/forge/Cargo.toml @@ -40,7 +40,7 @@ revm-inspectors.workspace = true comfy-table = "7" eyre.workspace = true -proptest = "1" +proptest.workspace = true rayon.workspace = true serde.workspace = true tracing.workspace = true diff --git a/crates/forge/bin/cmd/bind_json.rs b/crates/forge/bin/cmd/bind_json.rs new file mode 100644 index 000000000000..bd2d0ea30d67 --- /dev/null +++ b/crates/forge/bin/cmd/bind_json.rs @@ -0,0 +1,539 @@ +use super::eip712::Resolver; +use clap::{Parser, ValueHint}; +use eyre::Result; +use foundry_cli::{opts::CoreBuildArgs, utils::LoadConfig}; +use foundry_common::{compile::with_compilation_reporter, fs}; +use foundry_compilers::{ + artifacts::{ + output_selection::OutputSelection, ContractDefinitionPart, Source, SourceUnit, + SourceUnitPart, Sources, + }, + multi::{MultiCompilerLanguage, MultiCompilerParsedSource}, + project::ProjectCompiler, + solc::SolcLanguage, + CompilerSettings, Graph, Project, +}; +use foundry_config::Config; +use itertools::Itertools; +use rayon::prelude::*; +use solang_parser::pt as solang_ast; +use std::{ + collections::{BTreeMap, BTreeSet}, + fmt, + fmt::Write, + path::PathBuf, + sync::Arc, +}; + +foundry_config::impl_figment_convert!(BindJsonArgs, opts); + +/// CLI arguments for `forge bind-json`. +#[derive(Clone, Debug, Parser)] +pub struct BindJsonArgs { + /// The path to write bindings to. + #[arg(value_hint = ValueHint::FilePath, value_name = "PATH")] + pub out: Option, + + #[command(flatten)] + opts: CoreBuildArgs, +} + +impl BindJsonArgs { + pub fn run(self) -> Result<()> { + self.preprocess()?.compile()?.find_structs()?.resolve_imports_and_aliases().write()?; + + Ok(()) + } + + /// In cases when user moves/renames/deletes structs, compiler will start failing because + /// generated bindings will be referencing non-existing structs or importing non-existing + /// files. + /// + /// Because of that, we need a little bit of preprocessing to make sure that bindings will still + /// be valid. + /// + /// The strategy is: + /// 1. Replace bindings file with an empty one to get rid of potentially invalid imports. + /// 2. Remove all function bodies to get rid of `serialize`/`deserialize` invocations. + /// 3. Remove all `immutable` attributes to avoid errors because of erased constructors + /// initializing them. + /// + /// After that we'll still have enough information for bindings but compilation should succeed + /// in most of the cases. + fn preprocess(self) -> Result { + let config = self.try_load_config_emit_warnings()?; + let project = config.create_project(false, true)?; + + let target_path = config.root.0.join(self.out.as_ref().unwrap_or(&config.bind_json.out)); + + let sources = project.paths.read_input_files()?; + let graph = Graph::::resolve_sources(&project.paths, sources)?; + + // We only generate bindings for a single Solidity version to avoid conflicts. + let mut sources = graph + // resolve graph into mapping language -> version -> sources + .into_sources_by_version(project.offline, &project.locked_versions, &project.compiler)? + .0 + .into_iter() + // we are only interested in Solidity sources + .find(|(lang, _)| *lang == MultiCompilerLanguage::Solc(SolcLanguage::Solidity)) + .ok_or_else(|| eyre::eyre!("no Solidity sources"))? + .1 + .into_iter() + // For now, we are always picking the latest version. + .max_by(|(v1, _), (v2, _)| v1.cmp(v2)) + .unwrap() + .1; + + // Insert empty bindings file + sources.insert(target_path.clone(), Source::new("library JsonBindings {}")); + + let sources = Sources( + sources + .0 + .into_par_iter() + .map(|(path, source)| { + let mut locs_to_update = Vec::new(); + let mut content = Arc::unwrap_or_clone(source.content); + let (parsed, _) = solang_parser::parse(&content, 0) + .map_err(|errors| eyre::eyre!("Parser failed: {errors:?}"))?; + + // All function definitions in the file + let mut functions = Vec::new(); + + for part in &parsed.0 { + if let solang_ast::SourceUnitPart::FunctionDefinition(def) = part { + functions.push(def); + } + if let solang_ast::SourceUnitPart::ContractDefinition(contract) = part { + for part in &contract.parts { + match part { + solang_ast::ContractPart::FunctionDefinition(def) => { + functions.push(def); + } + // Remove `immutable` attributes + solang_ast::ContractPart::VariableDefinition(def) => { + for attr in &def.attrs { + if let solang_ast::VariableAttribute::Immutable(loc) = + attr + { + locs_to_update.push(( + loc.start(), + loc.end(), + String::new(), + )); + } + } + } + _ => {} + } + } + }; + } + + for def in functions { + // If there's no body block, keep the function as is + let Some(solang_ast::Statement::Block { loc, .. }) = def.body else { + continue; + }; + let new_body = match def.ty { + solang_ast::FunctionTy::Modifier => "{ _; }", + _ => "{ revert(); }", + }; + let start = loc.start(); + let end = loc.end(); + locs_to_update.push((start, end + 1, new_body.to_string())); + } + + locs_to_update.sort_by_key(|(start, _, _)| *start); + + let mut shift = 0_i64; + + for (start, end, new) in locs_to_update { + let start = ((start as i64) - shift) as usize; + let end = ((end as i64) - shift) as usize; + + content.replace_range(start..end, new.as_str()); + shift += (end - start) as i64; + shift -= new.len() as i64; + } + + Ok((path, Source::new(content))) + }) + .collect::>>()?, + ); + + Ok(PreprocessedState { sources, target_path, project, config }) + } +} + +/// A single struct definition for which we need to generate bindings. +#[derive(Debug, Clone)] +struct StructToWrite { + /// Name of the struct definition. + name: String, + /// Name of the contract containing the struct definition. None if the struct is defined at the + /// file level. + contract_name: Option, + /// Import alias for the contract or struct, depending on whether the struct is imported + /// directly, or via a contract. + import_alias: Option, + /// Path to the file containing the struct definition. + path: PathBuf, + /// EIP712 schema for the struct. + schema: String, + /// Name of the struct definition used in function names and schema_* variables. + name_in_fns: String, +} + +impl StructToWrite { + /// Returns the name of the imported item. If struct is definied at the file level, returns the + /// struct name, otherwise returns the parent contract name. + fn struct_or_contract_name(&self) -> &str { + self.contract_name.as_deref().unwrap_or(&self.name) + } + + /// Same as [StructToWrite::struct_or_contract_name] but with alias applied. + fn struct_or_contract_name_with_alias(&self) -> &str { + self.import_alias.as_deref().unwrap_or(self.struct_or_contract_name()) + } + + /// Path which can be used to reference this struct in input/output parameters. Either + /// StructName or ParantName.StructName + fn full_path(&self) -> String { + if self.contract_name.is_some() { + format!("{}.{}", self.struct_or_contract_name_with_alias(), self.name) + } else { + self.struct_or_contract_name_with_alias().to_string() + } + } + + fn import_item(&self) -> String { + if let Some(alias) = &self.import_alias { + format!("{} as {}", self.struct_or_contract_name(), alias) + } else { + self.struct_or_contract_name().to_string() + } + } +} + +#[derive(Debug)] +struct PreprocessedState { + sources: Sources, + target_path: PathBuf, + project: Project, + config: Config, +} + +impl PreprocessedState { + fn compile(self) -> Result { + let Self { sources, target_path, mut project, config } = self; + + project.settings.update_output_selection(|selection| { + *selection = OutputSelection::ast_output_selection(); + }); + + let output = with_compilation_reporter(false, || { + ProjectCompiler::with_sources(&project, sources)?.compile() + })?; + + if output.has_compiler_errors() { + eyre::bail!("{output}"); + } + + // Collect ASTs by getting them from sources and converting into strongly typed + // `SourceUnit`s. Also strips root from paths. + let asts = output + .into_output() + .sources + .into_iter() + .filter_map(|(path, mut sources)| Some((path, sources.swap_remove(0).source_file.ast?))) + .map(|(path, ast)| { + Ok(( + path.strip_prefix(project.root()).unwrap_or(&path).to_path_buf(), + serde_json::from_str::(&serde_json::to_string(&ast)?)?, + )) + }) + .collect::>>()?; + + Ok(CompiledState { asts, target_path, config, project }) + } +} + +#[derive(Debug, Clone)] +struct CompiledState { + asts: BTreeMap, + target_path: PathBuf, + config: Config, + project: Project, +} + +impl CompiledState { + fn find_structs(self) -> Result { + let Self { asts, target_path, config, project } = self; + + // construct mapping (file, id) -> (struct definition, optional parent contract name) + let structs = asts + .iter() + .flat_map(|(path, ast)| { + let mut structs = Vec::new(); + // we walk AST directly instead of using visitors because we need to distinguish + // between file-level and contract-level struct definitions + for node in &ast.nodes { + match node { + SourceUnitPart::StructDefinition(def) => { + structs.push((def, None)); + } + SourceUnitPart::ContractDefinition(contract) => { + for node in &contract.nodes { + if let ContractDefinitionPart::StructDefinition(def) = node { + structs.push((def, Some(contract.name.clone()))); + } + } + } + _ => {} + } + } + structs.into_iter().map(|(def, parent)| ((path.as_path(), def.id), (def, parent))) + }) + .collect::>(); + + // Resolver for EIP712 schemas + let resolver = Resolver::new(&asts); + + let mut structs_to_write = Vec::new(); + + let include = config.bind_json.include; + let exclude = config.bind_json.exclude; + + for ((path, id), (def, contract_name)) in structs { + // For some structs there's no schema (e.g. if they contain a mapping), so we just skip + // those. + let Some(schema) = resolver.resolve_struct_eip712(id, &mut Default::default(), true)? + else { + continue + }; + + if !include.is_empty() { + if !include.iter().any(|matcher| matcher.is_match(path)) { + continue; + } + } else { + // Exclude library files by default + if project.paths.has_library_ancestor(path) { + continue; + } + } + + if exclude.iter().any(|matcher| matcher.is_match(path)) { + continue; + } + + structs_to_write.push(StructToWrite { + name: def.name.clone(), + contract_name, + path: path.to_path_buf(), + schema, + + // will be filled later + import_alias: None, + name_in_fns: String::new(), + }) + } + + Ok(StructsState { structs_to_write, target_path }) + } +} + +#[derive(Debug)] +struct StructsState { + structs_to_write: Vec, + target_path: PathBuf, +} + +impl StructsState { + /// We manage 2 namespsaces for JSON bindings: + /// - Namespace of imported items. This includes imports of contracts containing structs and + /// structs defined at the file level. + /// - Namespace of struct names used in function names and schema_* variables. + /// + /// Both of those might contain conflicts, so we need to resolve them. + fn resolve_imports_and_aliases(self) -> ResolvedState { + let Self { mut structs_to_write, target_path } = self; + + // firstly, we resolve imported names conflicts + // construct mapping name -> paths from which items with such name are imported + let mut names_to_paths = BTreeMap::new(); + + for s in &structs_to_write { + names_to_paths + .entry(s.struct_or_contract_name()) + .or_insert_with(BTreeSet::new) + .insert(s.path.as_path()); + } + + // now resolve aliases for names which need them and construct mapping (name, file) -> alias + let mut aliases = BTreeMap::new(); + + for (name, paths) in names_to_paths { + if paths.len() <= 1 { + // no alias needed + continue + } + + for (i, path) in paths.into_iter().enumerate() { + aliases + .entry(name.to_string()) + .or_insert_with(BTreeMap::new) + .insert(path.to_path_buf(), format!("{name}_{i}")); + } + } + + for s in &mut structs_to_write { + let name = s.struct_or_contract_name(); + if aliases.contains_key(name) { + s.import_alias = Some(aliases[name][&s.path].clone()); + } + } + + // Each struct needs a name by which we are referencing it in function names (e.g. + // deserializeFoo) Those might also have conflicts, so we manage a separate + // namespace for them + let mut name_to_structs_indexes = BTreeMap::new(); + + for (idx, s) in structs_to_write.iter().enumerate() { + name_to_structs_indexes.entry(&s.name).or_insert_with(Vec::new).push(idx); + } + + // Keeps `Some` for structs that will be referenced by name other than their definition + // name. + let mut fn_names = vec![None; structs_to_write.len()]; + + for (name, indexes) in name_to_structs_indexes { + if indexes.len() > 1 { + for (i, idx) in indexes.into_iter().enumerate() { + fn_names[idx] = Some(format!("{name}_{i}")); + } + } + } + + for (s, fn_name) in structs_to_write.iter_mut().zip(fn_names.into_iter()) { + s.name_in_fns = fn_name.unwrap_or(s.name.clone()); + } + + ResolvedState { structs_to_write, target_path } + } +} + +struct ResolvedState { + structs_to_write: Vec, + target_path: PathBuf, +} + +impl ResolvedState { + fn write(self) -> Result { + let mut result = String::new(); + self.write_imports(&mut result)?; + self.write_vm(&mut result); + self.write_library(&mut result)?; + + if let Some(parent) = self.target_path.parent() { + fs::create_dir_all(parent)?; + } + fs::write(&self.target_path, &result)?; + + println!("Bindings written to {}", self.target_path.display()); + + Ok(result) + } + + fn write_imports(&self, result: &mut String) -> fmt::Result { + let mut grouped_imports = BTreeMap::new(); + + for struct_to_write in &self.structs_to_write { + let item = struct_to_write.import_item(); + grouped_imports + .entry(struct_to_write.path.as_path()) + .or_insert_with(BTreeSet::new) + .insert(item); + } + + result.push_str("// Automatically generated by forge bind-json.\n\npragma solidity >=0.6.2 <0.9.0;\npragma experimental ABIEncoderV2;\n\n"); + + for (path, names) in grouped_imports { + writeln!( + result, + "import {{{}}} from \"{}\";", + names.iter().join(", "), + path.display() + )?; + } + + Ok(()) + } + + /// Writes minimal VM interface to not depend on forge-std version + fn write_vm(&self, result: &mut String) { + result.push_str(r#" +interface Vm { + function parseJsonTypeArray(string calldata json, string calldata key, string calldata typeDescription) external pure returns (bytes memory); + function parseJsonType(string calldata json, string calldata typeDescription) external pure returns (bytes memory); + function parseJsonType(string calldata json, string calldata key, string calldata typeDescription) external pure returns (bytes memory); + function serializeJsonType(string calldata typeDescription, bytes memory value) external pure returns (string memory json); + function serializeJsonType(string calldata objectKey, string calldata valueKey, string calldata typeDescription, bytes memory value) external returns (string memory json); +} + "#); + } + + fn write_library(&self, result: &mut String) -> fmt::Result { + result.push_str( + r#" +library JsonBindings { + Vm constant vm = Vm(address(uint160(uint256(keccak256("hevm cheat code"))))); + +"#, + ); + // write schema constants + for struct_to_write in &self.structs_to_write { + writeln!( + result, + " string constant schema_{} = \"{}\";", + struct_to_write.name_in_fns, struct_to_write.schema + )?; + } + + // write serialization functions + for struct_to_write in &self.structs_to_write { + write!( + result, + r#" + function serialize({path} memory value) internal pure returns (string memory) {{ + return vm.serializeJsonType(schema_{name_in_fns}, abi.encode(value)); + }} + + function serialize({path} memory value, string memory objectKey, string memory valueKey) internal returns (string memory) {{ + return vm.serializeJsonType(objectKey, valueKey, schema_{name_in_fns}, abi.encode(value)); + }} + + function deserialize{name_in_fns}(string memory json) public pure returns ({path} memory) {{ + return abi.decode(vm.parseJsonType(json, schema_{name_in_fns}), ({path})); + }} + + function deserialize{name_in_fns}(string memory json, string memory path) public pure returns ({path} memory) {{ + return abi.decode(vm.parseJsonType(json, path, schema_{name_in_fns}), ({path})); + }} + + function deserialize{name_in_fns}Array(string memory json, string memory path) public pure returns ({path}[] memory) {{ + return abi.decode(vm.parseJsonTypeArray(json, path, schema_{name_in_fns}), ({path}[])); + }} +"#, + name_in_fns = struct_to_write.name_in_fns, + path = struct_to_write.full_path() + )?; + } + + result.push_str("}\n"); + + Ok(()) + } +} diff --git a/crates/forge/bin/cmd/eip712.rs b/crates/forge/bin/cmd/eip712.rs new file mode 100644 index 000000000000..636c305806dc --- /dev/null +++ b/crates/forge/bin/cmd/eip712.rs @@ -0,0 +1,241 @@ +use clap::{Parser, ValueHint}; +use eyre::{Ok, OptionExt, Result}; +use foundry_cli::{opts::CoreBuildArgs, utils::LoadConfig}; +use foundry_common::compile::ProjectCompiler; +use foundry_compilers::{ + artifacts::{ + output_selection::OutputSelection, + visitor::{Visitor, Walk}, + ContractDefinition, EnumDefinition, SourceUnit, StructDefinition, TypeDescriptions, + TypeName, + }, + CompilerSettings, +}; +use std::{collections::BTreeMap, path::PathBuf}; + +foundry_config::impl_figment_convert!(Eip712Args, opts); + +/// CLI arguments for `forge eip712`. +#[derive(Clone, Debug, Parser)] +pub struct Eip712Args { + /// The path to the file from which to read struct definitions. + #[arg(value_hint = ValueHint::FilePath, value_name = "PATH")] + pub target_path: PathBuf, + + #[command(flatten)] + opts: CoreBuildArgs, +} + +impl Eip712Args { + pub fn run(self) -> Result<()> { + let config = self.try_load_config_emit_warnings()?; + let mut project = config.create_project(false, true)?; + let target_path = dunce::canonicalize(self.target_path)?; + project.settings.update_output_selection(|selection| { + *selection = OutputSelection::ast_output_selection(); + }); + + let output = ProjectCompiler::new().files([target_path.clone()]).compile(&project)?; + + // Collect ASTs by getting them from sources and converting into strongly typed + // `SourceUnit`s. + let asts = output + .into_output() + .sources + .into_iter() + .filter_map(|(path, mut sources)| Some((path, sources.swap_remove(0).source_file.ast?))) + .map(|(path, ast)| { + Ok((path, serde_json::from_str::(&serde_json::to_string(&ast)?)?)) + }) + .collect::>>()?; + + let resolver = Resolver::new(&asts); + + let target_ast = asts + .get(&target_path) + .ok_or_else(|| eyre::eyre!("Could not find AST for target file {target_path:?}"))?; + + let structs_in_target = { + let mut collector = StructCollector::default(); + target_ast.walk(&mut collector); + collector.0 + }; + + for (id, _) in structs_in_target { + if let Some(resolved) = + resolver.resolve_struct_eip712(id, &mut Default::default(), true)? + { + println!("{resolved}"); + println!(); + } + } + + Ok(()) + } +} + +/// AST [Visitor] used for collecting struct definitions. +#[derive(Debug, Clone, Default)] +pub struct StructCollector(pub BTreeMap); + +impl Visitor for StructCollector { + fn visit_struct_definition(&mut self, def: &StructDefinition) { + self.0.insert(def.id, def.clone()); + } +} + +/// Collects mapping from AST id of type definition to representation of this type for EIP-712 +/// encoding. +/// +/// For now, maps contract definitions to `address` and enums to `uint8`. +#[derive(Debug, Clone, Default)] +struct SimpleCustomTypesCollector(BTreeMap); + +impl Visitor for SimpleCustomTypesCollector { + fn visit_contract_definition(&mut self, def: &ContractDefinition) { + self.0.insert(def.id, "address".to_string()); + } + + fn visit_enum_definition(&mut self, def: &EnumDefinition) { + self.0.insert(def.id, "uint8".to_string()); + } +} + +pub struct Resolver { + simple_types: BTreeMap, + structs: BTreeMap, +} + +impl Resolver { + pub fn new(asts: &BTreeMap) -> Self { + let simple_types = { + let mut collector = SimpleCustomTypesCollector::default(); + asts.values().for_each(|ast| ast.walk(&mut collector)); + + collector.0 + }; + + let structs = { + let mut collector = StructCollector::default(); + asts.values().for_each(|ast| ast.walk(&mut collector)); + collector.0 + }; + + Self { simple_types, structs } + } + + /// Converts a given struct definition into EIP-712 `encodeType` representation. + /// + /// Returns `None` if struct contains any fields that are not supported by EIP-712 (e.g. + /// mappings or function pointers). + pub fn resolve_struct_eip712( + &self, + id: usize, + subtypes: &mut BTreeMap, + append_subtypes: bool, + ) -> Result> { + let def = &self.structs[&id]; + let mut result = format!("{}(", def.name); + + for (idx, member) in def.members.iter().enumerate() { + let Some(ty) = self.resolve_type( + member.type_name.as_ref().ok_or_eyre("missing type name")?, + subtypes, + )? + else { + return Ok(None) + }; + + result.push_str(&ty); + result.push(' '); + result.push_str(&member.name); + + if idx < def.members.len() - 1 { + result.push(','); + } + } + + result.push(')'); + + if !append_subtypes { + return Ok(Some(result)) + } + + for subtype_id in subtypes.values().copied().collect::>() { + if subtype_id == id { + continue + } + let Some(encoded_subtype) = self.resolve_struct_eip712(subtype_id, subtypes, false)? + else { + return Ok(None) + }; + result.push_str(&encoded_subtype); + } + + Ok(Some(result)) + } + + /// Converts given [TypeName] into a type which can be converted to [DynSolType]. + /// + /// Returns `None` if the type is not supported for EIP712 encoding. + pub fn resolve_type( + &self, + type_name: &TypeName, + subtypes: &mut BTreeMap, + ) -> Result> { + match type_name { + TypeName::FunctionTypeName(_) | TypeName::Mapping(_) => Ok(None), + TypeName::ElementaryTypeName(ty) => Ok(Some(ty.name.clone())), + TypeName::ArrayTypeName(ty) => { + let Some(inner) = self.resolve_type(&ty.base_type, subtypes)? else { + return Ok(None) + }; + let len = parse_array_length(&ty.type_descriptions)?; + + Ok(Some(format!("{inner}[{}]", len.unwrap_or("")))) + } + TypeName::UserDefinedTypeName(ty) => { + if let Some(name) = self.simple_types.get(&(ty.referenced_declaration as usize)) { + Ok(Some(name.clone())) + } else if let Some(def) = self.structs.get(&(ty.referenced_declaration as usize)) { + let name = + // If we've already seen struct with this ID, just use assigned name. + if let Some((name, _)) = subtypes.iter().find(|(_, id)| **id == def.id) { + name.clone() + // Otherwise, try assigning a new name. + } else { + let mut i = 0; + let mut name = def.name.clone(); + while subtypes.contains_key(&name) { + i += 1; + name = format!("{}_{i}", def.name); + } + + subtypes.insert(name.clone(), def.id); + name + }; + + return Ok(Some(name)) + } else { + return Ok(None) + } + } + } + } +} + +fn parse_array_length(type_description: &TypeDescriptions) -> Result> { + let type_string = + type_description.type_string.as_ref().ok_or_eyre("missing typeString for array type")?; + let Some(inside_brackets) = + type_string.rsplit_once("[").and_then(|(_, right)| right.split("]").next()) + else { + eyre::bail!("failed to parse array type string: {type_string}") + }; + + if inside_brackets.is_empty() { + Ok(None) + } else { + Ok(Some(inside_brackets)) + } +} diff --git a/crates/forge/bin/cmd/mod.rs b/crates/forge/bin/cmd/mod.rs index d3e2f8b6d314..ff63fa7cbc0e 100644 --- a/crates/forge/bin/cmd/mod.rs +++ b/crates/forge/bin/cmd/mod.rs @@ -40,6 +40,7 @@ //! ``` pub mod bind; +pub mod bind_json; pub mod build; pub mod cache; pub mod clone; @@ -48,6 +49,7 @@ pub mod coverage; pub mod create; pub mod debug; pub mod doc; +pub mod eip712; pub mod flatten; pub mod fmt; pub mod geiger; diff --git a/crates/forge/bin/main.rs b/crates/forge/bin/main.rs index aff2ad530d96..4484be629e25 100644 --- a/crates/forge/bin/main.rs +++ b/crates/forge/bin/main.rs @@ -112,6 +112,8 @@ fn main() -> Result<()> { }, ForgeSubcommand::VerifyBytecode(cmd) => utils::block_on(cmd.run()), ForgeSubcommand::Soldeer(cmd) => cmd.run(), + ForgeSubcommand::Eip712(cmd) => cmd.run(), + ForgeSubcommand::BindJson(cmd) => cmd.run(), } } diff --git a/crates/forge/bin/opts.rs b/crates/forge/bin/opts.rs index a449bd75f46f..b86d19c17728 100644 --- a/crates/forge/bin/opts.rs +++ b/crates/forge/bin/opts.rs @@ -1,8 +1,8 @@ use crate::cmd::{ - bind::BindArgs, build::BuildArgs, cache::CacheArgs, clone::CloneArgs, config, coverage, - create::CreateArgs, debug::DebugArgs, doc::DocArgs, flatten, fmt::FmtArgs, geiger, generate, - init::InitArgs, inspect, install::InstallArgs, remappings::RemappingArgs, remove::RemoveArgs, - selectors::SelectorsSubcommands, snapshot, soldeer, test, tree, update, + bind::BindArgs, bind_json, build::BuildArgs, cache::CacheArgs, clone::CloneArgs, config, + coverage, create::CreateArgs, debug::DebugArgs, doc::DocArgs, eip712, flatten, fmt::FmtArgs, + geiger, generate, init::InitArgs, inspect, install::InstallArgs, remappings::RemappingArgs, + remove::RemoveArgs, selectors::SelectorsSubcommands, snapshot, soldeer, test, tree, update, }; use clap::{Parser, Subcommand, ValueHint}; use forge_script::ScriptArgs; @@ -164,6 +164,12 @@ pub enum ForgeSubcommand { /// Soldeer dependency manager. Soldeer(soldeer::SoldeerArgs), + + /// Generate EIP-712 struct encodings for structs from a given file. + Eip712(eip712::Eip712Args), + + /// Generate bindings for serialization/deserialization of project structs via JSON cheatcodes. + BindJson(bind_json::BindJsonArgs), } #[cfg(test)] diff --git a/crates/forge/tests/cli/bind_json.rs b/crates/forge/tests/cli/bind_json.rs new file mode 100644 index 000000000000..bdc8f0fa19ec --- /dev/null +++ b/crates/forge/tests/cli/bind_json.rs @@ -0,0 +1,54 @@ +// tests complete bind-json workflow +// ensures that we can run forge-bind even if files are depending on yet non-existent bindings and +// that generated bindings are correct +forgetest_init!(test_bind_json, |prj, cmd| { + prj.add_test( + "JsonBindings", + r#" +import {JsonBindings} from "utils/JsonBindings.sol"; +import {Test} from "forge-std/Test.sol"; + +struct TopLevelStruct { + uint256 param1; + int8 param2; +} + +contract BindJsonTest is Test { + using JsonBindings for *; + + struct ContractLevelStruct { + address[][] param1; + address addrParam; + } + + function testTopLevel() public { + string memory json = '{"param1": 1, "param2": -1}'; + TopLevelStruct memory topLevel = json.deserializeTopLevelStruct(); + assertEq(topLevel.param1, 1); + assertEq(topLevel.param2, -1); + + json = topLevel.serialize(); + TopLevelStruct memory deserialized = json.deserializeTopLevelStruct(); + assertEq(keccak256(abi.encode(deserialized)), keccak256(abi.encode(topLevel))); + } + + function testContractLevel() public { + ContractLevelStruct memory contractLevel = ContractLevelStruct({ + param1: new address[][](2), + addrParam: address(0xBEEF) + }); + + string memory json = contractLevel.serialize(); + assertEq(json, '{"param1":[[],[]],"addrParam":"0x000000000000000000000000000000000000bEEF"}'); + + ContractLevelStruct memory deserialized = json.deserializeContractLevelStruct(); + assertEq(keccak256(abi.encode(deserialized)), keccak256(abi.encode(contractLevel))); + } +} +"#, + ) + .unwrap(); + + cmd.arg("bind-json").assert_success(); + cmd.forge_fuse().args(["test"]).assert_success(); +}); diff --git a/crates/forge/tests/cli/build.rs b/crates/forge/tests/cli/build.rs index acbb54560361..6cb17d0d4872 100644 --- a/crates/forge/tests/cli/build.rs +++ b/crates/forge/tests/cli/build.rs @@ -57,8 +57,10 @@ contract ValidContract {} ) .unwrap(); - let config = - Config { skip: vec![Glob::new("src/InvalidContract.sol").unwrap()], ..Default::default() }; + let config = Config { + skip: vec![Glob::new("src/InvalidContract.sol").unwrap().into()], + ..Default::default() + }; prj.write_config(config); cmd.args(["build"]).assert_success(); diff --git a/crates/forge/tests/cli/config.rs b/crates/forge/tests/cli/config.rs index e28415e30596..f966d6024118 100644 --- a/crates/forge/tests/cli/config.rs +++ b/crates/forge/tests/cli/config.rs @@ -134,6 +134,7 @@ forgetest!(can_extract_config_values, |prj, cmd| { build_info_path: None, fmt: Default::default(), doc: Default::default(), + bind_json: Default::default(), fs_permissions: Default::default(), labels: Default::default(), prague: true, diff --git a/crates/forge/tests/cli/main.rs b/crates/forge/tests/cli/main.rs index 0ac67d81b176..b8bc3db5add4 100644 --- a/crates/forge/tests/cli/main.rs +++ b/crates/forge/tests/cli/main.rs @@ -4,6 +4,7 @@ extern crate foundry_test_utils; pub mod constants; pub mod utils; +mod bind_json; mod build; mod cache; mod cmd; diff --git a/crates/forge/tests/it/invariant.rs b/crates/forge/tests/it/invariant.rs index 4d6c091ee842..c0e657d0687e 100644 --- a/crates/forge/tests/it/invariant.rs +++ b/crates/forge/tests/it/invariant.rs @@ -387,7 +387,7 @@ async fn test_shrink_fail_on_revert() { runner.test_options.fuzz.seed = Some(U256::from(119u32)); runner.test_options.invariant.fail_on_revert = true; runner.test_options.invariant.runs = 1; - runner.test_options.invariant.depth = 100; + runner.test_options.invariant.depth = 200; match get_counterexample!(runner, &filter) { CounterExample::Single(_) => panic!("CounterExample should be a sequence."), diff --git a/testdata/cheats/Vm.sol b/testdata/cheats/Vm.sol index 2995403968d7..cf72e88475d8 100644 --- a/testdata/cheats/Vm.sol +++ b/testdata/cheats/Vm.sol @@ -285,6 +285,9 @@ interface Vm { function parseJsonKeys(string calldata json, string calldata key) external pure returns (string[] memory keys); function parseJsonString(string calldata json, string calldata key) external pure returns (string memory); function parseJsonStringArray(string calldata json, string calldata key) external pure returns (string[] memory); + function parseJsonTypeArray(string calldata json, string calldata key, string calldata typeDescription) external pure returns (bytes memory); + function parseJsonType(string calldata json, string calldata typeDescription) external pure returns (bytes memory); + function parseJsonType(string calldata json, string calldata key, string calldata typeDescription) external pure returns (bytes memory); function parseJsonUint(string calldata json, string calldata key) external pure returns (uint256); function parseJsonUintArray(string calldata json, string calldata key) external pure returns (uint256[] memory); function parseJson(string calldata json) external pure returns (bytes memory abiEncodedData); @@ -363,6 +366,8 @@ interface Vm { function serializeInt(string calldata objectKey, string calldata valueKey, int256 value) external returns (string memory json); function serializeInt(string calldata objectKey, string calldata valueKey, int256[] calldata values) external returns (string memory json); function serializeJson(string calldata objectKey, string calldata value) external returns (string memory json); + function serializeJsonType(string calldata typeDescription, bytes memory value) external pure returns (string memory json); + function serializeJsonType(string calldata objectKey, string calldata valueKey, string calldata typeDescription, bytes memory value) external returns (string memory json); function serializeString(string calldata objectKey, string calldata valueKey, string calldata value) external returns (string memory json); function serializeString(string calldata objectKey, string calldata valueKey, string[] calldata values) external returns (string memory json); function serializeUintToHex(string calldata objectKey, string calldata valueKey, uint256 value) external returns (string memory json); diff --git a/testdata/default/cheats/Json.t.sol b/testdata/default/cheats/Json.t.sol index ca53b1801297..0604ef9078f1 100644 --- a/testdata/default/cheats/Json.t.sol +++ b/testdata/default/cheats/Json.t.sol @@ -5,7 +5,89 @@ import "ds-test/test.sol"; import "cheats/Vm.sol"; import "../logs/console.sol"; +library JsonStructs { + address constant HEVM_ADDRESS = address(bytes20(uint160(uint256(keccak256("hevm cheat code"))))); + Vm constant vm = Vm(HEVM_ADDRESS); + + // forge eip712 testdata/default/cheats/Json.t.sol -R 'cheats=testdata/cheats' -R 'ds-test=testdata/lib/ds-test/src' | grep ^FlatJson + string constant schema_FlatJson = + "FlatJson(uint256 a,int24[][] arr,string str,bytes b,address addr,bytes32 fixedBytes)"; + + // forge eip712 testdata/default/cheats/Json.t.sol -R 'cheats=testdata/cheats' -R 'ds-test=testdata/lib/ds-test/src' | grep ^NestedJson + string constant schema_NestedJson = + "NestedJson(FlatJson[] members,AnotherFlatJson inner,string name)AnotherFlatJson(bytes4 fixedBytes)FlatJson(uint256 a,int24[][] arr,string str,bytes b,address addr,bytes32 fixedBytes)"; + + function deserializeFlatJson(string memory json) internal pure returns (ParseJsonTest.FlatJson memory) { + return abi.decode(vm.parseJsonType(json, schema_FlatJson), (ParseJsonTest.FlatJson)); + } + + function deserializeFlatJson(string memory json, string memory path) + internal + pure + returns (ParseJsonTest.FlatJson memory) + { + return abi.decode(vm.parseJsonType(json, path, schema_FlatJson), (ParseJsonTest.FlatJson)); + } + + function deserializeFlatJsonArray(string memory json, string memory path) + internal + pure + returns (ParseJsonTest.FlatJson[] memory) + { + return abi.decode(vm.parseJsonTypeArray(json, path, schema_FlatJson), (ParseJsonTest.FlatJson[])); + } + + function deserializeNestedJson(string memory json) internal pure returns (ParseJsonTest.NestedJson memory) { + return abi.decode(vm.parseJsonType(json, schema_NestedJson), (ParseJsonTest.NestedJson)); + } + + function deserializeNestedJson(string memory json, string memory path) + internal + pure + returns (ParseJsonTest.NestedJson memory) + { + return abi.decode(vm.parseJsonType(json, path, schema_NestedJson), (ParseJsonTest.NestedJson)); + } + + function deserializeNestedJsonArray(string memory json, string memory path) + internal + pure + returns (ParseJsonTest.NestedJson[] memory) + { + return abi.decode(vm.parseJsonType(json, path, schema_NestedJson), (ParseJsonTest.NestedJson[])); + } + + function serialize(ParseJsonTest.FlatJson memory instance) internal pure returns (string memory) { + return vm.serializeJsonType(schema_FlatJson, abi.encode(instance)); + } + + function serialize(ParseJsonTest.NestedJson memory instance) internal pure returns (string memory) { + return vm.serializeJsonType(schema_NestedJson, abi.encode(instance)); + } +} + contract ParseJsonTest is DSTest { + using JsonStructs for *; + + struct FlatJson { + uint256 a; + int24[][] arr; + string str; + bytes b; + address addr; + bytes32 fixedBytes; + } + + struct AnotherFlatJson { + bytes4 fixedBytes; + } + + struct NestedJson { + FlatJson[] members; + AnotherFlatJson inner; + string name; + } + Vm constant vm = Vm(HEVM_ADDRESS); string json; @@ -97,7 +179,7 @@ contract ParseJsonTest is DSTest { } function test_coercionRevert() public { - vm._expectCheatcodeRevert("values at \".nestedObject\" must not be JSON objects"); + vm._expectCheatcodeRevert("expected uint256, found JSON object"); vm.parseJsonUint(json, ".nestedObject"); } @@ -206,6 +288,44 @@ contract ParseJsonTest is DSTest { vm._expectCheatcodeRevert("key \".*\" must return exactly one JSON object"); vm.parseJsonKeys(jsonString, ".*"); } + + // forge eip712 testdata/default/cheats/Json.t.sol -R 'cheats=testdata/cheats' -R 'ds-test=testdata/lib/ds-test/src' | grep ^FlatJson + string constant schema_FlatJson = + "FlatJson(uint256 a,int24[][] arr,string str,bytes b,address addr,bytes32 fixedBytes)"; + + // forge eip712 testdata/default/cheats/Json.t.sol -R 'cheats=testdata/cheats' -R 'ds-test=testdata/lib/ds-test/src' | grep ^NestedJson + string constant schema_NestedJson = + "NestedJson(FlatJson[] members,AnotherFlatJson inner,string name)AnotherFlatJson(bytes4 fixedBytes)FlatJson(uint256 a,int24[][] arr,string str,bytes b,address addr,bytes32 fixedBytes)"; + + function test_parseJsonType() public { + string memory readJson = vm.readFile("fixtures/Json/nested_json_struct.json"); + NestedJson memory data = readJson.deserializeNestedJson(); + assertEq(data.members.length, 2); + + FlatJson memory expected = FlatJson({ + a: 200, + arr: new int24[][](0), + str: "some other string", + b: hex"0000000000000000000000000000000000000000", + addr: 0x167D91deaEEE3021161502873d3bcc6291081648, + fixedBytes: 0xed1c7beb1f00feaaaec5636950d6edb25a8d4fedc8deb2711287b64c4d27719d + }); + + assertEq(keccak256(abi.encode(data.members[1])), keccak256(abi.encode(expected))); + assertEq(bytes32(data.inner.fixedBytes), bytes32(bytes4(0x12345678))); + + FlatJson[] memory members = JsonStructs.deserializeFlatJsonArray(readJson, ".members"); + + assertEq(keccak256(abi.encode(members)), keccak256(abi.encode(data.members))); + } + + function test_parseJsonType_roundtrip() public { + string memory readJson = vm.readFile("fixtures/Json/nested_json_struct.json"); + NestedJson memory data = readJson.deserializeNestedJson(); + string memory serialized = data.serialize(); + NestedJson memory deserialized = serialized.deserializeNestedJson(); + assertEq(keccak256(abi.encode(data)), keccak256(abi.encode(deserialized))); + } } contract WriteJsonTest is DSTest { @@ -277,13 +397,13 @@ contract WriteJsonTest is DSTest { // Github issue: https://github.com/foundry-rs/foundry/issues/5745 function test_serializeRootObject() public { string memory serialized = vm.serializeJson(json1, '{"foo": "bar"}'); - assertEq(serialized, '{"foo":"bar"}'); + assertEq(serialized, '{"foo": "bar"}'); serialized = vm.serializeBool(json1, "boolean", true); assertEq(vm.parseJsonString(serialized, ".foo"), "bar"); assertEq(vm.parseJsonBool(serialized, ".boolean"), true); string memory overwritten = vm.serializeJson(json1, '{"value": 123}'); - assertEq(overwritten, '{"value":123}'); + assertEq(overwritten, '{"value": 123}'); } struct simpleJson { diff --git a/testdata/default/cheats/Toml.t.sol b/testdata/default/cheats/Toml.t.sol index 40667743f8d6..a01b29af62cb 100644 --- a/testdata/default/cheats/Toml.t.sol +++ b/testdata/default/cheats/Toml.t.sol @@ -116,7 +116,7 @@ contract ParseTomlTest is DSTest { } function test_coercionRevert() public { - vm._expectCheatcodeRevert("values at \".nestedObject\" must not be JSON objects"); + vm._expectCheatcodeRevert("expected uint256, found JSON object"); vm.parseTomlUint(toml, ".nestedObject"); } diff --git a/testdata/fixtures/Json/nested_json_struct.json b/testdata/fixtures/Json/nested_json_struct.json new file mode 100644 index 000000000000..ac6fe7692bb9 --- /dev/null +++ b/testdata/fixtures/Json/nested_json_struct.json @@ -0,0 +1,35 @@ +{ + "members": [ + { + "a": 100, + "arr": [ + [ + 1, + -2, + -5 + ], + [ + 1000, + 2000, + 0 + ] + ], + "str": "some string", + "b": "0x", + "addr": "0x0000000000000000000000000000000000000000", + "fixedBytes": "0x8ae3fc6bd1b150a73ec4afe3ef136fa2f88e9c96131c883c5e4a4714811c1598" + }, + { + "a": 200, + "arr": [], + "str": "some other string", + "b": "0x0000000000000000000000000000000000000000", + "addr": "0x167D91deaEEE3021161502873d3bcc6291081648", + "fixedBytes": "0xed1c7beb1f00feaaaec5636950d6edb25a8d4fedc8deb2711287b64c4d27719d" + } + ], + "inner": { + "fixedBytes": "0x12345678" + }, + "name": "test" +} \ No newline at end of file