diff --git a/.golangci.yml b/.golangci.yml index e52e16f8..ef08cb01 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -159,3 +159,4 @@ linters: - varnamelen # False positives - wsl # Way to strict and opinionated - lll # Disable rigid line length limit + - tparallel # Disable test parallelization diff --git a/client/x/evmengine/keeper/keeper_internal_test.go b/client/x/evmengine/keeper/keeper_internal_test.go index e06875a6..311c7557 100644 --- a/client/x/evmengine/keeper/keeper_internal_test.go +++ b/client/x/evmengine/keeper/keeper_internal_test.go @@ -2,34 +2,266 @@ package keeper import ( "context" + "encoding/json" "testing" "time" k1 "github.com/cometbft/cometbft/crypto/secp256k1" cmtproto "github.com/cometbft/cometbft/proto/tendermint/types" cmttypes "github.com/cometbft/cometbft/types" + sdk "github.com/cosmos/cosmos-sdk/types" authtx "github.com/cosmos/cosmos-sdk/x/auth/tx" + "github.com/ethereum/go-ethereum/beacon/engine" + "github.com/ethereum/go-ethereum/common" fuzz "github.com/google/gofuzz" "github.com/stretchr/testify/require" "github.com/piplabs/story/client/comet" moduletestutil "github.com/piplabs/story/client/x/evmengine/testutil" + "github.com/piplabs/story/client/x/evmengine/types" "github.com/piplabs/story/lib/errors" + "github.com/piplabs/story/lib/ethclient" "github.com/piplabs/story/lib/ethclient/mock" "github.com/piplabs/story/lib/k1util" "go.uber.org/mock/gomock" ) -func TestKeeper_isNextProposer(t *testing.T) { +type args struct { + height int64 + validatorsFunc func(context.Context, int64) (*cmttypes.ValidatorSet, bool, error) + current int + next int + header func(height int64, address []byte) cmtproto.Header +} + +func createKeeper(t *testing.T, args args) (sdk.Context, *mockCometAPI, *Keeper) { + t.Helper() + + cdc := getCodec(t) + txConfig := authtx.NewTxConfig(cdc, nil) + mockEngine, err := newMockEngineAPI(0) + require.NoError(t, err) + + cmtAPI := newMockCometAPI(t, args.validatorsFunc) + header := args.header(args.height, cmtAPI.validatorSet.Validators[args.current].Address) + + nxtAddr, err := k1util.PubKeyToAddress(cmtAPI.validatorSet.Validators[args.next].PubKey) + require.NoError(t, err) + + ctrl := gomock.NewController(t) + mockClient := mock.NewMockClient(ctrl) + ak := moduletestutil.NewMockAccountKeeper(ctrl) + esk := moduletestutil.NewMockEvmStakingKeeper(ctrl) + uk := moduletestutil.NewMockUpgradeKeeper(ctrl) + + ctx, storeService := setupCtxStore(t, &header) + keeper, err := NewKeeper(cdc, storeService, &mockEngine, mockClient, txConfig, ak, esk, uk) + require.NoError(t, err) + keeper.SetCometAPI(cmtAPI) + keeper.SetValidatorAddress(nxtAddr) + populateGenesisHead(ctx, t, keeper) + + return ctx, cmtAPI, keeper +} + +func TestKeeper_SetBuildDelay(t *testing.T) { + t.Parallel() + keeper := new(Keeper) + // check existing value + require.Equal(t, 0*time.Second, keeper.buildDelay) + // set new value + keeper.SetBuildDelay(10 * time.Second) + require.Equal(t, 10*time.Second, keeper.buildDelay) +} + +func TestKeeper_SetBuildOptimistic(t *testing.T) { + t.Parallel() + keeper := new(Keeper) + // check existing value + require.False(t, keeper.buildOptimistic) + // set new value + keeper.SetBuildOptimistic(true) + require.True(t, keeper.buildOptimistic) +} + +func TestKeeper_parseAndVerifyProposedPayload(t *testing.T) { t.Parallel() - type args struct { - height int64 - validatorsFunc func(context.Context, int64) (*cmttypes.ValidatorSet, bool, error) - current int - next int - header func(height int64, address []byte) cmtproto.Header + now := time.Now() + fuzzer := ethclient.NewFuzzer(now.Unix()) + ctx, _, keeper := createKeeper(t, args{ + height: 0, + current: 0, + next: 1, + header: func(height int64, address []byte) cmtproto.Header { + return cmtproto.Header{Height: height, ProposerAddress: address} + }, + }) + + tcs := []struct { + name string + setup func(context.Context) sdk.Context + msg func(context.Context) *types.MsgExecutionPayload + expectedErr string + }{ + { + name: "fail: unmarshal payload because of invalid json", + msg: func(_ context.Context) *types.MsgExecutionPayload { + return &types.MsgExecutionPayload{ExecutionPayload: []byte("invalid")} + }, + expectedErr: "unmarshal payload", + }, + { + name: "fail: payload number is not equal to head block height + 1", + msg: func(_ context.Context) *types.MsgExecutionPayload { + payload, err := ethclient.MakePayload(fuzzer, 100, uint64(now.Unix()), common.Hash{}, common.Address{}, common.Hash{}, &common.Hash{}) + require.NoError(t, err) + + marshaled, err := json.Marshal(payload) + require.NoError(t, err) + + return &types.MsgExecutionPayload{ExecutionPayload: marshaled} + }, + expectedErr: "invalid proposed payload number", + }, + { + name: "fail: payload parent hash is not equal to head hash", + msg: func(c context.Context) *types.MsgExecutionPayload { + execHead, err := keeper.getExecutionHead(c) + require.NoError(t, err) + + payload, err := ethclient.MakePayload(fuzzer, execHead.GetBlockHeight()+1, uint64(now.Unix()), common.Hash{}, common.Address{}, common.Hash{}, &common.Hash{}) + require.NoError(t, err) + + marshaled, err := json.Marshal(payload) + require.NoError(t, err) + + return &types.MsgExecutionPayload{ExecutionPayload: marshaled} + }, + expectedErr: "invalid proposed payload parent hash", + }, + { + name: "fail: invalid payload timestamp", + msg: func(c context.Context) *types.MsgExecutionPayload { + execHead, err := keeper.getExecutionHead(c) + require.NoError(t, err) + weekAgo := execHead.GetBlockTime() - 604800 + + payload, err := ethclient.MakePayload(fuzzer, 1, weekAgo, execHead.Hash(), common.Address{}, common.Hash{}, &common.Hash{}) + require.NoError(t, err) + + marshaled, err := json.Marshal(payload) + require.NoError(t, err) + + return &types.MsgExecutionPayload{ExecutionPayload: marshaled} + }, + expectedErr: "invalid payload timestamp", + }, + { + name: "fail: invalid payload random", + msg: func(c context.Context) *types.MsgExecutionPayload { + execHead, err := keeper.getExecutionHead(c) + require.NoError(t, err) + + payload, err := ethclient.MakePayload(fuzzer, execHead.GetBlockHeight()+1, uint64(now.Unix()), execHead.Hash(), common.Address{}, common.Hash{}, &common.Hash{}) + require.NoError(t, err) + + marshaled, err := json.Marshal(payload) + require.NoError(t, err) + + return &types.MsgExecutionPayload{ExecutionPayload: marshaled} + }, + expectedErr: "invalid payload random", + }, + { + name: "pass: valid payload", + msg: func(c context.Context) *types.MsgExecutionPayload { + execHead, err := keeper.getExecutionHead(c) + require.NoError(t, err) + + payload, err := ethclient.MakePayload(fuzzer, execHead.GetBlockHeight()+1, uint64(now.Unix()), execHead.Hash(), common.Address{}, execHead.Hash(), &common.Hash{}) + require.NoError(t, err) + + marshaled, err := json.Marshal(payload) + require.NoError(t, err) + + return &types.MsgExecutionPayload{ExecutionPayload: marshaled} + }, + }, + { + name: "pass: valid payload when consensus block time is less than execution block time", + setup: func(c context.Context) sdk.Context { + execHead, err := keeper.getExecutionHead(c) + require.NoError(t, err) + // update execution head with current block time + err = keeper.updateExecutionHead(c, engine.ExecutableData{ + Number: execHead.GetBlockHeight(), + BlockHash: common.BytesToHash(execHead.GetBlockHash()), + Timestamp: uint64(now.Unix()), + }) + require.NoError(t, err) + + // set block time to be less than execution block time + sdkCtx := sdk.UnwrapSDKContext(c) + sdkCtx = sdkCtx.WithBlockTime(now.Add(-24 * time.Hour)) + + return sdkCtx + }, + msg: func(c context.Context) *types.MsgExecutionPayload { + execHead, err := keeper.getExecutionHead(c) + require.NoError(t, err) + + payload, err := ethclient.MakePayload(fuzzer, execHead.GetBlockHeight()+1, execHead.GetBlockTime()+1, execHead.Hash(), common.Address{}, execHead.Hash(), &common.Hash{}) + require.NoError(t, err) + + marshaled, err := json.Marshal(payload) + require.NoError(t, err) + + return &types.MsgExecutionPayload{ExecutionPayload: marshaled} + }, + }, + } + + for _, tc := range tcs { + //nolint:tparallel // cannot run parallel because of data race on execution head table + t.Run(tc.name, func(t *testing.T) { + cachedCtx, _ := ctx.CacheContext() + if tc.setup != nil { + cachedCtx = tc.setup(cachedCtx) + } + _, err := keeper.parseAndVerifyProposedPayload(cachedCtx, tc.msg(cachedCtx)) + if tc.expectedErr != "" { + require.ErrorContains(t, err, tc.expectedErr) + } else { + require.NoError(t, err) + } + }) } +} + +func TestKeeper_setOptimisticPayload(t *testing.T) { + t.Parallel() + _, _, keeper := createKeeper(t, args{ + height: 0, + current: 0, + next: 1, + header: func(height int64, address []byte) cmtproto.Header { + return cmtproto.Header{Height: height, ProposerAddress: address} + }, + }) + + // check existing values + require.Nil(t, keeper.mutablePayload.ID) + require.Zero(t, keeper.mutablePayload.Height) + + // set new values + keeper.setOptimisticPayload(&engine.PayloadID{1}, 1) + require.Equal(t, uint64(1), keeper.mutablePayload.Height) + require.Equal(t, engine.PayloadID{1}, *keeper.mutablePayload.ID) +} + +func TestKeeper_isNextProposer(t *testing.T) { + t.Parallel() height := int64(1) tests := []struct { name string @@ -117,30 +349,7 @@ func TestKeeper_isNextProposer(t *testing.T) { t.Run(tt.name, func(t *testing.T) { t.Parallel() - cdc := getCodec(t) - txConfig := authtx.NewTxConfig(cdc, nil) - mockEngine, err := newMockEngineAPI(0) - require.NoError(t, err) - - cmtAPI := newMockCometAPI(t, tt.args.validatorsFunc) - header := tt.args.header(height, cmtAPI.validatorSet.Validators[tt.args.current].Address) - - nxtAddr, err := k1util.PubKeyToAddress(cmtAPI.validatorSet.Validators[tt.args.next].PubKey) - require.NoError(t, err) - - ctrl := gomock.NewController(t) - mockClient := mock.NewMockClient(ctrl) - ak := moduletestutil.NewMockAccountKeeper(ctrl) - esk := moduletestutil.NewMockEvmStakingKeeper(ctrl) - uk := moduletestutil.NewMockUpgradeKeeper(ctrl) - - ctx, storeService := setupCtxStore(t, &header) - - keeper, err := NewKeeper(cdc, storeService, &mockEngine, mockClient, txConfig, ak, esk, uk) - require.NoError(t, err) - keeper.SetCometAPI(cmtAPI) - keeper.SetValidatorAddress(nxtAddr) - populateGenesisHead(ctx, t, keeper) + ctx, cmtAPI, keeper := createKeeper(t, tt.args) got, err := keeper.isNextProposer(ctx, ctx.BlockHeader().ProposerAddress, ctx.BlockHeader().Height) if (err != nil) != tt.wantErr { diff --git a/lib/ethclient/enginemock.go b/lib/ethclient/enginemock.go index e3f26336..fe4251b2 100644 --- a/lib/ethclient/enginemock.go +++ b/lib/ethclient/enginemock.go @@ -108,7 +108,7 @@ func MockGenesisBlock() (*types.Block, error) { fuzzer = NewFuzzer(timestamp) ) - genesisPayload, err := makePayload(fuzzer, height, uint64(timestamp), parentHash, common.Address{}, parentHash, &parentBeaconRoot) + genesisPayload, err := MakePayload(fuzzer, height, uint64(timestamp), parentHash, common.Address{}, parentHash, &parentBeaconRoot) if err != nil { return nil, errors.Wrap(err, "make next payload") } @@ -337,7 +337,7 @@ func (m *engineMock) ForkchoiceUpdatedV3(ctx context.Context, update engine.Fork // If we have payload attributes, make a new payload if attrs != nil { - payload, err := makePayload(m.fuzzer, m.head.NumberU64()+1, + payload, err := MakePayload(m.fuzzer, m.head.NumberU64()+1, attrs.Timestamp, update.HeadBlockHash, attrs.SuggestedFeeRecipient, attrs.Random, attrs.BeaconRoot) if err != nil { return engine.ForkChoiceResponse{}, err @@ -396,8 +396,8 @@ func (*engineMock) GetPayloadV2(context.Context, engine.PayloadID) (*engine.Exec panic("implement me") } -// makePayload returns a new fuzzed payload using head as parent if provided. -func makePayload(fuzzer *fuzz.Fuzzer, height uint64, timestamp uint64, parentHash common.Hash, +// MakePayload returns a new fuzzed payload using head as parent if provided. +func MakePayload(fuzzer *fuzz.Fuzzer, height uint64, timestamp uint64, parentHash common.Hash, feeRecipient common.Address, randao common.Hash, beaconRoot *common.Hash) (engine.ExecutableData, error) { // Build a new header var header types.Header