From 3ed59538bdc6db342e5f044d5c9c243f3fbf739b Mon Sep 17 00:00:00 2001 From: Robert Raynor <35671663+mooselumph@users.noreply.github.com> Date: Fri, 23 Feb 2024 10:41:06 -0800 Subject: [PATCH] Refactor encoder (2 of N) (#269) --- clients/disperser_client.go | 3 +- clients/node_client.go | 7 +- clients/retrieval_client.go | 26 +-- clients/tests/retrieval_client_test.go | 32 ++- core/assignment_test.go | 3 +- core/auth/auth_test.go | 5 +- core/data.go | 50 +--- core/encoding.go | 96 -------- core/encoding/encoder.go | 215 ------------------ core/encoding/encoder_test.go | 109 --------- core/encoding/mock_encoder.go | 52 ----- core/encoding/verifier_test.go | 92 -------- core/serialization.go | 51 ----- core/serialization_test.go | 13 +- core/test/core_test.go | 41 ++-- core/validator.go | 41 ++-- disperser/api/grpc/encoder/encoder.pb.go | 2 +- disperser/apiserver/ratelimit_test.go | 3 +- disperser/apiserver/server.go | 7 +- disperser/apiserver/server_test.go | 5 +- disperser/batcher/batcher_test.go | 14 +- disperser/batcher/encoded_blob_store.go | 5 +- disperser/batcher/encoding_streamer.go | 13 +- disperser/batcher/encoding_streamer_test.go | 4 +- disperser/batcher/finalizer_test.go | 7 +- disperser/cmd/batcher/config.go | 6 +- disperser/cmd/encoder/config.go | 6 +- disperser/cmd/encoder/encoder.go | 6 +- disperser/cmd/encoder/flags/flags.go | 4 +- .../blobstore/blob_metadata_store_test.go | 5 +- .../common/blobstore/shared_storage_test.go | 5 +- disperser/common/inmem/store_test.go | 3 +- disperser/dataapi/docs/docs.go | 4 +- disperser/dataapi/docs/swagger.json | 4 +- disperser/dataapi/docs/swagger.yaml | 4 +- disperser/dataapi/server.go | 31 +-- disperser/dataapi/server_test.go | 7 +- disperser/disperser.go | 3 +- disperser/encoder/client.go | 16 +- disperser/encoder/server.go | 36 +-- disperser/encoder/server_test.go | 61 ++--- disperser/encoder_client.go | 4 +- disperser/local_encoder_client.go | 12 +- disperser/mock/encoder.go | 12 +- docs/design/encoding.md | 2 +- .../spec/protocol-modules/storage/encoding.md | 4 +- encoding/data.go | 60 +++++ encoding/encoding.go | 52 ++--- {core/encoding => encoding/kzgrs}/cli.go | 12 +- .../kzgrs/prover/data/SRSTable/dimE16.coset8 | Bin 8200 -> 0 bytes encoding/kzgrs/prover/decode_test.go | 11 +- encoding/kzgrs/prover/parametrized_prover.go | 12 +- .../kzgrs/prover/parametrized_prover_test.go | 7 +- encoding/kzgrs/prover/precompute_test.go | 8 +- encoding/kzgrs/prover/prover.go | 100 ++++++-- encoding/kzgrs/prover/prover_fuzz_test.go | 8 +- encoding/kzgrs/prover/prover_test.go | 93 +++++++- .../verifier/batch_commit_equivalence.go | 13 ++ .../verifier/batch_commit_equivalence_test.go | 5 +- encoding/kzgrs/verifier/degree_test.go | 5 +- encoding/kzgrs/verifier/frame_test.go | 6 +- encoding/kzgrs/verifier/multiframe.go | 41 +++- encoding/kzgrs/verifier/multiframe_test.go | 9 +- encoding/kzgrs/verifier/verifier.go | 93 ++++++-- encoding/kzgrs/verifier/verifier_test.go | 92 ++++++++ encoding/mock/encoder.go | 54 +++++ encoding/params.go | 76 +++++++ encoding/rs/decode.go | 5 +- encoding/rs/encode.go | 2 +- encoding/rs/encode_test.go | 7 +- encoding/rs/encoder.go | 5 +- encoding/rs/encoder_fuzz_test.go | 3 +- encoding/rs/frame_test.go | 3 +- encoding/rs/interpolation.go | 4 +- encoding/rs/utils_test.go | 7 +- encoding/serialization.go | 99 ++++++++ encoding/test/main.go | 6 +- encoding/utils.go | 44 ++++ go.mod | 2 +- inabox/tests/integration_suite_test.go | 26 +-- node/config.go | 6 +- node/flags/flags.go | 4 +- node/grpc/server.go | 3 +- node/grpc/server_load_test.go | 14 +- node/grpc/server_test.go | 39 ++-- node/grpc/utils.go | 11 +- node/node.go | 6 +- node/store_test.go | 25 +- retriever/cmd/main.go | 8 +- retriever/config.go | 6 +- retriever/flags/flags.go | 4 +- retriever/server.go | 3 +- retriever/server_test.go | 23 +- test/integration_test.go | 56 +++-- test/synthetic-test/synthetic_client_test.go | 60 +++-- 95 files changed, 1201 insertions(+), 1168 deletions(-) delete mode 100644 core/encoding.go delete mode 100644 core/encoding/encoder.go delete mode 100644 core/encoding/encoder_test.go delete mode 100644 core/encoding/mock_encoder.go delete mode 100644 core/encoding/verifier_test.go create mode 100644 encoding/data.go rename {core/encoding => encoding/kzgrs}/cli.go (92%) delete mode 100644 encoding/kzgrs/prover/data/SRSTable/dimE16.coset8 create mode 100644 encoding/mock/encoder.go create mode 100644 encoding/params.go create mode 100644 encoding/serialization.go create mode 100644 encoding/utils.go diff --git a/clients/disperser_client.go b/clients/disperser_client.go index da2d1b340f..b9337d9ec9 100644 --- a/clients/disperser_client.go +++ b/clients/disperser_client.go @@ -9,6 +9,7 @@ import ( disperser_rpc "github.com/Layr-Labs/eigenda/api/grpc/disperser" "github.com/Layr-Labs/eigenda/core" "github.com/Layr-Labs/eigenda/disperser" + "github.com/Layr-Labs/eigenda/encoding" "google.golang.org/grpc" "google.golang.org/grpc/credentials" "google.golang.org/grpc/credentials/insecure" @@ -155,7 +156,7 @@ func (c *disperserClient) DisperseBlobAuthenticated(ctx context.Context, data [] } authHeader := core.BlobAuthHeader{ - BlobCommitments: core.BlobCommitments{}, + BlobCommitments: encoding.BlobCommitments{}, AccountID: "", Nonce: authHeaderReply.BlobAuthHeader.ChallengeParameter, } diff --git a/clients/node_client.go b/clients/node_client.go index f90b0364ac..e1422dbc6f 100644 --- a/clients/node_client.go +++ b/clients/node_client.go @@ -6,6 +6,7 @@ import ( "github.com/Layr-Labs/eigenda/api/grpc/node" "github.com/Layr-Labs/eigenda/core" + "github.com/Layr-Labs/eigenda/encoding" node_utils "github.com/Layr-Labs/eigenda/node/grpc" "github.com/wealdtech/go-merkletree" "google.golang.org/grpc" @@ -14,7 +15,7 @@ import ( type RetrievedChunks struct { OperatorID core.OperatorID - Chunks []*core.Chunk + Chunks []*encoding.Frame Err error } @@ -117,9 +118,9 @@ func (c client) GetChunks( return } - chunks := make([]*core.Chunk, len(reply.GetChunks())) + chunks := make([]*encoding.Frame, len(reply.GetChunks())) for i, data := range reply.GetChunks() { - chunk, err := new(core.Chunk).Deserialize(data) + chunk, err := new(encoding.Frame).Deserialize(data) if err != nil { chunksChan <- RetrievedChunks{ OperatorID: opID, diff --git a/clients/retrieval_client.go b/clients/retrieval_client.go index d3639df65d..baad69bdf2 100644 --- a/clients/retrieval_client.go +++ b/clients/retrieval_client.go @@ -6,6 +6,7 @@ import ( "github.com/Layr-Labs/eigenda/common" "github.com/Layr-Labs/eigenda/core" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/pkg/kzg/bn254" "github.com/gammazero/workerpool" "github.com/wealdtech/go-merkletree" @@ -27,7 +28,7 @@ type retrievalClient struct { indexedChainState core.IndexedChainState assignmentCoordinator core.AssignmentCoordinator nodeClient NodeClient - encoder core.Encoder + verifier encoding.Verifier numConnections int } @@ -38,7 +39,7 @@ func NewRetrievalClient( chainState core.IndexedChainState, assignmentCoordinator core.AssignmentCoordinator, nodeClient NodeClient, - encoder core.Encoder, + verifier encoding.Verifier, numConnections int, ) (*retrievalClient, error) { @@ -47,7 +48,7 @@ func NewRetrievalClient( indexedChainState: chainState, assignmentCoordinator: assignmentCoordinator, nodeClient: nodeClient, - encoder: encoder, + verifier: verifier, numConnections: numConnections, }, nil } @@ -115,14 +116,14 @@ func (r *retrievalClient) RetrieveBlob( } // Validate the blob length - err = r.encoder.VerifyBlobLength(blobHeader.BlobCommitments) + err = r.verifier.VerifyBlobLength(blobHeader.BlobCommitments) if err != nil { return nil, err } // Validate the commitments are equivalent - commitmentBatch := []core.BlobCommitments{blobHeader.BlobCommitments} - err = r.encoder.VerifyCommitEquivalenceBatch(commitmentBatch) + commitmentBatch := []encoding.BlobCommitments{blobHeader.BlobCommitments} + err = r.verifier.VerifyCommitEquivalenceBatch(commitmentBatch) if err != nil { return nil, err } @@ -143,13 +144,10 @@ func (r *retrievalClient) RetrieveBlob( }) } - encodingParams, err := core.GetEncodingParams(quorumHeader.ChunkLength, info.TotalChunks) - if err != nil { - return nil, err - } + encodingParams := encoding.ParamsFromMins(quorumHeader.ChunkLength, info.TotalChunks) - var chunks []*core.Chunk - var indices []core.ChunkNumber + var chunks []*encoding.Frame + var indices []encoding.ChunkNumber // TODO(ian-shim): if we gathered enough chunks, cancel remaining RPC calls for i := 0; i < len(operators); i++ { reply := <-chunksChan @@ -162,7 +160,7 @@ func (r *retrievalClient) RetrieveBlob( return nil, fmt.Errorf("no assignment to operator %v", reply.OperatorID) } - err = r.encoder.VerifyChunks(reply.Chunks, assignment.GetIndices(), blobHeader.BlobCommitments, encodingParams) + err = r.verifier.VerifyFrames(reply.Chunks, assignment.GetIndices(), blobHeader.BlobCommitments, encodingParams) if err != nil { r.logger.Error("failed to verify chunks from operator", "operator", reply.OperatorID, "err", err) continue @@ -174,5 +172,5 @@ func (r *retrievalClient) RetrieveBlob( indices = append(indices, assignment.GetIndices()...) } - return r.encoder.Decode(chunks, indices, encodingParams, uint64(blobHeader.Length)*bn254.BYTES_PER_COEFFICIENT) + return r.verifier.Decode(chunks, indices, encodingParams, uint64(blobHeader.Length)*bn254.BYTES_PER_COEFFICIENT) } diff --git a/clients/tests/retrieval_client_test.go b/clients/tests/retrieval_client_test.go index 7b60e563bb..977bd3b5ee 100644 --- a/clients/tests/retrieval_client_test.go +++ b/clients/tests/retrieval_client_test.go @@ -10,9 +10,9 @@ import ( clientsmock "github.com/Layr-Labs/eigenda/clients/mock" "github.com/Layr-Labs/eigenda/common/logging" "github.com/Layr-Labs/eigenda/core" - "github.com/Layr-Labs/eigenda/core/encoding" coreindexer "github.com/Layr-Labs/eigenda/core/indexer" coremock "github.com/Layr-Labs/eigenda/core/mock" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/kzgrs" "github.com/Layr-Labs/eigenda/encoding/kzgrs/prover" "github.com/Layr-Labs/eigenda/encoding/kzgrs/verifier" @@ -26,7 +26,7 @@ import ( const numOperators = 10 -func makeTestEncoder() (core.Encoder, error) { +func makeTestComponents() (encoding.Prover, encoding.Verifier, error) { config := &kzgrs.KzgConfig{ G1Path: "../../inabox/resources/kzg/g1.point", G2Path: "../../inabox/resources/kzg/g2.point", @@ -36,20 +36,17 @@ func makeTestEncoder() (core.Encoder, error) { NumWorker: uint64(runtime.GOMAXPROCS(0)), } - kzgEncoderGroup, err := prover.NewProver(config, true) + p, err := prover.NewProver(config, true) if err != nil { - return nil, err + return nil, nil, err } - kzgVerifierGroup, err := verifier.NewVerifier(config, true) + v, err := verifier.NewVerifier(config, true) if err != nil { - return nil, err + return nil, nil, err } - return &encoding.Encoder{ - EncoderGroup: kzgEncoderGroup, - VerifierGroup: kzgVerifierGroup, - }, nil + return p, v, nil } var ( @@ -82,7 +79,7 @@ func setup(t *testing.T) { nodeClient = clientsmock.NewNodeClient() coordinator = &core.StdAssignmentCoordinator{} - encoder, err := makeTestEncoder() + p, v, err := makeTestComponents() if err != nil { t.Fatal(err) } @@ -99,7 +96,7 @@ func setup(t *testing.T) { panic("failed to create a new indexed chain state") } - retrievalClient, err = clients.NewRetrievalClient(logger, ics, coordinator, nodeClient, encoder, 2) + retrievalClient, err = clients.NewRetrievalClient(logger, ics, coordinator, nodeClient, v, 2) if err != nil { panic("failed to create a new retrieval client") } @@ -132,7 +129,7 @@ func setup(t *testing.T) { } blobSize := uint(len(blob.Data)) - blobLength := core.GetBlobLength(uint(blobSize)) + blobLength := encoding.GetBlobLength(uint(blobSize)) chunkLength, err := coordinator.CalculateChunkLength(operatorState, blobLength, 0, securityParams[0]) if err != nil { @@ -153,18 +150,15 @@ func setup(t *testing.T) { t.Fatal(err) } - params, err := core.GetEncodingParams(chunkLength, info.TotalChunks) - if err != nil { - t.Fatal(err) - } + params := encoding.ParamsFromMins(chunkLength, info.TotalChunks) - commitments, chunks, err := encoder.Encode(blob.Data, params) + commitments, chunks, err := p.EncodeAndProve(blob.Data, params) if err != nil { t.Fatal(err) } blobHeader = &core.BlobHeader{ - BlobCommitments: core.BlobCommitments{ + BlobCommitments: encoding.BlobCommitments{ Commitment: commitments.Commitment, LengthCommitment: commitments.LengthCommitment, LengthProof: commitments.LengthProof, diff --git a/core/assignment_test.go b/core/assignment_test.go index 855ffddd31..74873bada2 100644 --- a/core/assignment_test.go +++ b/core/assignment_test.go @@ -8,6 +8,7 @@ import ( "github.com/Layr-Labs/eigenda/core" "github.com/Layr-Labs/eigenda/core/mock" + "github.com/Layr-Labs/eigenda/encoding" "github.com/stretchr/testify/assert" ) @@ -88,7 +89,7 @@ func TestOperatorAssignments(t *testing.T) { assert.Equal(t, assignment, expectedAssignments[operatorID]) header := &core.BlobHeader{ - BlobCommitments: core.BlobCommitments{ + BlobCommitments: encoding.BlobCommitments{ Length: blobLength, }, QuorumInfos: []*core.BlobQuorumInfo{quorumInfo}, diff --git a/core/auth/auth_test.go b/core/auth/auth_test.go index 5a0953ebe1..947979f50c 100644 --- a/core/auth/auth_test.go +++ b/core/auth/auth_test.go @@ -6,6 +6,7 @@ import ( "github.com/Layr-Labs/eigenda/core" "github.com/Layr-Labs/eigenda/core/auth" + "github.com/Layr-Labs/eigenda/encoding" "github.com/stretchr/testify/assert" ) @@ -19,7 +20,7 @@ func TestAuthentication(t *testing.T) { signer := auth.NewSigner(privateKeyHex) testHeader := core.BlobAuthHeader{ - BlobCommitments: core.BlobCommitments{}, + BlobCommitments: encoding.BlobCommitments{}, AccountID: signer.GetAccountID(), Nonce: rand.Uint32(), AuthenticationData: []byte{}, @@ -46,7 +47,7 @@ func TestAuthenticationFail(t *testing.T) { signer := auth.NewSigner(privateKeyHex) testHeader := core.BlobAuthHeader{ - BlobCommitments: core.BlobCommitments{}, + BlobCommitments: encoding.BlobCommitments{}, AccountID: signer.GetAccountID(), Nonce: rand.Uint32(), AuthenticationData: []byte{}, diff --git a/core/data.go b/core/data.go index ca0e124950..b0469d0ab8 100644 --- a/core/data.go +++ b/core/data.go @@ -5,6 +5,7 @@ import ( "fmt" "github.com/Layr-Labs/eigenda/common" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/pkg/kzg/bn254" ) @@ -58,7 +59,7 @@ type Blob struct { // multiple times (Replay attack). type BlobAuthHeader struct { // Commitments - BlobCommitments `json:"commitments"` + encoding.BlobCommitments `json:"commitments"` // AccountID is the account that is paying for the blob to be stored. AccountID is hexadecimal representation of the ECDSA public key AccountID AccountID `json:"account_id"` // Nonce @@ -99,7 +100,7 @@ type BlobQuorumInfo struct { // BlobHeader contains all metadata related to a blob including commitments and parameters for encoding type BlobHeader struct { - BlobCommitments + encoding.BlobCommitments // QuorumInfos contains the quorum specific parameters for the blob QuorumInfos []*BlobQuorumInfo @@ -126,14 +127,6 @@ func (b *BlobHeader) EncodedSizeAllQuorums() int64 { return size } -// BlomCommitments contains the blob's commitment, degree proof, and the actual degree. -type BlobCommitments struct { - Commitment *G1Commitment `json:"commitment"` - LengthCommitment *G2Commitment `json:"length_commitment"` - LengthProof *LengthProof `json:"length_proof"` - Length uint `json:"length"` -} - // Batch // A batch is a collection of blobs. DA nodes receive and attest to the blobs in a batch together to amortize signature verification costs @@ -148,28 +141,8 @@ type BatchHeader struct { // EncodedBlob contains the messages to be sent to a group of DA nodes corresponding to a single blob type EncodedBlob = map[OperatorID]*BlobMessage -// Chunks - -// Chunk is the smallest unit that is distributed to DA nodes, including both data and the associated polynomial opening proofs. -// A chunk corresponds to a set of evaluations of the global polynomial whose coefficients are used to construct the blob Commitment. -type Chunk struct { - // The Coeffs field contains the coefficients of the polynomial which interolates these evaluations. This is the same as the - // interpolating polynomial, I(X), used in the KZG multi-reveal (https://dankradfeist.de/ethereum/2020/06/16/kate-polynomial-commitments.html#multiproofs) - Coeffs []Symbol - Proof Proof -} - -func (c *Chunk) Length() int { - return len(c.Coeffs) -} - -// Returns the size of chunk in bytes. -func (c *Chunk) Size() int { - return c.Length() * bn254.BYTES_PER_COEFFICIENT -} - // A Bundle is the collection of chunks associated with a single blob, for a single operator and a single quorum. -type Bundle = []*Chunk +type Bundle = []*encoding.Frame // Bundles is the collection of bundles associated with a single blob and a single operator. type Bundles map[QuorumID]Bundle @@ -205,18 +178,3 @@ func (cb Bundles) Size() int64 { } return size } - -// Sample is a chunk with associated metadata used by the Universal Batch Verifier -type Sample struct { - Commitment *G1Commitment - Chunk *Chunk - AssignmentIndex ChunkNumber - BlobIndex int -} - -// SubBatch is a part of the whole Batch with identical Encoding Parameters, i.e. (ChunkLen, NumChunk) -// Blobs with the same encoding parameters are collected in a single subBatch -type SubBatch struct { - Samples []Sample - NumBlobs int -} diff --git a/core/encoding.go b/core/encoding.go deleted file mode 100644 index 661d778ed5..0000000000 --- a/core/encoding.go +++ /dev/null @@ -1,96 +0,0 @@ -package core - -import ( - "fmt" - - encoder "github.com/Layr-Labs/eigenda/encoding/rs" - "github.com/Layr-Labs/eigenda/pkg/kzg/bn254" -) - -// Commitments - -// Commitment is a polynomial commitment (e.g. a kzg commitment) -type G1Commitment bn254.G1Point - -// Commitment is a polynomial commitment (e.g. a kzg commitment) -type G2Commitment bn254.G2Point - -// LengthProof is a polynomial commitment on G2 (e.g. a kzg commitment) used for low degree proof -type LengthProof = G2Commitment - -// The proof used to open a commitment. In the case of Kzg, this is also a kzg commitment, and is different from a Commitment only semantically. -type Proof = bn254.G1Point - -// Symbol is a symbol in the field used for polynomial commitments -type Symbol = bn254.Fr - -// Encoding - -// EncodingParams contains the encoding parameters that the encoder must satisfy. -type EncodingParams struct { - ChunkLength uint // ChunkSize is the length of the chunk in symbols - NumChunks uint -} - -// Encoder is responsible for encoding, decoding, and chunk verification -type Encoder interface { - // Encode takes in a blob and returns the commitments and encoded chunks. The encoding will satisfy the property that - // for any number M such that M*params.ChunkLength > BlobCommitments.Length, then any set of M chunks will be sufficient to - // reconstruct the blob. - Encode(data []byte, params EncodingParams) (BlobCommitments, []*Chunk, error) - - // VerifyChunks takes in the chunks, indices, commitments, and encoding parameters and returns an error if the chunks are invalid. - VerifyChunks(chunks []*Chunk, indices []ChunkNumber, commitments BlobCommitments, params EncodingParams) error - - // VerifyBatch takes in the encoding parameters, samples and the number of blobs and returns an error if a chunk in any sample is invalid. - UniversalVerifySubBatch(params EncodingParams, samples []Sample, numBlobs int) error - - // VerifyBlobLength takes in the commitments and returns an error if the blob length is invalid. - VerifyBlobLength(commitments BlobCommitments) error - - // VerifyCommitEquivalence takes in a list of commitments and returns an error if the commitment of G1 and G2 are inconsistent - VerifyCommitEquivalenceBatch(commitments []BlobCommitments) error - - // Decode takes in the chunks, indices, and encoding parameters and returns the decoded blob - Decode(chunks []*Chunk, indices []ChunkNumber, params EncodingParams, inputSize uint64) ([]byte, error) -} - -// GetBlobLength converts from blob size in bytes to blob size in symbols -func GetBlobLength(blobSize uint) uint { - symSize := uint(bn254.BYTES_PER_COEFFICIENT) - return (blobSize + symSize - 1) / symSize -} - -// GetBlobSize converts from blob length in symbols to blob size in bytes. This is not an exact conversion. -func GetBlobSize(blobLength uint) uint { - return blobLength * bn254.BYTES_PER_COEFFICIENT -} - -// GetBlobLength converts from blob size in bytes to blob size in symbols -func GetEncodedBlobLength(blobLength uint, quorumThreshold, advThreshold uint8) uint { - return roundUpDivide(blobLength*100, uint(quorumThreshold)-uint(advThreshold)) -} - -// GetEncodingParams takes in the minimum chunk length and the minimum number of chunks and returns the encoding parameters. -// Both the ChunkLength and NumChunks must be powers of 2, and the ChunkLength returned here should be used in constructing the BlobHeader. -func GetEncodingParams(minChunkLength, minNumChunks uint) (EncodingParams, error) { - return EncodingParams{ - ChunkLength: uint(encoder.NextPowerOf2(uint64(minChunkLength))), - NumChunks: uint(encoder.NextPowerOf2(uint64(minNumChunks))), - }, nil -} - -// ValidateEncodingParams takes in the encoding parameters and returns an error if they are invalid. -func ValidateEncodingParams(params EncodingParams, blobLength, SRSOrder int) error { - - if int(params.ChunkLength*params.NumChunks) >= SRSOrder { - return fmt.Errorf("the supplied encoding parameters are not valid with respect to the SRS. ChunkLength: %d, NumChunks: %d, SRSOrder: %d", params.ChunkLength, params.NumChunks, SRSOrder) - } - - if int(params.ChunkLength*params.NumChunks) < blobLength { - return fmt.Errorf("the supplied encoding parameters are not sufficient for the size of the data input") - } - - return nil - -} diff --git a/core/encoding/encoder.go b/core/encoding/encoder.go deleted file mode 100644 index 9646ba28fb..0000000000 --- a/core/encoding/encoder.go +++ /dev/null @@ -1,215 +0,0 @@ -package encoding - -import ( - "crypto/sha256" - - "github.com/Layr-Labs/eigenda/core" - "github.com/Layr-Labs/eigenda/encoding" - "github.com/Layr-Labs/eigenda/encoding/kzgrs" - "github.com/Layr-Labs/eigenda/encoding/kzgrs/prover" - "github.com/Layr-Labs/eigenda/encoding/kzgrs/verifier" - encoder "github.com/Layr-Labs/eigenda/encoding/rs" - "github.com/Layr-Labs/eigenda/pkg/kzg/bn254" - lru "github.com/hashicorp/golang-lru/v2" -) - -func toEncParams(params core.EncodingParams) encoder.EncodingParams { - return encoder.ParamsFromMins(uint64(params.NumChunks), uint64(params.ChunkLength)) -} - -type EncoderConfig struct { - KzgConfig kzgrs.KzgConfig - CacheEncodedBlobs bool -} - -type Encoder struct { - Config EncoderConfig - EncoderGroup *prover.Prover - VerifierGroup *verifier.Verifier - Cache *lru.Cache[string, encodedValue] -} - -var _ core.Encoder = &Encoder{} - -func NewEncoder(config EncoderConfig, loadG2Points bool) (*Encoder, error) { - kzgEncoderGroup, err := prover.NewProver(&config.KzgConfig, loadG2Points) - if err != nil { - return nil, err - } - - kzgVerifierGroup, err := verifier.NewVerifier(&config.KzgConfig, loadG2Points) - if err != nil { - return nil, err - } - - cache, err := lru.New[string, encodedValue](128) - if err != nil { - return nil, err - } - - return &Encoder{ - EncoderGroup: kzgEncoderGroup, - VerifierGroup: kzgVerifierGroup, - Cache: cache, - Config: config, - }, nil -} - -type encodedValue struct { - commitments core.BlobCommitments - chunks []*core.Chunk - err error -} - -func (e *Encoder) Encode(data []byte, params core.EncodingParams) (core.BlobCommitments, []*core.Chunk, error) { - - var cacheKey string = "" - if e.Config.CacheEncodedBlobs { - cacheKey = hashBlob(data, params) - if v, ok := e.Cache.Get(cacheKey); ok { - return v.commitments, v.chunks, v.err - } - } - encParams := toEncParams(params) - - enc, err := e.EncoderGroup.GetKzgEncoder(encParams) - if err != nil { - return core.BlobCommitments{}, nil, err - } - - commit, lowDegreeCommit, lowDegreeProof, kzgFrames, _, err := enc.EncodeBytes(data) - if err != nil { - return core.BlobCommitments{}, nil, err - } - - chunks := make([]*core.Chunk, len(kzgFrames)) - for ind, frame := range kzgFrames { - - chunks[ind] = &core.Chunk{ - Coeffs: frame.Coeffs, - Proof: frame.Proof, - } - } - - length := uint(len(encoder.ToFrArray(data))) - commitments := core.BlobCommitments{ - Commitment: (*core.G1Commitment)(commit), - LengthCommitment: (*core.G2Commitment)(lowDegreeCommit), - LengthProof: (*core.G2Commitment)(lowDegreeProof), - Length: length, - } - - if e.Config.CacheEncodedBlobs { - e.Cache.Add(cacheKey, encodedValue{ - commitments: commitments, - chunks: chunks, - err: nil, - }) - } - return commitments, chunks, nil -} - -func (e *Encoder) VerifyBlobLength(commitments core.BlobCommitments) error { - return e.VerifierGroup.VerifyCommit((*bn254.G2Point)(commitments.LengthCommitment), (*bn254.G2Point)(commitments.LengthProof), uint64(commitments.Length)) - -} - -func (e *Encoder) VerifyChunks(chunks []*core.Chunk, indices []core.ChunkNumber, commitments core.BlobCommitments, params core.EncodingParams) error { - - encParams := toEncParams(params) - - verifier, err := e.VerifierGroup.GetKzgVerifier(encParams) - if err != nil { - return err - } - - for ind := range chunks { - err = verifier.VerifyFrame( - (*bn254.G1Point)(commitments.Commitment), - &encoding.Frame{ - Proof: chunks[ind].Proof, - Coeffs: chunks[ind].Coeffs, - }, - uint64(indices[ind]), - ) - - if err != nil { - return err - } - } - - return nil - -} - -func (e *Encoder) VerifyCommitEquivalenceBatch(commitments []core.BlobCommitments) error { - commitmentsPair := make([]verifier.CommitmentPair, len(commitments)) - - for i, c := range commitments { - commitmentsPair[i] = verifier.CommitmentPair{ - Commitment: (bn254.G1Point)(*c.Commitment), - LengthCommitment: (bn254.G2Point)(*c.LengthCommitment), - } - } - return e.VerifierGroup.BatchVerifyCommitEquivalence(commitmentsPair) -} - -// convert struct understandable by the crypto library -func (e *Encoder) UniversalVerifySubBatch(params core.EncodingParams, samplesCore []core.Sample, numBlobs int) error { - encParams := toEncParams(params) - samples := make([]verifier.Sample, len(samplesCore)) - - for i, sc := range samplesCore { - x, err := encoder.GetLeadingCosetIndex( - uint64(sc.AssignmentIndex), - encParams.NumChunks, - ) - if err != nil { - return err - } - - sample := verifier.Sample{ - Commitment: (bn254.G1Point)(*sc.Commitment), - Proof: sc.Chunk.Proof, - RowIndex: sc.BlobIndex, - Coeffs: sc.Chunk.Coeffs, - X: uint(x), - } - samples[i] = sample - } - - return e.VerifierGroup.UniversalVerify(encParams, samples, numBlobs) -} - -// Decode takes in the chunks, indices, and encoding parameters and returns the decoded blob -// The result is trimmed to the given maxInputSize. -func (e *Encoder) Decode(chunks []*core.Chunk, indices []core.ChunkNumber, params core.EncodingParams, maxInputSize uint64) ([]byte, error) { - frames := make([]encoding.Frame, len(chunks)) - for i := range chunks { - frames[i] = encoding.Frame{ - Proof: chunks[i].Proof, - Coeffs: chunks[i].Coeffs, - } - } - encoder, err := e.EncoderGroup.GetKzgEncoder(toEncParams(params)) - if err != nil { - return nil, err - } - - return encoder.Decode(frames, toUint64Array(indices), maxInputSize) -} - -func toUint64Array(chunkIndices []core.ChunkNumber) []uint64 { - res := make([]uint64, len(chunkIndices)) - for i, d := range chunkIndices { - res[i] = uint64(d) - } - return res -} - -func hashBlob(data []byte, params core.EncodingParams) string { - h := sha256.New() - h.Write(data) - h.Write([]byte{byte(params.ChunkLength), byte(params.NumChunks)}) - return string(h.Sum(nil)) -} diff --git a/core/encoding/encoder_test.go b/core/encoding/encoder_test.go deleted file mode 100644 index 0f63b8d53d..0000000000 --- a/core/encoding/encoder_test.go +++ /dev/null @@ -1,109 +0,0 @@ -package encoding_test - -import ( - "crypto/rand" - "log" - "runtime" - "testing" - - "github.com/Layr-Labs/eigenda/core" - "github.com/Layr-Labs/eigenda/core/encoding" - "github.com/Layr-Labs/eigenda/encoding/kzgrs" - "github.com/stretchr/testify/assert" -) - -var ( - enc core.Encoder - - gettysburgAddressBytes = []byte("Fourscore and seven years ago our fathers brought forth, on this continent, a new nation, conceived in liberty, and dedicated to the proposition that all men are created equal. Now we are engaged in a great civil war, testing whether that nation, or any nation so conceived, and so dedicated, can long endure. We are met on a great battle-field of that war. We have come to dedicate a portion of that field, as a final resting-place for those who here gave their lives, that that nation might live. It is altogether fitting and proper that we should do this. But, in a larger sense, we cannot dedicate, we cannot consecrate—we cannot hallow—this ground. The brave men, living and dead, who struggled here, have consecrated it far above our poor power to add or detract. The world will little note, nor long remember what we say here, but it can never forget what they did here. It is for us the living, rather, to be dedicated here to the unfinished work which they who fought here have thus far so nobly advanced. It is rather for us to be here dedicated to the great task remaining before us—that from these honored dead we take increased devotion to that cause for which they here gave the last full measure of devotion—that we here highly resolve that these dead shall not have died in vain—that this nation, under God, shall have a new birth of freedom, and that government of the people, by the people, for the people, shall not perish from the earth.") -) - -func init() { - var err error - enc, err = makeTestEncoder() - if err != nil { - log.Fatal(err) - } -} - -// makeTestEncoder makes an encoder currently using the only supported backend. -func makeTestEncoder() (core.Encoder, error) { - config := kzgrs.KzgConfig{ - G1Path: "../../inabox/resources/kzg/g1.point.300000", - G2Path: "../../inabox/resources/kzg/g2.point.300000", - CacheDir: "../../inabox/resources/kzg/SRSTables", - SRSOrder: 300000, - SRSNumberToLoad: 300000, - NumWorker: uint64(runtime.GOMAXPROCS(0)), - } - - return encoding.NewEncoder(encoding.EncoderConfig{KzgConfig: config}, true) -} - -func TestEncoder(t *testing.T) { - params := core.EncodingParams{ - ChunkLength: 5, - NumChunks: 5, - } - commitments, chunks, err := enc.Encode(gettysburgAddressBytes, params) - assert.NoError(t, err) - - indices := []core.ChunkNumber{ - 0, 1, 2, 3, 4, 5, 6, 7, - } - err = enc.VerifyChunks(chunks, indices, commitments, params) - assert.NoError(t, err) - err = enc.VerifyChunks(chunks, []core.ChunkNumber{ - 7, 6, 5, 4, 3, 2, 1, 0, - }, commitments, params) - assert.Error(t, err) - - maxInputSize := uint64(len(gettysburgAddressBytes)) - decoded, err := enc.Decode(chunks, indices, params, maxInputSize) - assert.NoError(t, err) - assert.Equal(t, gettysburgAddressBytes, decoded) - - // shuffle chunks - tmp := chunks[2] - chunks[2] = chunks[5] - chunks[5] = tmp - indices = []core.ChunkNumber{ - 0, 1, 5, 3, 4, 2, 6, 7, - } - - err = enc.VerifyChunks(chunks, indices, commitments, params) - assert.NoError(t, err) - - decoded, err = enc.Decode(chunks, indices, params, maxInputSize) - assert.NoError(t, err) - assert.Equal(t, gettysburgAddressBytes, decoded) -} - -// Ballpark number for 400KiB blob encoding -// -// goos: darwin -// goarch: arm64 -// pkg: github.com/Layr-Labs/eigenda/core/encoding -// BenchmarkEncode-12 1 2421900583 ns/op -func BenchmarkEncode(b *testing.B) { - params := core.EncodingParams{ - ChunkLength: 512, - NumChunks: 256, - } - blobSize := 400 * 1024 - numSamples := 30 - blobs := make([][]byte, numSamples) - for i := 0; i < numSamples; i++ { - blob := make([]byte, blobSize) - _, _ = rand.Read(blob) - blobs[i] = blob - } - - // Warm up the encoder: ensures that all SRS tables are loaded so these aren't included in the benchmark. - _, _, _ = enc.Encode(blobs[0], params) - b.ResetTimer() - - for i := 0; i < b.N; i++ { - _, _, _ = enc.Encode(blobs[i%numSamples], params) - } -} diff --git a/core/encoding/mock_encoder.go b/core/encoding/mock_encoder.go deleted file mode 100644 index 172ac8c8b5..0000000000 --- a/core/encoding/mock_encoder.go +++ /dev/null @@ -1,52 +0,0 @@ -package encoding - -import ( - "time" - - "github.com/Layr-Labs/eigenda/core" - "github.com/stretchr/testify/mock" -) - -type MockEncoder struct { - mock.Mock - - Delay time.Duration -} - -var _ core.Encoder = &MockEncoder{} - -func (e *MockEncoder) Encode(data []byte, params core.EncodingParams) (core.BlobCommitments, []*core.Chunk, error) { - args := e.Called(data, params) - time.Sleep(e.Delay) - return args.Get(0).(core.BlobCommitments), args.Get(1).([]*core.Chunk), args.Error(2) -} - -func (e *MockEncoder) VerifyChunks(chunks []*core.Chunk, indices []core.ChunkNumber, commitments core.BlobCommitments, params core.EncodingParams) error { - args := e.Called(chunks, indices, commitments, params) - time.Sleep(e.Delay) - return args.Error(0) -} - -func (e *MockEncoder) UniversalVerifySubBatch(params core.EncodingParams, samples []core.Sample, numBlobs int) error { - args := e.Called(params, samples, numBlobs) - time.Sleep(e.Delay) - return args.Error(0) -} -func (e *MockEncoder) VerifyCommitEquivalenceBatch(commitments []core.BlobCommitments) error { - args := e.Called(commitments) - time.Sleep(e.Delay) - return args.Error(0) -} - -func (e *MockEncoder) VerifyBlobLength(commitments core.BlobCommitments) error { - - args := e.Called(commitments) - time.Sleep(e.Delay) - return args.Error(0) -} - -func (e *MockEncoder) Decode(chunks []*core.Chunk, indices []core.ChunkNumber, params core.EncodingParams, maxInputSize uint64) ([]byte, error) { - args := e.Called(chunks, indices, params, maxInputSize) - time.Sleep(e.Delay) - return args.Get(0).([]byte), args.Error(1) -} diff --git a/core/encoding/verifier_test.go b/core/encoding/verifier_test.go deleted file mode 100644 index 040f44d2c2..0000000000 --- a/core/encoding/verifier_test.go +++ /dev/null @@ -1,92 +0,0 @@ -package encoding_test - -import ( - "crypto/rand" - "fmt" - "os" - "testing" - - "github.com/Layr-Labs/eigenda/core" - "github.com/stretchr/testify/assert" - // "github.com/pkg/profile" -) - -// var control interface{ Stop() } - -func TestBenchmarkVerifyChunks(t *testing.T) { - t.Skip("This test is meant to be run manually, not as part of the test suite") - - chunkLengths := []int{64, 128, 256, 512, 1024, 2048, 4096, 8192} - chunkCounts := []int{4, 8, 16} - - file, err := os.Create("benchmark_results.csv") - if err != nil { - t.Fatalf("Failed to open file for writing: %v", err) - } - defer file.Close() - - fmt.Fprintln(file, "numChunks,chunkLength,ns/op,allocs/op") - - for _, chunkLength := range chunkLengths { - - blobSize := chunkLength * 31 * 2 - params := core.EncodingParams{ - ChunkLength: uint(chunkLength), - NumChunks: 16, - } - blob := make([]byte, blobSize) - _, err = rand.Read(blob) - assert.NoError(t, err) - - commitments, chunks, err := enc.Encode(blob, params) - assert.NoError(t, err) - - indices := make([]core.ChunkNumber, params.NumChunks) - for i := range indices { - indices[i] = core.ChunkNumber(i) - } - - for _, numChunks := range chunkCounts { - - result := testing.Benchmark(func(b *testing.B) { - for i := 0; i < b.N; i++ { - // control = profile.Start(profile.ProfilePath(".")) - err := enc.VerifyChunks(chunks[:numChunks], indices[:numChunks], commitments, params) - assert.NoError(t, err) - // control.Stop() - } - }) - // Print results in CSV format - fmt.Fprintf(file, "%d,%d,%d,%d\n", numChunks, chunkLength, result.NsPerOp(), result.AllocsPerOp()) - - } - } - -} - -func BenchmarkVerifyBlob(b *testing.B) { - - params := core.EncodingParams{ - ChunkLength: uint(256), - NumChunks: uint(8), - } - blobSize := 8 * 256 - numSamples := 30 - blobs := make([][]byte, numSamples) - for i := 0; i < numSamples; i++ { - blob := make([]byte, blobSize) - _, _ = rand.Read(blob) - blobs[i] = blob - } - - commitments, _, err := enc.Encode(blobs[0], params) - assert.NoError(b, err) - - b.ResetTimer() - - for i := 0; i < b.N; i++ { - err = enc.VerifyBlobLength(commitments) - assert.NoError(b, err) - } - -} diff --git a/core/serialization.go b/core/serialization.go index eda00a1bd3..1874836fb6 100644 --- a/core/serialization.go +++ b/core/serialization.go @@ -4,7 +4,6 @@ import ( "bytes" "encoding/binary" "encoding/gob" - "encoding/json" "errors" "fmt" "math/big" @@ -12,7 +11,6 @@ import ( "slices" binding "github.com/Layr-Labs/eigenda/contracts/bindings/EigenDAServiceManager" - bn "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/ethereum/go-ethereum/accounts/abi" "github.com/wealdtech/go-merkletree" @@ -367,55 +365,6 @@ func (h *BlobHeader) Deserialize(data []byte) (*BlobHeader, error) { return h, err } -func (c *Chunk) Serialize() ([]byte, error) { - return encode(c) -} - -func (c *Chunk) Deserialize(data []byte) (*Chunk, error) { - err := decode(data, c) - return c, err -} - -func (c *G1Commitment) Serialize() ([]byte, error) { - return encode(c) -} - -func (c *G1Commitment) Deserialize(data []byte) (*G1Commitment, error) { - err := decode(data, c) - return c, err -} - -func (c *G1Commitment) UnmarshalJSON(data []byte) error { - var g1Point bn.G1Affine - err := json.Unmarshal(data, &g1Point) - if err != nil { - return err - } - c.X = g1Point.X - c.Y = g1Point.Y - return nil -} - -func (c *G2Commitment) Serialize() ([]byte, error) { - return encode(c) -} - -func (c *G2Commitment) Deserialize(data []byte) (*G2Commitment, error) { - err := decode(data, c) - return c, err -} - -func (c *G2Commitment) UnmarshalJSON(data []byte) error { - var g2Point bn.G2Affine - err := json.Unmarshal(data, &g2Point) - if err != nil { - return err - } - c.X = g2Point.X - c.Y = g2Point.Y - return nil -} - func encode(obj any) ([]byte, error) { var buf bytes.Buffer enc := gob.NewEncoder(&buf) diff --git a/core/serialization_test.go b/core/serialization_test.go index 020b5880d5..6a15ceab9f 100644 --- a/core/serialization_test.go +++ b/core/serialization_test.go @@ -8,6 +8,7 @@ import ( binding "github.com/Layr-Labs/eigenda/contracts/bindings/EigenDAServiceManager" "github.com/Layr-Labs/eigenda/core" "github.com/Layr-Labs/eigenda/core/eth" + "github.com/Layr-Labs/eigenda/encoding" kzgbn254 "github.com/Layr-Labs/eigenda/pkg/kzg/bn254" "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fp" @@ -57,7 +58,7 @@ func TestBlobHeaderEncoding(t *testing.T) { commitX = *commitX.SetBigInt(big.NewInt(1)) commitY = *commitY.SetBigInt(big.NewInt(2)) - commitment := &core.G1Commitment{ + commitment := &encoding.G1Commitment{ X: commitX, Y: commitY, } @@ -81,10 +82,10 @@ func TestBlobHeaderEncoding(t *testing.T) { lengthCommitment = lengthProof blobHeader := &core.BlobHeader{ - BlobCommitments: core.BlobCommitments{ + BlobCommitments: encoding.BlobCommitments{ Commitment: commitment, - LengthCommitment: (*core.G2Commitment)(&lengthCommitment), - LengthProof: (*core.G2Commitment)(&lengthProof), + LengthCommitment: (*encoding.G2Commitment)(&lengthCommitment), + LengthProof: (*encoding.G2Commitment)(&lengthProof), Length: 10, }, QuorumInfos: []*core.BlobQuorumInfo{ @@ -146,7 +147,7 @@ func TestCommitmentMarshaling(t *testing.T) { commitX = *commitX.SetBigInt(big.NewInt(1)) commitY = *commitY.SetBigInt(big.NewInt(2)) - commitment := &core.G1Commitment{ + commitment := &encoding.G1Commitment{ X: commitX, Y: commitY, } @@ -154,7 +155,7 @@ func TestCommitmentMarshaling(t *testing.T) { marshalled, err := json.Marshal(commitment) assert.NoError(t, err) - recovered := new(core.G1Commitment) + recovered := new(encoding.G1Commitment) err = json.Unmarshal(marshalled, recovered) assert.NoError(t, err) assert.Equal(t, recovered, commitment) diff --git a/core/test/core_test.go b/core/test/core_test.go index be255679c8..5f8c4b3cc1 100644 --- a/core/test/core_test.go +++ b/core/test/core_test.go @@ -10,15 +10,18 @@ import ( "github.com/Layr-Labs/eigenda/common" "github.com/Layr-Labs/eigenda/core" - "github.com/Layr-Labs/eigenda/core/encoding" "github.com/Layr-Labs/eigenda/core/mock" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/kzgrs" + "github.com/Layr-Labs/eigenda/encoding/kzgrs/prover" + "github.com/Layr-Labs/eigenda/encoding/kzgrs/verifier" "github.com/gammazero/workerpool" "github.com/stretchr/testify/assert" ) var ( - enc core.Encoder + p encoding.Prover + v encoding.Verifier asn core.AssignmentCoordinator = &core.StdAssignmentCoordinator{} ) @@ -31,15 +34,15 @@ func TestMain(m *testing.M) { func setup(m *testing.M) { var err error - enc, err = makeTestEncoder() + p, v, err = makeTestComponents() if err != nil { panic("failed to start localstack container") } } -// makeTestEncoder makes an encoder currently using the only supported backend. -func makeTestEncoder() (core.Encoder, error) { - config := kzgrs.KzgConfig{ +// makeTestComponents makes a prover and verifier currently using the only supported backend. +func makeTestComponents() (encoding.Prover, encoding.Verifier, error) { + config := &kzgrs.KzgConfig{ G1Path: "../../inabox/resources/kzg/g1.point", G2Path: "../../inabox/resources/kzg/g2.point", CacheDir: "../../inabox/resources/kzg/SRSTables", @@ -48,8 +51,17 @@ func makeTestEncoder() (core.Encoder, error) { NumWorker: uint64(runtime.GOMAXPROCS(0)), } - return encoding.NewEncoder(encoding.EncoderConfig{KzgConfig: config}, true) + p, err := prover.NewProver(config, true) + if err != nil { + return nil, nil, err + } + + v, err := verifier.NewVerifier(config, true) + if err != nil { + return nil, nil, err + } + return p, v, nil } func makeTestBlob(t *testing.T, length int, securityParams []*core.SecurityParam) core.Blob { @@ -91,7 +103,7 @@ func prepareBatch(t *testing.T, cst core.IndexedChainState, blobs []core.Blob, q } blobSize := uint(len(blob.Data)) - blobLength := core.GetBlobLength(blobSize) + blobLength := encoding.GetBlobLength(blobSize) chunkLength, err := asn.CalculateChunkLength(state, blobLength, 0, blob.RequestHeader.SecurityParams[quorumIndex]) if err != nil { @@ -112,18 +124,15 @@ func prepareBatch(t *testing.T, cst core.IndexedChainState, blobs []core.Blob, q t.Fatal(err) } - params, err := core.GetEncodingParams(chunkLength, info.TotalChunks) - if err != nil { - t.Fatal(err) - } + params := encoding.ParamsFromMins(chunkLength, info.TotalChunks) - commitments, chunks, err := enc.Encode(blob.Data, params) + commitments, chunks, err := p.EncodeAndProve(blob.Data, params) if err != nil { t.Fatal(err) } blobHeader := &core.BlobHeader{ - BlobCommitments: core.BlobCommitments{ + BlobCommitments: encoding.BlobCommitments{ Commitment: commitments.Commitment, LengthCommitment: commitments.LengthCommitment, LengthProof: commitments.LengthProof, @@ -153,7 +162,7 @@ func prepareBatch(t *testing.T, cst core.IndexedChainState, blobs []core.Blob, q // checkBatch runs the verification logic for each DA node in the current OperatorState, and returns an error if any of // the DA nodes' validation checks fails func checkBatch(t *testing.T, cst core.IndexedChainState, encodedBlob core.EncodedBlob, header core.BatchHeader) { - val := core.NewChunkValidator(enc, asn, cst, [32]byte{}) + val := core.NewChunkValidator(v, asn, cst, [32]byte{}) quorums := []core.QuorumID{0} state, _ := cst.GetIndexedOperatorState(context.Background(), header.ReferenceBlockNumber, quorums) @@ -170,7 +179,7 @@ func checkBatch(t *testing.T, cst core.IndexedChainState, encodedBlob core.Encod // checkBatchByUniversalVerifier runs the verification logic for each DA node in the current OperatorState, and returns an error if any of // the DA nodes' validation checks fails func checkBatchByUniversalVerifier(t *testing.T, cst core.IndexedChainState, encodedBlobs []core.EncodedBlob, header core.BatchHeader, pool common.WorkerPool) { - val := core.NewChunkValidator(enc, asn, cst, [32]byte{}) + val := core.NewChunkValidator(v, asn, cst, [32]byte{}) quorums := []core.QuorumID{0} state, _ := cst.GetIndexedOperatorState(context.Background(), header.ReferenceBlockNumber, quorums) diff --git a/core/validator.go b/core/validator.go index ab54035eaf..93e1fae25f 100644 --- a/core/validator.go +++ b/core/validator.go @@ -5,6 +5,7 @@ import ( "fmt" "github.com/Layr-Labs/eigenda/common" + "github.com/Layr-Labs/eigenda/encoding" ) var ( @@ -20,22 +21,22 @@ type ChunkValidator interface { // chunkValidator implements the validation logic that a DA node should apply to its received chunks type chunkValidator struct { - encoder Encoder + verifier encoding.Verifier assignment AssignmentCoordinator chainState ChainState operatorID OperatorID } -func NewChunkValidator(enc Encoder, asgn AssignmentCoordinator, cst ChainState, operatorID OperatorID) ChunkValidator { +func NewChunkValidator(v encoding.Verifier, asgn AssignmentCoordinator, cst ChainState, operatorID OperatorID) ChunkValidator { return &chunkValidator{ - encoder: enc, + verifier: v, assignment: asgn, chainState: cst, operatorID: operatorID, } } -func (v *chunkValidator) validateBlobQuorum(quorumHeader *BlobQuorumInfo, blob *BlobMessage, operatorState *OperatorState) ([]*Chunk, *Assignment, *EncodingParams, error) { +func (v *chunkValidator) validateBlobQuorum(quorumHeader *BlobQuorumInfo, blob *BlobMessage, operatorState *OperatorState) ([]*encoding.Frame, *Assignment, *encoding.EncodingParams, error) { if quorumHeader.AdversaryThreshold >= quorumHeader.QuorumThreshold { return nil, nil, nil, fmt.Errorf("invalid header: quorum threshold (%d) does not exceed adversary threshold (%d)", quorumHeader.QuorumThreshold, quorumHeader.AdversaryThreshold) } @@ -74,12 +75,8 @@ func (v *chunkValidator) validateBlobQuorum(quorumHeader *BlobQuorumInfo, blob * } // Check the received chunks against the commitment - params, err := GetEncodingParams(quorumHeader.ChunkLength, info.TotalChunks) - if err != nil { - return nil, nil, nil, err - } - - if params.ChunkLength != quorumHeader.ChunkLength { + params := encoding.ParamsFromMins(quorumHeader.ChunkLength, info.TotalChunks) + if params.ChunkLength != uint64(quorumHeader.ChunkLength) { return nil, nil, nil, fmt.Errorf("%w: chunk length from encoding parameters (%d) does not match quorum header (%d)", ErrChunkLengthMismatch, params.ChunkLength, quorumHeader.ChunkLength) } @@ -92,7 +89,7 @@ func (v *chunkValidator) ValidateBlob(blob *BlobMessage, operatorState *Operator } // Validate the blob length - err := v.encoder.VerifyBlobLength(blob.BlobHeader.BlobCommitments) + err := v.verifier.VerifyBlobLength(blob.BlobHeader.BlobCommitments) if err != nil { return err } @@ -106,7 +103,7 @@ func (v *chunkValidator) ValidateBlob(blob *BlobMessage, operatorState *Operator return err } else { // Check the received chunks against the commitment - err = v.encoder.VerifyChunks(chunks, assignment.GetIndices(), blob.BlobHeader.BlobCommitments, *params) + err = v.verifier.VerifyFrames(chunks, assignment.GetIndices(), blob.BlobHeader.BlobCommitments, *params) if err != nil { return err } @@ -121,8 +118,8 @@ func (v *chunkValidator) UpdateOperatorID(operatorID OperatorID) { } func (v *chunkValidator) ValidateBatch(blobs []*BlobMessage, operatorState *OperatorState, pool common.WorkerPool) error { - subBatchMap := make(map[EncodingParams]*SubBatch) - blobCommitmentList := make([]BlobCommitments, len(blobs)) + subBatchMap := make(map[encoding.EncodingParams]*encoding.SubBatch) + blobCommitmentList := make([]encoding.BlobCommitments, len(blobs)) for k, blob := range blobs { if len(blob.Bundles) != len(blob.BlobHeader.QuorumInfos) { @@ -148,9 +145,9 @@ func (v *chunkValidator) ValidateBatch(blobs []*BlobMessage, operatorState *Oper } indices := assignment.GetIndices() - samples := make([]Sample, len(chunks)) + samples := make([]encoding.Sample, len(chunks)) for ind := range chunks { - samples[ind] = Sample{ + samples[ind] = encoding.Sample{ Commitment: blob.BlobHeader.BlobCommitments.Commitment, Chunk: chunks[ind], AssignmentIndex: uint(indices[ind]), @@ -160,7 +157,7 @@ func (v *chunkValidator) ValidateBatch(blobs []*BlobMessage, operatorState *Oper // update subBatch if !ok { - subBatchMap[*params] = &SubBatch{ + subBatchMap[*params] = &encoding.SubBatch{ Samples: samples, NumBlobs: 1, } @@ -194,7 +191,7 @@ func (v *chunkValidator) ValidateBatch(blobs []*BlobMessage, operatorState *Oper }) } // check if commitments are equivalent - err := v.encoder.VerifyCommitEquivalenceBatch(blobCommitmentList) + err := v.verifier.VerifyCommitEquivalenceBatch(blobCommitmentList) if err != nil { return err } @@ -209,9 +206,9 @@ func (v *chunkValidator) ValidateBatch(blobs []*BlobMessage, operatorState *Oper return nil } -func (v *chunkValidator) universalVerifyWorker(params EncodingParams, subBatch *SubBatch, out chan error) { +func (v *chunkValidator) universalVerifyWorker(params encoding.EncodingParams, subBatch *encoding.SubBatch, out chan error) { - err := v.encoder.UniversalVerifySubBatch(params, subBatch.Samples, subBatch.NumBlobs) + err := v.verifier.UniversalVerifySubBatch(params, subBatch.Samples, subBatch.NumBlobs) if err != nil { out <- err return @@ -220,8 +217,8 @@ func (v *chunkValidator) universalVerifyWorker(params EncodingParams, subBatch * out <- nil } -func (v *chunkValidator) VerifyBlobLengthWorker(blobCommitments BlobCommitments, out chan error) { - err := v.encoder.VerifyBlobLength(blobCommitments) +func (v *chunkValidator) VerifyBlobLengthWorker(blobCommitments encoding.BlobCommitments, out chan error) { + err := v.verifier.VerifyBlobLength(blobCommitments) if err != nil { out <- err return diff --git a/disperser/api/grpc/encoder/encoder.pb.go b/disperser/api/grpc/encoder/encoder.pb.go index ae23088d55..1ca95514a5 100644 --- a/disperser/api/grpc/encoder/encoder.pb.go +++ b/disperser/api/grpc/encoder/encoder.pb.go @@ -197,7 +197,7 @@ func (x *EncodeBlobRequest) GetData() []byte { return nil } -func (x *EncodeBlobRequest) GetEncodingParams() *EncodingParams { +func (x *EncodeBlobRequest) ParamsFromMins() *EncodingParams { if x != nil { return x.EncodingParams } diff --git a/disperser/apiserver/ratelimit_test.go b/disperser/apiserver/ratelimit_test.go index 9bf39976ac..e969fd2043 100644 --- a/disperser/apiserver/ratelimit_test.go +++ b/disperser/apiserver/ratelimit_test.go @@ -12,6 +12,7 @@ import ( "github.com/Layr-Labs/eigenda/api/grpc/mock" "github.com/Layr-Labs/eigenda/core" "github.com/Layr-Labs/eigenda/core/auth" + "github.com/Layr-Labs/eigenda/encoding" "github.com/stretchr/testify/assert" "google.golang.org/grpc/peer" ) @@ -222,7 +223,7 @@ func simulateClient(t *testing.T, signer core.BlobRequestSigner, origin string, assert.True(t, ok) authHeader := core.BlobAuthHeader{ - BlobCommitments: core.BlobCommitments{}, + BlobCommitments: encoding.BlobCommitments{}, AccountID: "", Nonce: authHeaderReply.BlobAuthHeader.ChallengeParameter, } diff --git a/disperser/apiserver/server.go b/disperser/apiserver/server.go index a66a2a8532..a79da3ebc2 100644 --- a/disperser/apiserver/server.go +++ b/disperser/apiserver/server.go @@ -18,6 +18,7 @@ import ( "github.com/Layr-Labs/eigenda/core" "github.com/Layr-Labs/eigenda/core/auth" "github.com/Layr-Labs/eigenda/disperser" + "github.com/Layr-Labs/eigenda/encoding" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/ethereum/go-ethereum/crypto" "github.com/prometheus/client_golang/prometheus" @@ -362,9 +363,9 @@ func (s *DispersalServer) checkRateLimitsAndAddRates(ctx context.Context, blob * // Get the encoded blob size from the blob header. Calculation is done in a way that nodes can replicate blobSize := len(blob.Data) - length := core.GetBlobLength(uint(blobSize)) - encodedLength := core.GetEncodedBlobLength(length, uint8(param.QuorumThreshold), uint8(param.AdversaryThreshold)) - encodedSize := core.GetBlobSize(encodedLength) + length := encoding.GetBlobLength(uint(blobSize)) + encodedLength := encoding.GetEncodedBlobLength(length, uint8(param.QuorumThreshold), uint8(param.AdversaryThreshold)) + encodedSize := encoding.GetBlobSize(encodedLength) s.logger.Debug("checking rate limits", "origin", origin, "address", authenticatedAddress, "quorum", param.QuorumID, "encodedSize", encodedSize, "blobSize", blobSize, "accountThroughput", accountRates.Throughput, "accountBlobRate", accountRates.BlobRate, "accountKey", accountKey) diff --git a/disperser/apiserver/server_test.go b/disperser/apiserver/server_test.go index 69621acfaa..ff596faaf7 100644 --- a/disperser/apiserver/server_test.go +++ b/disperser/apiserver/server_test.go @@ -11,6 +11,7 @@ import ( "github.com/Layr-Labs/eigenda/disperser/apiserver" "github.com/Layr-Labs/eigenda/disperser/common/blobstore" + "github.com/Layr-Labs/eigenda/encoding" gethcommon "github.com/ethereum/go-ethereum/common" "github.com/google/uuid" @@ -464,7 +465,7 @@ func simulateBlobConfirmation(t *testing.T, requestID []byte, blobSize uint, sec _, err = commitY.SetString("9207254729396071334325696286939045899948985698134704137261649190717970615186") assert.NoError(t, err) - commitment := &core.G1Commitment{ + commitment := &encoding.G1Commitment{ X: commitX, Y: commitY, } @@ -496,7 +497,7 @@ func simulateBlobConfirmation(t *testing.T, requestID []byte, blobSize uint, sec ReferenceBlockNumber: referenceBlockNumber, BatchRoot: batchRoot, BlobInclusionProof: inclusionProof, - BlobCommitment: &core.BlobCommitments{ + BlobCommitment: &encoding.BlobCommitments{ Commitment: commitment, Length: uint(dataLength), }, diff --git a/disperser/batcher/batcher_test.go b/disperser/batcher/batcher_test.go index 59e8a7567a..86ed7308c0 100644 --- a/disperser/batcher/batcher_test.go +++ b/disperser/batcher/batcher_test.go @@ -13,7 +13,6 @@ import ( "github.com/Layr-Labs/eigenda/common/logging" cmock "github.com/Layr-Labs/eigenda/common/mock" "github.com/Layr-Labs/eigenda/core" - "github.com/Layr-Labs/eigenda/core/encoding" coremock "github.com/Layr-Labs/eigenda/core/mock" "github.com/Layr-Labs/eigenda/disperser" bat "github.com/Layr-Labs/eigenda/disperser/batcher" @@ -21,7 +20,9 @@ import ( batchermock "github.com/Layr-Labs/eigenda/disperser/batcher/mock" "github.com/Layr-Labs/eigenda/disperser/common/inmem" dmock "github.com/Layr-Labs/eigenda/disperser/mock" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/kzgrs" + "github.com/Layr-Labs/eigenda/encoding/kzgrs/prover" gethcommon "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/core/types" "github.com/stretchr/testify/assert" @@ -42,8 +43,9 @@ type batcherComponents struct { } // makeTestEncoder makes an encoder currently using the only supported backend. -func makeTestEncoder() (core.Encoder, error) { - config := kzgrs.KzgConfig{ +func makeTestProver() (encoding.Prover, error) { + + config := &kzgrs.KzgConfig{ G1Path: "../../inabox/resources/kzg/g1.point", G2Path: "../../inabox/resources/kzg/g2.point", CacheDir: "../../inabox/resources/kzg/SRSTables", @@ -52,7 +54,7 @@ func makeTestEncoder() (core.Encoder, error) { NumWorker: uint64(runtime.GOMAXPROCS(0)), } - return encoding.NewEncoder(encoding.EncoderConfig{KzgConfig: config}, true) + return prover.NewProver(config, true) } func makeTestBlob(securityParams []*core.SecurityParam) core.Blob { @@ -79,7 +81,7 @@ func makeBatcher(t *testing.T) (*batcherComponents, *bat.Batcher, func() []time. transactor.On("OperatorIDToAddress").Return(gethcommon.Address{}, nil) agg, err := core.NewStdSignatureAggregator(logger, transactor) assert.NoError(t, err) - enc, err := makeTestEncoder() + p, err := makeTestProver() assert.NoError(t, err) state := cst.GetTotalOperatorState(context.Background(), 0) @@ -106,7 +108,7 @@ func makeBatcher(t *testing.T) (*batcherComponents, *bat.Batcher, func() []time. metrics := bat.NewMetrics("9100", logger) - encoderClient := disperser.NewLocalEncoderClient(enc) + encoderClient := disperser.NewLocalEncoderClient(p) finalizer := batchermock.NewFinalizer() ethClient := &cmock.MockEthClient{} txnManager := mock.NewTxnManager() diff --git a/disperser/batcher/encoded_blob_store.go b/disperser/batcher/encoded_blob_store.go index bdf9eb860c..e5f533b7b4 100644 --- a/disperser/batcher/encoded_blob_store.go +++ b/disperser/batcher/encoded_blob_store.go @@ -7,6 +7,7 @@ import ( "github.com/Layr-Labs/eigenda/common" "github.com/Layr-Labs/eigenda/core" "github.com/Layr-Labs/eigenda/disperser" + "github.com/Layr-Labs/eigenda/encoding" ) type requestID string @@ -33,8 +34,8 @@ type EncodingResult struct { BlobMetadata *disperser.BlobMetadata ReferenceBlockNumber uint BlobQuorumInfo *core.BlobQuorumInfo - Commitment *core.BlobCommitments - Chunks []*core.Chunk + Commitment *encoding.BlobCommitments + Chunks []*encoding.Frame Assignments map[core.OperatorID]core.Assignment Status status } diff --git a/disperser/batcher/encoding_streamer.go b/disperser/batcher/encoding_streamer.go index 43daaedcf7..0f71187e55 100644 --- a/disperser/batcher/encoding_streamer.go +++ b/disperser/batcher/encoding_streamer.go @@ -11,6 +11,7 @@ import ( "github.com/Layr-Labs/eigenda/common" "github.com/Layr-Labs/eigenda/core" "github.com/Layr-Labs/eigenda/disperser" + "github.com/Layr-Labs/eigenda/encoding" "github.com/wealdtech/go-merkletree" ) @@ -268,7 +269,7 @@ func (e *EncodingStreamer) RequestEncoding(ctx context.Context, encoderChan chan type pendingRequestInfo struct { BlobQuorumInfo *core.BlobQuorumInfo - EncodingParams core.EncodingParams + EncodingParams encoding.EncodingParams Assignments map[core.OperatorID]core.Assignment } @@ -289,7 +290,7 @@ func (e *EncodingStreamer) RequestEncodingForBlob(ctx context.Context, metadata continue } - blobLength := core.GetBlobLength(metadata.RequestMetadata.BlobSize) + blobLength := encoding.GetBlobLength(metadata.RequestMetadata.BlobSize) chunkLength, err := e.assignmentCoordinator.CalculateChunkLength(state.OperatorState, blobLength, e.StreamerConfig.TargetNumChunks, quorum) if err != nil { @@ -313,13 +314,9 @@ func (e *EncodingStreamer) RequestEncodingForBlob(ctx context.Context, metadata continue } - params, err := core.GetEncodingParams(chunkLength, info.TotalChunks) - if err != nil { - e.logger.Error("[RequestEncodingForBlob] error getting encoding params", "err", err) - continue - } + params := encoding.ParamsFromMins(chunkLength, info.TotalChunks) - err = core.ValidateEncodingParams(params, int(blobLength), e.SRSOrder) + err = encoding.ValidateEncodingParams(params, int(blobLength), e.SRSOrder) if err != nil { e.logger.Error("[RequestEncodingForBlob] invalid encoding params", "err", err) // Cancel the blob diff --git a/disperser/batcher/encoding_streamer_test.go b/disperser/batcher/encoding_streamer_test.go index b7b807e869..a22892356c 100644 --- a/disperser/batcher/encoding_streamer_test.go +++ b/disperser/batcher/encoding_streamer_test.go @@ -41,9 +41,9 @@ func createEncodingStreamer(t *testing.T, initialBlockNumber uint, batchThreshol blobStore := inmem.NewBlobStore() cst, err := coremock.MakeChainDataMock(numOperators) assert.Nil(t, err) - enc, err := makeTestEncoder() + p, err := makeTestProver() assert.Nil(t, err) - encoderClient := disperser.NewLocalEncoderClient(enc) + encoderClient := disperser.NewLocalEncoderClient(p) asgn := &core.StdAssignmentCoordinator{} sizeNotifier := batcher.NewEncodedSizeNotifier(make(chan struct{}, 1), batchThreshold) workerpool := workerpool.New(5) diff --git a/disperser/batcher/finalizer_test.go b/disperser/batcher/finalizer_test.go index a240888a5f..76a3e40046 100644 --- a/disperser/batcher/finalizer_test.go +++ b/disperser/batcher/finalizer_test.go @@ -12,6 +12,7 @@ import ( "github.com/Layr-Labs/eigenda/disperser" "github.com/Layr-Labs/eigenda/disperser/batcher" "github.com/Layr-Labs/eigenda/disperser/common/inmem" + "github.com/Layr-Labs/eigenda/encoding" "github.com/ethereum/go-ethereum" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/core/types" @@ -63,7 +64,7 @@ func TestFinalizedBlob(t *testing.T) { ReferenceBlockNumber: 132, BatchRoot: []byte("hello"), BlobInclusionProof: inclusionProof, - BlobCommitment: &core.BlobCommitments{}, + BlobCommitment: &encoding.BlobCommitments{}, BatchID: 99, ConfirmationTxnHash: common.HexToHash("0x123"), ConfirmationBlockNumber: uint32(150), @@ -159,7 +160,7 @@ func TestUnfinalizedBlob(t *testing.T) { ReferenceBlockNumber: 132, BatchRoot: []byte("hello"), BlobInclusionProof: inclusionProof, - BlobCommitment: &core.BlobCommitments{}, + BlobCommitment: &encoding.BlobCommitments{}, BatchID: 99, ConfirmationTxnHash: common.HexToHash("0x123"), ConfirmationBlockNumber: uint32(150), @@ -230,7 +231,7 @@ func TestNoReceipt(t *testing.T) { ReferenceBlockNumber: 132, BatchRoot: []byte("hello"), BlobInclusionProof: inclusionProof, - BlobCommitment: &core.BlobCommitments{}, + BlobCommitment: &encoding.BlobCommitments{}, BatchID: 99, ConfirmationTxnHash: common.HexToHash("0x123"), ConfirmationBlockNumber: uint32(150), diff --git a/disperser/cmd/batcher/config.go b/disperser/cmd/batcher/config.go index e099604519..f1b5e656ec 100644 --- a/disperser/cmd/batcher/config.go +++ b/disperser/cmd/batcher/config.go @@ -4,10 +4,10 @@ import ( "github.com/Layr-Labs/eigenda/common/aws" "github.com/Layr-Labs/eigenda/common/geth" "github.com/Layr-Labs/eigenda/common/logging" - "github.com/Layr-Labs/eigenda/core/encoding" "github.com/Layr-Labs/eigenda/disperser/batcher" "github.com/Layr-Labs/eigenda/disperser/cmd/batcher/flags" "github.com/Layr-Labs/eigenda/disperser/common/blobstore" + "github.com/Layr-Labs/eigenda/encoding/kzgrs" "github.com/Layr-Labs/eigenda/indexer" "github.com/urfave/cli" ) @@ -18,7 +18,7 @@ type Config struct { BlobstoreConfig blobstore.Config EthClientConfig geth.EthClientConfig AwsClientConfig aws.ClientConfig - EncoderConfig encoding.EncoderConfig + EncoderConfig kzgrs.KzgConfig LoggerConfig logging.Config MetricsConfig batcher.MetricsConfig IndexerConfig indexer.Config @@ -40,7 +40,7 @@ func NewConfig(ctx *cli.Context) Config { }, EthClientConfig: geth.ReadEthClientConfig(ctx), AwsClientConfig: aws.ReadClientConfig(ctx, flags.FlagPrefix), - EncoderConfig: encoding.ReadCLIConfig(ctx), + EncoderConfig: kzgrs.ReadCLIConfig(ctx), LoggerConfig: logging.ReadCLIConfig(ctx, flags.FlagPrefix), BatcherConfig: batcher.Config{ PullInterval: ctx.GlobalDuration(flags.PullIntervalFlag.Name), diff --git a/disperser/cmd/encoder/config.go b/disperser/cmd/encoder/config.go index b12d78f2cf..124af71e1c 100644 --- a/disperser/cmd/encoder/config.go +++ b/disperser/cmd/encoder/config.go @@ -2,14 +2,14 @@ package main import ( "github.com/Layr-Labs/eigenda/common/logging" - "github.com/Layr-Labs/eigenda/core/encoding" "github.com/Layr-Labs/eigenda/disperser/cmd/encoder/flags" "github.com/Layr-Labs/eigenda/disperser/encoder" + "github.com/Layr-Labs/eigenda/encoding/kzgrs" "github.com/urfave/cli" ) type Config struct { - EncoderConfig encoding.EncoderConfig + EncoderConfig kzgrs.KzgConfig LoggerConfig logging.Config ServerConfig *encoder.ServerConfig MetricsConfig encoder.MetrisConfig @@ -17,7 +17,7 @@ type Config struct { func NewConfig(ctx *cli.Context) Config { config := Config{ - EncoderConfig: encoding.ReadCLIConfig(ctx), + EncoderConfig: kzgrs.ReadCLIConfig(ctx), LoggerConfig: logging.ReadCLIConfig(ctx, flags.FlagPrefix), ServerConfig: &encoder.ServerConfig{ GrpcPort: ctx.GlobalString(flags.GrpcPortFlag.Name), diff --git a/disperser/cmd/encoder/encoder.go b/disperser/cmd/encoder/encoder.go index 81fb87e3e3..c16d6b2739 100644 --- a/disperser/cmd/encoder/encoder.go +++ b/disperser/cmd/encoder/encoder.go @@ -5,8 +5,8 @@ import ( "fmt" "github.com/Layr-Labs/eigenda/common" - "github.com/Layr-Labs/eigenda/core/encoding" "github.com/Layr-Labs/eigenda/disperser/encoder" + "github.com/Layr-Labs/eigenda/encoding/kzgrs/prover" ) type EncoderGRPCServer struct { @@ -15,7 +15,7 @@ type EncoderGRPCServer struct { func NewEncoderGRPCServer(config Config, logger common.Logger) (*EncoderGRPCServer, error) { - coreEncoder, err := encoding.NewEncoder(config.EncoderConfig, true) + p, err := prover.NewProver(&config.EncoderConfig, true) if err != nil { return nil, fmt.Errorf("failed to create encoder: %w", err) } @@ -28,7 +28,7 @@ func NewEncoderGRPCServer(config Config, logger common.Logger) (*EncoderGRPCServ logger.Info("Enabled metrics for Encoder", "socket", httpSocket) } - server := encoder.NewServer(*config.ServerConfig, logger, coreEncoder, metrics) + server := encoder.NewServer(*config.ServerConfig, logger, p, metrics) return &EncoderGRPCServer{ Server: server, diff --git a/disperser/cmd/encoder/flags/flags.go b/disperser/cmd/encoder/flags/flags.go index c163de38f8..aadc5b826f 100644 --- a/disperser/cmd/encoder/flags/flags.go +++ b/disperser/cmd/encoder/flags/flags.go @@ -3,7 +3,7 @@ package flags import ( "github.com/Layr-Labs/eigenda/common" "github.com/Layr-Labs/eigenda/common/logging" - "github.com/Layr-Labs/eigenda/core/encoding" + "github.com/Layr-Labs/eigenda/encoding/kzgrs" "github.com/urfave/cli" ) @@ -66,6 +66,6 @@ var Flags []cli.Flag func init() { Flags = append(requiredFlags, optionalFlags...) - Flags = append(Flags, encoding.CLIFlags(envVarPrefix)...) + Flags = append(Flags, kzgrs.CLIFlags(envVarPrefix)...) Flags = append(Flags, logging.CLIFlags(envVarPrefix, FlagPrefix)...) } diff --git a/disperser/common/blobstore/blob_metadata_store_test.go b/disperser/common/blobstore/blob_metadata_store_test.go index 0b132ab0bb..eca3e23f88 100644 --- a/disperser/common/blobstore/blob_metadata_store_test.go +++ b/disperser/common/blobstore/blob_metadata_store_test.go @@ -8,6 +8,7 @@ import ( commondynamodb "github.com/Layr-Labs/eigenda/common/aws/dynamodb" "github.com/Layr-Labs/eigenda/core" "github.com/Layr-Labs/eigenda/disperser" + "github.com/Layr-Labs/eigenda/encoding" "github.com/aws/aws-sdk-go-v2/service/dynamodb/types" "github.com/consensys/gnark-crypto/ecc/bn254/fp" "github.com/ethereum/go-ethereum/common" @@ -210,7 +211,7 @@ func getConfirmedMetadata(t *testing.T, metadataKey disperser.BlobKey) *disperse assert.NoError(t, err) _, err = commitY.SetString("9207254729396071334325696286939045899948985698134704137261649190717970615186") assert.NoError(t, err) - commitment := &core.G1Commitment{ + commitment := &encoding.G1Commitment{ X: commitX, Y: commitY, } @@ -242,7 +243,7 @@ func getConfirmedMetadata(t *testing.T, metadataKey disperser.BlobKey) *disperse ReferenceBlockNumber: referenceBlockNumber, BatchRoot: batchRoot, BlobInclusionProof: inclusionProof, - BlobCommitment: &core.BlobCommitments{ + BlobCommitment: &encoding.BlobCommitments{ Commitment: commitment, Length: uint(dataLength), }, diff --git a/disperser/common/blobstore/shared_storage_test.go b/disperser/common/blobstore/shared_storage_test.go index c7ea038230..dabd5b6f0c 100644 --- a/disperser/common/blobstore/shared_storage_test.go +++ b/disperser/common/blobstore/shared_storage_test.go @@ -10,6 +10,7 @@ import ( "github.com/Layr-Labs/eigenda/core" "github.com/Layr-Labs/eigenda/disperser" + "github.com/Layr-Labs/eigenda/encoding" "github.com/stretchr/testify/assert" "github.com/ethereum/go-ethereum/common" @@ -73,7 +74,7 @@ func TestSharedBlobStore(t *testing.T) { SignatoryRecordHash: [32]byte{0}, ReferenceBlockNumber: 132, BatchRoot: []byte("hello"), - BlobCommitment: &core.BlobCommitments{}, + BlobCommitment: &encoding.BlobCommitments{}, BatchID: 99, ConfirmationTxnHash: common.HexToHash("0x123"), ConfirmationBlockNumber: 150, @@ -126,7 +127,7 @@ func TestSharedBlobStore(t *testing.T) { SignatoryRecordHash: [32]byte{0}, ReferenceBlockNumber: 132, BatchRoot: []byte("hello"), - BlobCommitment: &core.BlobCommitments{}, + BlobCommitment: &encoding.BlobCommitments{}, BatchID: 99, ConfirmationBlockNumber: 150, Fee: []byte{0}, diff --git a/disperser/common/inmem/store_test.go b/disperser/common/inmem/store_test.go index 5dd8067e57..0f8a702d73 100644 --- a/disperser/common/inmem/store_test.go +++ b/disperser/common/inmem/store_test.go @@ -8,6 +8,7 @@ import ( "github.com/Layr-Labs/eigenda/core" "github.com/Layr-Labs/eigenda/disperser" "github.com/Layr-Labs/eigenda/disperser/common/inmem" + "github.com/Layr-Labs/eigenda/encoding" "github.com/ethereum/go-ethereum/common" "github.com/stretchr/testify/assert" ) @@ -71,7 +72,7 @@ func TestBlobStore(t *testing.T) { ReferenceBlockNumber: 132, BatchRoot: []byte("hello"), BlobInclusionProof: inclusionProof, - BlobCommitment: &core.BlobCommitments{}, + BlobCommitment: &encoding.BlobCommitments{}, BatchID: 99, ConfirmationTxnHash: common.HexToHash("0x123"), ConfirmationBlockNumber: uint32(150), diff --git a/disperser/dataapi/docs/docs.go b/disperser/dataapi/docs/docs.go index e1f9ef6621..a0aec734c2 100644 --- a/disperser/dataapi/docs/docs.go +++ b/disperser/dataapi/docs/docs.go @@ -349,7 +349,7 @@ const docTemplate = `{ } }, "definitions": { - "core.BlobCommitments": { + "encoding.BlobCommitments": { "type": "object", "properties": { "commitment": { @@ -407,7 +407,7 @@ const docTemplate = `{ "type": "string" }, "blob_commitment": { - "$ref": "#/definitions/core.BlobCommitments" + "$ref": "#/definitions/encoding.BlobCommitments" }, "blob_inclusion_proof": { "type": "string" diff --git a/disperser/dataapi/docs/swagger.json b/disperser/dataapi/docs/swagger.json index e7f56fb289..2c357b1d0a 100644 --- a/disperser/dataapi/docs/swagger.json +++ b/disperser/dataapi/docs/swagger.json @@ -345,7 +345,7 @@ } }, "definitions": { - "core.BlobCommitments": { + "encoding.BlobCommitments": { "type": "object", "properties": { "commitment": { @@ -403,7 +403,7 @@ "type": "string" }, "blob_commitment": { - "$ref": "#/definitions/core.BlobCommitments" + "$ref": "#/definitions/encoding.BlobCommitments" }, "blob_inclusion_proof": { "type": "string" diff --git a/disperser/dataapi/docs/swagger.yaml b/disperser/dataapi/docs/swagger.yaml index 458b7dc4ab..b0a10ce7b7 100644 --- a/disperser/dataapi/docs/swagger.yaml +++ b/disperser/dataapi/docs/swagger.yaml @@ -1,5 +1,5 @@ definitions: - core.BlobCommitments: + encoding.BlobCommitments: properties: commitment: $ref: '#/definitions/core.Commitment' @@ -45,7 +45,7 @@ definitions: batch_root: type: string blob_commitment: - $ref: '#/definitions/core.BlobCommitments' + $ref: '#/definitions/encoding.BlobCommitments' blob_inclusion_proof: type: string blob_index: diff --git a/disperser/dataapi/server.go b/disperser/dataapi/server.go index e371197882..0f5d4daece 100644 --- a/disperser/dataapi/server.go +++ b/disperser/dataapi/server.go @@ -12,6 +12,7 @@ import ( "github.com/Layr-Labs/eigenda/common" "github.com/Layr-Labs/eigenda/core" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/disperser" "github.com/Layr-Labs/eigenda/disperser/dataapi/docs" @@ -32,21 +33,21 @@ var errNotFound = errors.New("not found") type ( BlobMetadataResponse struct { - BlobKey string `json:"blob_key"` - BatchHeaderHash string `json:"batch_header_hash"` - BlobIndex uint32 `json:"blob_index"` - SignatoryRecordHash string `json:"signatory_record_hash"` - ReferenceBlockNumber uint32 `json:"reference_block_number"` - BatchRoot string `json:"batch_root"` - BlobInclusionProof string `json:"blob_inclusion_proof"` - BlobCommitment *core.BlobCommitments `json:"blob_commitment"` - BatchId uint32 `json:"batch_id"` - ConfirmationBlockNumber uint32 `json:"confirmation_block_number"` - ConfirmationTxnHash string `json:"confirmation_txn_hash"` - Fee string `json:"fee"` - SecurityParams []*core.SecurityParam `json:"security_params"` - RequestAt uint64 `json:"requested_at"` - BlobStatus disperser.BlobStatus `json:"blob_status"` + BlobKey string `json:"blob_key"` + BatchHeaderHash string `json:"batch_header_hash"` + BlobIndex uint32 `json:"blob_index"` + SignatoryRecordHash string `json:"signatory_record_hash"` + ReferenceBlockNumber uint32 `json:"reference_block_number"` + BatchRoot string `json:"batch_root"` + BlobInclusionProof string `json:"blob_inclusion_proof"` + BlobCommitment *encoding.BlobCommitments `json:"blob_commitment"` + BatchId uint32 `json:"batch_id"` + ConfirmationBlockNumber uint32 `json:"confirmation_block_number"` + ConfirmationTxnHash string `json:"confirmation_txn_hash"` + Fee string `json:"fee"` + SecurityParams []*core.SecurityParam `json:"security_params"` + RequestAt uint64 `json:"requested_at"` + BlobStatus disperser.BlobStatus `json:"blob_status"` } Metric struct { diff --git a/disperser/dataapi/server_test.go b/disperser/dataapi/server_test.go index 7910c3b8ad..629a675c48 100644 --- a/disperser/dataapi/server_test.go +++ b/disperser/dataapi/server_test.go @@ -23,6 +23,7 @@ import ( prommock "github.com/Layr-Labs/eigenda/disperser/dataapi/prometheus/mock" "github.com/Layr-Labs/eigenda/disperser/dataapi/subgraph" subgraphmock "github.com/Layr-Labs/eigenda/disperser/dataapi/subgraph/mock" + "github.com/Layr-Labs/eigenda/encoding" "github.com/consensys/gnark-crypto/ecc/bn254/fp" "github.com/ethereum/go-ethereum/common" "github.com/gin-gonic/gin" @@ -40,7 +41,7 @@ var ( //go:embed testdata/prometheus-resp-avg-throughput.json mockPrometheusRespAvgThroughput string - expectedBlobCommitment *core.BlobCommitments + expectedBlobCommitment *encoding.BlobCommitments mockLogger = &commock.Logger{} blobstore = inmem.NewBlobStore() mockPrometheusApi = &prommock.MockPrometheusApi{} @@ -836,7 +837,7 @@ func markBlobConfirmed(t *testing.T, blob *core.Blob, key disperser.BlobKey, bat assert.NoError(t, err) _, err = commitY.SetString("9207254729396071334325696286939045899948985698134704137261649190717970615186") assert.NoError(t, err) - commitment := &core.G1Commitment{ + commitment := &encoding.G1Commitment{ X: commitX, Y: commitY, } @@ -848,7 +849,7 @@ func markBlobConfirmed(t *testing.T, blob *core.Blob, key disperser.BlobKey, bat ReferenceBlockNumber: expectedReferenceBlockNumber, BatchRoot: expectedBatchRoot, BlobInclusionProof: expectedInclusionProof, - BlobCommitment: &core.BlobCommitments{ + BlobCommitment: &encoding.BlobCommitments{ Commitment: commitment, Length: uint(expectedDataLength), }, diff --git a/disperser/disperser.go b/disperser/disperser.go index 2219fbafed..9b0ef93128 100644 --- a/disperser/disperser.go +++ b/disperser/disperser.go @@ -9,6 +9,7 @@ import ( "github.com/Layr-Labs/eigenda/common" "github.com/Layr-Labs/eigenda/core" + "github.com/Layr-Labs/eigenda/encoding" disperser_rpc "github.com/Layr-Labs/eigenda/api/grpc/disperser" gcommon "github.com/ethereum/go-ethereum/common" @@ -114,7 +115,7 @@ type ConfirmationInfo struct { ReferenceBlockNumber uint32 `json:"reference_block_number"` BatchRoot []byte `json:"batch_root"` BlobInclusionProof []byte `json:"blob_inclusion_proof"` - BlobCommitment *core.BlobCommitments `json:"blob_commitment"` + BlobCommitment *encoding.BlobCommitments `json:"blob_commitment"` BatchID uint32 `json:"batch_id"` ConfirmationTxnHash gcommon.Hash `json:"confirmation_txn_hash"` ConfirmationBlockNumber uint32 `json:"confirmation_block_number"` diff --git a/disperser/encoder/client.go b/disperser/encoder/client.go index 964f9617b6..6b3858a63b 100644 --- a/disperser/encoder/client.go +++ b/disperser/encoder/client.go @@ -5,9 +5,9 @@ import ( "fmt" "time" - "github.com/Layr-Labs/eigenda/core" "github.com/Layr-Labs/eigenda/disperser" pb "github.com/Layr-Labs/eigenda/disperser/api/grpc/encoder" + "github.com/Layr-Labs/eigenda/encoding" "google.golang.org/grpc" "google.golang.org/grpc/credentials/insecure" ) @@ -24,7 +24,7 @@ func NewEncoderClient(addr string, timeout time.Duration) (disperser.EncoderClie }, nil } -func (c client) EncodeBlob(ctx context.Context, data []byte, encodingParams core.EncodingParams) (*core.BlobCommitments, []*core.Chunk, error) { +func (c client) EncodeBlob(ctx context.Context, data []byte, encodingParams encoding.EncodingParams) (*encoding.BlobCommitments, []*encoding.Frame, error) { conn, err := grpc.Dial( c.addr, grpc.WithTransportCredentials(insecure.NewCredentials()), @@ -47,27 +47,27 @@ func (c client) EncodeBlob(ctx context.Context, data []byte, encodingParams core return nil, nil, err } - commitment, err := new(core.G1Commitment).Deserialize(reply.GetCommitment().GetCommitment()) + commitment, err := new(encoding.G1Commitment).Deserialize(reply.GetCommitment().GetCommitment()) if err != nil { return nil, nil, err } - lengthCommitment, err := new(core.G2Commitment).Deserialize(reply.GetCommitment().GetLengthCommitment()) + lengthCommitment, err := new(encoding.G2Commitment).Deserialize(reply.GetCommitment().GetLengthCommitment()) if err != nil { return nil, nil, err } - lengthProof, err := new(core.LengthProof).Deserialize(reply.GetCommitment().GetLengthProof()) + lengthProof, err := new(encoding.LengthProof).Deserialize(reply.GetCommitment().GetLengthProof()) if err != nil { return nil, nil, err } - chunks := make([]*core.Chunk, len(reply.GetChunks())) + chunks := make([]*encoding.Frame, len(reply.GetChunks())) for i, chunk := range reply.GetChunks() { - deserialized, err := new(core.Chunk).Deserialize(chunk) + deserialized, err := new(encoding.Frame).Deserialize(chunk) if err != nil { return nil, nil, err } chunks[i] = deserialized } - return &core.BlobCommitments{ + return &encoding.BlobCommitments{ Commitment: commitment, LengthCommitment: lengthCommitment, LengthProof: lengthProof, diff --git a/disperser/encoder/server.go b/disperser/encoder/server.go index d5bfe60e27..f38c549261 100644 --- a/disperser/encoder/server.go +++ b/disperser/encoder/server.go @@ -9,9 +9,9 @@ import ( "github.com/Layr-Labs/eigenda/common" "github.com/Layr-Labs/eigenda/common/healthcheck" - "github.com/Layr-Labs/eigenda/core" "github.com/Layr-Labs/eigenda/disperser" pb "github.com/Layr-Labs/eigenda/disperser/api/grpc/encoder" + "github.com/Layr-Labs/eigenda/encoding" "google.golang.org/grpc" "google.golang.org/grpc/reflection" ) @@ -20,22 +20,22 @@ import ( type Server struct { pb.UnimplementedEncoderServer - config ServerConfig - logger common.Logger - coreEncoder core.Encoder - metrics *Metrics - close func() + config ServerConfig + logger common.Logger + prover encoding.Prover + metrics *Metrics + close func() runningRequests chan struct{} requestPool chan struct{} } -func NewServer(config ServerConfig, logger common.Logger, coreEncoder core.Encoder, metrics *Metrics) *Server { +func NewServer(config ServerConfig, logger common.Logger, prover encoding.Prover, metrics *Metrics) *Server { return &Server{ - config: config, - logger: logger, - coreEncoder: coreEncoder, - metrics: metrics, + config: config, + logger: logger, + prover: prover, + metrics: metrics, runningRequests: make(chan struct{}, config.MaxConcurrentRequests), requestPool: make(chan struct{}, config.RequestPoolSize), @@ -76,12 +76,12 @@ func (s *Server) handleEncoding(ctx context.Context, req *pb.EncodeBlobRequest) begin := time.Now() // Convert to core EncodingParams - var encodingParams = core.EncodingParams{ - ChunkLength: uint(req.EncodingParams.ChunkLength), - NumChunks: uint(req.EncodingParams.NumChunks), + var encodingParams = encoding.EncodingParams{ + ChunkLength: uint64(req.EncodingParams.ChunkLength), + NumChunks: uint64(req.EncodingParams.NumChunks), } - commits, chunks, err := s.coreEncoder.Encode(req.Data, encodingParams) + commits, chunks, err := s.prover.EncodeAndProve(req.Data, encodingParams) if err != nil { return nil, err @@ -120,10 +120,10 @@ func (s *Server) handleEncoding(ctx context.Context, req *pb.EncodeBlobRequest) return &pb.EncodeBlobReply{ Commitment: &pb.BlobCommitment{ - Commitment: commitData, + Commitment: commitData, LengthCommitment: lengthCommitData, - LengthProof: lengthProofData, - Length: uint32(commits.Length), + LengthProof: lengthProofData, + Length: uint32(commits.Length), }, Chunks: chunksData, }, nil diff --git a/disperser/encoder/server_test.go b/disperser/encoder/server_test.go index b91d89ae23..0ff38ff435 100644 --- a/disperser/encoder/server_test.go +++ b/disperser/encoder/server_test.go @@ -14,12 +14,15 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" + encmock "github.com/Layr-Labs/eigenda/encoding/mock" + cmock "github.com/Layr-Labs/eigenda/common/mock" "github.com/Layr-Labs/eigenda/core" - "github.com/Layr-Labs/eigenda/core/encoding" coremock "github.com/Layr-Labs/eigenda/core/mock" pb "github.com/Layr-Labs/eigenda/disperser/api/grpc/encoder" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/kzgrs" + "github.com/Layr-Labs/eigenda/encoding/kzgrs/prover" "github.com/Layr-Labs/eigenda/pkg/kzg/bn254" ) @@ -29,8 +32,8 @@ var ( var logger = &cmock.Logger{} -func makeTestEncoder(numPoint uint64) (*encoding.Encoder, ServerConfig) { - kzgConfig := kzgrs.KzgConfig{ +func makeTestProver(numPoint uint64) (encoding.Prover, ServerConfig) { + kzgConfig := &kzgrs.KzgConfig{ G1Path: "../../inabox/resources/kzg/g1.point", G2Path: "../../inabox/resources/kzg/g2.point", CacheDir: "../../inabox/resources/kzg/SRSTables", @@ -39,21 +42,19 @@ func makeTestEncoder(numPoint uint64) (*encoding.Encoder, ServerConfig) { NumWorker: uint64(runtime.GOMAXPROCS(0)), } - encodingConfig := encoding.EncoderConfig{KzgConfig: kzgConfig} - - encoder, _ := encoding.NewEncoder(encodingConfig, true) + p, _ := prover.NewProver(kzgConfig, true) encoderServerConfig := ServerConfig{ GrpcPort: "3000", MaxConcurrentRequests: 16, RequestPoolSize: 32, } - return encoder, encoderServerConfig + return p, encoderServerConfig } -var testEncoder, testServerConfig = makeTestEncoder(3000) +var testProver, testServerConfig = makeTestProver(3000) -func getTestData() (core.Blob, core.EncodingParams) { +func getTestData() (core.Blob, encoding.EncodingParams) { var quorumID core.QuorumID = 0 var adversaryThreshold uint8 = 80 var quorumThreshold uint8 = 90 @@ -80,7 +81,7 @@ func getTestData() (core.Blob, core.EncodingParams) { coordinator := &core.StdAssignmentCoordinator{} blobSize := uint(len(testBlob.Data)) - blobLength := core.GetBlobLength(uint(blobSize)) + blobLength := encoding.GetBlobLength(uint(blobSize)) chunkLength, err := coordinator.CalculateChunkLength(operatorState, blobLength, 0, securityParams[0]) if err != nil { @@ -97,14 +98,14 @@ func getTestData() (core.Blob, core.EncodingParams) { log.Fatal(err) } - testEncodingParams, _ := core.GetEncodingParams(chunkLength, info.TotalChunks) + testEncodingParams := encoding.ParamsFromMins(chunkLength, info.TotalChunks) return testBlob, testEncodingParams } func newEncoderTestServer(t *testing.T) *Server { metrics := NewMetrics("9000", logger) - return NewServer(testServerConfig, logger, testEncoder, metrics) + return NewServer(testServerConfig, logger, testProver, metrics) } func TestEncodeBlob(t *testing.T) { @@ -126,22 +127,22 @@ func TestEncodeBlob(t *testing.T) { assert.NotNil(t, reply.Chunks) // Decode Server Data - var chunksData []*core.Chunk + var chunksData []*encoding.Frame for i := range reply.Chunks { - chunkSerialized, _ := new(core.Chunk).Deserialize(reply.GetChunks()[i]) + chunkSerialized, _ := new(encoding.Frame).Deserialize(reply.GetChunks()[i]) // perform an operation chunksData = append(chunksData, chunkSerialized) } assert.NotNil(t, chunksData) // Indices obtained from Encoder_Test - indices := []core.ChunkNumber{ + indices := []encoding.ChunkNumber{ 0, 1, 2, 3, 4, 5, 6, 7, } maxInputSize := uint64(len(gettysburgAddressBytes)) + 10 - decoded, err := testEncoder.Decode(chunksData, indices, testEncodingParams, maxInputSize) + decoded, err := testProver.Decode(chunksData, indices, testEncodingParams, maxInputSize) assert.Nil(t, err) recovered := bytes.TrimRight(decoded, "\x00") assert.Equal(t, recovered, gettysburgAddressBytes) @@ -173,21 +174,21 @@ func TestThrottling(t *testing.T) { metrics := NewMetrics("9000", logger) concurrentRequests := 2 requestPoolSize := 4 - encoder := &encoding.MockEncoder{ + encoder := &encmock.MockEncoder{ Delay: 500 * time.Millisecond, } - blobCommitment := core.BlobCommitments{ - Commitment: &core.G1Commitment{ + blobCommitment := encoding.BlobCommitments{ + Commitment: &encoding.G1Commitment{ X: X1, Y: Y1, }, - LengthCommitment: (*core.G2Commitment)(&lengthCommitment), - LengthProof: (*core.G2Commitment)(&lengthProof), + LengthCommitment: (*encoding.G2Commitment)(&lengthCommitment), + LengthProof: (*encoding.G2Commitment)(&lengthProof), Length: 10, } - encoder.On("Encode", mock.Anything, mock.Anything).Return(blobCommitment, []*core.Chunk{}, nil) + encoder.On("EncodeAndProve", mock.Anything, mock.Anything).Return(blobCommitment, []*encoding.Frame{}, nil) encoderServerConfig := ServerConfig{ GrpcPort: "3000", MaxConcurrentRequests: concurrentRequests, @@ -244,9 +245,9 @@ func TestThrottling(t *testing.T) { func TestEncoderPointsLoading(t *testing.T) { // encoder 1 only loads 1500 points - encoder1, config1 := makeTestEncoder(1500) + prover1, config1 := makeTestProver(1500) metrics := NewMetrics("9000", logger) - server1 := NewServer(config1, logger, encoder1, metrics) + server1 := NewServer(config1, logger, prover1, metrics) testBlobData, testEncodingParams := getTestData() @@ -265,28 +266,28 @@ func TestEncoderPointsLoading(t *testing.T) { assert.NotNil(t, reply1.Chunks) // Decode Server Data - var chunksData []*core.Chunk + var chunksData []*encoding.Frame for i := range reply1.Chunks { - chunkSerialized, _ := new(core.Chunk).Deserialize(reply1.GetChunks()[i]) + chunkSerialized, _ := new(encoding.Frame).Deserialize(reply1.GetChunks()[i]) // perform an operation chunksData = append(chunksData, chunkSerialized) } assert.NotNil(t, chunksData) // Indices obtained from Encoder_Test - indices := []core.ChunkNumber{ + indices := []encoding.ChunkNumber{ 0, 1, 2, 3, 4, 5, 6, 7, } maxInputSize := uint64(len(gettysburgAddressBytes)) + 10 - decoded, err := testEncoder.Decode(chunksData, indices, testEncodingParams, maxInputSize) + decoded, err := testProver.Decode(chunksData, indices, testEncodingParams, maxInputSize) assert.Nil(t, err) recovered := bytes.TrimRight(decoded, "\x00") assert.Equal(t, recovered, gettysburgAddressBytes) // encoder 2 only loads 2900 points - encoder2, config2 := makeTestEncoder(2900) + encoder2, config2 := makeTestProver(2900) server2 := NewServer(config2, logger, encoder2, metrics) reply2, err := server2.EncodeBlob(context.Background(), encodeBlobRequestProto) @@ -294,7 +295,7 @@ func TestEncoderPointsLoading(t *testing.T) { assert.NotNil(t, reply2.Chunks) for i := range reply2.Chunks { - chunkSerialized, _ := new(core.Chunk).Deserialize(reply2.GetChunks()[i]) + chunkSerialized, _ := new(encoding.Frame).Deserialize(reply2.GetChunks()[i]) // perform an operation assert.Equal(t, len(chunkSerialized.Coeffs), len(chunksData[i].Coeffs)) assert.Equal(t, chunkSerialized.Coeffs, chunksData[i].Coeffs) diff --git a/disperser/encoder_client.go b/disperser/encoder_client.go index a9ef9b231b..20857af9cd 100644 --- a/disperser/encoder_client.go +++ b/disperser/encoder_client.go @@ -3,9 +3,9 @@ package disperser import ( "context" - "github.com/Layr-Labs/eigenda/core" + "github.com/Layr-Labs/eigenda/encoding" ) type EncoderClient interface { - EncodeBlob(ctx context.Context, data []byte, encodingParams core.EncodingParams) (*core.BlobCommitments, []*core.Chunk, error) + EncodeBlob(ctx context.Context, data []byte, encodingParams encoding.EncodingParams) (*encoding.BlobCommitments, []*encoding.Frame, error) } diff --git a/disperser/local_encoder_client.go b/disperser/local_encoder_client.go index 4e2ec8446f..b66cf79dfd 100644 --- a/disperser/local_encoder_client.go +++ b/disperser/local_encoder_client.go @@ -4,27 +4,27 @@ import ( "context" "sync" - "github.com/Layr-Labs/eigenda/core" + "github.com/Layr-Labs/eigenda/encoding" ) type LocalEncoderClient struct { mu sync.Mutex - encoder core.Encoder + prover encoding.Prover } var _ EncoderClient = (*LocalEncoderClient)(nil) -func NewLocalEncoderClient(encoder core.Encoder) *LocalEncoderClient { +func NewLocalEncoderClient(prover encoding.Prover) *LocalEncoderClient { return &LocalEncoderClient{ - encoder: encoder, + prover: prover, } } -func (m *LocalEncoderClient) EncodeBlob(ctx context.Context, data []byte, encodingParams core.EncodingParams) (*core.BlobCommitments, []*core.Chunk, error) { +func (m *LocalEncoderClient) EncodeBlob(ctx context.Context, data []byte, encodingParams encoding.EncodingParams) (*encoding.BlobCommitments, []*encoding.Frame, error) { m.mu.Lock() defer m.mu.Unlock() - commits, chunks, err := m.encoder.Encode(data, encodingParams) + commits, chunks, err := m.prover.EncodeAndProve(data, encodingParams) if err != nil { return nil, nil, err } diff --git a/disperser/mock/encoder.go b/disperser/mock/encoder.go index fa36ece167..0aa6422434 100644 --- a/disperser/mock/encoder.go +++ b/disperser/mock/encoder.go @@ -3,8 +3,8 @@ package mock import ( "context" - "github.com/Layr-Labs/eigenda/core" "github.com/Layr-Labs/eigenda/disperser" + "github.com/Layr-Labs/eigenda/encoding" "github.com/stretchr/testify/mock" ) @@ -18,15 +18,15 @@ func NewMockEncoderClient() *MockEncoderClient { return &MockEncoderClient{} } -func (m *MockEncoderClient) EncodeBlob(ctx context.Context, data []byte, encodingParams core.EncodingParams) (*core.BlobCommitments, []*core.Chunk, error) { +func (m *MockEncoderClient) EncodeBlob(ctx context.Context, data []byte, encodingParams encoding.EncodingParams) (*encoding.BlobCommitments, []*encoding.Frame, error) { args := m.Called(ctx, data, encodingParams) - var commitments *core.BlobCommitments + var commitments *encoding.BlobCommitments if args.Get(0) != nil { - commitments = args.Get(0).(*core.BlobCommitments) + commitments = args.Get(0).(*encoding.BlobCommitments) } - var chunks []*core.Chunk + var chunks []*encoding.Frame if args.Get(1) != nil { - chunks = args.Get(1).([]*core.Chunk) + chunks = args.Get(1).([]*encoding.Frame) } return commitments, chunks, args.Error(2) } diff --git a/docs/design/encoding.md b/docs/design/encoding.md index a5b432fc42..d5e9607de4 100644 --- a/docs/design/encoding.md +++ b/docs/design/encoding.md @@ -58,6 +58,6 @@ As a simple illustrative example, suppose that `AssignmentCoordinator` provides - `ChunkLength` = 3 - `NumChunks` = 4 -Supplied with these parameters, `Encoder.GetEncodingParams` will upgrade `ChunkLength` to the next highest power of 2, i.e., `ChunkLength` = 4, and leave `NumChunks` unchanged. The following figure illustrates how the indices will be assigned across the chunks in this scenario. +Supplied with these parameters, `Encoder.ParamsFromMins` will upgrade `ChunkLength` to the next highest power of 2, i.e., `ChunkLength` = 4, and leave `NumChunks` unchanged. The following figure illustrates how the indices will be assigned across the chunks in this scenario. ![Worked example of chunk indices for ChunkLength=4, NumChunks=4](../assets/encoding-groups.png) diff --git a/docs/spec/protocol-modules/storage/encoding.md b/docs/spec/protocol-modules/storage/encoding.md index aefb7a683a..95601f99c0 100644 --- a/docs/spec/protocol-modules/storage/encoding.md +++ b/docs/spec/protocol-modules/storage/encoding.md @@ -24,10 +24,10 @@ type Encoder interface { // may use different symbol sizes GetBlobLength(blobSize uint) uint - // GetEncodingParams takes in the minimum chunk length and the minimum number of chunks and returns the encoding parameters given any + // ParamsFromMins takes in the minimum chunk length and the minimum number of chunks and returns the encoding parameters given any // additional constraints from the encoder backend. For instance, both the ChunkLength and NumChunks must typically be powers of 2. // The ChunkLength returned here should be used in constructing the BlobHeader. - GetEncodingParams(minChunkLength, minNumChunks uint) (EncodingParams, error) + ParamsFromMins(minChunkLength, minNumChunks uint) (EncodingParams, error) // Encode takes in a blob and returns the commitments and encoded chunks. The encoding will satisfy the property that // for any number M such that M*params.ChunkLength > BlobCommitments.Length, then any set of M chunks will be sufficient to diff --git a/encoding/data.go b/encoding/data.go new file mode 100644 index 0000000000..b8c94e65c1 --- /dev/null +++ b/encoding/data.go @@ -0,0 +1,60 @@ +package encoding + +import "github.com/Layr-Labs/eigenda/pkg/kzg/bn254" + +// Commitment is a polynomial commitment (e.g. a kzg commitment) +type G1Commitment bn254.G1Point + +// Commitment is a polynomial commitment (e.g. a kzg commitment) +type G2Commitment bn254.G2Point + +// LengthProof is a polynomial commitment on G2 (e.g. a kzg commitment) used for low degree proof +type LengthProof = G2Commitment + +// The proof used to open a commitment. In the case of Kzg, this is also a kzg commitment, and is different from a Commitment only semantically. +type Proof = bn254.G1Point + +// Symbol is a symbol in the field used for polynomial commitments +type Symbol = bn254.Fr + +// BlomCommitments contains the blob's commitment, degree proof, and the actual degree. +type BlobCommitments struct { + Commitment *G1Commitment `json:"commitment"` + LengthCommitment *G2Commitment `json:"length_commitment"` + LengthProof *LengthProof `json:"length_proof"` + Length uint `json:"length"` +} + +// Frame is a chunk of data with the associated multi-reveal proof +type Frame struct { + // Proof is the multireveal proof corresponding to the chunk + Proof Proof + // Coeffs contains the coefficience of the interpolating polynomial of the chunk + Coeffs []Symbol +} + +func (f *Frame) Length() int { + return len(f.Coeffs) +} + +// Returns the size of chunk in bytes. +func (f *Frame) Size() int { + return f.Length() * bn254.BYTES_PER_COEFFICIENT +} + +// Sample is a chunk with associated metadata used by the Universal Batch Verifier +type Sample struct { + Commitment *G1Commitment + Chunk *Frame + AssignmentIndex ChunkNumber + BlobIndex int +} + +// SubBatch is a part of the whole Batch with identical Encoding Parameters, i.e. (ChunkLength, NumChunk) +// Blobs with the same encoding parameters are collected in a single subBatch +type SubBatch struct { + Samples []Sample + NumBlobs int +} + +type ChunkNumber = uint diff --git a/encoding/encoding.go b/encoding/encoding.go index 9651cfce0b..5acd0539ae 100644 --- a/encoding/encoding.go +++ b/encoding/encoding.go @@ -1,36 +1,30 @@ package encoding -import ( - "bytes" - "encoding/gob" - - "github.com/Layr-Labs/eigenda/pkg/kzg/bn254" -) - -// Proof is the multireveal proof -// Coeffs is identical to input data converted into Fr element -type Frame struct { - Proof bn254.G1Point - Coeffs []bn254.Fr +type Decoder interface { + // Decode takes in the chunks, indices, and encoding parameters and returns the decoded blob + Decode(chunks []*Frame, indices []ChunkNumber, params EncodingParams, inputSize uint64) ([]byte, error) } -func (f *Frame) Encode() ([]byte, error) { - var buf bytes.Buffer - enc := gob.NewEncoder(&buf) - err := enc.Encode(f) - if err != nil { - return nil, err - } - return buf.Bytes(), nil +type Prover interface { + Decoder + // Encode takes in a blob and returns the commitments and encoded chunks. The encoding will satisfy the property that + // for any number M such that M*params.ChunkLength > BlobCommitments.Length, then any set of M chunks will be sufficient to + // reconstruct the blob. + EncodeAndProve(data []byte, params EncodingParams) (BlobCommitments, []*Frame, error) } -func Decode(b []byte) (Frame, error) { - var f Frame - buf := bytes.NewBuffer(b) - dec := gob.NewDecoder(buf) - err := dec.Decode(&f) - if err != nil { - return Frame{}, err - } - return f, nil +type Verifier interface { + Decoder + + // VerifyChunks takes in the chunks, indices, commitments, and encoding parameters and returns an error if the chunks are invalid. + VerifyFrames(chunks []*Frame, indices []ChunkNumber, commitments BlobCommitments, params EncodingParams) error + + // VerifyBatch takes in the encoding parameters, samples and the number of blobs and returns an error if a chunk in any sample is invalid. + UniversalVerifySubBatch(params EncodingParams, samples []Sample, numBlobs int) error + + // VerifyBlobLength takes in the commitments and returns an error if the blob length is invalid. + VerifyBlobLength(commitments BlobCommitments) error + + // VerifyCommitEquivalence takes in a list of commitments and returns an error if the commitment of G1 and G2 are inconsistent + VerifyCommitEquivalenceBatch(commitments []BlobCommitments) error } diff --git a/core/encoding/cli.go b/encoding/kzgrs/cli.go similarity index 92% rename from core/encoding/cli.go rename to encoding/kzgrs/cli.go index 89e1f7b8e1..3a87e5247f 100644 --- a/core/encoding/cli.go +++ b/encoding/kzgrs/cli.go @@ -1,10 +1,9 @@ -package encoding +package kzgrs import ( "runtime" "github.com/Layr-Labs/eigenda/common" - "github.com/Layr-Labs/eigenda/encoding/kzgrs" "github.com/urfave/cli" ) @@ -87,8 +86,8 @@ func CLIFlags(envPrefix string) []cli.Flag { } } -func ReadCLIConfig(ctx *cli.Context) EncoderConfig { - cfg := kzgrs.KzgConfig{} +func ReadCLIConfig(ctx *cli.Context) KzgConfig { + cfg := KzgConfig{} cfg.G1Path = ctx.GlobalString(G1PathFlagName) cfg.G2Path = ctx.GlobalString(G2PathFlagName) cfg.CacheDir = ctx.GlobalString(CachePathFlagName) @@ -99,8 +98,5 @@ func ReadCLIConfig(ctx *cli.Context) EncoderConfig { cfg.PreloadEncoder = ctx.GlobalBool(PreloadEncoderFlagName) cfg.G2PowerOf2Path = ctx.GlobalString(G2PowerOf2PathFlagName) - return EncoderConfig{ - KzgConfig: cfg, - CacheEncodedBlobs: ctx.GlobalBoolT(CacheEncodedBlobsFlagName), - } + return cfg } diff --git a/encoding/kzgrs/prover/data/SRSTable/dimE16.coset8 b/encoding/kzgrs/prover/data/SRSTable/dimE16.coset8 deleted file mode 100644 index 4fdd63a2f8565526f0af9c31aa2bbff1fc6b8b00..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8200 zcmV+jAot&gSLI=kBzSHaw;uSe7sl<=68%`UZ&E3x_dx_dIi9B1Wli*|f`!o>`VtE@ zyxfYjSB)~7x^8r|aZU0Y8C6M!bP5r;lDZP+E&O7x5#09XD>r&0ogJu647?NGh13Jt zw|paa^9FJ2umE4RhZ~Q>$(GUO_wSo#CwqDeDllfEtdJlMhhwj?|`5i9@8qQO^~?r|hg67zMitl6KAtK-^{CIHbb2P;cq^^PJ45tX@N=%_{LVGooM> zOpJ{QY<1RKc<10axmeRW{05OoH}RvdAM2%1H5tyVfwz@VLtSMJgr<(zCYui_4xwtB zQdI@*s10SE4ceTI9Etc+Nma`2W)TwEYoK*#28y@*t9`!VoS{V+1LB2y9@yMLhcMc5 ze^Qmha7yevxX-kw($7)Gvn7$wUEz+lyldoLYk)9iIOnEy$JUeaE$IO66S~QnSfG35 z*#5ki2zu|*j$u*Du-)ZXA>38R6i=-mUy8qnft9Nk4r&$7GzV)HbzB4Vd_G(jKm>8h z$iZg|MXC7F5`XV(ysUO5LWGnVMh3z>=g$%M9p6ao?jk~dEg@s-N`1(K?|_efJ+OYS zRDposOWTLl*na6qGD%zru${=d*e=$F`MYvKUQ&QdJk zuIcY+_M91j5fVtI`D+Z8Juo*Vf;hm$tV-yeDG^6|t5ai{f zLnwA-29%-Gyw)HGaK+$h1_9ZJ>gnOf^$9z38P`Y+!ji5Ktv=BC>?jEAXNYMazLqKQ zaQ(CE+#WR66Qt)9pPuk^Ju*6#_0YnluqwOVg*?YI+sQ|RbEkT=P9N{hggRj7m816Pkqm;!P@$R`YUiPENN0I;U(S0ca4dG8-If90<;Wu z2MRiDWrG{Z94S1)&DkX?hf1rpH(pN(KYjDs%&j7U=AeXDdTbFY{p=bM{#7|1L^K-s0L3Dw86iIQ7Qbcr}gq9w>!BI z`wDn+L1UC&Df_L~@UOZ#=UfL5kFYfwDa*t7vFVm6P?nK2Ohnr~Hc8P6-e=3fXyi$> zQ(|M&KH8EPm6Dz_kE<+W;5oM%TCGJFkY!p1F4L;5)d8s=Fs`*maEzQGugo0a&$|HQ zY43+K$?f5mKV>>rPsajohXj0=wCRirW}q8Mi~9H)Oe#dSLl*EG0g+y77 z2W~CeYuoDLb-C~U1Dxl@+|`WmPmU>da^1P=$alXH3~T65@=lpG3i1l$O;L_?jTj? z?cnH*{74nQgJ5vys;R)3OUczm4&&%WjBSPu&#zP|?`OyMB1NOE%yMX~YH70kT84nN znDoa&*H3PvzUHVoJ;EPQ#f;2lEqL8MqZQQ|8OuU>;e9avm5;g;fel|Ou+E>32Vl9w z{pU|Tsd7e+TEg+}wsMARs*-&y2~}%s+N#+SxiSBht4+ir^G)Xdm+VkRM97EW!OMva zE?dU~r{0rwWT)#Bqd{z0nqn5e)YIG$Dms9T*sE){mvP|JGIaY82A$?2!Tm}iKhGaq zH>e*r#s1Ww(PI`I~;XXtM5sEN8ds5XI*3;6IlZ4J4wRkjAak<)@KkO=tI4zunQ zg*dVgF8;IcT7&aartzg6=EHrp-QD_s>cC(k_^U>s2!GUcZR_M z+f9DeR)7Ny;14wdi#i)7^palo#<0!BVy}L)wW2065W6nXSqW3NLS_f`qX3nrk<3a! zVRY;3Pz{SuyF#H^l>81+ZALE>8^p>g=%Geg#;}|=*H^ub#SH!(fao@BnD>ME{xonaz^+isk zNA4640BWk^nyqdM2G$>vGe+jhwB1@hp_i6<07ZooC_koir{8;SlKbj6D&~mWO@#b#U#^v=PZXh* zQsH4{7NXP{qj^U77}t~CT(ElsTAXhm%Zr(+7pVz=Q;qQEiJM_kk|_9v|F6Z9On|qo z=)F|UN8QQm%t41SeV^g>xnWh2TlOWUGm3VN9#dgevg9t~h$XPT(KQQ2bmeDrVr8;D zSJm`J^ja)vq7%)exo(@){98StA(1yn8|L@Ar$yqUweqjY9Y{O~yI5bSu@=K7L>s{-a8pZ&?MzjOY9zp0}m! ztUVoACn7_H5E?r4_oC0spqBtK4Ol*R)hju~2f_}U+Z57|)Rn9Af*SdVL#G>-Jir!U zlmTVVrx=FyMbPj z9rLH00hd_ggDLpqfw!C5*#I1HuI(>deCUx3976kG3$Za`YEufq05{7%t2pqX>R3E_T z;m+|pIRwAMiQFIY#z7+dnCj6KK;fd}7EQWZQ#>6uZ8CGcURdjc<_|C518iSu%*i}Q zO}C-2_rJi+LEf{ZV)5^VNesFmfiAcW{O`fiUo~}Ycs~a>CMm2u_V2sZ5A0~{n zI0)dSLW5mC_rd?yWxzXfoe>}fOKdOcfFN~mGl6zGPwmIYN30l{`6P|Rp&OHM^A1hJLq=sOCZ=QZa{RLRkH?izLbmiQol z)2W{%Wt}%Nn8g;4XHV9oiY_SjhY-|aNj#=xq$Q8Obf}%XBIdB3v9XSPPS~u{pZTFM zr>v4|#!aEcD0b%VunO!IHlZFJ9`Y@4v2PQsiIAVrEkHTwzHU`EhNt%V$XkdiVzX~C zN4L8u5zz3P=McXN|85ApcPLgeCPU#`oUHO;><1 zPYuORb>_@?3|<9LPpcQ%7#STb^LiPNz=WbAtRkDi3_N4u4w2-@1M#b*L;xl8cbdft zL$Hv9B5-K&J4#T3$$*@!U$mxTC&_*0`FEIFU_s3 zAOPigNo2-ql@T$Y*n*AGU?;XkqrVc^LRMi6#4DoRcoU)V-l zF$qbn5K*%dQ|NjF-%Muy61OA~l||ZoIMP0Zt664Dlvsg-)QUBC1|g{}_YI8ey@w}S zqy5=sdKvyOfu0Og^+A<-cB5=Em}A!vV0J$u61F6qoiB*CaA^#+cn*Pbj70efy%PiY zMDE#;%*Dl^fH-aob~mH-%F14!WGv|p{=vL$_$osh8t1ErHuLiMi$Qg4Q$eqbB7hz- z{lD3!s}ZpQwH zoE?TKI@ol7zpU9?8QVB!lOLulS2)*WwnFvuR<6rXYn9&f3(oaQE!gPV#MP}*>?lrqjMhORNrEX6b?1_Ii4Fy>3-d|dV`0(vW4VZffuXT5S!m8fGHLlws$F$;Tsnq_0ue-)y-D|R*5RKWZEeXmI zm`(MXwAo+Bv_GzlE3c%y;D5ui736% z8Fw-#nEoBk1D&F{5nDaA^1MQ#Ng>4jWhDk&VbS56BXB2RcR+UU9B~2qgsSAi2_;M8 zIcI;xb`iEza7>=k^5L_-I}`=Aom8XVC!*uJQYsOutK%e08lx=?P4#NMS@lr!!Oa^2P&FbV*xo~mx^!Oao7VYb@+3b+LhzxsU8I~>)=ga z7-ZsuQ#47vN+pJ0G+jt1^(%RtpE_285gwRG`@ra36ad&5J9frmW0DX$j7O6Gb7Uc5 zy>Tzg&HAjaXVQWK_?cl)_o_NE*h9!G{TYhS-QQ0TWBL0i9lwNR=>JKxT;g12T(@+{0*hz zdQ;IGK+y|ph-$+#m9Z%*LIF&?UGLI>jwZQPbQxVfsPi6pG_`@*mkF1|0B5`=3Dr@i zisQ8A&e5eStT-zkovjjgiSg;|E5 zXHh4hxW%pAFQ*y?LK_Y7&+kMv>TVoH!Y9@~Xr~_{geU1#yRJzfP`wjY^}s7k?;ZOm z4Lb&X+Unkxas$sp6L*VA24gDmJczM3y@j!<=G! zuWI`Wx{V)Z4#Dy$HUh5Fc?5CvY+DkP>R+^t{D275uebaifkS^$hm7@%`0|ryZLWL6 zgk$u0jvS+6LjZxv@FoO+ZJO`^1I(5}CM@9dYE zj-3hE&-wC}hZetlo5l+(Zs9zd?N2y5%Z-8P0AF3fXbBQAQIfyI`A)lBR=LjedzbZ5>tk-E$xM@5_8)USy6$EU%LmxGAuFSXcHyu}~EUo<97 z&o;}xI}_<}pSS4P(HD{`fE==xTzR)h_1H;S^vl5AaJcrh=56kyM(V9ErcLJTp{^&Y z=8sRWnXh)Q3AW3a2e+)s{hkQWxw7J+|8Iu>;P2cBVl4<{EJCvF@^USHPU7;B?jr-{ zxYpiExu&N))aL=?(Z$3%dNdn~W5YFx8<$Gz|HwQj=q4}dru(*GPBkeTPt>XxR?AsU>2q{U!6}O}?#&tQ5M6b55oXUY5_hb) zNXO5;ueLXUj-3-w-=pTXyb#j!lT#%03Wo&_F8lzF%#gGj7XE=9-dd{DpBl+RJT>7t zhF>HEF}{a>yT(;>Sxe?1(c44a#3n5wz!Lh-#_rstwDotffTDb3G$*-Nh3KV%IcCTY zP25~M!E@|zvvvrrA=Cf<&vIhT)?uuSa`y9#M{BKqVo0mY*RV&$Z&JINvXVdA=#Ao* zRpEoSYD?q3)wM++R#w?{nTW01g?3R5?}sVay_Ay`_Az>`t+LHO2bXLkQzJ9lqp=re zCrXmzi=WXAS7~(+Js!p_;HaqsI5J%itNH%j=kG4cWreVQ;VD%C9G{pvYZrY|Wyk;~ zCtjzoV4-+r&yYt<*I?4~c{&fYegCA3fkY7mq!vm~O+-l1%*(%W(LBoW0`Oj%Xotjw z;phWDDHGsIU|32CB96a$nVNbu3h*33E$k-7H@BXkAeqa(3Nx!a-@axA9ge$JaDf^m z<*lx2V0Xt`H>NCJ7;z_~sdsIr#nz}AhSoncCA=q6`h~(40!#u4PSdIjT(M!O*BZ9T zjS3=n9GtG|GIA2f%9hLW(aiE=4B+_}V%128i z>Ht4Md=f|4GF3|)GCH|EZUbBca9_wHjM6Rxkd?o7Iaul_yHl<_02hTGS9cwA?;hmu zXA>w-Wz?a2lp>A&ErBeG6t8joVrZh7uS6bO22<)TTP17zW|Ey1r8dGZ$DLwV518Tm zP;Xu_RjHT0lvu?;4c9&WL5{V5XI->9?>Q4ziw^37-pIHahy`7>5;`^uYRnV-%%gV$7QcvoI#pn|3f`sYi8VOP}{+i5X# z+YEAWm;nl7SI8&gJ0Jsi7Y-N?^Nw5%dYL4~*PK;-e}C|ClD`H9QIc8p|5I?VQh=STxOii%!+2 zW$pUGMhxp$qYZF6slt(Q`ktH9JIXnZ_sv1S0l~f&FP9{d8;^1|77>OJ7rVry?#G#% zJX41_C4Kk8_Woy{Uf7u+>^)R&HYvJ<4NDp+24K3Dw*aK{fyIt}GATQ`rVn)Ck~~M4k?Fy#6H!?ly3(TA5oP}C z^_Lw9zN9|07T^SRM=y2ACY06D0Io-r-vS@kD2Vvs_urm#P)@LFVXV4p(?JUUfyZ*G zhI?6jO4)AuCzIl2vt4AXzM9U=ZnM}~o-p4ztjfR>^T`#{6}4V$zKOsRUOQvsuU+oX zoo@;GW_I6sIn$nrV6RZtHxwM|?m*T#o1!Ak@_8t`;q|;|RKM0k6N`4O@yS4Nzr_<= zsRa@l^Z$wH{hrPBaD~>PN>3i2UO56Jh|&#ox~?Lo+6MC;2Qc@uY7Oz<F4e;4YFOq8PRq_4V#P~w#EEe1YLdgaI_@w7xOvU#(x(EV z#~kKn>k6;#8xr|?y{L#-dPo4PqMjbN4z5W+O$w=5`?FYMVg=Bk{k>Eh33K3vbQ>sK zA`obZuWL}sY(Ufi$a8%y1jDR;)jbtWMjonl1?HG34VH0E!cP)uP^wiG?6i zU`E<@-4C)UCrCudYq}Y@#)fZJ(>sH_@%lOuMdu7Iey9Rgo@Yi-^1f~oEA+>a8WajE z=P#7SQWR@lr*z`1H;x1CUr4_nRpU1^)9-@rv5`P#T2`9O#>khq%?a4t`-RSEJ*J4p zP0kQeY~5)>Z8&1?5o_Tp?nSP;1Yw?P=>iCF<()No#ZFuaXBtX@)f>yh( z!|U}s!oySl(3xohqI9gd={?@n1{Ov$!)4)vu@ds6SK7JJuLY)(kK!+ZrE%{Xgaz4?gyA{RwwZ@e8g>K8Q^#i@|6^QluFz4fcb42d(LsV^r>Vw&+Zp`vb@Lhwdvx&{My~0g)e28+Q zF`ZT-t~kN=;C(0%VzO>hO^KUri)Z3fL6CDJEaX4z`+YwC%xiy4HIHj!Ihg;|5F0UT zY1-*;uC*0Ckzqd+dyFv3L5m?fUqZ5}37EhhGhS!aPgUxEdm|IwNk-*`C7H(|y7Qq~+NULL4RXkeIN*_#Wi6RB=V>>(GlTO(K>AvJl zlY8S*=#xV;MZ{l1|ousP)qD2r+yX}@^2F52Rg;C5d550dq=W9 zE}Sn#h4bE^8t;q?E}CDHZ1ikRF0BL|P_(I1$taM3-X3SKW@6(g@gaweB9(NxLm`TZ z180fBzgB*)Ok6Me8mdUva5UeKo$Pbf4csp@7|<5<7r;yyyu2@3N5QT!A9j1C6nE~7 z?s=}&U8bjU(vb9H(;`^sZcIohU35gn)xK~hd~Bf+X6~jBXDV@4rz4n?^}t8;Fhf#~ uY%4fjV~Thj#(L!oSi@(W$Jpq<`m@UFY?o2%A*|Ze-(mZxKi)rDHAV^)Tp;`a diff --git a/encoding/kzgrs/prover/decode_test.go b/encoding/kzgrs/prover/decode_test.go index 5b6a9d9a63..189145c79b 100644 --- a/encoding/kzgrs/prover/decode_test.go +++ b/encoding/kzgrs/prover/decode_test.go @@ -3,9 +3,8 @@ package prover_test import ( "testing" - enc "github.com/Layr-Labs/eigenda/encoding" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/kzgrs/prover" - "github.com/Layr-Labs/eigenda/encoding/rs" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -16,14 +15,14 @@ func TestEncodeDecodeFrame_AreInverses(t *testing.T) { group, _ := prover.NewProver(kzgConfig, true) - params := rs.GetEncodingParams(numSys, numPar, uint64(len(GETTYSBURG_ADDRESS_BYTES))) + params := encoding.ParamsFromSysPar(numSys, numPar, uint64(len(gettysburgAddressBytes))) - p, err := group.NewKzgEncoder(params) + p, err := group.GetKzgEncoder(params) require.Nil(t, err) require.NotNil(t, p) - _, _, _, frames, _, err := p.EncodeBytes(GETTYSBURG_ADDRESS_BYTES) + _, _, _, frames, _, err := p.EncodeBytes(gettysburgAddressBytes) require.Nil(t, err) require.NotNil(t, frames, err) @@ -31,7 +30,7 @@ func TestEncodeDecodeFrame_AreInverses(t *testing.T) { require.Nil(t, err) require.NotNil(t, b) - frame, err := enc.Decode(b) + frame, err := encoding.Decode(b) require.Nil(t, err) require.NotNil(t, frame) diff --git a/encoding/kzgrs/prover/parametrized_prover.go b/encoding/kzgrs/prover/parametrized_prover.go index 493332c8c2..10086aedd7 100644 --- a/encoding/kzgrs/prover/parametrized_prover.go +++ b/encoding/kzgrs/prover/parametrized_prover.go @@ -7,7 +7,7 @@ import ( "sync" "time" - enc "github.com/Layr-Labs/eigenda/encoding" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/kzgrs" "github.com/Layr-Labs/eigenda/encoding/rs" "github.com/Layr-Labs/eigenda/encoding/utils/toeplitz" @@ -35,12 +35,12 @@ type WorkerResult struct { } // just a wrapper to take bytes not Fr Element -func (g *ParametrizedProver) EncodeBytes(inputBytes []byte) (*bls.G1Point, *bls.G2Point, *bls.G2Point, []enc.Frame, []uint32, error) { +func (g *ParametrizedProver) EncodeBytes(inputBytes []byte) (*bls.G1Point, *bls.G2Point, *bls.G2Point, []encoding.Frame, []uint32, error) { inputFr := rs.ToFrArray(inputBytes) return g.Encode(inputFr) } -func (g *ParametrizedProver) Encode(inputFr []bls.Fr) (*bls.G1Point, *bls.G2Point, *bls.G2Point, []enc.Frame, []uint32, error) { +func (g *ParametrizedProver) Encode(inputFr []bls.Fr) (*bls.G1Point, *bls.G2Point, *bls.G2Point, []encoding.Frame, []uint32, error) { startTime := time.Now() poly, frames, indices, err := g.Encoder.Encode(inputFr) @@ -92,7 +92,7 @@ func (g *ParametrizedProver) Encode(inputFr []bls.Fr) (*bls.G1Point, *bls.G2Poin paddedCoeffs := make([]bls.Fr, g.NumEvaluations()) copy(paddedCoeffs, poly.Coeffs) - proofs, err := g.ProveAllCosetThreads(paddedCoeffs, g.NumChunks, g.ChunkLen, g.NumWorker) + proofs, err := g.ProveAllCosetThreads(paddedCoeffs, g.NumChunks, g.ChunkLength, g.NumWorker) if err != nil { return nil, nil, nil, nil, nil, fmt.Errorf("could not generate proofs: %v", err) } @@ -101,9 +101,9 @@ func (g *ParametrizedProver) Encode(inputFr []bls.Fr) (*bls.G1Point, *bls.G2Poin log.Printf(" Proving takes %v\n", time.Since(intermediate)) } - kzgFrames := make([]enc.Frame, len(frames)) + kzgFrames := make([]encoding.Frame, len(frames)) for i, index := range indices { - kzgFrames[i] = enc.Frame{ + kzgFrames[i] = encoding.Frame{ Proof: proofs[index], Coeffs: frames[i].Coeffs, } diff --git a/encoding/kzgrs/prover/parametrized_prover_test.go b/encoding/kzgrs/prover/parametrized_prover_test.go index 64a4a6fc4c..3b8b2c0024 100644 --- a/encoding/kzgrs/prover/parametrized_prover_test.go +++ b/encoding/kzgrs/prover/parametrized_prover_test.go @@ -4,6 +4,7 @@ import ( "fmt" "testing" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/kzgrs" "github.com/Layr-Labs/eigenda/encoding/kzgrs/prover" "github.com/Layr-Labs/eigenda/encoding/kzgrs/verifier" @@ -18,11 +19,11 @@ func TestProveAllCosetThreads(t *testing.T) { group, _ := prover.NewProver(kzgConfig, true) - params := rs.GetEncodingParams(numSys, numPar, uint64(len(GETTYSBURG_ADDRESS_BYTES))) - enc, err := group.NewKzgEncoder(params) + params := encoding.ParamsFromSysPar(numSys, numPar, uint64(len(gettysburgAddressBytes))) + enc, err := group.GetKzgEncoder(params) require.Nil(t, err) - inputFr := rs.ToFrArray(GETTYSBURG_ADDRESS_BYTES) + inputFr := rs.ToFrArray(gettysburgAddressBytes) commit, _, _, frames, fIndices, err := enc.Encode(inputFr) require.Nil(t, err) diff --git a/encoding/kzgrs/prover/precompute_test.go b/encoding/kzgrs/prover/precompute_test.go index 35729bf814..1c7c680fb8 100644 --- a/encoding/kzgrs/prover/precompute_test.go +++ b/encoding/kzgrs/prover/precompute_test.go @@ -6,9 +6,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/kzgrs" "github.com/Layr-Labs/eigenda/encoding/kzgrs/prover" - "github.com/Layr-Labs/eigenda/encoding/rs" ) func TestNewSRSTable_PreComputeWorks(t *testing.T) { @@ -16,7 +16,7 @@ func TestNewSRSTable_PreComputeWorks(t *testing.T) { defer teardownSuite(t) kzgConfig.CacheDir = "./data/SRSTable" - params := rs.GetEncodingParams(numSys, numPar, uint64(len(GETTYSBURG_ADDRESS_BYTES))) + params := encoding.ParamsFromSysPar(numSys, numPar, uint64(len(gettysburgAddressBytes))) require.NotNil(t, params) s1, err := kzgrs.ReadG1Points(kzgConfig.G1Path, kzgConfig.SRSOrder, kzgConfig.NumWorker) @@ -30,7 +30,7 @@ func TestNewSRSTable_PreComputeWorks(t *testing.T) { require.Nil(t, err) require.NotNil(t, subTable1) - fftPoints1, err := subTable1.GetSubTables(params.NumChunks, params.ChunkLen) + fftPoints1, err := subTable1.GetSubTables(params.NumChunks, params.ChunkLength) require.Nil(t, err) require.NotNil(t, fftPoints1) @@ -38,7 +38,7 @@ func TestNewSRSTable_PreComputeWorks(t *testing.T) { require.Nil(t, err) require.NotNil(t, subTable2) - fftPoints2, err := subTable2.GetSubTables(params.NumChunks, params.ChunkLen) + fftPoints2, err := subTable2.GetSubTables(params.NumChunks, params.ChunkLength) require.Nil(t, err) require.NotNil(t, fftPoints2) diff --git a/encoding/kzgrs/prover/prover.go b/encoding/kzgrs/prover/prover.go index 566b1d5311..6c47be9ce9 100644 --- a/encoding/kzgrs/prover/prover.go +++ b/encoding/kzgrs/prover/prover.go @@ -11,6 +11,7 @@ import ( "strings" "sync" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/kzgrs" "github.com/Layr-Labs/eigenda/encoding/rs" kzg "github.com/Layr-Labs/eigenda/pkg/kzg" @@ -26,9 +27,11 @@ type Prover struct { mu sync.Mutex LoadG2Points bool - ParametrizedProvers map[rs.EncodingParams]*ParametrizedProver + ParametrizedProvers map[encoding.EncodingParams]*ParametrizedProver } +var _ encoding.Prover = &Prover{} + func NewProver(config *kzgrs.KzgConfig, loadG2Points bool) (*Prover, error) { if config.SRSNumberToLoad > config.SRSOrder { @@ -85,7 +88,7 @@ func NewProver(config *kzgrs.KzgConfig, loadG2Points bool) (*Prover, error) { KzgConfig: config, Srs: srs, G2Trailing: g2Trailing, - ParametrizedProvers: make(map[rs.EncodingParams]*ParametrizedProver), + ParametrizedProvers: make(map[encoding.EncodingParams]*ParametrizedProver), LoadG2Points: loadG2Points, } @@ -114,7 +117,7 @@ func (g *Prover) PreloadAllEncoders() error { } fmt.Printf("detect %v srs maps\n", len(paramsAll)) for i := 0; i < len(paramsAll); i++ { - fmt.Printf(" %v. NumChunks: %v ChunkLen: %v\n", i, paramsAll[i].NumChunks, paramsAll[i].ChunkLen) + fmt.Printf(" %v. NumChunks: %v ChunkLength: %v\n", i, paramsAll[i].NumChunks, paramsAll[i].ChunkLength) } if len(paramsAll) == 0 { @@ -133,7 +136,39 @@ func (g *Prover) PreloadAllEncoders() error { return nil } -func (g *Prover) GetKzgEncoder(params rs.EncodingParams) (*ParametrizedProver, error) { +func (e *Prover) EncodeAndProve(data []byte, params encoding.EncodingParams) (encoding.BlobCommitments, []*encoding.Frame, error) { + + enc, err := e.GetKzgEncoder(params) + if err != nil { + return encoding.BlobCommitments{}, nil, err + } + + commit, lowDegreeCommit, lowDegreeProof, kzgFrames, _, err := enc.EncodeBytes(data) + if err != nil { + return encoding.BlobCommitments{}, nil, err + } + + chunks := make([]*encoding.Frame, len(kzgFrames)) + for ind, frame := range kzgFrames { + + chunks[ind] = &encoding.Frame{ + Coeffs: frame.Coeffs, + Proof: frame.Proof, + } + } + + length := uint(len(rs.ToFrArray(data))) + commitments := encoding.BlobCommitments{ + Commitment: (*encoding.G1Commitment)(commit), + LengthCommitment: (*encoding.G2Commitment)(lowDegreeCommit), + LengthProof: (*encoding.G2Commitment)(lowDegreeProof), + Length: length, + } + + return commitments, chunks, nil +} + +func (g *Prover) GetKzgEncoder(params encoding.EncodingParams) (*ParametrizedProver, error) { g.mu.Lock() defer g.mu.Unlock() enc, ok := g.ParametrizedProvers[params] @@ -141,7 +176,7 @@ func (g *Prover) GetKzgEncoder(params rs.EncodingParams) (*ParametrizedProver, e return enc, nil } - enc, err := g.newKzgEncoder(params) + enc, err := g.newProver(params) if err == nil { g.ParametrizedProvers[params] = enc } @@ -149,18 +184,11 @@ func (g *Prover) GetKzgEncoder(params rs.EncodingParams) (*ParametrizedProver, e return enc, err } -func (g *Prover) NewKzgEncoder(params rs.EncodingParams) (*ParametrizedProver, error) { - g.mu.Lock() - defer g.mu.Unlock() - - return g.newKzgEncoder(params) -} - -func (g *Prover) newKzgEncoder(params rs.EncodingParams) (*ParametrizedProver, error) { +func (g *Prover) newProver(params encoding.EncodingParams) (*ParametrizedProver, error) { // Check that the parameters are valid with respect to the SRS. - if params.ChunkLen*params.NumChunks >= g.SRSOrder { - return nil, fmt.Errorf("the supplied encoding parameters are not valid with respect to the SRS. ChunkLength: %d, NumChunks: %d, SRSOrder: %d", params.ChunkLen, params.NumChunks, g.SRSOrder) + if params.ChunkLength*params.NumChunks >= g.SRSOrder { + return nil, fmt.Errorf("the supplied encoding parameters are not valid with respect to the SRS. ChunkLength: %d, NumChunks: %d, SRSOrder: %d", params.ChunkLength, params.NumChunks, g.SRSOrder) } encoder, err := rs.NewEncoder(params, g.Verbose) @@ -175,7 +203,7 @@ func (g *Prover) newKzgEncoder(params rs.EncodingParams) (*ParametrizedProver, e return nil, err } - fftPoints, err := subTable.GetSubTables(encoder.NumChunks, encoder.ChunkLen) + fftPoints, err := subTable.GetSubTables(encoder.NumChunks, encoder.ChunkLength) if err != nil { log.Println("could not get sub tables", err) return nil, err @@ -184,12 +212,12 @@ func (g *Prover) newKzgEncoder(params rs.EncodingParams) (*ParametrizedProver, e fftPointsT := make([][]bls.G1Point, len(fftPoints[0])) for i := range fftPointsT { fftPointsT[i] = make([]bls.G1Point, len(fftPoints)) - for j := uint64(0); j < encoder.ChunkLen; j++ { + for j := uint64(0); j < encoder.ChunkLength; j++ { fftPointsT[i][j] = fftPoints[j][i] } } n := uint8(math.Log2(float64(encoder.NumEvaluations()))) - if encoder.ChunkLen == 1 { + if encoder.ChunkLength == 1 { n = uint8(math.Log2(float64(2 * encoder.NumChunks))) } fs := kzg.NewFFTSettings(n) @@ -246,14 +274,14 @@ func (g *Prover) newKzgEncoder(params rs.EncodingParams) (*ParametrizedProver, e // where the first * specifies the dimension of the matrix which // equals to the number of chunks // where the second & specifies the length of each chunk -func GetAllPrecomputedSrsMap(tableDir string) ([]rs.EncodingParams, error) { +func GetAllPrecomputedSrsMap(tableDir string) ([]encoding.EncodingParams, error) { files, err := os.ReadDir(tableDir) if err != nil { log.Println("Error to list SRS Table directory", err) return nil, err } - tables := make([]rs.EncodingParams, 0) + tables := make([]encoding.EncodingParams, 0) for _, file := range files { filename := file.Name() @@ -270,11 +298,37 @@ func GetAllPrecomputedSrsMap(tableDir string) ([]rs.EncodingParams, error) { return nil, err } - params := rs.EncodingParams{ - NumChunks: uint64(cosetSizeValue), - ChunkLen: uint64(dimEValue), + params := encoding.EncodingParams{ + NumChunks: uint64(cosetSizeValue), + ChunkLength: uint64(dimEValue), } tables = append(tables, params) } return tables, nil } + +// Decode takes in the chunks, indices, and encoding parameters and returns the decoded blob +// The result is trimmed to the given maxInputSize. +func (p *Prover) Decode(chunks []*encoding.Frame, indices []encoding.ChunkNumber, params encoding.EncodingParams, maxInputSize uint64) ([]byte, error) { + frames := make([]encoding.Frame, len(chunks)) + for i := range chunks { + frames[i] = encoding.Frame{ + Proof: chunks[i].Proof, + Coeffs: chunks[i].Coeffs, + } + } + encoder, err := p.GetKzgEncoder(params) + if err != nil { + return nil, err + } + + return encoder.Decode(frames, toUint64Array(indices), maxInputSize) +} + +func toUint64Array(chunkIndices []encoding.ChunkNumber) []uint64 { + res := make([]uint64, len(chunkIndices)) + for i, d := range chunkIndices { + res[i] = uint64(d) + } + return res +} diff --git a/encoding/kzgrs/prover/prover_fuzz_test.go b/encoding/kzgrs/prover/prover_fuzz_test.go index 2848a5765c..51ed05bd04 100644 --- a/encoding/kzgrs/prover/prover_fuzz_test.go +++ b/encoding/kzgrs/prover/prover_fuzz_test.go @@ -3,20 +3,20 @@ package prover_test import ( "testing" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/kzgrs/prover" - "github.com/Layr-Labs/eigenda/encoding/rs" "github.com/stretchr/testify/assert" ) func FuzzOnlySystematic(f *testing.F) { - f.Add(GETTYSBURG_ADDRESS_BYTES) + f.Add(gettysburgAddressBytes) f.Fuzz(func(t *testing.T, input []byte) { group, _ := prover.NewProver(kzgConfig, true) - params := rs.GetEncodingParams(10, 3, uint64(len(input))) - enc, err := group.NewKzgEncoder(params) + params := encoding.ParamsFromSysPar(10, 3, uint64(len(input))) + enc, err := group.GetKzgEncoder(params) if err != nil { t.Errorf("Error making rs: %q", err) } diff --git a/encoding/kzgrs/prover/prover_test.go b/encoding/kzgrs/prover/prover_test.go index 76935ef4dd..9ef23bf5eb 100644 --- a/encoding/kzgrs/prover/prover_test.go +++ b/encoding/kzgrs/prover/prover_test.go @@ -1,14 +1,19 @@ package prover_test import ( + cryptorand "crypto/rand" "log" "math/rand" "os" "runtime" "testing" - enc "github.com/Layr-Labs/eigenda/encoding" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/kzgrs" + "github.com/Layr-Labs/eigenda/encoding/kzgrs/prover" + "github.com/Layr-Labs/eigenda/encoding/kzgrs/verifier" + + "github.com/stretchr/testify/assert" ) const ( @@ -16,11 +21,11 @@ const ( ) var ( - GETTYSBURG_ADDRESS_BYTES = []byte("Fourscore and seven years ago our fathers brought forth, on this continent, a new nation, conceived in liberty, and dedicated to the proposition that all men are created equal. Now we are engaged in a great civil war, testing whether that nation, or any nation so conceived, and so dedicated, can long endure. We are met on a great battle-field of that war. We have come to dedicate a portion of that field, as a final resting-place for those who here gave their lives, that that nation might live. It is altogether fitting and proper that we should do this. But, in a larger sense, we cannot dedicate, we cannot consecrate—we cannot hallow—this ground. The brave men, living and dead, who struggled here, have consecrated it far above our poor power to add or detract. The world will little note, nor long remember what we say here, but it can never forget what they did here. It is for us the living, rather, to be dedicated here to the unfinished work which they who fought here have thus far so nobly advanced. It is rather for us to be here dedicated to the great task remaining before us—that from these honored dead we take increased devotion to that cause for which they here gave the last full measure of devotion—that we here highly resolve that these dead shall not have died in vain—that this nation, under God, shall have a new birth of freedom, and that government of the people, by the people, for the people, shall not perish from the earth.") - kzgConfig *kzgrs.KzgConfig - numNode uint64 - numSys uint64 - numPar uint64 + gettysburgAddressBytes = []byte("Fourscore and seven years ago our fathers brought forth, on this continent, a new nation, conceived in liberty, and dedicated to the proposition that all men are created equal. Now we are engaged in a great civil war, testing whether that nation, or any nation so conceived, and so dedicated, can long endure. We are met on a great battle-field of that war. We have come to dedicate a portion of that field, as a final resting-place for those who here gave their lives, that that nation might live. It is altogether fitting and proper that we should do this. But, in a larger sense, we cannot dedicate, we cannot consecrate—we cannot hallow—this ground. The brave men, living and dead, who struggled here, have consecrated it far above our poor power to add or detract. The world will little note, nor long remember what we say here, but it can never forget what they did here. It is for us the living, rather, to be dedicated here to the unfinished work which they who fought here have thus far so nobly advanced. It is rather for us to be here dedicated to the great task remaining before us—that from these honored dead we take increased devotion to that cause for which they here gave the last full measure of devotion—that we here highly resolve that these dead shall not have died in vain—that this nation, under God, shall have a new birth of freedom, and that government of the people, by the people, for the people, shall not perish from the earth.") + kzgConfig *kzgrs.KzgConfig + numNode uint64 + numSys uint64 + numPar uint64 ) func setupSuite(t *testing.T) func(t *testing.T) { @@ -48,8 +53,8 @@ func setupSuite(t *testing.T) func(t *testing.T) { } } -func sampleFrames(frames []enc.Frame, num uint64) ([]enc.Frame, []uint64) { - samples := make([]enc.Frame, num) +func sampleFrames(frames []encoding.Frame, num uint64) ([]encoding.Frame, []uint64) { + samples := make([]encoding.Frame, num) indices := rand.Perm(len(frames)) indices = indices[:num] @@ -60,3 +65,75 @@ func sampleFrames(frames []enc.Frame, num uint64) ([]enc.Frame, []uint64) { } return samples, frameIndices } + +func TestEncoder(t *testing.T) { + + p, _ := prover.NewProver(kzgConfig, true) + v, _ := verifier.NewVerifier(kzgConfig, true) + + params := encoding.ParamsFromMins(5, 5) + commitments, chunks, err := p.EncodeAndProve(gettysburgAddressBytes, params) + assert.NoError(t, err) + + indices := []encoding.ChunkNumber{ + 0, 1, 2, 3, 4, 5, 6, 7, + } + err = v.VerifyFrames(chunks, indices, commitments, params) + assert.NoError(t, err) + err = v.VerifyFrames(chunks, []encoding.ChunkNumber{ + 7, 6, 5, 4, 3, 2, 1, 0, + }, commitments, params) + assert.Error(t, err) + + maxInputSize := uint64(len(gettysburgAddressBytes)) + decoded, err := p.Decode(chunks, indices, params, maxInputSize) + assert.NoError(t, err) + assert.Equal(t, gettysburgAddressBytes, decoded) + + // shuffle chunks + tmp := chunks[2] + chunks[2] = chunks[5] + chunks[5] = tmp + indices = []encoding.ChunkNumber{ + 0, 1, 5, 3, 4, 2, 6, 7, + } + + err = v.VerifyFrames(chunks, indices, commitments, params) + assert.NoError(t, err) + + decoded, err = p.Decode(chunks, indices, params, maxInputSize) + assert.NoError(t, err) + assert.Equal(t, gettysburgAddressBytes, decoded) +} + +// Ballpark number for 400KiB blob encoding +// +// goos: darwin +// goarch: arm64 +// pkg: github.com/Layr-Labs/eigenda/core/encoding +// BenchmarkEncode-12 1 2421900583 ns/op +func BenchmarkEncode(b *testing.B) { + + p, _ := prover.NewProver(kzgConfig, true) + + params := encoding.EncodingParams{ + ChunkLength: 512, + NumChunks: 256, + } + blobSize := 400 * 1024 + numSamples := 30 + blobs := make([][]byte, numSamples) + for i := 0; i < numSamples; i++ { + blob := make([]byte, blobSize) + _, _ = cryptorand.Read(blob) + blobs[i] = blob + } + + // Warm up the encoder: ensures that all SRS tables are loaded so these aren't included in the benchmark. + _, _, _ = p.EncodeAndProve(blobs[0], params) + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, _, _ = p.EncodeAndProve(blobs[i%numSamples], params) + } +} diff --git a/encoding/kzgrs/verifier/batch_commit_equivalence.go b/encoding/kzgrs/verifier/batch_commit_equivalence.go index 4101d428d3..7b61b23e31 100644 --- a/encoding/kzgrs/verifier/batch_commit_equivalence.go +++ b/encoding/kzgrs/verifier/batch_commit_equivalence.go @@ -5,6 +5,7 @@ import ( "encoding/gob" "errors" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/pkg/kzg/bn254" ) @@ -66,6 +67,18 @@ func CreateRandomnessVector(g1commits []bn254.G1Point, g2commits []bn254.G2Point return randomsFr, nil } +func (v *Verifier) VerifyCommitEquivalenceBatch(commitments []encoding.BlobCommitments) error { + commitmentsPair := make([]CommitmentPair, len(commitments)) + + for i, c := range commitments { + commitmentsPair[i] = CommitmentPair{ + Commitment: (bn254.G1Point)(*c.Commitment), + LengthCommitment: (bn254.G2Point)(*c.LengthCommitment), + } + } + return v.BatchVerifyCommitEquivalence(commitmentsPair) +} + func (group *Verifier) BatchVerifyCommitEquivalence(commitmentsPair []CommitmentPair) error { g1commits := make([]bn254.G1Point, len(commitmentsPair)) diff --git a/encoding/kzgrs/verifier/batch_commit_equivalence_test.go b/encoding/kzgrs/verifier/batch_commit_equivalence_test.go index 58e57d90b7..0ba1433140 100644 --- a/encoding/kzgrs/verifier/batch_commit_equivalence_test.go +++ b/encoding/kzgrs/verifier/batch_commit_equivalence_test.go @@ -3,6 +3,7 @@ package verifier_test import ( "testing" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/kzgrs/prover" "github.com/Layr-Labs/eigenda/encoding/kzgrs/verifier" "github.com/Layr-Labs/eigenda/encoding/rs" @@ -17,8 +18,8 @@ func TestBatchEquivalence(t *testing.T) { group, _ := prover.NewProver(kzgConfig, true) v, _ := verifier.NewVerifier(kzgConfig, true) - params := rs.GetEncodingParams(numSys, numPar, uint64(len(GETTYSBURG_ADDRESS_BYTES))) - enc, err := group.NewKzgEncoder(params) + params := encoding.ParamsFromSysPar(numSys, numPar, uint64(len(GETTYSBURG_ADDRESS_BYTES))) + enc, err := group.GetKzgEncoder(params) require.Nil(t, err) inputFr := rs.ToFrArray(GETTYSBURG_ADDRESS_BYTES) diff --git a/encoding/kzgrs/verifier/degree_test.go b/encoding/kzgrs/verifier/degree_test.go index 052bb771ef..ce275eabee 100644 --- a/encoding/kzgrs/verifier/degree_test.go +++ b/encoding/kzgrs/verifier/degree_test.go @@ -3,6 +3,7 @@ package verifier_test import ( "testing" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/kzgrs/prover" "github.com/Layr-Labs/eigenda/encoding/kzgrs/verifier" "github.com/Layr-Labs/eigenda/encoding/rs" @@ -16,8 +17,8 @@ func TestLengthProof(t *testing.T) { group, _ := prover.NewProver(kzgConfig, true) v, _ := verifier.NewVerifier(kzgConfig, true) - params := rs.GetEncodingParams(numSys, numPar, uint64(len(GETTYSBURG_ADDRESS_BYTES))) - enc, err := group.NewKzgEncoder(params) + params := encoding.ParamsFromSysPar(numSys, numPar, uint64(len(GETTYSBURG_ADDRESS_BYTES))) + enc, err := group.GetKzgEncoder(params) require.Nil(t, err) numBlob := 5 diff --git a/encoding/kzgrs/verifier/frame_test.go b/encoding/kzgrs/verifier/frame_test.go index c75ff38e14..eaf6c55778 100644 --- a/encoding/kzgrs/verifier/frame_test.go +++ b/encoding/kzgrs/verifier/frame_test.go @@ -7,10 +7,10 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/kzgrs" "github.com/Layr-Labs/eigenda/encoding/kzgrs/prover" "github.com/Layr-Labs/eigenda/encoding/kzgrs/verifier" - "github.com/Layr-Labs/eigenda/encoding/rs" kzg "github.com/Layr-Labs/eigenda/pkg/kzg" ) @@ -20,9 +20,9 @@ func TestVerify(t *testing.T) { group, _ := prover.NewProver(kzgConfig, true) - params := rs.GetEncodingParams(numSys, numPar, uint64(len(GETTYSBURG_ADDRESS_BYTES))) + params := encoding.ParamsFromSysPar(numSys, numPar, uint64(len(GETTYSBURG_ADDRESS_BYTES))) - enc, err := group.NewKzgEncoder(params) + enc, err := group.GetKzgEncoder(params) require.Nil(t, err) require.NotNil(t, enc) diff --git a/encoding/kzgrs/verifier/multiframe.go b/encoding/kzgrs/verifier/multiframe.go index f14649198b..f81cf6e56c 100644 --- a/encoding/kzgrs/verifier/multiframe.go +++ b/encoding/kzgrs/verifier/multiframe.go @@ -7,9 +7,11 @@ import ( "fmt" "math" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/kzgrs" "github.com/Layr-Labs/eigenda/encoding/rs" kzg "github.com/Layr-Labs/eigenda/pkg/kzg" + "github.com/Layr-Labs/eigenda/pkg/kzg/bn254" bls "github.com/Layr-Labs/eigenda/pkg/kzg/bn254" ) @@ -69,10 +71,10 @@ func GenRandomnessVector(samples []Sample) ([]bls.Fr, error) { } // the rhsG1 comprises of three terms, see https://ethresear.ch/t/a-universal-verification-equation-for-data-availability-sampling/13240/1 -func genRhsG1(samples []Sample, randomsFr []bls.Fr, m int, params rs.EncodingParams, ks *kzg.KZGSettings, proofs []bls.G1Point) (*bls.G1Point, error) { +func genRhsG1(samples []Sample, randomsFr []bls.Fr, m int, params encoding.EncodingParams, ks *kzg.KZGSettings, proofs []bls.G1Point) (*bls.G1Point, error) { n := len(samples) commits := make([]bls.G1Point, m) - D := params.ChunkLen + D := params.ChunkLength var tmp bls.Fr @@ -109,7 +111,7 @@ func genRhsG1(samples []Sample, randomsFr []bls.Fr, m int, params rs.EncodingPar rk := randomsFr[k] // for each monomial in a given polynomial, multiply its coefficient with the corresponding random field, - // then sum it with others. Given ChunkLen (D) is identical for all samples in a subBatch. + // then sum it with others. Given ChunkLength (D) is identical for all samples in a subBatch. // The operation is always valid. for j := uint64(0); j < D; j++ { bls.MulModFr(&tmp, &coeffs[j], &rk) @@ -156,6 +158,33 @@ func genRhsG1(samples []Sample, randomsFr []bls.Fr, m int, params rs.EncodingPar return &rhsG1, nil } +// TODO(mooselumph): Cleanup this function +func (v *Verifier) UniversalVerifySubBatch(params encoding.EncodingParams, samplesCore []encoding.Sample, numBlobs int) error { + + samples := make([]Sample, len(samplesCore)) + + for i, sc := range samplesCore { + x, err := rs.GetLeadingCosetIndex( + uint64(sc.AssignmentIndex), + params.NumChunks, + ) + if err != nil { + return err + } + + sample := Sample{ + Commitment: (bn254.G1Point)(*sc.Commitment), + Proof: sc.Chunk.Proof, + RowIndex: sc.BlobIndex, + Coeffs: sc.Chunk.Coeffs, + X: uint(x), + } + samples[i] = sample + } + + return v.UniversalVerify(params, samples, numBlobs) +} + // UniversalVerify implements batch verification on a set of chunks given the same chunk dimension (chunkLen, numChunk). // The details is given in Ethereum Research post whose authors are George Kadianakis, Ansgar Dietrichs, Dankrad Feist // https://ethresear.ch/t/a-universal-verification-equation-for-data-availability-sampling/13240 @@ -164,7 +193,7 @@ func genRhsG1(samples []Sample, randomsFr []bls.Fr, m int, params rs.EncodingPar // // The order of samples do not matter. // Each sample need not have unique row, it is possible that multiple chunks of the same blob are validated altogether -func (group *Verifier) UniversalVerify(params rs.EncodingParams, samples []Sample, m int) error { +func (group *Verifier) UniversalVerify(params encoding.EncodingParams, samples []Sample, m int) error { // precheck for i, s := range samples { if s.RowIndex >= m { @@ -179,14 +208,14 @@ func (group *Verifier) UniversalVerify(params rs.EncodingParams, samples []Sampl } ks := verifier.Ks - D := params.ChunkLen + D := params.ChunkLength if D > group.SRSNumberToLoad { return fmt.Errorf("requested chunkLen %v is larger than Loaded SRS points %v.", D, group.SRSNumberToLoad) } n := len(samples) - fmt.Printf("Batch verify %v frames of %v symbols out of %v blobs \n", n, params.ChunkLen, m) + fmt.Printf("Batch verify %v frames of %v symbols out of %v blobs \n", n, params.ChunkLength, m) // generate random field elements to aggregate equality check randomsFr, err := GenRandomnessVector(samples) diff --git a/encoding/kzgrs/verifier/multiframe_test.go b/encoding/kzgrs/verifier/multiframe_test.go index ca39c241bb..2cd199158e 100644 --- a/encoding/kzgrs/verifier/multiframe_test.go +++ b/encoding/kzgrs/verifier/multiframe_test.go @@ -3,6 +3,7 @@ package verifier_test import ( "testing" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/kzgrs/prover" "github.com/Layr-Labs/eigenda/encoding/kzgrs/verifier" "github.com/Layr-Labs/eigenda/encoding/rs" @@ -17,8 +18,8 @@ func TestUniversalVerify(t *testing.T) { group, _ := prover.NewProver(kzgConfig, true) v, _ := verifier.NewVerifier(kzgConfig, true) - params := rs.GetEncodingParams(numSys, numPar, uint64(len(GETTYSBURG_ADDRESS_BYTES))) - enc, err := group.NewKzgEncoder(params) + params := encoding.ParamsFromSysPar(numSys, numPar, uint64(len(GETTYSBURG_ADDRESS_BYTES))) + enc, err := group.GetKzgEncoder(params) require.Nil(t, err) numBlob := 5 @@ -65,8 +66,8 @@ func TestUniversalVerifyWithPowerOf2G2(t *testing.T) { v, err := verifier.NewVerifier(kzgConfig, true) assert.NoError(t, err) - params := rs.GetEncodingParams(numSys, numPar, uint64(len(GETTYSBURG_ADDRESS_BYTES))) - enc, err := group.NewKzgEncoder(params) + params := encoding.ParamsFromSysPar(numSys, numPar, uint64(len(GETTYSBURG_ADDRESS_BYTES))) + enc, err := group.GetKzgEncoder(params) assert.NoError(t, err) numBlob := 5 diff --git a/encoding/kzgrs/verifier/verifier.go b/encoding/kzgrs/verifier/verifier.go index 992efa2d13..f77153cd8f 100644 --- a/encoding/kzgrs/verifier/verifier.go +++ b/encoding/kzgrs/verifier/verifier.go @@ -8,12 +8,12 @@ import ( "runtime" "sync" - enc "github.com/Layr-Labs/eigenda/encoding" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/kzgrs" "github.com/Layr-Labs/eigenda/encoding/rs" kzg "github.com/Layr-Labs/eigenda/pkg/kzg" + "github.com/Layr-Labs/eigenda/pkg/kzg/bn254" bls "github.com/Layr-Labs/eigenda/pkg/kzg/bn254" - wbls "github.com/Layr-Labs/eigenda/pkg/kzg/bn254" ) type Verifier struct { @@ -23,9 +23,11 @@ type Verifier struct { mu sync.Mutex LoadG2Points bool - ParametrizedVerifiers map[rs.EncodingParams]*ParametrizedVerifier + ParametrizedVerifiers map[encoding.EncodingParams]*ParametrizedVerifier } +var _ encoding.Verifier = &Verifier{} + func NewVerifier(config *kzgrs.KzgConfig, loadG2Points bool) (*Verifier, error) { if config.SRSNumberToLoad > config.SRSOrder { @@ -82,7 +84,7 @@ func NewVerifier(config *kzgrs.KzgConfig, loadG2Points bool) (*Verifier, error) KzgConfig: config, Srs: srs, G2Trailing: g2Trailing, - ParametrizedVerifiers: make(map[rs.EncodingParams]*ParametrizedVerifier), + ParametrizedVerifiers: make(map[encoding.EncodingParams]*ParametrizedVerifier), LoadG2Points: loadG2Points, } @@ -94,13 +96,13 @@ type ParametrizedVerifier struct { *kzgrs.KzgConfig Srs *kzg.SRS - rs.EncodingParams + *rs.Encoder Fs *kzg.FFTSettings Ks *kzg.KZGSettings } -func (g *Verifier) GetKzgVerifier(params rs.EncodingParams) (*ParametrizedVerifier, error) { +func (g *Verifier) GetKzgVerifier(params encoding.EncodingParams) (*ParametrizedVerifier, error) { g.mu.Lock() defer g.mu.Unlock() @@ -121,13 +123,13 @@ func (g *Verifier) GetKzgVerifier(params rs.EncodingParams) (*ParametrizedVerifi return ver, err } -func (g *Verifier) NewKzgVerifier(params rs.EncodingParams) (*ParametrizedVerifier, error) { +func (g *Verifier) NewKzgVerifier(params encoding.EncodingParams) (*ParametrizedVerifier, error) { g.mu.Lock() defer g.mu.Unlock() return g.newKzgVerifier(params) } -func (g *Verifier) newKzgVerifier(params rs.EncodingParams) (*ParametrizedVerifier, error) { +func (g *Verifier) newKzgVerifier(params encoding.EncodingParams) (*ParametrizedVerifier, error) { if err := params.Validate(); err != nil { return nil, err @@ -141,18 +143,29 @@ func (g *Verifier) newKzgVerifier(params rs.EncodingParams) (*ParametrizedVerifi return nil, err } + encoder, err := rs.NewEncoder(params, g.Verbose) + if err != nil { + log.Println("Could not create encoder: ", err) + return nil, err + } + return &ParametrizedVerifier{ - KzgConfig: g.KzgConfig, - Srs: g.Srs, - EncodingParams: params, - Fs: fs, - Ks: ks, + KzgConfig: g.KzgConfig, + Srs: g.Srs, + Encoder: encoder, + Fs: fs, + Ks: ks, }, nil } +func (v *Verifier) VerifyBlobLength(commitments encoding.BlobCommitments) error { + return v.VerifyCommit((*bn254.G2Point)(commitments.LengthCommitment), (*bn254.G2Point)(commitments.LengthProof), uint64(commitments.Length)) + +} + // VerifyCommit verifies the low degree proof; since it doesn't depend on the encoding parameters // we leave it as a method of the KzgEncoderGroup -func (v *Verifier) VerifyCommit(lengthCommit *wbls.G2Point, lowDegreeProof *wbls.G2Point, length uint64) error { +func (v *Verifier) VerifyCommit(lengthCommit *bls.G2Point, lowDegreeProof *bls.G2Point, length uint64) error { g1Challenge, err := kzgrs.ReadG1Point(v.SRSOrder-length, v.KzgConfig) if err != nil { @@ -176,7 +189,30 @@ func VerifyLowDegreeProof(lengthCommit *bls.G2Point, proof *bls.G2Point, g1Chall return bls.PairingsVerify(g1Challenge, lengthCommit, &bls.GenG1, proof) } -func (v *ParametrizedVerifier) VerifyFrame(commit *wbls.G1Point, f *enc.Frame, index uint64) error { +func (v *Verifier) VerifyFrames(frames []*encoding.Frame, indices []encoding.ChunkNumber, commitments encoding.BlobCommitments, params encoding.EncodingParams) error { + + verifier, err := v.GetKzgVerifier(params) + if err != nil { + return err + } + + for ind := range frames { + err = verifier.VerifyFrame( + (*bn254.G1Point)(commitments.Commitment), + frames[ind], + uint64(indices[ind]), + ) + + if err != nil { + return err + } + } + + return nil + +} + +func (v *ParametrizedVerifier) VerifyFrame(commit *bls.G1Point, f *encoding.Frame, index uint64) error { j, err := rs.GetLeadingCosetIndex( uint64(index), @@ -200,7 +236,7 @@ func (v *ParametrizedVerifier) VerifyFrame(commit *wbls.G1Point, f *enc.Frame, i } // Verify function assumes the Data stored is coefficients of coset's interpolating poly -func VerifyFrame(f *enc.Frame, ks *kzg.KZGSettings, commitment *bls.G1Point, x *bls.Fr, g2Atn *bls.G2Point) bool { +func VerifyFrame(f *encoding.Frame, ks *kzg.KZGSettings, commitment *bls.G1Point, x *bls.Fr, g2Atn *bls.G2Point) bool { var xPow bls.Fr bls.CopyFr(&xPow, &bls.ONE) @@ -235,3 +271,28 @@ func VerifyFrame(f *enc.Frame, ks *kzg.KZGSettings, commitment *bls.G1Point, x * return bls.PairingsVerify(&commitMinusInterpolation, &bls.GenG2, &f.Proof, &xnMinusYn) } + +// Decode takes in the chunks, indices, and encoding parameters and returns the decoded blob +// The result is trimmed to the given maxInputSize. +func (v *Verifier) Decode(chunks []*encoding.Frame, indices []encoding.ChunkNumber, params encoding.EncodingParams, maxInputSize uint64) ([]byte, error) { + frames := make([]rs.Frame, len(chunks)) + for i := range chunks { + frames[i] = rs.Frame{ + Coeffs: chunks[i].Coeffs, + } + } + encoder, err := v.GetKzgVerifier(params) + if err != nil { + return nil, err + } + + return encoder.Decode(frames, toUint64Array(indices), maxInputSize) +} + +func toUint64Array(chunkIndices []encoding.ChunkNumber) []uint64 { + res := make([]uint64, len(chunkIndices)) + for i, d := range chunkIndices { + res[i] = uint64(d) + } + return res +} diff --git a/encoding/kzgrs/verifier/verifier_test.go b/encoding/kzgrs/verifier/verifier_test.go index f7b3387db4..5a82cfacb5 100644 --- a/encoding/kzgrs/verifier/verifier_test.go +++ b/encoding/kzgrs/verifier/verifier_test.go @@ -1,12 +1,18 @@ package verifier_test import ( + "crypto/rand" + "fmt" "log" "os" "runtime" "testing" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/kzgrs" + "github.com/Layr-Labs/eigenda/encoding/kzgrs/prover" + "github.com/Layr-Labs/eigenda/encoding/kzgrs/verifier" + "github.com/stretchr/testify/assert" ) const ( @@ -45,3 +51,89 @@ func setupSuite(t *testing.T) func(t *testing.T) { os.RemoveAll("./data") } } + +// var control interface{ Stop() } + +func TestBenchmarkVerifyChunks(t *testing.T) { + t.Skip("This test is meant to be run manually, not as part of the test suite") + + p, _ := prover.NewProver(kzgConfig, true) + v, _ := verifier.NewVerifier(kzgConfig, true) + + chunkLengths := []uint64{64, 128, 256, 512, 1024, 2048, 4096, 8192} + chunkCounts := []int{4, 8, 16} + + file, err := os.Create("benchmark_results.csv") + if err != nil { + t.Fatalf("Failed to open file for writing: %v", err) + } + defer file.Close() + + fmt.Fprintln(file, "numChunks,chunkLength,ns/op,allocs/op") + + for _, chunkLength := range chunkLengths { + + blobSize := chunkLength * 31 * 2 + params := encoding.EncodingParams{ + ChunkLength: chunkLength, + NumChunks: 16, + } + blob := make([]byte, blobSize) + _, err = rand.Read(blob) + assert.NoError(t, err) + + commitments, chunks, err := p.EncodeAndProve(blob, params) + assert.NoError(t, err) + + indices := make([]encoding.ChunkNumber, params.NumChunks) + for i := range indices { + indices[i] = encoding.ChunkNumber(i) + } + + for _, numChunks := range chunkCounts { + + result := testing.Benchmark(func(b *testing.B) { + for i := 0; i < b.N; i++ { + // control = profile.Start(profile.ProfilePath(".")) + err := v.VerifyFrames(chunks[:numChunks], indices[:numChunks], commitments, params) + assert.NoError(t, err) + // control.Stop() + } + }) + // Print results in CSV format + fmt.Fprintf(file, "%d,%d,%d,%d\n", numChunks, chunkLength, result.NsPerOp(), result.AllocsPerOp()) + + } + } + +} + +func BenchmarkVerifyBlob(b *testing.B) { + + p, _ := prover.NewProver(kzgConfig, true) + v, _ := verifier.NewVerifier(kzgConfig, true) + + params := encoding.EncodingParams{ + ChunkLength: 256, + NumChunks: 8, + } + blobSize := 8 * 256 + numSamples := 30 + blobs := make([][]byte, numSamples) + for i := 0; i < numSamples; i++ { + blob := make([]byte, blobSize) + _, _ = rand.Read(blob) + blobs[i] = blob + } + + commitments, _, err := p.EncodeAndProve(blobs[0], params) + assert.NoError(b, err) + + b.ResetTimer() + + for i := 0; i < b.N; i++ { + err = v.VerifyBlobLength(commitments) + assert.NoError(b, err) + } + +} diff --git a/encoding/mock/encoder.go b/encoding/mock/encoder.go new file mode 100644 index 0000000000..3f643fb745 --- /dev/null +++ b/encoding/mock/encoder.go @@ -0,0 +1,54 @@ +package encoding + +import ( + "time" + + "github.com/Layr-Labs/eigenda/encoding" + "github.com/stretchr/testify/mock" +) + +type MockEncoder struct { + mock.Mock + + Delay time.Duration +} + +var _ encoding.Prover = &MockEncoder{} + +var _ encoding.Verifier = &MockEncoder{} + +func (e *MockEncoder) EncodeAndProve(data []byte, params encoding.EncodingParams) (encoding.BlobCommitments, []*encoding.Frame, error) { + args := e.Called(data, params) + time.Sleep(e.Delay) + return args.Get(0).(encoding.BlobCommitments), args.Get(1).([]*encoding.Frame), args.Error(2) +} + +func (e *MockEncoder) VerifyFrames(chunks []*encoding.Frame, indices []encoding.ChunkNumber, commitments encoding.BlobCommitments, params encoding.EncodingParams) error { + args := e.Called(chunks, indices, commitments, params) + time.Sleep(e.Delay) + return args.Error(0) +} + +func (e *MockEncoder) UniversalVerifySubBatch(params encoding.EncodingParams, samples []encoding.Sample, numBlobs int) error { + args := e.Called(params, samples, numBlobs) + time.Sleep(e.Delay) + return args.Error(0) +} +func (e *MockEncoder) VerifyCommitEquivalenceBatch(commitments []encoding.BlobCommitments) error { + args := e.Called(commitments) + time.Sleep(e.Delay) + return args.Error(0) +} + +func (e *MockEncoder) VerifyBlobLength(commitments encoding.BlobCommitments) error { + + args := e.Called(commitments) + time.Sleep(e.Delay) + return args.Error(0) +} + +func (e *MockEncoder) Decode(chunks []*encoding.Frame, indices []encoding.ChunkNumber, params encoding.EncodingParams, maxInputSize uint64) ([]byte, error) { + args := e.Called(chunks, indices, params, maxInputSize) + time.Sleep(e.Delay) + return args.Get(0).([]byte), args.Error(1) +} diff --git a/encoding/params.go b/encoding/params.go new file mode 100644 index 0000000000..6697aed70b --- /dev/null +++ b/encoding/params.go @@ -0,0 +1,76 @@ +package encoding + +import ( + "errors" + "fmt" + + bls "github.com/Layr-Labs/eigenda/pkg/kzg/bn254" + "golang.org/x/exp/constraints" +) + +var ( + ErrInvalidParams = errors.New("invalid encoding params") +) + +type EncodingParams struct { + ChunkLength uint64 // ChunkSize is the length of the chunk in symbols + NumChunks uint64 +} + +func (p EncodingParams) ChunkDegree() uint64 { + return p.ChunkLength - 1 +} + +func (p EncodingParams) NumEvaluations() uint64 { + return p.NumChunks * p.ChunkLength +} + +func (p EncodingParams) Validate() error { + + if NextPowerOf2(p.NumChunks) != p.NumChunks { + return ErrInvalidParams + } + + if NextPowerOf2(p.ChunkLength) != p.ChunkLength { + return ErrInvalidParams + } + + return nil +} + +func ParamsFromMins[T constraints.Integer](minChunkLength, minNumChunks T) EncodingParams { + return EncodingParams{ + NumChunks: NextPowerOf2(uint64(minNumChunks)), + ChunkLength: NextPowerOf2(uint64(minChunkLength)), + } +} + +func ParamsFromSysPar(numSys, numPar, dataSize uint64) EncodingParams { + + numNodes := numSys + numPar + dataLen := roundUpDivide(dataSize, bls.BYTES_PER_COEFFICIENT) + chunkLen := roundUpDivide(dataLen, numSys) + return ParamsFromMins(chunkLen, numNodes) + +} + +func GetNumSys(dataSize uint64, chunkLen uint64) uint64 { + dataLen := roundUpDivide(dataSize, bls.BYTES_PER_COEFFICIENT) + numSys := dataLen / chunkLen + return numSys +} + +// ValidateEncodingParams takes in the encoding parameters and returns an error if they are invalid. +func ValidateEncodingParams(params EncodingParams, blobLength, SRSOrder int) error { + + if int(params.ChunkLength*params.NumChunks) >= SRSOrder { + return fmt.Errorf("the supplied encoding parameters are not valid with respect to the SRS. ChunkLength: %d, NumChunks: %d, SRSOrder: %d", params.ChunkLength, params.NumChunks, SRSOrder) + } + + if int(params.ChunkLength*params.NumChunks) < blobLength { + return fmt.Errorf("the supplied encoding parameters are not sufficient for the size of the data input") + } + + return nil + +} diff --git a/encoding/rs/decode.go b/encoding/rs/decode.go index fef231aae6..55484277cf 100644 --- a/encoding/rs/decode.go +++ b/encoding/rs/decode.go @@ -3,6 +3,7 @@ package rs import ( "errors" + "github.com/Layr-Labs/eigenda/encoding" bls "github.com/Layr-Labs/eigenda/pkg/kzg/bn254" ) @@ -16,7 +17,7 @@ import ( // the frames and indices don't encode the length of the original data. If maxInputSize // is smaller than the original input size, decoded data will be trimmed to fit the maxInputSize. func (g *Encoder) Decode(frames []Frame, indices []uint64, maxInputSize uint64) ([]byte, error) { - numSys := GetNumSys(maxInputSize, g.ChunkLen) + numSys := encoding.GetNumSys(maxInputSize, g.ChunkLength) if uint64(len(frames)) < numSys { return nil, errors.New("number of frame must be sufficient") @@ -37,7 +38,7 @@ func (g *Encoder) Decode(frames []Frame, indices []uint64, maxInputSize uint64) } // Some pattern i butterfly swap. Find the leading coset, then increment by number of coset - for j := uint64(0); j < g.ChunkLen; j++ { + for j := uint64(0); j < g.ChunkLength; j++ { p := j*g.NumChunks + uint64(e) samples[p] = new(bls.Fr) bls.CopyFr(samples[p], &evals[j]) diff --git a/encoding/rs/encode.go b/encoding/rs/encode.go index 3bc05145bd..d9d013f4af 100644 --- a/encoding/rs/encode.go +++ b/encoding/rs/encode.go @@ -84,7 +84,7 @@ func (g *Encoder) MakeFrames( frame := Frame{} indices = append(indices, j) - ys := polyEvals[g.ChunkLen*i : g.ChunkLen*(i+1)] + ys := polyEvals[g.ChunkLength*i : g.ChunkLength*(i+1)] err := rb.ReverseBitOrderFr(ys) if err != nil { return nil, nil, err diff --git a/encoding/rs/encode_test.go b/encoding/rs/encode_test.go index 78da01925c..dde2b7c333 100644 --- a/encoding/rs/encode_test.go +++ b/encoding/rs/encode_test.go @@ -7,6 +7,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/rs" ) @@ -14,7 +15,7 @@ func TestEncodeDecode_InvertsWhenSamplingAllFrames(t *testing.T) { teardownSuite := setupSuite(t) defer teardownSuite(t) - params := rs.GetEncodingParams(numSys, numPar, uint64(len(GETTYSBURG_ADDRESS_BYTES))) + params := encoding.ParamsFromSysPar(numSys, numPar, uint64(len(GETTYSBURG_ADDRESS_BYTES))) enc, _ := rs.NewEncoder(params, true) require.NotNil(t, enc) @@ -37,7 +38,7 @@ func TestEncodeDecode_InvertsWhenSamplingMissingFrame(t *testing.T) { teardownSuite := setupSuite(t) defer teardownSuite(t) - params := rs.GetEncodingParams(numSys, numPar, uint64(len(GETTYSBURG_ADDRESS_BYTES))) + params := encoding.ParamsFromSysPar(numSys, numPar, uint64(len(GETTYSBURG_ADDRESS_BYTES))) enc, _ := rs.NewEncoder(params, true) require.NotNil(t, enc) @@ -59,7 +60,7 @@ func TestEncodeDecode_ErrorsWhenNotEnoughSampledFrames(t *testing.T) { teardownSuite := setupSuite(t) defer teardownSuite(t) - params := rs.GetEncodingParams(numSys, numPar, uint64(len(GETTYSBURG_ADDRESS_BYTES))) + params := encoding.ParamsFromSysPar(numSys, numPar, uint64(len(GETTYSBURG_ADDRESS_BYTES))) enc, _ := rs.NewEncoder(params, true) require.NotNil(t, enc) diff --git a/encoding/rs/encoder.go b/encoding/rs/encoder.go index 2d5ec57ca3..dd52447f24 100644 --- a/encoding/rs/encoder.go +++ b/encoding/rs/encoder.go @@ -3,11 +3,12 @@ package rs import ( "math" + "github.com/Layr-Labs/eigenda/encoding" kzg "github.com/Layr-Labs/eigenda/pkg/kzg" ) type Encoder struct { - EncodingParams + encoding.EncodingParams Fs *kzg.FFTSettings @@ -22,7 +23,7 @@ type Encoder struct { // original data. When some systematic chunks are missing but identical parity chunk are // available, the receive can go through a Reed Solomon decoding to reconstruct the // original data. -func NewEncoder(params EncodingParams, verbose bool) (*Encoder, error) { +func NewEncoder(params encoding.EncodingParams, verbose bool) (*Encoder, error) { err := params.Validate() if err != nil { diff --git a/encoding/rs/encoder_fuzz_test.go b/encoding/rs/encoder_fuzz_test.go index fd5840893a..0c70d54644 100644 --- a/encoding/rs/encoder_fuzz_test.go +++ b/encoding/rs/encoder_fuzz_test.go @@ -3,6 +3,7 @@ package rs_test import ( "testing" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/rs" "github.com/stretchr/testify/assert" ) @@ -12,7 +13,7 @@ func FuzzOnlySystematic(f *testing.F) { f.Add(GETTYSBURG_ADDRESS_BYTES) f.Fuzz(func(t *testing.T, input []byte) { - params := rs.GetEncodingParams(10, 3, uint64(len(input))) + params := encoding.ParamsFromSysPar(10, 3, uint64(len(input))) enc, err := rs.NewEncoder(params, true) if err != nil { t.Errorf("Error making rs: %q", err) diff --git a/encoding/rs/frame_test.go b/encoding/rs/frame_test.go index 7e21232007..dc230664e5 100644 --- a/encoding/rs/frame_test.go +++ b/encoding/rs/frame_test.go @@ -3,6 +3,7 @@ package rs_test import ( "testing" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/rs" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -12,7 +13,7 @@ func TestEncodeDecodeFrame_AreInverses(t *testing.T) { teardownSuite := setupSuite(t) defer teardownSuite(t) - params := rs.GetEncodingParams(numSys, numPar, uint64(len(GETTYSBURG_ADDRESS_BYTES))) + params := encoding.ParamsFromSysPar(numSys, numPar, uint64(len(GETTYSBURG_ADDRESS_BYTES))) enc, _ := rs.NewEncoder(params, true) require.NotNil(t, enc) diff --git a/encoding/rs/interpolation.go b/encoding/rs/interpolation.go index c206e0efec..1dda2d632c 100644 --- a/encoding/rs/interpolation.go +++ b/encoding/rs/interpolation.go @@ -20,7 +20,7 @@ func (g *Encoder) GetInterpolationPolyEval( interpolationPoly []bls.Fr, j uint32, ) ([]bls.Fr, error) { - evals := make([]bls.Fr, g.ChunkLen) + evals := make([]bls.Fr, g.ChunkLength) w := g.Fs.ExpandedRootsOfUnity[uint64(j)] shiftedInterpolationPoly := make([]bls.Fr, len(interpolationPoly)) @@ -65,7 +65,7 @@ func (g *Encoder) GetInterpolationPolyEval( // Since both F W are invertible, c = W^-1 F^-1 d, convert it back. F W W^-1 F^-1 d = c func (g *Encoder) GetInterpolationPolyCoeff(chunk []bls.Fr, k uint32) ([]bls.Fr, error) { - coeffs := make([]bls.Fr, g.ChunkLen) + coeffs := make([]bls.Fr, g.ChunkLength) w := g.Fs.ExpandedRootsOfUnity[uint64(k)] shiftedInterpolationPoly := make([]bls.Fr, len(chunk)) err := g.Fs.InplaceFFT(chunk, shiftedInterpolationPoly, true) diff --git a/encoding/rs/utils_test.go b/encoding/rs/utils_test.go index 849f7f0fab..51c04f4ecc 100644 --- a/encoding/rs/utils_test.go +++ b/encoding/rs/utils_test.go @@ -6,14 +6,15 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/rs" ) func TestGetEncodingParams(t *testing.T) { - params := rs.GetEncodingParams(1, 4, 1000) + params := encoding.ParamsFromSysPar(1, 4, 1000) require.NotNil(t, params) - assert.Equal(t, params.ChunkLen, uint64(64)) + assert.Equal(t, params.ChunkLength, uint64(64)) // assert.Equal(t, params.DataLen, uint64(1000)) assert.Equal(t, params.NumChunks, uint64(8)) assert.Equal(t, params.NumEvaluations(), uint64(512)) @@ -37,7 +38,7 @@ func TestToFrArrayAndToByteArray_AreInverses(t *testing.T) { numEle := rs.GetNumElement(1000, BYTES_PER_COEFFICIENT) assert.Equal(t, numEle, uint64(33)) - params := rs.GetEncodingParams(numSys, numPar, uint64(len(GETTYSBURG_ADDRESS_BYTES))) + params := encoding.ParamsFromSysPar(numSys, numPar, uint64(len(GETTYSBURG_ADDRESS_BYTES))) enc, _ := rs.NewEncoder(params, true) require.NotNil(t, enc) diff --git a/encoding/serialization.go b/encoding/serialization.go new file mode 100644 index 0000000000..ecc7be5b8b --- /dev/null +++ b/encoding/serialization.go @@ -0,0 +1,99 @@ +package encoding + +import ( + "bytes" + "encoding/gob" + "encoding/json" + + "github.com/consensys/gnark-crypto/ecc/bn254" +) + +func (c *Frame) Serialize() ([]byte, error) { + return encode(c) +} + +func (c *Frame) Deserialize(data []byte) (*Frame, error) { + err := decode(data, c) + return c, err +} + +func (f *Frame) Encode() ([]byte, error) { + var buf bytes.Buffer + enc := gob.NewEncoder(&buf) + err := enc.Encode(f) + if err != nil { + return nil, err + } + return buf.Bytes(), nil +} + +func Decode(b []byte) (Frame, error) { + var f Frame + buf := bytes.NewBuffer(b) + dec := gob.NewDecoder(buf) + err := dec.Decode(&f) + if err != nil { + return Frame{}, err + } + return f, nil +} + +func (c *G1Commitment) Serialize() ([]byte, error) { + return encode(c) +} + +func (c *G1Commitment) Deserialize(data []byte) (*G1Commitment, error) { + err := decode(data, c) + return c, err +} + +func (c *G1Commitment) UnmarshalJSON(data []byte) error { + var g1Point bn254.G1Affine + err := json.Unmarshal(data, &g1Point) + if err != nil { + return err + } + c.X = g1Point.X + c.Y = g1Point.Y + return nil +} + +func (c *G2Commitment) Serialize() ([]byte, error) { + return encode(c) +} + +func (c *G2Commitment) Deserialize(data []byte) (*G2Commitment, error) { + err := decode(data, c) + return c, err +} + +func (c *G2Commitment) UnmarshalJSON(data []byte) error { + var g2Point bn254.G2Affine + err := json.Unmarshal(data, &g2Point) + if err != nil { + return err + } + c.X = g2Point.X + c.Y = g2Point.Y + return nil +} + +func encode(obj any) ([]byte, error) { + var buf bytes.Buffer + enc := gob.NewEncoder(&buf) + err := enc.Encode(obj) + if err != nil { + return nil, err + } + return buf.Bytes(), nil +} + +func decode(data []byte, obj any) error { + buf := bytes.NewBuffer(data) + dec := gob.NewDecoder(buf) + err := dec.Decode(obj) + if err != nil { + return err + } + return nil +} diff --git a/encoding/test/main.go b/encoding/test/main.go index b777623012..0a6e5c5b77 100644 --- a/encoding/test/main.go +++ b/encoding/test/main.go @@ -69,10 +69,10 @@ func TestKzgRs() { } // create encoding object - kzgGroup, _ := prover.NewProver(kzgConfig, true) + p, _ := prover.NewProver(kzgConfig, true) - params := rs.EncodingParams{NumChunks: 200, ChunkLen: 180} - enc, _ := kzgGroup.NewKzgEncoder(params) + params := encoding.EncodingParams{NumChunks: 200, ChunkLength: 180} + enc, _ := p.GetKzgEncoder(params) //inputFr := kzgrs.ToFrArray(inputBytes) inputSize := uint64(numSymbols) diff --git a/encoding/utils.go b/encoding/utils.go new file mode 100644 index 0000000000..d25e42bdbc --- /dev/null +++ b/encoding/utils.go @@ -0,0 +1,44 @@ +package encoding + +import ( + "golang.org/x/exp/constraints" + + "math" + + "github.com/Layr-Labs/eigenda/pkg/kzg/bn254" +) + +// GetBlobLength converts from blob size in bytes to blob size in symbols +func GetBlobLength(blobSize uint) uint { + symSize := uint(bn254.BYTES_PER_COEFFICIENT) + return (blobSize + symSize - 1) / symSize +} + +// GetBlobSize converts from blob length in symbols to blob size in bytes. This is not an exact conversion. +func GetBlobSize(blobLength uint) uint { + return blobLength * bn254.BYTES_PER_COEFFICIENT +} + +// GetBlobLength converts from blob size in bytes to blob size in symbols +func GetEncodedBlobLength(blobLength uint, quorumThreshold, advThreshold uint8) uint { + return roundUpDivide(blobLength*100, uint(quorumThreshold-advThreshold)) +} + +func NextPowerOf2(d uint64) uint64 { + nextPower := math.Ceil(math.Log2(float64(d))) + return uint64(math.Pow(2.0, nextPower)) +} + +// func roundUpDivideBig(a, b *big.Int) *big.Int { + +// one := new(big.Int).SetUint64(1) +// num := new(big.Int).Sub(new(big.Int).Add(a, b), one) // a + b - 1 +// res := new(big.Int).Div(num, b) // (a + b - 1) / b +// return res + +// } + +func roundUpDivide[T constraints.Integer](a, b T) T { + return (a + b - 1) / b + +} diff --git a/go.mod b/go.mod index b9552cc540..a3ac4cf4bb 100644 --- a/go.mod +++ b/go.mod @@ -30,6 +30,7 @@ require ( github.com/wealdtech/go-merkletree v1.0.1-0.20230205101955-ec7a95ea11ca go.uber.org/automaxprocs v1.5.2 go.uber.org/goleak v1.2.0 + golang.org/x/exp v0.0.0-20230905200255-921286631fa9 google.golang.org/grpc v1.59.0 ) @@ -130,7 +131,6 @@ require ( go.uber.org/multierr v1.11.0 // indirect go.uber.org/zap v1.26.0 // indirect golang.org/x/arch v0.4.0 // indirect - golang.org/x/exp v0.0.0-20230905200255-921286631fa9 // indirect golang.org/x/mod v0.12.0 // indirect golang.org/x/oauth2 v0.11.0 // indirect golang.org/x/sync v0.3.0 // indirect diff --git a/inabox/tests/integration_suite_test.go b/inabox/tests/integration_suite_test.go index 52b9a27297..d1c9165a47 100644 --- a/inabox/tests/integration_suite_test.go +++ b/inabox/tests/integration_suite_test.go @@ -16,10 +16,10 @@ import ( "github.com/Layr-Labs/eigenda/common/logging" rollupbindings "github.com/Layr-Labs/eigenda/contracts/bindings/MockRollup" "github.com/Layr-Labs/eigenda/core" - "github.com/Layr-Labs/eigenda/core/encoding" "github.com/Layr-Labs/eigenda/core/eth" coreindexer "github.com/Layr-Labs/eigenda/core/indexer" "github.com/Layr-Labs/eigenda/encoding/kzgrs" + "github.com/Layr-Labs/eigenda/encoding/kzgrs/verifier" "github.com/Layr-Labs/eigenda/inabox/deploy" "github.com/Layr-Labs/eigenda/indexer" gcommon "github.com/ethereum/go-ethereum/common" @@ -156,18 +156,16 @@ func setupRetrievalClient(testConfig *deploy.Config) error { if err != nil { return err } - encoder, err := encoding.NewEncoder(encoding.EncoderConfig{ - KzgConfig: kzgrs.KzgConfig{ - G1Path: testConfig.Retriever.RETRIEVER_G1_PATH, - G2Path: testConfig.Retriever.RETRIEVER_G2_PATH, - G2PowerOf2Path: testConfig.Retriever.RETRIEVER_G2_POWER_OF_2_PATH, - CacheDir: testConfig.Retriever.RETRIEVER_CACHE_PATH, - NumWorker: 1, - SRSOrder: uint64(srsOrder), - SRSNumberToLoad: uint64(srsOrder), - Verbose: true, - PreloadEncoder: false, - }, + v, err := verifier.NewVerifier(&kzgrs.KzgConfig{ + G1Path: testConfig.Retriever.RETRIEVER_G1_PATH, + G2Path: testConfig.Retriever.RETRIEVER_G2_PATH, + G2PowerOf2Path: testConfig.Retriever.RETRIEVER_G2_POWER_OF_2_PATH, + CacheDir: testConfig.Retriever.RETRIEVER_CACHE_PATH, + NumWorker: 1, + SRSOrder: uint64(srsOrder), + SRSNumberToLoad: uint64(srsOrder), + Verbose: true, + PreloadEncoder: false, }, false) if err != nil { return err @@ -191,7 +189,7 @@ func setupRetrievalClient(testConfig *deploy.Config) error { return err } - retrievalClient, err = clients.NewRetrievalClient(logger, ics, agn, nodeClient, encoder, 10) + retrievalClient, err = clients.NewRetrievalClient(logger, ics, agn, nodeClient, v, 10) if err != nil { return err } diff --git a/node/config.go b/node/config.go index 2c2d7cb188..b77e676fe3 100644 --- a/node/config.go +++ b/node/config.go @@ -11,7 +11,7 @@ import ( "github.com/Layr-Labs/eigenda/common/geth" "github.com/Layr-Labs/eigenda/common/logging" "github.com/Layr-Labs/eigenda/core" - "github.com/Layr-Labs/eigenda/core/encoding" + "github.com/Layr-Labs/eigenda/encoding/kzgrs" "github.com/Layr-Labs/eigenda/node/flags" "github.com/Layr-Labs/eigensdk-go/crypto/bls" "github.com/ethereum/go-ethereum/accounts/keystore" @@ -70,7 +70,7 @@ type Config struct { EthClientConfig geth.EthClientConfig LoggingConfig logging.Config - EncoderConfig encoding.EncoderConfig + EncoderConfig kzgrs.KzgConfig } // NewConfig parses the Config from the provided flags or environment variables and @@ -156,7 +156,7 @@ func NewConfig(ctx *cli.Context) (*Config, error) { DbPath: ctx.GlobalString(flags.DbPathFlag.Name), PrivateBls: privateBls, EthClientConfig: ethClientConfig, - EncoderConfig: encoding.ReadCLIConfig(ctx), + EncoderConfig: kzgrs.ReadCLIConfig(ctx), LoggingConfig: logging.ReadCLIConfig(ctx, flags.FlagPrefix), BLSOperatorStateRetrieverAddr: ctx.GlobalString(flags.BlsOperatorStateRetrieverFlag.Name), EigenDAServiceManagerAddr: ctx.GlobalString(flags.EigenDAServiceManagerFlag.Name), diff --git a/node/flags/flags.go b/node/flags/flags.go index 313db1b4c8..c813053b98 100644 --- a/node/flags/flags.go +++ b/node/flags/flags.go @@ -6,7 +6,7 @@ import ( "github.com/Layr-Labs/eigenda/common" "github.com/Layr-Labs/eigenda/common/geth" "github.com/Layr-Labs/eigenda/common/logging" - "github.com/Layr-Labs/eigenda/core/encoding" + "github.com/Layr-Labs/eigenda/encoding/kzgrs" "github.com/urfave/cli" ) @@ -271,7 +271,7 @@ var optionalFlags = []cli.Flag{ func init() { Flags = append(requiredFlags, optionalFlags...) - Flags = append(Flags, encoding.CLIFlags(EnvVarPrefix)...) + Flags = append(Flags, kzgrs.CLIFlags(EnvVarPrefix)...) Flags = append(Flags, geth.EthClientFlags(EnvVarPrefix)...) Flags = append(Flags, logging.CLIFlags(EnvVarPrefix, FlagPrefix)...) } diff --git a/node/grpc/server.go b/node/grpc/server.go index c4b64610da..2102fd85b7 100644 --- a/node/grpc/server.go +++ b/node/grpc/server.go @@ -10,6 +10,7 @@ import ( pb "github.com/Layr-Labs/eigenda/api/grpc/node" "github.com/Layr-Labs/eigenda/common" "github.com/Layr-Labs/eigenda/core" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/node" "github.com/prometheus/client_golang/prometheus" @@ -194,7 +195,7 @@ func (s *Server) RetrieveChunks(ctx context.Context, in *pb.RetrieveChunksReques if quorumInfo == nil { return nil, fmt.Errorf("invalid request: quorum ID %d not found in blob header", in.GetQuorumId()) } - encodedBlobSize := core.GetBlobSize(core.GetEncodedBlobLength(blobHeader.Length, quorumInfo.QuorumThreshold, quorumInfo.AdversaryThreshold)) + encodedBlobSize := encoding.GetBlobSize(encoding.GetEncodedBlobLength(blobHeader.Length, quorumInfo.QuorumThreshold, quorumInfo.AdversaryThreshold)) rate := quorumInfo.QuorumRate s.mu.Lock() diff --git a/node/grpc/server_load_test.go b/node/grpc/server_load_test.go index a5eba90b86..ccc578fc05 100644 --- a/node/grpc/server_load_test.go +++ b/node/grpc/server_load_test.go @@ -9,16 +9,17 @@ import ( "github.com/Layr-Labs/eigenda/core" "github.com/Layr-Labs/eigenda/disperser/batcher" dispatcher "github.com/Layr-Labs/eigenda/disperser/batcher/grpc" + "github.com/Layr-Labs/eigenda/encoding" "github.com/stretchr/testify/assert" ) func makeBatch(t *testing.T, blobSize int, numBlobs int, advThreshold, quorumThreshold int, refBlockNumber uint) (*core.BatchHeader, map[core.OperatorID][]*core.BlobMessage) { - encoder, err := makeTestEncoder() + p, _, err := makeTestComponents() assert.NoError(t, err) asn := &core.StdAssignmentCoordinator{} blobHeaders := make([]*core.BlobHeader, numBlobs) - blobChunks := make([][]*core.Chunk, numBlobs) + blobChunks := make([][]*encoding.Frame, numBlobs) blobMessagesByOp := make(map[core.OperatorID][]*core.BlobMessage) for i := 0; i < numBlobs; i++ { // create data @@ -29,7 +30,7 @@ func makeBatch(t *testing.T, blobSize int, numBlobs int, advThreshold, quorumThr operatorState, err := chainState.GetOperatorState(context.Background(), 0, []core.QuorumID{0}) assert.NoError(t, err) - chunkLength, err := asn.CalculateChunkLength(operatorState, core.GetBlobLength(uint(blobSize)), 0, &core.SecurityParam{ + chunkLength, err := asn.CalculateChunkLength(operatorState, encoding.GetBlobLength(uint(blobSize)), 0, &core.SecurityParam{ QuorumID: 0, AdversaryThreshold: uint8(advThreshold), QuorumThreshold: uint8(quorumThreshold), @@ -47,7 +48,7 @@ func makeBatch(t *testing.T, blobSize int, numBlobs int, advThreshold, quorumThr // encode data - assignments, info, err := asn.GetAssignments(operatorState, core.GetBlobLength(uint(blobSize)), blobQuorumInfo) + assignments, info, err := asn.GetAssignments(operatorState, encoding.GetBlobLength(uint(blobSize)), blobQuorumInfo) assert.NoError(t, err) quorumInfo := batcher.QuorumInfo{ Assignments: assignments, @@ -55,10 +56,9 @@ func makeBatch(t *testing.T, blobSize int, numBlobs int, advThreshold, quorumThr QuantizationFactor: batcher.QuantizationFactor, } - params, err := core.GetEncodingParams(chunkLength, quorumInfo.Info.TotalChunks) - assert.NoError(t, err) + params := encoding.ParamsFromMins(chunkLength, quorumInfo.Info.TotalChunks) t.Logf("Encoding params: ChunkLength: %d, NumChunks: %d", params.ChunkLength, params.NumChunks) - commits, chunks, err := encoder.Encode(data, params) + commits, chunks, err := p.EncodeAndProve(data, params) assert.NoError(t, err) blobChunks[i] = chunks diff --git a/node/grpc/server_test.go b/node/grpc/server_test.go index d584f0b996..b2b48d2da2 100644 --- a/node/grpc/server_test.go +++ b/node/grpc/server_test.go @@ -14,9 +14,11 @@ import ( "github.com/Layr-Labs/eigenda/common/logging" commonmock "github.com/Layr-Labs/eigenda/common/mock" "github.com/Layr-Labs/eigenda/core" - "github.com/Layr-Labs/eigenda/core/encoding" core_mock "github.com/Layr-Labs/eigenda/core/mock" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/kzgrs" + "github.com/Layr-Labs/eigenda/encoding/kzgrs/prover" + "github.com/Layr-Labs/eigenda/encoding/kzgrs/verifier" "github.com/Layr-Labs/eigenda/node" "github.com/Layr-Labs/eigenda/node/grpc" "github.com/Layr-Labs/eigensdk-go/metrics" @@ -41,9 +43,10 @@ func TestMain(m *testing.M) { os.Exit(m.Run()) } -// makeTestEncoder makes an encoder currently using the only supported backend. -func makeTestEncoder() (core.Encoder, error) { - config := kzgrs.KzgConfig{ +// makeTestVerifier makes a verifier currently using the only supported backend. +func makeTestComponents() (encoding.Prover, encoding.Verifier, error) { + + config := &kzgrs.KzgConfig{ G1Path: "../../inabox/resources/kzg/g1.point.300000", G2Path: "../../inabox/resources/kzg/g2.point.300000", CacheDir: "../../inabox/resources/kzg/SRSTables", @@ -52,7 +55,17 @@ func makeTestEncoder() (core.Encoder, error) { NumWorker: uint64(runtime.GOMAXPROCS(0)), } - return encoding.NewEncoder(encoding.EncoderConfig{KzgConfig: config}, true) + p, err := prover.NewProver(config, true) + if err != nil { + return nil, nil, err + } + + v, err := verifier.NewVerifier(config, true) + if err != nil { + return nil, nil, err + } + + return p, v, nil } func newTestServer(t *testing.T, mockValidator bool) *grpc.Server { @@ -99,7 +112,7 @@ func newTestServer(t *testing.T, mockValidator bool) *grpc.Server { val = mockVal } else { - enc, err := makeTestEncoder() + _, v, err := makeTestComponents() if err != nil { panic("failed to create test encoder") } @@ -111,7 +124,7 @@ func newTestServer(t *testing.T, mockValidator bool) *grpc.Server { panic("failed to create test encoder") } - val = core.NewChunkValidator(enc, asn, cst, opID) + val = core.NewChunkValidator(v, asn, cst, opID) } node := &node.Node{ @@ -133,7 +146,7 @@ func makeStoreChunksRequest(t *testing.T, quorumThreshold, adversaryThreshold ui _, err = commitY.SetString("9207254729396071334325696286939045899948985698134704137261649190717970615186") assert.NoError(t, err) - commitment := &core.G1Commitment{ + commitment := &encoding.G1Commitment{ X: commitX, Y: commitY, } @@ -147,7 +160,7 @@ func makeStoreChunksRequest(t *testing.T, quorumThreshold, adversaryThreshold ui _, err = lengthYA1.SetString("4082367875863433681332203403145435568316851327593401208105741076214120093531") assert.NoError(t, err) - var lengthProof, lengthCommitment core.G2Commitment + var lengthProof, lengthCommitment encoding.G2Commitment lengthProof.X.A0 = lengthXA0 lengthProof.X.A1 = lengthXA1 lengthProof.Y.A0 = lengthYA0 @@ -166,7 +179,7 @@ func makeStoreChunksRequest(t *testing.T, quorumThreshold, adversaryThreshold ui blobHeaders := []*core.BlobHeader{ { - BlobCommitments: core.BlobCommitments{ + BlobCommitments: encoding.BlobCommitments{ Commitment: commitment, LengthCommitment: &lengthCommitment, LengthProof: &lengthProof, @@ -175,7 +188,7 @@ func makeStoreChunksRequest(t *testing.T, quorumThreshold, adversaryThreshold ui QuorumInfos: []*core.BlobQuorumInfo{quorumHeader}, }, { - BlobCommitments: core.BlobCommitments{ + BlobCommitments: encoding.BlobCommitments{ Commitment: commitment, LengthCommitment: &lengthCommitment, LengthProof: &lengthProof, @@ -257,9 +270,9 @@ func TestRetrieveChunks(t *testing.T) { QuorumId: 0, }) assert.NoError(t, err) - recovered, err := new(core.Chunk).Deserialize(retrievalReply.GetChunks()[0]) + recovered, err := new(encoding.Frame).Deserialize(retrievalReply.GetChunks()[0]) assert.NoError(t, err) - chunk, err := new(core.Chunk).Deserialize(encodedChunk) + chunk, err := new(encoding.Frame).Deserialize(encodedChunk) assert.NoError(t, err) assert.Equal(t, recovered, chunk) } diff --git a/node/grpc/utils.go b/node/grpc/utils.go index 8f6bcf7eda..5db612e29a 100644 --- a/node/grpc/utils.go +++ b/node/grpc/utils.go @@ -7,6 +7,7 @@ import ( pb "github.com/Layr-Labs/eigenda/api/grpc/node" "github.com/Layr-Labs/eigenda/core" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/node" "github.com/consensys/gnark-crypto/ecc/bn254/fp" "github.com/wealdtech/go-merkletree" @@ -38,9 +39,9 @@ func GetBlobMessages(in *pb.StoreChunksRequest) ([]*core.BlobMessage, error) { bundles := make(map[core.QuorumID]core.Bundle, len(blob.GetBundles())) for i, chunks := range blob.GetBundles() { quorumID := blob.GetHeader().GetQuorumHeaders()[i].QuorumId - bundles[uint8(quorumID)] = make([]*core.Chunk, len(chunks.GetChunks())) + bundles[uint8(quorumID)] = make([]*encoding.Frame, len(chunks.GetChunks())) for j, data := range chunks.GetChunks() { - chunk, err := new(core.Chunk).Deserialize(data) + chunk, err := new(encoding.Frame).Deserialize(data) if err != nil { return nil, err } @@ -60,11 +61,11 @@ func GetBlobMessages(in *pb.StoreChunksRequest) ([]*core.BlobMessage, error) { func GetBlobHeaderFromProto(h *pb.BlobHeader) (*core.BlobHeader, error) { commitX := new(fp.Element).SetBytes(h.GetCommitment().GetX()) commitY := new(fp.Element).SetBytes(h.GetCommitment().GetY()) - commitment := &core.G1Commitment{ + commitment := &encoding.G1Commitment{ X: *commitX, Y: *commitY, } - var lengthCommitment, lengthProof core.G2Commitment + var lengthCommitment, lengthProof encoding.G2Commitment if h.GetLengthCommitment() != nil { lengthCommitment.X.A0 = *new(fp.Element).SetBytes(h.GetLengthCommitment().GetXA0()) lengthCommitment.X.A1 = *new(fp.Element).SetBytes(h.GetLengthCommitment().GetXA1()) @@ -92,7 +93,7 @@ func GetBlobHeaderFromProto(h *pb.BlobHeader) (*core.BlobHeader, error) { } return &core.BlobHeader{ - BlobCommitments: core.BlobCommitments{ + BlobCommitments: encoding.BlobCommitments{ Commitment: commitment, LengthCommitment: &lengthCommitment, LengthProof: &lengthProof, diff --git a/node/node.go b/node/node.go index 3a1f63c5ee..66105f41b6 100644 --- a/node/node.go +++ b/node/node.go @@ -11,6 +11,7 @@ import ( "time" "github.com/Layr-Labs/eigenda/common/pubip" + "github.com/Layr-Labs/eigenda/encoding/kzgrs/verifier" gethcommon "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/crypto" "github.com/prometheus/client_golang/prometheus" @@ -19,7 +20,6 @@ import ( "github.com/Layr-Labs/eigenda/common" "github.com/Layr-Labs/eigenda/common/geth" "github.com/Layr-Labs/eigenda/core" - "github.com/Layr-Labs/eigenda/core/encoding" "github.com/Layr-Labs/eigenda/core/eth" "github.com/Layr-Labs/eigenda/core/indexer" "github.com/Layr-Labs/eigensdk-go/chainio/constructor" @@ -113,12 +113,12 @@ func NewNode(config *Config, pubIPProvider pubip.Provider, logger common.Logger) nodeApi := nodeapi.NewNodeApi(AppName, SemVer, ":"+config.NodeApiPort, logger) // Make validator - enc, err := encoding.NewEncoder(config.EncoderConfig, false) + v, err := verifier.NewVerifier(&config.EncoderConfig, false) if err != nil { return nil, err } asgn := &core.StdAssignmentCoordinator{} - validator := core.NewChunkValidator(enc, asgn, cst, config.ID) + validator := core.NewChunkValidator(v, asgn, cst, config.ID) // Create new store diff --git a/node/store_test.go b/node/store_test.go index 1802fa881a..c2e746ea43 100644 --- a/node/store_test.go +++ b/node/store_test.go @@ -10,6 +10,7 @@ import ( pb "github.com/Layr-Labs/eigenda/api/grpc/node" "github.com/Layr-Labs/eigenda/common/mock" "github.com/Layr-Labs/eigenda/core" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/node" "github.com/Layr-Labs/eigenda/pkg/kzg/bn254" "github.com/Layr-Labs/eigensdk-go/metrics" @@ -65,40 +66,40 @@ func CreateBatch(t *testing.T) (*core.BatchHeader, []*core.BlobMessage, []*pb.Bl }, ChunkLength: 10, } - chunk1 := &core.Chunk{ + chunk1 := &encoding.Frame{ Proof: commitment, - Coeffs: []core.Symbol{bn254.ONE}, + Coeffs: []encoding.Symbol{bn254.ONE}, } blobMessage := []*core.BlobMessage{ { BlobHeader: &core.BlobHeader{ - BlobCommitments: core.BlobCommitments{ - Commitment: (*core.G1Commitment)(&commitment), - LengthCommitment: (*core.G2Commitment)(&lengthCommitment), - LengthProof: (*core.LengthProof)(&lengthProof), + BlobCommitments: encoding.BlobCommitments{ + Commitment: (*encoding.G1Commitment)(&commitment), + LengthCommitment: (*encoding.G2Commitment)(&lengthCommitment), + LengthProof: (*encoding.LengthProof)(&lengthProof), Length: 48, }, QuorumInfos: []*core.BlobQuorumInfo{quorumHeader}, }, Bundles: core.Bundles{ - core.QuorumID(0): []*core.Chunk{ + core.QuorumID(0): []*encoding.Frame{ chunk1, }, }, }, { BlobHeader: &core.BlobHeader{ - BlobCommitments: core.BlobCommitments{ - Commitment: (*core.G1Commitment)(&commitment), - LengthCommitment: (*core.G2Commitment)(&lengthCommitment), - LengthProof: (*core.G2Commitment)(&lengthProof), + BlobCommitments: encoding.BlobCommitments{ + Commitment: (*encoding.G1Commitment)(&commitment), + LengthCommitment: (*encoding.G2Commitment)(&lengthCommitment), + LengthProof: (*encoding.G2Commitment)(&lengthProof), Length: 50, }, QuorumInfos: []*core.BlobQuorumInfo{quorumHeader}, }, Bundles: core.Bundles{ - core.QuorumID(0): []*core.Chunk{ + core.QuorumID(0): []*encoding.Frame{ chunk1, }, }, diff --git a/retriever/cmd/main.go b/retriever/cmd/main.go index 5318816296..089dd2e99b 100644 --- a/retriever/cmd/main.go +++ b/retriever/cmd/main.go @@ -13,10 +13,10 @@ import ( "github.com/Layr-Labs/eigenda/common/healthcheck" "github.com/Layr-Labs/eigenda/common/logging" "github.com/Layr-Labs/eigenda/core" - "github.com/Layr-Labs/eigenda/core/encoding" "github.com/Layr-Labs/eigenda/core/eth" coreindexer "github.com/Layr-Labs/eigenda/core/indexer" "github.com/Layr-Labs/eigenda/core/thegraph" + "github.com/Layr-Labs/eigenda/encoding/kzgrs/verifier" "github.com/Layr-Labs/eigenda/retriever" retrivereth "github.com/Layr-Labs/eigenda/retriever/eth" "github.com/Layr-Labs/eigenda/retriever/flags" @@ -75,7 +75,7 @@ func RetrieverMain(ctx *cli.Context) error { } nodeClient := clients.NewNodeClient(config.Timeout) - encoder, err := encoding.NewEncoder(config.EncoderConfig, false) + v, err := verifier.NewVerifier(&config.EncoderConfig, false) if err != nil { log.Fatalln("could not start tcp listener", err) } @@ -126,13 +126,13 @@ func RetrieverMain(ctx *cli.Context) error { } agn := &core.StdAssignmentCoordinator{} - retrievalClient, err := clients.NewRetrievalClient(logger, ics, agn, nodeClient, encoder, config.NumConnections) + retrievalClient, err := clients.NewRetrievalClient(logger, ics, agn, nodeClient, v, config.NumConnections) if err != nil { log.Fatalln("could not start tcp listener", err) } chainClient := retrivereth.NewChainClient(gethClient, logger) - retrieverServiceServer := retriever.NewServer(config, logger, retrievalClient, encoder, ics, chainClient) + retrieverServiceServer := retriever.NewServer(config, logger, retrievalClient, v, ics, chainClient) if err = retrieverServiceServer.Start(context.Background()); err != nil { log.Fatalln("failed to start retriever service server", err) } diff --git a/retriever/config.go b/retriever/config.go index 6c3c09655c..94f0ab4998 100644 --- a/retriever/config.go +++ b/retriever/config.go @@ -5,14 +5,14 @@ import ( "github.com/Layr-Labs/eigenda/common/geth" "github.com/Layr-Labs/eigenda/common/logging" - "github.com/Layr-Labs/eigenda/core/encoding" + "github.com/Layr-Labs/eigenda/encoding/kzgrs" "github.com/Layr-Labs/eigenda/indexer" "github.com/Layr-Labs/eigenda/retriever/flags" "github.com/urfave/cli" ) type Config struct { - EncoderConfig encoding.EncoderConfig + EncoderConfig kzgrs.KzgConfig EthClientConfig geth.EthClientConfig LoggerConfig logging.Config IndexerConfig indexer.Config @@ -29,7 +29,7 @@ type Config struct { func NewConfig(ctx *cli.Context) *Config { return &Config{ - EncoderConfig: encoding.ReadCLIConfig(ctx), + EncoderConfig: kzgrs.ReadCLIConfig(ctx), EthClientConfig: geth.ReadEthClientConfig(ctx), LoggerConfig: logging.ReadCLIConfig(ctx, flags.FlagPrefix), IndexerConfig: indexer.ReadIndexerConfig(ctx), diff --git a/retriever/flags/flags.go b/retriever/flags/flags.go index b68d16c856..fb18b9f048 100644 --- a/retriever/flags/flags.go +++ b/retriever/flags/flags.go @@ -4,7 +4,7 @@ import ( "github.com/Layr-Labs/eigenda/common" "github.com/Layr-Labs/eigenda/common/geth" "github.com/Layr-Labs/eigenda/common/logging" - "github.com/Layr-Labs/eigenda/core/encoding" + "github.com/Layr-Labs/eigenda/encoding/kzgrs" "github.com/Layr-Labs/eigenda/indexer" "github.com/urfave/cli" ) @@ -103,7 +103,7 @@ var Flags []cli.Flag func init() { Flags = append(requiredFlags, optionalFlags...) - Flags = append(Flags, encoding.CLIFlags(envPrefix)...) + Flags = append(Flags, kzgrs.CLIFlags(envPrefix)...) Flags = append(Flags, geth.EthClientFlags(envPrefix)...) Flags = append(Flags, logging.CLIFlags(envPrefix, FlagPrefix)...) Flags = append(Flags, indexer.CLIFlags(envPrefix)...) diff --git a/retriever/server.go b/retriever/server.go index 9d7529f80d..8cc4670132 100644 --- a/retriever/server.go +++ b/retriever/server.go @@ -8,6 +8,7 @@ import ( "github.com/Layr-Labs/eigenda/clients" "github.com/Layr-Labs/eigenda/common" "github.com/Layr-Labs/eigenda/core" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/retriever/eth" gcommon "github.com/ethereum/go-ethereum/common" ) @@ -27,7 +28,7 @@ func NewServer( config *Config, logger common.Logger, retrievalClient clients.RetrievalClient, - encoder core.Encoder, + verifier encoding.Verifier, indexedState core.IndexedChainState, chainClient eth.ChainClient, ) *Server { diff --git a/retriever/server_test.go b/retriever/server_test.go index 2217f4edda..e7aaac67dd 100644 --- a/retriever/server_test.go +++ b/retriever/server_test.go @@ -11,8 +11,8 @@ import ( commock "github.com/Layr-Labs/eigenda/common/mock" binding "github.com/Layr-Labs/eigenda/contracts/bindings/EigenDAServiceManager" "github.com/Layr-Labs/eigenda/core" - "github.com/Layr-Labs/eigenda/core/encoding" coremock "github.com/Layr-Labs/eigenda/core/mock" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/kzgrs" "github.com/Layr-Labs/eigenda/encoding/kzgrs/prover" "github.com/Layr-Labs/eigenda/encoding/kzgrs/verifier" @@ -32,32 +32,29 @@ var ( gettysburgAddressBytes = []byte("Fourscore and seven years ago our fathers brought forth, on this continent, a new nation, conceived in liberty, and dedicated to the proposition that all men are created equal. Now we are engaged in a great civil war, testing whether that nation, or any nation so conceived, and so dedicated, can long endure. We are met on a great battle-field of that war. We have come to dedicate a portion of that field, as a final resting-place for those who here gave their lives, that that nation might live. It is altogether fitting and proper that we should do this. But, in a larger sense, we cannot dedicate, we cannot consecrate—we cannot hallow—this ground. The brave men, living and dead, who struggled here, have consecrated it far above our poor power to add or detract. The world will little note, nor long remember what we say here, but it can never forget what they did here. It is for us the living, rather, to be dedicated here to the unfinished work which they who fought here have thus far so nobly advanced. It is rather for us to be here dedicated to the great task remaining before us—that from these honored dead we take increased devotion to that cause for which they here gave the last full measure of devotion—that we here highly resolve that these dead shall not have died in vain—that this nation, under God, shall have a new birth of freedom, and that government of the people, by the people, for the people, shall not perish from the earth.") ) -func makeTestEncoder() (core.Encoder, error) { +func makeTestComponents() (encoding.Prover, encoding.Verifier, error) { config := &kzgrs.KzgConfig{ G1Path: "../inabox/resources/kzg/g1.point", G2Path: "../inabox/resources/kzg/g2.point", - G2PowerOf2Path: "../inabox/resources/kzg/g2.point.powerOf2", CacheDir: "../inabox/resources/kzg/SRSTables", SRSOrder: 3000, SRSNumberToLoad: 3000, NumWorker: uint64(runtime.GOMAXPROCS(0)), } - kzgEncoderGroup, err := prover.NewProver(config, true) + p, err := prover.NewProver(config, true) if err != nil { - return nil, err + return nil, nil, err } - kzgVerifierGroup, err := verifier.NewVerifier(config, true) + v, err := verifier.NewVerifier(config, true) if err != nil { - return nil, err + return nil, nil, err } - return &encoding.Encoder{ - EncoderGroup: kzgEncoderGroup, - VerifierGroup: kzgVerifierGroup, - }, nil + return p, v, nil } + func newTestServer(t *testing.T) *retriever.Server { var err error config := &retriever.Config{} @@ -69,14 +66,14 @@ func newTestServer(t *testing.T) *retriever.Server { log.Fatalf("failed to create new mocked chain data: %s", err) } - encoder, err := makeTestEncoder() + _, v, err := makeTestComponents() if err != nil { log.Fatal(err) } retrievalClient = &clientsmock.MockRetrievalClient{} chainClient = mock.NewMockChainClient() - return retriever.NewServer(config, logger, retrievalClient, encoder, indexedChainState, chainClient) + return retriever.NewServer(config, logger, retrievalClient, v, indexedChainState, chainClient) } func TestRetrieveBlob(t *testing.T) { diff --git a/test/integration_test.go b/test/integration_test.go index 7f6e25b4f9..24c1b1b2a2 100644 --- a/test/integration_test.go +++ b/test/integration_test.go @@ -17,6 +17,8 @@ import ( "github.com/Layr-Labs/eigenda/common/pubip" "github.com/Layr-Labs/eigenda/encoding/kzgrs" + "github.com/Layr-Labs/eigenda/encoding/kzgrs/prover" + "github.com/Layr-Labs/eigenda/encoding/kzgrs/verifier" "github.com/consensys/gnark-crypto/ecc/bn254/fp" clientsmock "github.com/Layr-Labs/eigenda/clients/mock" @@ -33,12 +35,12 @@ import ( "github.com/Layr-Labs/eigenda/common/logging" commonmock "github.com/Layr-Labs/eigenda/common/mock" "github.com/Layr-Labs/eigenda/core" - "github.com/Layr-Labs/eigenda/core/encoding" coremock "github.com/Layr-Labs/eigenda/core/mock" "github.com/Layr-Labs/eigenda/disperser" "github.com/Layr-Labs/eigenda/disperser/batcher" batchermock "github.com/Layr-Labs/eigenda/disperser/batcher/mock" "github.com/Layr-Labs/eigenda/disperser/common/inmem" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/node" nodegrpc "github.com/Layr-Labs/eigenda/node/grpc" "github.com/Layr-Labs/eigenda/pkg/kzg/bn254" @@ -54,7 +56,8 @@ import ( ) var ( - enc core.Encoder + p encoding.Prover + v encoding.Verifier asn core.AssignmentCoordinator gettysburgAddressBytes = []byte("Fourscore and seven years ago our fathers brought forth, on this continent, a new nation, conceived in liberty, and dedicated to the proposition that all men are created equal. Now we are engaged in a great civil war, testing whether that nation, or any nation so conceived, and so dedicated, can long endure. We are met on a great battle-field of that war. We have come to dedicate a portion of that field, as a final resting-place for those who here gave their lives, that that nation might live. It is altogether fitting and proper that we should do this. But, in a larger sense, we cannot dedicate, we cannot consecrate—we cannot hallow—this ground. The brave men, living and dead, who struggled here, have consecrated it far above our poor power to add or detract. The world will little note, nor long remember what we say here, but it can never forget what they did here. It is for us the living, rather, to be dedicated here to the unfinished work which they who fought here have thus far so nobly advanced. It is rather for us to be here dedicated to the great task remaining before us—that from these honored dead we take increased devotion to that cause for which they here gave the last full measure of devotion—that we here highly resolve that these dead shall not have died in vain—that this nation, under God, shall have a new birth of freedom, and that government of the people, by the people, for the people, shall not perish from the earth.") @@ -71,13 +74,14 @@ const ( ) func init() { - enc = mustMakeTestEncoder() + p, v = mustMakeTestComponents() asn = &core.StdAssignmentCoordinator{} } // makeTestEncoder makes an encoder currently using the only supported backend. -func mustMakeTestEncoder() core.Encoder { - config := kzgrs.KzgConfig{ +func mustMakeTestComponents() (encoding.Prover, encoding.Verifier) { + + config := &kzgrs.KzgConfig{ G1Path: "../inabox/resources/kzg/g1.point", G2Path: "../inabox/resources/kzg/g2.point", CacheDir: "../inabox/resources/kzg/SRSTables", @@ -86,15 +90,17 @@ func mustMakeTestEncoder() core.Encoder { NumWorker: uint64(runtime.GOMAXPROCS(0)), } - encoder, err := encoding.NewEncoder( - encoding.EncoderConfig{KzgConfig: config}, - true, - ) + p, err := prover.NewProver(config, true) if err != nil { - log.Fatalln("failed to initialize new encoder") + log.Fatal(err) } - return encoder + v, err := verifier.NewVerifier(config, true) + if err != nil { + log.Fatal(err) + } + + return p, v } func mustMakeTestBlob() core.Blob { @@ -146,13 +152,13 @@ func mustMakeDisperser(t *testing.T, cst core.IndexedChainState, store disperser ChainWriteTimeout: 10 * time.Second, } - enc0 := mustMakeTestEncoder() + p0, _ := mustMakeTestComponents() metrics := encoder.NewMetrics("9000", logger) grpcEncoder := encoder.NewServer(encoder.ServerConfig{ GrpcPort: encoderPort, MaxConcurrentRequests: 16, RequestPoolSize: 32, - }, logger, enc0, metrics) + }, logger, p0, metrics) encoderClient, err := encoder.NewEncoderClient(batcherConfig.EncoderSocket, 10*time.Second) if err != nil { @@ -256,8 +262,8 @@ func mustMakeOperators(t *testing.T, cst *coremock.ChainDataMock, logger common. } // creating a new instance of encoder instead of sharing enc because enc is not thread safe - encoder := mustMakeTestEncoder() - val := core.NewChunkValidator(encoder, asn, cst, id) + _, v0 := mustMakeTestComponents() + val := core.NewChunkValidator(v0, asn, cst, id) noopMetrics := metrics.NewNoopMetrics() reg := prometheus.NewRegistry() @@ -327,7 +333,7 @@ func mustMakeRetriever(cst core.IndexedChainState, logger common.Logger) (*commo gethClient := &commonmock.MockEthClient{} retrievalClient := &clientsmock.MockRetrievalClient{} chainClient := retrievermock.NewMockChainClient() - server := retriever.NewServer(config, logger, retrievalClient, enc, cst, chainClient) + server := retriever.NewServer(config, logger, retrievalClient, v, cst, chainClient) return gethClient, TestRetriever{ Server: server, @@ -475,7 +481,7 @@ func TestDispersalAndRetrieval(t *testing.T) { operatorState, err := cst.GetOperatorState(ctx, 0, []core.QuorumID{0}) assert.NoError(t, err) - blobLength := core.GetBlobLength(uint(len(blob.Data))) + blobLength := encoding.GetBlobLength(uint(len(blob.Data))) chunkLength, err := asn.CalculateChunkLength(operatorState, blobLength, 0, blob.RequestHeader.SecurityParams[0]) assert.NoError(t, err) @@ -491,11 +497,13 @@ func TestDispersalAndRetrieval(t *testing.T) { assignments, info, err := asn.GetAssignments(operatorState, blobLength, blobQuorumInfo) assert.NoError(t, err) - var indices []core.ChunkNumber - var chunks []*core.Chunk + var indices []encoding.ChunkNumber + var chunks []*encoding.Frame var blobHeader *core.BlobHeader for _, op := range ops { + fmt.Println("Processing operator: ", hexutil.Encode(op.Node.Config.ID[:])) + // check that blob headers can be retrieved from operators headerReply, err := op.Server.GetBlobHeader(ctx, &nodepb.GetBlobHeaderRequest{ BatchHeaderHash: batchHeaderHash, @@ -503,11 +511,11 @@ func TestDispersalAndRetrieval(t *testing.T) { QuorumId: uint32(0), }) assert.NoError(t, err) - actualCommitment := &core.G1Commitment{ + actualCommitment := &encoding.G1Commitment{ X: *new(fp.Element).SetBytes(headerReply.GetBlobHeader().GetCommitment().GetX()), Y: *new(fp.Element).SetBytes(headerReply.GetBlobHeader().GetCommitment().GetY()), } - var actualLengthCommitment, actualLengthProof core.G2Commitment + var actualLengthCommitment, actualLengthProof encoding.G2Commitment actualLengthCommitment.X.A0.SetBytes(headerReply.GetBlobHeader().GetLengthCommitment().GetXA0()) actualLengthCommitment.X.A1.SetBytes(headerReply.GetBlobHeader().GetLengthCommitment().GetXA1()) actualLengthCommitment.Y.A0.SetBytes(headerReply.GetBlobHeader().GetLengthCommitment().GetYA0()) @@ -543,7 +551,7 @@ func TestDispersalAndRetrieval(t *testing.T) { assignment, ok := assignments[op.Node.Config.ID] assert.True(t, ok) for _, data := range chunksReply.GetChunks() { - chunk, err := new(core.Chunk).Deserialize(data) + chunk, err := new(encoding.Frame).Deserialize(data) assert.NoError(t, err) chunks = append(chunks, chunk) } @@ -551,9 +559,9 @@ func TestDispersalAndRetrieval(t *testing.T) { indices = append(indices, assignment.GetIndices()...) } - encodingParams, err := core.GetEncodingParams(chunkLength, info.TotalChunks) + encodingParams := encoding.ParamsFromMins(chunkLength, info.TotalChunks) assert.NoError(t, err) - recovered, err := enc.Decode(chunks, indices, encodingParams, uint64(blobHeader.Length)*bn254.BYTES_PER_COEFFICIENT) + recovered, err := v.Decode(chunks, indices, encodingParams, uint64(blobHeader.Length)*bn254.BYTES_PER_COEFFICIENT) assert.NoError(t, err) recovered = bytes.TrimRight(recovered, "\x00") assert.Equal(t, gettysburgAddressBytes, recovered) diff --git a/test/synthetic-test/synthetic_client_test.go b/test/synthetic-test/synthetic_client_test.go index 10111155e7..9b6b4316ab 100644 --- a/test/synthetic-test/synthetic_client_test.go +++ b/test/synthetic-test/synthetic_client_test.go @@ -18,6 +18,7 @@ import ( "testing" "time" + "github.com/consensys/gnark-crypto/ecc/bn254/fp" "github.com/google/uuid" "github.com/shurcooL/graphql" @@ -29,12 +30,13 @@ import ( "github.com/Layr-Labs/eigenda/common/logging" rollupbindings "github.com/Layr-Labs/eigenda/contracts/bindings/MockRollup" "github.com/Layr-Labs/eigenda/core" - "github.com/Layr-Labs/eigenda/core/encoding" "github.com/Layr-Labs/eigenda/core/eth" coremock "github.com/Layr-Labs/eigenda/core/mock" "github.com/Layr-Labs/eigenda/core/thegraph" encoder_rpc "github.com/Layr-Labs/eigenda/disperser/api/grpc/encoder" + "github.com/Layr-Labs/eigenda/encoding" "github.com/Layr-Labs/eigenda/encoding/kzgrs" + "github.com/Layr-Labs/eigenda/encoding/kzgrs/verifier" gcommon "github.com/ethereum/go-ethereum/common" "github.com/stretchr/testify/assert" "google.golang.org/grpc" @@ -220,22 +222,20 @@ func setupRetrievalClient(ethClient common.EthClient, retrievalClientConfig *Ret if err != nil { return err } - encoder, err := encoding.NewEncoder(encoding.EncoderConfig{ - KzgConfig: kzgrs.KzgConfig{ - G1Path: retrievalClientConfig.RetrieverG1Path, - G2Path: retrievalClientConfig.RetrieverG2Path, - CacheDir: retrievalClientConfig.RetrieverCachePath, - NumWorker: 1, - SRSOrder: uint64(srsOrder), - Verbose: true, - PreloadEncoder: true, - }, - }) + v, err := verifier.NewVerifier(&kzgrs.KzgConfig{ + G1Path: retrievalClientConfig.RetrieverG1Path, + G2Path: retrievalClientConfig.RetrieverG2Path, + CacheDir: retrievalClientConfig.RetrieverCachePath, + NumWorker: 1, + SRSOrder: uint64(srsOrder), + Verbose: true, + PreloadEncoder: true, + }, true) if err != nil { return err } - retrievalClient, err = clients.NewRetrievalClient(logger, indexedChainStateClient, agn, nodeClient, encoder, 10) + retrievalClient, err = clients.NewRetrievalClient(logger, indexedChainStateClient, agn, nodeClient, v, 10) if err != nil { return err } @@ -486,13 +486,13 @@ func retrieverClientBlobRetrieve(blobStatusReply *disperser_rpc.BlobStatusReply, } func blobHeaderFromProto(blobHeader *disperser_rpc.BlobHeader) rollupbindings.IEigenDAServiceManagerBlobHeader { - logger := testSuite.Logger - commitmentBytes := blobHeader.GetCommitment() - commitment, err := new(core.Commitment).Deserialize(commitmentBytes) - if err != nil { - logger.Printf("failed to deserialize commitment: %s", err) - return rollupbindings.IEigenDAServiceManagerBlobHeader{} + commitX := new(fp.Element).SetBytes(blobHeader.GetCommitment().GetX()) + commitY := new(fp.Element).SetBytes(blobHeader.GetCommitment().GetY()) + commitment := &encoding.G1Commitment{ + X: *commitX, + Y: *commitY, } + quorums := make([]rollupbindings.IEigenDAServiceManagerQuorumBlobParam, len(blobHeader.GetBlobQuorumParams())) for i, quorum := range blobHeader.GetBlobQuorumParams() { quorums[i] = rollupbindings.IEigenDAServiceManagerQuorumBlobParam{ @@ -513,7 +513,7 @@ func blobHeaderFromProto(blobHeader *disperser_rpc.BlobHeader) rollupbindings.IE } } -func blobVerificationProofFromProto(verificationProof *disperser_rpc.BlobVerificationProof) rollupbindings.EigenDABlobUtilsBlobVerificationProof { +func blobVerificationProofFromProto(verificationProof *disperser_rpc.BlobVerificationProof) rollupbindings.EigenDARollupUtilsBlobVerificationProof { logger := testSuite.Logger batchMetadataProto := verificationProof.GetBatchMetadata() batchHeaderProto := verificationProof.GetBatchMetadata().GetBatchHeader() @@ -543,7 +543,7 @@ func blobVerificationProofFromProto(verificationProof *disperser_rpc.BlobVerific logger.Printf("VerificationProof:InclusionProof: %v\n", verificationProof.GetInclusionProof()) logger.Printf("VerificationProof:QuorumThresholdIndexes: %v\n", verificationProof.GetQuorumIndexes()) - return rollupbindings.EigenDABlobUtilsBlobVerificationProof{ + return rollupbindings.EigenDARollupUtilsBlobVerificationProof{ BatchId: verificationProof.GetBatchId(), BlobIndex: uint8(verificationProof.GetBlobIndex()), BatchMetadata: batchMetadata, @@ -563,17 +563,17 @@ func TestEncodeBlob(t *testing.T) { assert.NotNil(t, encoderReply.Chunks) // Decode Server Data - var chunksData []*core.Chunk + var chunksData []*encoding.Frame for i := range encoderReply.Chunks { - chunkSerialized, _ := new(core.Chunk).Deserialize(encoderReply.GetChunks()[i]) + chunkSerialized, _ := new(encoding.Frame).Deserialize(encoderReply.GetChunks()[i]) // perform an operation chunksData = append(chunksData, chunkSerialized) } assert.NotNil(t, chunksData) // Indices obtained from Encoder_Test - indices := []core.ChunkNumber{ + indices := []encoding.ChunkNumber{ 0, 1, 2, 3, 4, 5, 6, 7, } @@ -587,17 +587,15 @@ func TestEncodeBlob(t *testing.T) { NumWorker: uint64(runtime.GOMAXPROCS(0)), } - encodingConfig := encoding.EncoderConfig{KzgConfig: kzgConfig} - - encoder, _ := encoding.NewEncoder(encodingConfig) + v, _ := verifier.NewVerifier(&kzgConfig, false) maxInputSize := uint64(len(gettysburgAddressBytes)) + 10 - decoded, err := encoder.Decode(chunksData, indices, *encodingParams, maxInputSize) + decoded, err := v.Decode(chunksData, indices, *encodingParams, maxInputSize) assert.Nil(t, err) assert.Equal(t, decoded, gettysburgAddressBytes) } -func encodeBlob(data []byte) (*encoder_rpc.EncodeBlobReply, *core.EncodingParams, error) { +func encodeBlob(data []byte) (*encoder_rpc.EncodeBlobReply, *encoding.EncodingParams, error) { logger := testSuite.Logger var adversaryThreshold uint8 = 80 var quorumThreshold uint8 = 90 @@ -630,7 +628,7 @@ func encodeBlob(data []byte) (*encoder_rpc.EncodeBlobReply, *core.EncodingParams coordinator := &core.StdAssignmentCoordinator{} blobSize := uint(len(testBlob.Data)) - blobLength := core.GetBlobLength(uint(blobSize)) + blobLength := encoding.GetBlobLength(uint(blobSize)) chunkLength, err := coordinator.CalculateChunkLength(operatorState, blobLength, 0, param) if err != nil { @@ -646,7 +644,7 @@ func encodeBlob(data []byte) (*encoder_rpc.EncodeBlobReply, *core.EncodingParams if err != nil { logger.Printf("failed to get assignments: %s", err) } - testEncodingParams, _ := core.GetEncodingParams(chunkLength, info.TotalChunks) + testEncodingParams := encoding.ParamsFromMins(chunkLength, info.TotalChunks) testEncodingParamsProto := &encoder_rpc.EncodingParams{ ChunkLength: uint32(testEncodingParams.ChunkLength),