From 3c1888b3480bf47b98e7a478aee5c5cffe5c0edd Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Fri, 1 Mar 2024 11:11:36 +0100 Subject: [PATCH 01/66] feat: mul by linear factor ok --- ecc/bn254/shplonk/shplonk.go | 54 ++++++++++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) create mode 100644 ecc/bn254/shplonk/shplonk.go diff --git a/ecc/bn254/shplonk/shplonk.go b/ecc/bn254/shplonk/shplonk.go new file mode 100644 index 0000000000..963663f623 --- /dev/null +++ b/ecc/bn254/shplonk/shplonk.go @@ -0,0 +1,54 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//cf https://eprint.iacr.org/2020/081.pdf + +package shplonk + +import ( + "github.com/consensys/gnark-crypto/ecc/bn254" + "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark-crypto/ecc/bn254/kzg" +) + +// OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. +// +// implements io.ReaderFrom and io.WriterTo +type OpeningProof struct { + + // W = ∑ᵢ γⁱZ_{T\xᵢ}(f_i(X)-f(x_i)) + W bn254.G1Affine + + // (fᵢ(xᵢ))_{i} + ClaimedValues []fr.Element +} + +func BatchOpen(p [][]fr.Element, points []fr.Element, pk kzg.ProvingKey) {} + +// ------------------------------ +// utils + +// computes f <- (x-a)*f (in place if the capacity of f is correctly set) +func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { + s := len(f) + var tmp fr.Element + f = append(f, fr.NewElement(0)) + f[s] = f[s-1] + for i := s - 1; i >= 1; i-- { + tmp.Mul(&f[i], &a) + f[i].Sub(&f[i-1], &tmp) + } + f[0].Mul(&f[0], &a).Neg(&f[0]) + return f +} From b9aeda23fc26188604e5a6e72d2471e250c7efcd Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Fri, 1 Mar 2024 15:47:06 +0100 Subject: [PATCH 02/66] feat: division ok --- ecc/bn254/shplonk/shplonk.go | 26 +++++++++ ecc/bn254/shplonk/shplonk_test.go | 91 +++++++++++++++++++++++++++++++ 2 files changed, 117 insertions(+) create mode 100644 ecc/bn254/shplonk/shplonk_test.go diff --git a/ecc/bn254/shplonk/shplonk.go b/ecc/bn254/shplonk/shplonk.go index 963663f623..45c3997111 100644 --- a/ecc/bn254/shplonk/shplonk.go +++ b/ecc/bn254/shplonk/shplonk.go @@ -39,6 +39,14 @@ func BatchOpen(p [][]fr.Element, points []fr.Element, pk kzg.ProvingKey) {} // ------------------------------ // utils +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} + // computes f <- (x-a)*f (in place if the capacity of f is correctly set) func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { s := len(f) @@ -52,3 +60,21 @@ func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { f[0].Mul(&f[0], &a).Neg(&f[0]) return f } + +// returns f/g (assuming g divides f) +// OK to not use fft if deg(g) is small +// g's leading coefficient is assumed to be 1 +// f memory is re-used for the result +func div(f, g []fr.Element) []fr.Element { + sizef := len(f) + sizeg := len(g) + stop := sizeg - +1 + var t fr.Element + for i := sizef - 2; i >= stop; i-- { + for j := 0; j < sizeg-1; j++ { + t.Mul(&f[i+1], &g[sizeg-2-j]) + f[i-j].Sub(&f[i-j], &t) + } + } + return f[sizeg-1:] +} diff --git a/ecc/bn254/shplonk/shplonk_test.go b/ecc/bn254/shplonk/shplonk_test.go new file mode 100644 index 0000000000..1d49374f27 --- /dev/null +++ b/ecc/bn254/shplonk/shplonk_test.go @@ -0,0 +1,91 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package shplonk + +import ( + "fmt" + "testing" + + "github.com/consensys/gnark-crypto/ecc/bn254/fr" +) + +func TestMultiplyLinearFactor(t *testing.T) { + + s := 10 + f := make([]fr.Element, s, s+1) + for i := 0; i < 10; i++ { + f[i].SetRandom() + } + + var a, y fr.Element + a.SetRandom() + y = eval(f, a) + if y.IsZero() { + t.Fatal("result should not be zero") + } + + f = multiplyLinearFactor(f, a) + y = eval(f, a) + if !y.IsZero() { + t.Fatal("(X-a)f(X) should be zero at a") + } +} + +func TestDiv(t *testing.T) { + + nbPoints := 10 + s := 10 + f := make([]fr.Element, s, s+nbPoints) + for i := 0; i < s; i++ { + f[i].SetRandom() + } + + // backup + g := make([]fr.Element, s) + copy(g, f) + for i := 0; i < len(g); i++ { + fmt.Printf("%s\n", g[i].String()) + } + fmt.Println("--") + + x := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + fmt.Println("--") + q := make([][2]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + q[i][1].SetOne() + q[i][0].Neg(&x[i]) + f = div(f, q[i][:]) + } + + for i := 0; i < len(f); i++ { + fmt.Printf("%s\n", f[i].String()) + } + fmt.Println("--") + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + +} From c6fdec20aafc2bebd3b35b09fb7971f4c558fe91 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Fri, 1 Mar 2024 16:11:46 +0100 Subject: [PATCH 03/66] feat: build vanishing poly ok --- ecc/bn254/shplonk/shplonk.go | 9 +++++++++ ecc/bn254/shplonk/shplonk_test.go | 33 ++++++++++++++++++++----------- 2 files changed, 30 insertions(+), 12 deletions(-) diff --git a/ecc/bn254/shplonk/shplonk.go b/ecc/bn254/shplonk/shplonk.go index 45c3997111..14a72a3ac7 100644 --- a/ecc/bn254/shplonk/shplonk.go +++ b/ecc/bn254/shplonk/shplonk.go @@ -61,6 +61,15 @@ func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { return f } +// returns πᵢ(X-xᵢ) +func buildVanishingPoly(x []fr.Element) []fr.Element { + res := make([]fr.Element, 1, len(x)+1) + for i := 0; i < len(x); i++ { + res = multiplyLinearFactor(res, x[i]) + } + return res +} + // returns f/g (assuming g divides f) // OK to not use fft if deg(g) is small // g's leading coefficient is assumed to be 1 diff --git a/ecc/bn254/shplonk/shplonk_test.go b/ecc/bn254/shplonk/shplonk_test.go index 1d49374f27..62de573db4 100644 --- a/ecc/bn254/shplonk/shplonk_test.go +++ b/ecc/bn254/shplonk/shplonk_test.go @@ -15,12 +15,32 @@ package shplonk import ( - "fmt" "testing" "github.com/consensys/gnark-crypto/ecc/bn254/fr" ) +func TestBuildVanishingPoly(t *testing.T) { + s := 10 + x := make([]fr.Element, s) + for i := 0; i < s; i++ { + x[i].SetRandom() + } + r := buildVanishingPoly(x) + + if len(r) != s+1 { + t.Fatal("error degree r") + } + + // check that r(x_{i})=0 for all i + for i := 0; i < len(x); i++ { + y := eval(r, x[i]) + if !y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at xᵢ should be zero") + } + } +} + func TestMultiplyLinearFactor(t *testing.T) { s := 10 @@ -55,17 +75,12 @@ func TestDiv(t *testing.T) { // backup g := make([]fr.Element, s) copy(g, f) - for i := 0; i < len(g); i++ { - fmt.Printf("%s\n", g[i].String()) - } - fmt.Println("--") x := make([]fr.Element, nbPoints) for i := 0; i < nbPoints; i++ { x[i].SetRandom() f = multiplyLinearFactor(f, x[i]) } - fmt.Println("--") q := make([][2]fr.Element, nbPoints) for i := 0; i < nbPoints; i++ { q[i][1].SetOne() @@ -73,11 +88,6 @@ func TestDiv(t *testing.T) { f = div(f, q[i][:]) } - for i := 0; i < len(f); i++ { - fmt.Printf("%s\n", f[i].String()) - } - fmt.Println("--") - // g should be equal to f if len(f) != len(g) { t.Fatal("lengths don't match") @@ -87,5 +97,4 @@ func TestDiv(t *testing.T) { t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") } } - } From 34702b922bccc3e7acf671696eb6e4248fe38d8a Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Fri, 1 Mar 2024 19:06:24 +0100 Subject: [PATCH 04/66] fix: fixed vanishing poly --- ecc/bn254/shplonk/shplonk.go | 1 + ecc/bn254/shplonk/shplonk_test.go | 28 ++++++++++++++++++++++++++++ 2 files changed, 29 insertions(+) diff --git a/ecc/bn254/shplonk/shplonk.go b/ecc/bn254/shplonk/shplonk.go index 14a72a3ac7..b434796c9e 100644 --- a/ecc/bn254/shplonk/shplonk.go +++ b/ecc/bn254/shplonk/shplonk.go @@ -64,6 +64,7 @@ func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { // returns πᵢ(X-xᵢ) func buildVanishingPoly(x []fr.Element) []fr.Element { res := make([]fr.Element, 1, len(x)+1) + res[0].SetOne() for i := 0; i < len(x); i++ { res = multiplyLinearFactor(res, x[i]) } diff --git a/ecc/bn254/shplonk/shplonk_test.go b/ecc/bn254/shplonk/shplonk_test.go index 62de573db4..e2d47e63ce 100644 --- a/ecc/bn254/shplonk/shplonk_test.go +++ b/ecc/bn254/shplonk/shplonk_test.go @@ -39,6 +39,14 @@ func TestBuildVanishingPoly(t *testing.T) { t.Fatal("πᵢ(X-xᵢ) at xᵢ should be zero") } } + + // check that r(y)!=0 for a random point + var a fr.Element + a.SetRandom() + y := eval(r, a) + if y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at r \neq xᵢ should not be zero") + } } func TestMultiplyLinearFactor(t *testing.T) { @@ -76,6 +84,7 @@ func TestDiv(t *testing.T) { g := make([]fr.Element, s) copy(g, f) + // successive divions of linear terms x := make([]fr.Element, nbPoints) for i := 0; i < nbPoints; i++ { x[i].SetRandom() @@ -97,4 +106,23 @@ func TestDiv(t *testing.T) { t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") } } + + // division by a degree > 1 polynomial + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + r := buildVanishingPoly(x) + f = div(f, r) + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + } From 2dbe9e6efc08f7f3c42c011f7a050f8cd3ed6619 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Fri, 1 Mar 2024 19:17:45 +0100 Subject: [PATCH 05/66] fix: fixed test TestMultiplyLinearFactor --- ecc/bn254/shplonk/shplonk_test.go | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/ecc/bn254/shplonk/shplonk_test.go b/ecc/bn254/shplonk/shplonk_test.go index e2d47e63ce..991e37cab1 100644 --- a/ecc/bn254/shplonk/shplonk_test.go +++ b/ecc/bn254/shplonk/shplonk_test.go @@ -59,16 +59,17 @@ func TestMultiplyLinearFactor(t *testing.T) { var a, y fr.Element a.SetRandom() - y = eval(f, a) - if y.IsZero() { - t.Fatal("result should not be zero") - } - f = multiplyLinearFactor(f, a) y = eval(f, a) if !y.IsZero() { t.Fatal("(X-a)f(X) should be zero at a") } + a.SetRandom() + y = eval(f, a) + if y.IsZero() { + t.Fatal("(X-1)f(X) at a random point should not be zero") + } + } func TestDiv(t *testing.T) { From d35544df58d7ceab54534f9b9dab49948c67fd8e Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Tue, 5 Mar 2024 16:56:48 +0100 Subject: [PATCH 06/66] feat: naive mul (for combinings polynomials) ok --- ecc/bn254/shplonk/shplonk.go | 116 +++++++++++++++++++++++++++++- ecc/bn254/shplonk/shplonk_test.go | 36 ++++++++++ 2 files changed, 150 insertions(+), 2 deletions(-) diff --git a/ecc/bn254/shplonk/shplonk.go b/ecc/bn254/shplonk/shplonk.go index b434796c9e..2f6a7b0948 100644 --- a/ecc/bn254/shplonk/shplonk.go +++ b/ecc/bn254/shplonk/shplonk.go @@ -17,9 +17,17 @@ package shplonk import ( + "errors" + "hash" + "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark-crypto/ecc/bn254/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" +) + +var ( + ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") ) // OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. @@ -27,14 +35,94 @@ import ( // implements io.ReaderFrom and io.WriterTo type OpeningProof struct { - // W = ∑ᵢ γⁱZ_{T\xᵢ}(f_i(X)-f(x_i)) + // W = ∑ᵢ γⁱZ_{T\xᵢ}(f_i(X)-f(x_i)) where Z_{T} is the vanishing polynomial on the (xᵢ)_{i} W bn254.G1Affine + // L(X)/(X-z) where L(X)=∑ᵢγⁱZ_{T\xᵢ}(f_i(X)-rᵢ) - Z_{T}W(X) + WPrime bn254.G1Affine + // (fᵢ(xᵢ))_{i} ClaimedValues []fr.Element } -func BatchOpen(p [][]fr.Element, points []fr.Element, pk kzg.ProvingKey) {} +// func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points []fr.Element, hf hash.Hash, pk kzg.ProvingKey, dataTranscript ...[]byte) (OpeningProof, error) { + +// var res OpeningProof + +// if len(polynomials) != len(points) { +// return res, ErrInvalidNumberOfPoints +// } + +// // derive γ +// gamma, err := deriveGamma(points, digests, hf, dataTranscript...) +// if err != nil { +// return res, err +// } + +// // compute the claimed evaluations +// maxSize := len(polynomials[0]) +// for i := 1; i < len(polynomials); i++ { +// if maxSize < len(polynomials[i]) { +// maxSize = len(polynomials[i]) +// } +// } + +// totalSize := maxSize + len(points) // maxSize+len(points)-1 is the max degree among the polynomials Z_{T\xᵢ}fᵢ +// buf := make([]fr.Element, totalSize) +// f := make([]fr.Element, totalSize) +// copy(buf, polynomials[0]) +// v := buildVanishingPoly(points[1:]) + +// for i := 1; i>deg(small), deg(small) =~ 10 max +// buf is used as a buffer +func mul(big, small []fr.Element, buf []fr.Element) []fr.Element { + + sizeRes := len(big) + len(small) - 1 + if len(buf) < sizeRes { + s := make([]fr.Element, sizeRes-len(buf)) + buf = append(buf, s...) + } + for i := 0; i < len(buf); i++ { + buf[i].SetZero() + } + + var tmp fr.Element + for i := 0; i < len(small); i++ { + for j := 0; j < len(big); j++ { + tmp.Mul(&big[j], &small[i]) + buf[j+i].Add(&buf[j+i], &tmp) + } + } + return buf +} + // returns f/g (assuming g divides f) // OK to not use fft if deg(g) is small // g's leading coefficient is assumed to be 1 diff --git a/ecc/bn254/shplonk/shplonk_test.go b/ecc/bn254/shplonk/shplonk_test.go index 991e37cab1..62bec6a864 100644 --- a/ecc/bn254/shplonk/shplonk_test.go +++ b/ecc/bn254/shplonk/shplonk_test.go @@ -72,6 +72,42 @@ func TestMultiplyLinearFactor(t *testing.T) { } +func TestNaiveMul(t *testing.T) { + + size := 10 + f := make([]fr.Element, size) + for i := 0; i < size; i++ { + f[i].SetRandom() + } + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + + v := buildVanishingPoly(points) + buf := make([]fr.Element, size+nbPoints-1) + g := mul(f, v, buf) + + // check that g(x_{i}) = 0 + for i := 0; i < nbPoints; i++ { + y := eval(g, points[i]) + if !y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at x_{i} should be zero") + } + } + + // check that g(r) != 0 for a random point + var a fr.Element + a.SetRandom() + y := eval(g, a) + if y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at a random point should not be zero") + } + +} + func TestDiv(t *testing.T) { nbPoints := 10 From c7810044bc8de2c5260012c64ba73eab7107c0f7 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Tue, 5 Mar 2024 17:20:15 +0100 Subject: [PATCH 07/66] feat: add poly --- ecc/bn254/shplonk/shplonk.go | 82 ++++++++++++++++++++++++------------ 1 file changed, 55 insertions(+), 27 deletions(-) diff --git a/ecc/bn254/shplonk/shplonk.go b/ecc/bn254/shplonk/shplonk.go index 2f6a7b0948..e7d1cf8f9b 100644 --- a/ecc/bn254/shplonk/shplonk.go +++ b/ecc/bn254/shplonk/shplonk.go @@ -45,42 +45,42 @@ type OpeningProof struct { ClaimedValues []fr.Element } -// func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points []fr.Element, hf hash.Hash, pk kzg.ProvingKey, dataTranscript ...[]byte) (OpeningProof, error) { +func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points []fr.Element, hf hash.Hash, pk kzg.ProvingKey, dataTranscript ...[]byte) (OpeningProof, error) { -// var res OpeningProof + var res OpeningProof -// if len(polynomials) != len(points) { -// return res, ErrInvalidNumberOfPoints -// } + if len(polynomials) != len(points) { + return res, ErrInvalidNumberOfPoints + } -// // derive γ -// gamma, err := deriveGamma(points, digests, hf, dataTranscript...) -// if err != nil { -// return res, err -// } + // derive γ + gamma, err := deriveGamma(points, digests, hf, dataTranscript...) + if err != nil { + return res, err + } -// // compute the claimed evaluations -// maxSize := len(polynomials[0]) -// for i := 1; i < len(polynomials); i++ { -// if maxSize < len(polynomials[i]) { -// maxSize = len(polynomials[i]) -// } -// } + // compute the claimed evaluations + maxSize := len(polynomials[0]) + for i := 1; i < len(polynomials); i++ { + if maxSize < len(polynomials[i]) { + maxSize = len(polynomials[i]) + } + } -// totalSize := maxSize + len(points) // maxSize+len(points)-1 is the max degree among the polynomials Z_{T\xᵢ}fᵢ -// buf := make([]fr.Element, totalSize) -// f := make([]fr.Element, totalSize) -// copy(buf, polynomials[0]) -// v := buildVanishingPoly(points[1:]) + totalSize := maxSize + len(points) // maxSize+len(points)-1 is the max degree among the polynomials Z_{T\xᵢ}fᵢ + buf := make([]fr.Element, totalSize) + f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation + copy(buf, polynomials[0]) + v := buildVanishingPoly(points[1:]) -// for i := 1; i Date: Thu, 7 Mar 2024 10:23:05 +0100 Subject: [PATCH 08/66] feat: wrpime ok --- ecc/bn254/shplonk/shplonk.go | 152 ++++++++++++++++++++++++----------- 1 file changed, 104 insertions(+), 48 deletions(-) diff --git a/ecc/bn254/shplonk/shplonk.go b/ecc/bn254/shplonk/shplonk.go index e7d1cf8f9b..f51318b09f 100644 --- a/ecc/bn254/shplonk/shplonk.go +++ b/ecc/bn254/shplonk/shplonk.go @@ -49,35 +49,106 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points []fr.Ele var res OpeningProof + nbInstances := len(polynomials) if len(polynomials) != len(points) { return res, ErrInvalidNumberOfPoints } + // transcript + fs := fiatshamir.NewTranscript(hf, "gamma", "z") + // derive γ - gamma, err := deriveGamma(points, digests, hf, dataTranscript...) + gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) if err != nil { return res, err } // compute the claimed evaluations - maxSize := len(polynomials[0]) + maxSizePolys := len(polynomials[0]) for i := 1; i < len(polynomials); i++ { - if maxSize < len(polynomials[i]) { - maxSize = len(polynomials[i]) + if maxSizePolys < len(polynomials[i]) { + maxSizePolys = len(polynomials[i]) } } - - totalSize := maxSize + len(points) // maxSize+len(points)-1 is the max degree among the polynomials Z_{T\xᵢ}fᵢ - buf := make([]fr.Element, totalSize) + totalSize := maxSizePolys + len(points) - 1 // maxSizePolys+len(points)-2 is the max degree among the polynomials Z_{T\xᵢ}fᵢ + bufTotalSize := make([]fr.Element, totalSize) + bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation - copy(buf, polynomials[0]) - v := buildVanishingPoly(points[1:]) + bufPoints := make([]fr.Element, nbInstances-1) + ztMinusXi := make([][]fr.Element, nbInstances) + res.ClaimedValues = make([]fr.Element, nbInstances) + var accGamma fr.Element + accGamma.SetOne() + for i := 0; i < nbInstances; i++ { + + res.ClaimedValues[i] = eval(polynomials[i], points[i]) + + copy(bufPoints, points[:i]) + copy(bufPoints[i:], points[i+1:]) + + ztMinusXi[i] = buildVanishingPoly(bufPoints) + copy(bufMaxSizePolynomials, polynomials[i]) + bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &res.ClaimedValues[i]) // f-f(xᵢ) + bufTotalSize = mul(bufMaxSizePolynomials, ztMinusXi[i], bufTotalSize) + bufTotalSize = mulByConstant(bufTotalSize, accGamma) + for j := 0; j < len(bufTotalSize); j++ { + f[j].Add(&f[j], &bufTotalSize[j]) + } - for i := 1; i < len(polynomials); i++ { + accGamma.Mul(&accGamma, &gamma) + setZero(bufMaxSizePolynomials) + } + zt := buildVanishingPoly(points) + w := div(f, zt) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation + res.W, err = kzg.Commit(w, pk) + if err != nil { + return res, err } // derive z + z, err := deriveChallenge("z", nil, []kzg.Digest{res.W}, fs) + if err != nil { + return res, err + } + + // compute L = ∑ᵢγⁱZ_{T\xᵢ}(z)(fᵢ-rᵢ(z))-Z_{T}(z)W + accGamma.SetOne() + var gammaiZtMinusXi fr.Element + l := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation + for i := 0; i < len(polynomials); i++ { + + zi := eval(ztMinusXi[i], z) + gammaiZtMinusXi.Mul(&accGamma, &zi) + copy(bufMaxSizePolynomials, polynomials[i]) + bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &res.ClaimedValues[i]) + mulByConstant(bufMaxSizePolynomials, gammaiZtMinusXi) + for j := 0; j < len(bufMaxSizePolynomials); j++ { + l[j].Add(&l[j], &bufMaxSizePolynomials[j]) + } + + setZero(bufMaxSizePolynomials) + accGamma.Mul(&accGamma, &gamma) + } + ztz := eval(zt, z) + setZero(bufTotalSize) + copy(bufTotalSize, w) + mulByConstant(bufTotalSize, ztz) + for i := 0; i < totalSize-maxSizePolys; i++ { + bufTotalSize[totalSize-1-i].Neg(&bufTotalSize[totalSize-1-i]) + } + for i := 0; i < maxSizePolys; i++ { + bufTotalSize[i].Sub(&l[i], &bufTotalSize[i]) + } + + lz := eval(l, z) + l[0].Sub(&l[0], &lz) + xMinusZ := buildVanishingPoly([]fr.Element{z}) + wPrime := div(l, xMinusZ) + res.WPrime, err = kzg.Commit(wPrime, pk) + if err != nil { + return res, err + } return res, nil } @@ -92,29 +163,29 @@ func BatchVerify(proof OpeningProof, commitments []kzg.Digest, points []fr.Eleme return nil } -// deriveGamma derives a challenge using Fiat Shamir to polynomials. -func deriveGamma(points []fr.Element, digests []kzg.Digest, hf hash.Hash, dataTranscript ...[]byte) (fr.Element, error) { +// deriveChallenge derives a challenge using Fiat Shamir to polynomials. +// The arguments are added to the transcript in the order in which they are given. +func deriveChallenge(name string, points []fr.Element, digests []kzg.Digest, t *fiatshamir.Transcript, dataTranscript ...[]byte) (fr.Element, error) { // derive the challenge gamma, binded to the point and the commitments - fs := fiatshamir.NewTranscript(hf, "gamma") for i := range points { - if err := fs.Bind("gamma", points[i].Marshal()); err != nil { + if err := t.Bind(name, points[i].Marshal()); err != nil { return fr.Element{}, err } } for i := range digests { - if err := fs.Bind("gamma", digests[i].Marshal()); err != nil { + if err := t.Bind(name, digests[i].Marshal()); err != nil { return fr.Element{}, err } } for i := 0; i < len(dataTranscript); i++ { - if err := fs.Bind("gamma", dataTranscript[i]); err != nil { + if err := t.Bind(name, dataTranscript[i]); err != nil { return fr.Element{}, err } } - gammaByte, err := fs.ComputeChallenge("gamma") + gammaByte, err := t.ComputeChallenge(name) if err != nil { return fr.Element{}, err } @@ -127,6 +198,13 @@ func deriveGamma(points []fr.Element, digests []kzg.Digest, hf hash.Hash, dataTr // ------------------------------ // utils +// sets f to zero +func setZero(f []fr.Element) { + for i := 0; i < len(f); i++ { + f[i].SetZero() + } +} + func eval(f []fr.Element, x fr.Element) fr.Element { var y fr.Element for i := len(f) - 1; i >= 0; i-- { @@ -135,27 +213,7 @@ func eval(f []fr.Element, x fr.Element) fr.Element { return y } -// sets buf= f+g and returns buf (memory of f is re-used) -func add(f, g, buf []fr.Element) []fr.Element { - maxSize := len(f) - if maxSize < len(g) { - maxSize = len(g) - } - if len(buf) < maxSize { - s := make([]fr.Element, maxSize-len(buf)) - buf = append(buf, s...) - } - for i := 0; i < len(buf); i++ { - buf[i].SetZero() - } - copy(buf, f) - for i := 0; i < len(g); i++ { - buf[i].Add(&buf[i], &g[i]) - } - return buf -} - -// returns \gamma*f, re-using f +// returns γ*f, re-using f func mulByConstant(f []fr.Element, gamma fr.Element) []fr.Element { for i := 0; i < len(f); i++ { f[i].Mul(&f[i], &gamma) @@ -188,23 +246,21 @@ func buildVanishingPoly(x []fr.Element) []fr.Element { } // returns f*g using naive multiplication -// deg(big)>>deg(small), deg(small) =~ 10 max -// buf is used as a buffer -func mul(big, small []fr.Element, buf []fr.Element) []fr.Element { +// deg(f)>>deg(g), deg(small) =~ 10 max +// buf is used as a buffer and should not be f or g +func mul(f, g []fr.Element, buf []fr.Element) []fr.Element { - sizeRes := len(big) + len(small) - 1 + sizeRes := len(f) + len(g) - 1 if len(buf) < sizeRes { s := make([]fr.Element, sizeRes-len(buf)) buf = append(buf, s...) } - for i := 0; i < len(buf); i++ { - buf[i].SetZero() - } + setZero(buf) var tmp fr.Element - for i := 0; i < len(small); i++ { - for j := 0; j < len(big); j++ { - tmp.Mul(&big[j], &small[i]) + for i := 0; i < len(g); i++ { + for j := 0; j < len(f); j++ { + tmp.Mul(&f[j], &g[i]) buf[j+i].Add(&buf[j+i], &tmp) } } From bd81fc9fe7cf99d075a0e4b5309573b212f5b105 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Fri, 8 Mar 2024 14:45:22 +0100 Subject: [PATCH 09/66] feat: sage debugging, open ok --- ecc/bn254/shplonk/shplonk.go | 123 +++++++++++++++++++++++++++--- ecc/bn254/shplonk/shplonk_test.go | 64 ++++++++++++++++ 2 files changed, 178 insertions(+), 9 deletions(-) diff --git a/ecc/bn254/shplonk/shplonk.go b/ecc/bn254/shplonk/shplonk.go index f51318b09f..5c791a75d1 100644 --- a/ecc/bn254/shplonk/shplonk.go +++ b/ecc/bn254/shplonk/shplonk.go @@ -18,8 +18,11 @@ package shplonk import ( "errors" + "fmt" "hash" + "math/big" + "github.com/consensys/gnark-crypto/ecc" "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark-crypto/ecc/bn254/kzg" @@ -28,6 +31,7 @@ import ( var ( ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") + ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") ) // OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. @@ -45,6 +49,13 @@ type OpeningProof struct { ClaimedValues []fr.Element } +func prettyPrintPoly(f []fr.Element) { + for i := 0; i < len(f)-1; i++ { + fmt.Printf("%s*x**%d+", f[i].String(), i) + } + fmt.Printf("%s*x**%d\n", f[len(f)-1].String(), len(f)-1) +} + func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points []fr.Element, hf hash.Hash, pk kzg.ProvingKey, dataTranscript ...[]byte) (OpeningProof, error) { var res OpeningProof @@ -71,6 +82,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points []fr.Ele } } totalSize := maxSizePolys + len(points) - 1 // maxSizePolys+len(points)-2 is the max degree among the polynomials Z_{T\xᵢ}fᵢ + bufTotalSize := make([]fr.Element, totalSize) bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation @@ -79,16 +91,18 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points []fr.Ele res.ClaimedValues = make([]fr.Element, nbInstances) var accGamma fr.Element accGamma.SetOne() + + fmt.Printf("gamma = Fr(%s)\n", gamma.String()) for i := 0; i < nbInstances; i++ { res.ClaimedValues[i] = eval(polynomials[i], points[i]) copy(bufPoints, points[:i]) copy(bufPoints[i:], points[i+1:]) - ztMinusXi[i] = buildVanishingPoly(bufPoints) + copy(bufMaxSizePolynomials, polynomials[i]) - bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &res.ClaimedValues[i]) // f-f(xᵢ) + bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &res.ClaimedValues[i]) bufTotalSize = mul(bufMaxSizePolynomials, ztMinusXi[i], bufTotalSize) bufTotalSize = mulByConstant(bufTotalSize, accGamma) for j := 0; j < len(bufTotalSize); j++ { @@ -98,6 +112,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points []fr.Ele accGamma.Mul(&accGamma, &gamma) setZero(bufMaxSizePolynomials) } + // prettyPrintPoly(f) zt := buildVanishingPoly(points) w := div(f, zt) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation @@ -111,6 +126,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points []fr.Ele if err != nil { return res, err } + fmt.Printf("z = Fr(%s)\n", z.String()) // compute L = ∑ᵢγⁱZ_{T\xᵢ}(z)(fᵢ-rᵢ(z))-Z_{T}(z)W accGamma.SetOne() @@ -135,16 +151,16 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points []fr.Ele copy(bufTotalSize, w) mulByConstant(bufTotalSize, ztz) for i := 0; i < totalSize-maxSizePolys; i++ { - bufTotalSize[totalSize-1-i].Neg(&bufTotalSize[totalSize-1-i]) + l[totalSize-1-i].Neg(&bufTotalSize[totalSize-1-i]) } for i := 0; i < maxSizePolys; i++ { - bufTotalSize[i].Sub(&l[i], &bufTotalSize[i]) + l[i].Sub(&l[i], &bufTotalSize[i]) } - lz := eval(l, z) - l[0].Sub(&l[0], &lz) xMinusZ := buildVanishingPoly([]fr.Element{z}) wPrime := div(l, xMinusZ) + prettyPrintPoly(wPrime) + res.WPrime, err = kzg.Commit(wPrime, pk) if err != nil { return res, err @@ -154,11 +170,100 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points []fr.Ele } // BatchVerify uses proof to check that the commitments correctly open to proof.ClaimedValues -// at points. The order mattes: the proof validates that the i-th commitment is correctly opened +// at points. The order matters: the proof validates that the i-th commitment is correctly opened // at the i-th point -func BatchVerify(proof OpeningProof, commitments []kzg.Digest, points []fr.Element) error { +// dataTranscript is some extra data that might be needed for Fiat Shamir, and is appended at the end +// of the original transcript. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points []fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + if len(digests) != len(proof.ClaimedValues) { + return ErrInvalidNumberOfPoints + } + if len(digests) != len(points) { + return ErrInvalidNumberOfPoints + } + + // transcript + fs := fiatshamir.NewTranscript(hf, "gamma", "z") + + // derive γ + gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) + if err != nil { + return err + } + + // derive z + // TODO seems ok that z depend only on W, need to check that carefully + z, err := deriveChallenge("z", nil, []kzg.Digest{proof.W}, fs) + if err != nil { + return err + } - // compute γ + // check that e(F + zW', [1]_{2})=e(W',[x]_{2}) + // where F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i}-[∑ᵢγⁱZ_{T\xᵢ}(z)fᵢ(z)]_{1}-Z_{T}(z)[W] + var sumGammaiZTminusXiFiz, tmp, accGamma fr.Element + nbInstances := len(points) + gammaiZTminusXiz := make([]fr.Element, nbInstances) + accGamma.SetOne() + bufPoints := make([]fr.Element, len(points)-1) + for i := 0; i < len(points); i++ { + + copy(bufPoints, points[:i]) + copy(bufPoints[i:], points[i+1:]) + + ztMinusXi := buildVanishingPoly(bufPoints) + gammaiZTminusXiz[i] = eval(ztMinusXi, z) + gammaiZTminusXiz[i].Mul(&accGamma, &gammaiZTminusXiz[i]) + + tmp.Mul(&gammaiZTminusXiz[i], &proof.ClaimedValues[i]) + sumGammaiZTminusXiFiz.Add(&sumGammaiZTminusXiFiz, &tmp) + + accGamma.Mul(&accGamma, &gamma) + } + + // ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} + config := ecc.MultiExpConfig{} + var sumGammaiZtMinusXiComi kzg.Digest + _, err = sumGammaiZtMinusXiComi.MultiExp(digests, gammaiZTminusXiz, config) + if err != nil { + return err + } + + var bufBigInt big.Int + + // [∑ᵢZ_{T\xᵢ}fᵢ(z)]_{1} + var sumGammaiZTminusXiFizCom kzg.Digest + var sumGammaiZTminusXiFizBigInt big.Int + sumGammaiZTminusXiFiz.BigInt(&sumGammaiZTminusXiFizBigInt) + sumGammaiZTminusXiFizCom.ScalarMultiplication(&vk.G1, &sumGammaiZTminusXiFizBigInt) + + // Z_{T}(z)[W] + zt := buildVanishingPoly(points) + ztz := eval(zt, z) + var ztW kzg.Digest + ztz.BigInt(&bufBigInt) + ztW.ScalarMultiplication(&proof.W, &bufBigInt) + + // F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} - [∑ᵢZ_{T\xᵢ}fᵢ(z)]_{1} - Z_{T}(z)[W] + var f kzg.Digest + f.Sub(&sumGammaiZtMinusXiComi, &sumGammaiZTminusXiFizCom). + Sub(&f, &ztW) + + // F+zW' + var zWPrime kzg.Digest + z.BigInt(&bufBigInt) + zWPrime.ScalarMultiplication(&zWPrime, &bufBigInt) + f.Add(&f, &zWPrime) + + // check that e(F+zW',[1]_{2})=e(W',[x]_{2}) + check, err := bn254.PairingCheckFixedQ( + []bn254.G1Affine{f, proof.WPrime}, + vk.Lines[:], + ) + + if !check { + return ErrVerifyOpeningProof + } return nil } diff --git a/ecc/bn254/shplonk/shplonk_test.go b/ecc/bn254/shplonk/shplonk_test.go index 62bec6a864..a7aa8ed20c 100644 --- a/ecc/bn254/shplonk/shplonk_test.go +++ b/ecc/bn254/shplonk/shplonk_test.go @@ -15,11 +15,75 @@ package shplonk import ( + "crypto/sha256" + "fmt" + "math/big" "testing" + "github.com/consensys/gnark-crypto/ecc" "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark-crypto/ecc/bn254/kzg" + "github.com/stretchr/testify/require" ) +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 230 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestOpening(t *testing.T) { + + assert := require.New(t) + + nbPolys := 2 + sizePoly := make([]int, nbPolys) + for i := 0; i < nbPolys; i++ { + sizePoly[i] = 5 + i + } + polys := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + polys[i] = make([]fr.Element, sizePoly[i]) + for j := 0; j < sizePoly[i]; j++ { + polys[i][j].SetRandom() + } + } + + for i := 0; i < nbPolys; i++ { + prettyPrintPoly(polys[i]) + } + fmt.Println("--") + + digests := make([]kzg.Digest, nbPolys) + for i := 0; i < nbPolys; i++ { + digests[i], _ = kzg.Commit(polys[i], testSrs.Pk) + } + + points := make([]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + points[i].SetRandom() + } + + fmt.Println("[") + for i := 0; i < len(points); i++ { + fmt.Printf("Fr(%s), ", points[i].String()) + } + fmt.Println("]") + + hf := sha256.New() + + openingProof, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) + assert.NoError(err) + + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.NoError(err) + +} + func TestBuildVanishingPoly(t *testing.T) { s := 10 x := make([]fr.Element, s) From 390621f28e71abe11359b38a4217297697e69e90 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Fri, 8 Mar 2024 19:10:10 +0100 Subject: [PATCH 10/66] feat: debugged neg pairing --- ecc/bn254/shplonk/shplonk.go | 23 +++++++---------------- ecc/bn254/shplonk/shplonk_test.go | 12 ------------ 2 files changed, 7 insertions(+), 28 deletions(-) diff --git a/ecc/bn254/shplonk/shplonk.go b/ecc/bn254/shplonk/shplonk.go index 5c791a75d1..c7e6a4d6eb 100644 --- a/ecc/bn254/shplonk/shplonk.go +++ b/ecc/bn254/shplonk/shplonk.go @@ -18,7 +18,6 @@ package shplonk import ( "errors" - "fmt" "hash" "math/big" @@ -49,13 +48,6 @@ type OpeningProof struct { ClaimedValues []fr.Element } -func prettyPrintPoly(f []fr.Element) { - for i := 0; i < len(f)-1; i++ { - fmt.Printf("%s*x**%d+", f[i].String(), i) - } - fmt.Printf("%s*x**%d\n", f[len(f)-1].String(), len(f)-1) -} - func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points []fr.Element, hf hash.Hash, pk kzg.ProvingKey, dataTranscript ...[]byte) (OpeningProof, error) { var res OpeningProof @@ -92,7 +84,6 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points []fr.Ele var accGamma fr.Element accGamma.SetOne() - fmt.Printf("gamma = Fr(%s)\n", gamma.String()) for i := 0; i < nbInstances; i++ { res.ClaimedValues[i] = eval(polynomials[i], points[i]) @@ -112,7 +103,6 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points []fr.Ele accGamma.Mul(&accGamma, &gamma) setZero(bufMaxSizePolynomials) } - // prettyPrintPoly(f) zt := buildVanishingPoly(points) w := div(f, zt) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation @@ -126,7 +116,6 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points []fr.Ele if err != nil { return res, err } - fmt.Printf("z = Fr(%s)\n", z.String()) // compute L = ∑ᵢγⁱZ_{T\xᵢ}(z)(fᵢ-rᵢ(z))-Z_{T}(z)W accGamma.SetOne() @@ -159,7 +148,6 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points []fr.Ele xMinusZ := buildVanishingPoly([]fr.Element{z}) wPrime := div(l, xMinusZ) - prettyPrintPoly(wPrime) res.WPrime, err = kzg.Commit(wPrime, pk) if err != nil { @@ -244,7 +232,7 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points []fr.Element, ztz.BigInt(&bufBigInt) ztW.ScalarMultiplication(&proof.W, &bufBigInt) - // F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} - [∑ᵢZ_{T\xᵢ}fᵢ(z)]_{1} - Z_{T}(z)[W] + // F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} - [∑ᵢ\gamma^{i}Z_{T\xᵢ}fᵢ(z)]_{1} - Z_{T}(z)[W] var f kzg.Digest f.Sub(&sumGammaiZtMinusXiComi, &sumGammaiZTminusXiFizCom). Sub(&f, &ztW) @@ -252,8 +240,9 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points []fr.Element, // F+zW' var zWPrime kzg.Digest z.BigInt(&bufBigInt) - zWPrime.ScalarMultiplication(&zWPrime, &bufBigInt) + zWPrime.ScalarMultiplication(&proof.WPrime, &bufBigInt) f.Add(&f, &zWPrime) + f.Neg(&f) // check that e(F+zW',[1]_{2})=e(W',[x]_{2}) check, err := bn254.PairingCheckFixedQ( @@ -326,7 +315,8 @@ func mulByConstant(f []fr.Element, gamma fr.Element) []fr.Element { return f } -// computes f <- (x-a)*f (in place if the capacity of f is correctly set) +// computes f <- (x-a)*f +// memory of f is re used, need to pass a copy to not modify it func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { s := len(f) var tmp fr.Element @@ -353,6 +343,7 @@ func buildVanishingPoly(x []fr.Element) []fr.Element { // returns f*g using naive multiplication // deg(f)>>deg(g), deg(small) =~ 10 max // buf is used as a buffer and should not be f or g +// f and g are not modified func mul(f, g []fr.Element, buf []fr.Element) []fr.Element { sizeRes := len(f) + len(g) - 1 @@ -375,7 +366,7 @@ func mul(f, g []fr.Element, buf []fr.Element) []fr.Element { // returns f/g (assuming g divides f) // OK to not use fft if deg(g) is small // g's leading coefficient is assumed to be 1 -// f memory is re-used for the result +// f memory is re-used for the result, need to pass a copy to not modify it func div(f, g []fr.Element) []fr.Element { sizef := len(f) sizeg := len(g) diff --git a/ecc/bn254/shplonk/shplonk_test.go b/ecc/bn254/shplonk/shplonk_test.go index a7aa8ed20c..8945160b7b 100644 --- a/ecc/bn254/shplonk/shplonk_test.go +++ b/ecc/bn254/shplonk/shplonk_test.go @@ -16,7 +16,6 @@ package shplonk import ( "crypto/sha256" - "fmt" "math/big" "testing" @@ -53,11 +52,6 @@ func TestOpening(t *testing.T) { } } - for i := 0; i < nbPolys; i++ { - prettyPrintPoly(polys[i]) - } - fmt.Println("--") - digests := make([]kzg.Digest, nbPolys) for i := 0; i < nbPolys; i++ { digests[i], _ = kzg.Commit(polys[i], testSrs.Pk) @@ -68,12 +62,6 @@ func TestOpening(t *testing.T) { points[i].SetRandom() } - fmt.Println("[") - for i := 0; i < len(points); i++ { - fmt.Printf("Fr(%s), ", points[i].String()) - } - fmt.Println("]") - hf := sha256.New() openingProof, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) From 50c6a26e2c2d6602573a02740e79427c79e96de7 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Sun, 10 Mar 2024 13:50:32 +0100 Subject: [PATCH 11/66] feat: interpolator ok --- ecc/bn254/shplonk/shplonk.go | 14 +++++++++++- ecc/bn254/shplonk/shplonk_test.go | 38 +++++++++++++++++++++++++++++-- 2 files changed, 49 insertions(+), 3 deletions(-) diff --git a/ecc/bn254/shplonk/shplonk.go b/ecc/bn254/shplonk/shplonk.go index c7e6a4d6eb..7d8b250c38 100644 --- a/ecc/bn254/shplonk/shplonk.go +++ b/ecc/bn254/shplonk/shplonk.go @@ -232,7 +232,7 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points []fr.Element, ztz.BigInt(&bufBigInt) ztW.ScalarMultiplication(&proof.W, &bufBigInt) - // F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} - [∑ᵢ\gamma^{i}Z_{T\xᵢ}fᵢ(z)]_{1} - Z_{T}(z)[W] + // F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} - [∑ᵢγⁱZ_{T\xᵢ}fᵢ(z)]_{1} - Z_{T}(z)[W] var f kzg.Digest f.Sub(&sumGammaiZtMinusXiComi, &sumGammaiZTminusXiFizCom). Sub(&f, &ztW) @@ -340,6 +340,18 @@ func buildVanishingPoly(x []fr.Element) []fr.Element { return res } +// returns f such that f(xⱼ)=δⁱⱼ +func buildLagrangeFromDomain(x []fr.Element, i int) []fr.Element { + xx := make([]fr.Element, len(x)-1) + copy(xx, x[:i]) + copy(xx[i:], x[i+1:]) + res := buildVanishingPoly(xx) + d := eval(res, x[i]) + d.Inverse(&d) + res = mulByConstant(res, d) + return res +} + // returns f*g using naive multiplication // deg(f)>>deg(g), deg(small) =~ 10 max // buf is used as a buffer and should not be f or g diff --git a/ecc/bn254/shplonk/shplonk_test.go b/ecc/bn254/shplonk/shplonk_test.go index 8945160b7b..f70ba9bb58 100644 --- a/ecc/bn254/shplonk/shplonk_test.go +++ b/ecc/bn254/shplonk/shplonk_test.go @@ -72,6 +72,40 @@ func TestOpening(t *testing.T) { } +func TestBuildLagrangeFromDomain(t *testing.T) { + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + var r fr.Element + for i := 0; i < nbPoints; i++ { + + l := buildLagrangeFromDomain(points, i) + + // check that l(xᵢ)=1 and l(xⱼ)=0 for j!=i + for j := 0; j < nbPoints; j++ { + y := eval(l, points[j]) + if i == j { + if !y.IsOne() { + t.Fatal("l_{i}(x_{i}) should be equal to 1") + } + } else { + if !y.IsZero() { + t.Fatal("l_{i}(x_{j}) where i!=j should be equal to 0") + } + } + } + r.SetRandom() + y := eval(l, r) + if y.IsZero() { + t.Fatal("l_{i}(x) should not be zero if x is random") + } + } + +} + func TestBuildVanishingPoly(t *testing.T) { s := 10 x := make([]fr.Element, s) @@ -84,7 +118,7 @@ func TestBuildVanishingPoly(t *testing.T) { t.Fatal("error degree r") } - // check that r(x_{i})=0 for all i + // check that r(xᵢ)=0 for all i for i := 0; i < len(x); i++ { y := eval(r, x[i]) if !y.IsZero() { @@ -142,7 +176,7 @@ func TestNaiveMul(t *testing.T) { buf := make([]fr.Element, size+nbPoints-1) g := mul(f, v, buf) - // check that g(x_{i}) = 0 + // check that g(xᵢ) = 0 for i := 0; i < nbPoints; i++ { y := eval(g, points[i]) if !y.IsZero() { From 505f8fea366b6fb17b9426ca9110605d8df35be4 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Sun, 10 Mar 2024 18:46:29 +0100 Subject: [PATCH 12/66] feat: interpolate ok --- ecc/bn254/shplonk/shplonk.go | 15 +++++++++++++++ ecc/bn254/shplonk/shplonk_test.go | 19 +++++++++++++++++++ 2 files changed, 34 insertions(+) diff --git a/ecc/bn254/shplonk/shplonk.go b/ecc/bn254/shplonk/shplonk.go index 7d8b250c38..31e44c9052 100644 --- a/ecc/bn254/shplonk/shplonk.go +++ b/ecc/bn254/shplonk/shplonk.go @@ -340,6 +340,21 @@ func buildVanishingPoly(x []fr.Element) []fr.Element { return res } +// returns f such that f(xᵢ)=yᵢ, x and y are assumed to be of the same size +func interpolate(x, y []fr.Element) []fr.Element { + + res := make([]fr.Element, len(x)) + for i := 0; i < len(x); i++ { + li := buildLagrangeFromDomain(x, i) + li = mulByConstant(li, y[i]) + for j := 0; j < len(x); j++ { + res[j].Add(&res[j], &li[j]) + } + } + + return res +} + // returns f such that f(xⱼ)=δⁱⱼ func buildLagrangeFromDomain(x []fr.Element, i int) []fr.Element { xx := make([]fr.Element, len(x)-1) diff --git a/ecc/bn254/shplonk/shplonk_test.go b/ecc/bn254/shplonk/shplonk_test.go index f70ba9bb58..6a81569bbe 100644 --- a/ecc/bn254/shplonk/shplonk_test.go +++ b/ecc/bn254/shplonk/shplonk_test.go @@ -72,6 +72,25 @@ func TestOpening(t *testing.T) { } +func TestInterpolate(t *testing.T) { + + nbPoints := 10 + x := make([]fr.Element, nbPoints) + y := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + y[i].SetRandom() + } + f := interpolate(x, y) + for i := 0; i < nbPoints; i++ { + fx := eval(f, x[i]) + if !fx.Equal(&y[i]) { + t.Fatal("f(x_{i})!=y_{i}") + } + } + +} + func TestBuildLagrangeFromDomain(t *testing.T) { nbPoints := 10 From 9c726c593a6b5d6e117808a1133237df926524c0 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Sun, 10 Mar 2024 19:55:18 +0100 Subject: [PATCH 13/66] feat: TestBuildZtMinusSi ok --- ecc/bn254/shplonk/shplonk.go | 458 ++++++++++++++++-------------- ecc/bn254/shplonk/shplonk_test.go | 104 ++++--- 2 files changed, 319 insertions(+), 243 deletions(-) diff --git a/ecc/bn254/shplonk/shplonk.go b/ecc/bn254/shplonk/shplonk.go index 31e44c9052..887533f050 100644 --- a/ecc/bn254/shplonk/shplonk.go +++ b/ecc/bn254/shplonk/shplonk.go @@ -18,10 +18,7 @@ package shplonk import ( "errors" - "hash" - "math/big" - "github.com/consensys/gnark-crypto/ecc" "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark-crypto/ecc/bn254/kzg" @@ -44,227 +41,251 @@ type OpeningProof struct { // L(X)/(X-z) where L(X)=∑ᵢγⁱZ_{T\xᵢ}(f_i(X)-rᵢ) - Z_{T}W(X) WPrime bn254.G1Affine - // (fᵢ(xᵢ))_{i} - ClaimedValues []fr.Element + // ClaimedValues[i] are the values of fᵢ on Sᵢ + ClaimedValues [][]fr.Element } -func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points []fr.Element, hf hash.Hash, pk kzg.ProvingKey, dataTranscript ...[]byte) (OpeningProof, error) { - - var res OpeningProof - - nbInstances := len(polynomials) - if len(polynomials) != len(points) { - return res, ErrInvalidNumberOfPoints - } - - // transcript - fs := fiatshamir.NewTranscript(hf, "gamma", "z") - - // derive γ - gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) - if err != nil { - return res, err - } - - // compute the claimed evaluations - maxSizePolys := len(polynomials[0]) - for i := 1; i < len(polynomials); i++ { - if maxSizePolys < len(polynomials[i]) { - maxSizePolys = len(polynomials[i]) - } - } - totalSize := maxSizePolys + len(points) - 1 // maxSizePolys+len(points)-2 is the max degree among the polynomials Z_{T\xᵢ}fᵢ - - bufTotalSize := make([]fr.Element, totalSize) - bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) - f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation - bufPoints := make([]fr.Element, nbInstances-1) - ztMinusXi := make([][]fr.Element, nbInstances) - res.ClaimedValues = make([]fr.Element, nbInstances) - var accGamma fr.Element - accGamma.SetOne() - - for i := 0; i < nbInstances; i++ { - - res.ClaimedValues[i] = eval(polynomials[i], points[i]) - - copy(bufPoints, points[:i]) - copy(bufPoints[i:], points[i+1:]) - ztMinusXi[i] = buildVanishingPoly(bufPoints) - - copy(bufMaxSizePolynomials, polynomials[i]) - bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &res.ClaimedValues[i]) - bufTotalSize = mul(bufMaxSizePolynomials, ztMinusXi[i], bufTotalSize) - bufTotalSize = mulByConstant(bufTotalSize, accGamma) - for j := 0; j < len(bufTotalSize); j++ { - f[j].Add(&f[j], &bufTotalSize[j]) - } - - accGamma.Mul(&accGamma, &gamma) - setZero(bufMaxSizePolynomials) - } - - zt := buildVanishingPoly(points) - w := div(f, zt) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation - res.W, err = kzg.Commit(w, pk) - if err != nil { - return res, err - } - - // derive z - z, err := deriveChallenge("z", nil, []kzg.Digest{res.W}, fs) - if err != nil { - return res, err - } - - // compute L = ∑ᵢγⁱZ_{T\xᵢ}(z)(fᵢ-rᵢ(z))-Z_{T}(z)W - accGamma.SetOne() - var gammaiZtMinusXi fr.Element - l := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation - for i := 0; i < len(polynomials); i++ { - - zi := eval(ztMinusXi[i], z) - gammaiZtMinusXi.Mul(&accGamma, &zi) - copy(bufMaxSizePolynomials, polynomials[i]) - bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &res.ClaimedValues[i]) - mulByConstant(bufMaxSizePolynomials, gammaiZtMinusXi) - for j := 0; j < len(bufMaxSizePolynomials); j++ { - l[j].Add(&l[j], &bufMaxSizePolynomials[j]) - } - - setZero(bufMaxSizePolynomials) - accGamma.Mul(&accGamma, &gamma) - } - ztz := eval(zt, z) - setZero(bufTotalSize) - copy(bufTotalSize, w) - mulByConstant(bufTotalSize, ztz) - for i := 0; i < totalSize-maxSizePolys; i++ { - l[totalSize-1-i].Neg(&bufTotalSize[totalSize-1-i]) - } - for i := 0; i < maxSizePolys; i++ { - l[i].Sub(&l[i], &bufTotalSize[i]) - } - - xMinusZ := buildVanishingPoly([]fr.Element{z}) - wPrime := div(l, xMinusZ) - - res.WPrime, err = kzg.Commit(wPrime, pk) - if err != nil { - return res, err - } - - return res, nil -} +// BatchOpen opens the list of polynomials on points, where the i-th polynomials is opend at points[i]. +// func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, pk kzg.ProvingKey, dataTranscript ...[]byte) (OpeningProof, error) { + +// var res OpeningProof + +// nbInstances := len(polynomials) +// if len(polynomials) != len(points) { +// return res, ErrInvalidNumberOfPoints +// } + +// // transcript +// fs := fiatshamir.NewTranscript(hf, "gamma", "z") + +// // derive γ +// gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) +// if err != nil { +// return res, err +// } + +// // compute the size of the linear combination +// maxSizePolys := len(polynomials[0]) +// for i := 1; i < len(polynomials); i++ { +// if maxSizePolys < len(polynomials[i]) { +// maxSizePolys = len(polynomials[i]) +// } +// } +// nbPoints := 0 +// sizeSi := make([]int, len(points)) +// for i := 0; i < nbInstances; i++ { +// nbPoints += len(points[i]) +// sizeSi[i] = len(points[i]) +// } +// totalSize := 0 +// for i := 0; i < nbInstances; i++ { +// sizeIthTerm := len(polynomials[i]) + nbPoints - len(points[i]) // the degree of the i-th term is len(polynomials[i])-1+nbPoints-len(points[i]) +// if totalSize < sizeIthTerm { +// totalSize = sizeIthTerm +// } +// } + +// bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) +// bufTotalSize := make([]fr.Element, totalSize) +// f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation +// res.ClaimedValues = make([][]fr.Element, nbInstances) +// for i := 0; i < nbPoints; i++ { +// res.ClaimedValues[i] = make([]fr.Element, len(points[i])) +// } +// var accGamma fr.Element +// accGamma.SetOne() + +// ztMinusSi := make([][]fr.Element, nbInstances) +// for i := 0; i < nbInstances; i++ { + +// for j := 0; j < len(points[i]); j++ { +// res.ClaimedValues[i][j] = eval(polynomials[i], points[i][j]) +// } + +// bufPoints := make([]fr.Element, 0, nbPoints-sizeSi[i]) +// for j := 0; j < i; j++ { +// bufPoints = append(bufPoints, points[j]...) +// } +// for j := i + 1; j < nbInstances; j++ { +// bufPoints = append(bufPoints, points[j]...) +// } +// ztMinusSi[i] = buildVanishingPoly(bufPoints) + +// copy(bufMaxSizePolynomials, polynomials[i]) +// bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &res.ClaimedValues[i]) +// bufTotalSize = mul(bufMaxSizePolynomials, ztMinusXi[i], bufTotalSize) +// bufTotalSize = mulByConstant(bufTotalSize, accGamma) +// for j := 0; j < len(bufTotalSize); j++ { +// f[j].Add(&f[j], &bufTotalSize[j]) +// } + +// accGamma.Mul(&accGamma, &gamma) +// setZero(bufMaxSizePolynomials) +// } + +// zt := buildVanishingPoly(points) +// w := div(f, zt) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation +// res.W, err = kzg.Commit(w, pk) +// if err != nil { +// return res, err +// } + +// // derive z +// z, err := deriveChallenge("z", nil, []kzg.Digest{res.W}, fs) +// if err != nil { +// return res, err +// } + +// // compute L = ∑ᵢγⁱZ_{T\xᵢ}(z)(fᵢ-rᵢ(z))-Z_{T}(z)W +// accGamma.SetOne() +// var gammaiZtMinusXi fr.Element +// l := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation +// for i := 0; i < len(polynomials); i++ { + +// zi := eval(ztMinusXi[i], z) +// gammaiZtMinusXi.Mul(&accGamma, &zi) +// copy(bufMaxSizePolynomials, polynomials[i]) +// bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &res.ClaimedValues[i]) +// mulByConstant(bufMaxSizePolynomials, gammaiZtMinusXi) +// for j := 0; j < len(bufMaxSizePolynomials); j++ { +// l[j].Add(&l[j], &bufMaxSizePolynomials[j]) +// } + +// setZero(bufMaxSizePolynomials) +// accGamma.Mul(&accGamma, &gamma) +// } +// ztz := eval(zt, z) +// setZero(bufTotalSize) +// copy(bufTotalSize, w) +// mulByConstant(bufTotalSize, ztz) +// for i := 0; i < totalSize-maxSizePolys; i++ { +// l[totalSize-1-i].Neg(&bufTotalSize[totalSize-1-i]) +// } +// for i := 0; i < maxSizePolys; i++ { +// l[i].Sub(&l[i], &bufTotalSize[i]) +// } + +// xMinusZ := buildVanishingPoly([]fr.Element{z}) +// wPrime := div(l, xMinusZ) + +// res.WPrime, err = kzg.Commit(wPrime, pk) +// if err != nil { +// return res, err +// } + +// return res, nil +// } // BatchVerify uses proof to check that the commitments correctly open to proof.ClaimedValues // at points. The order matters: the proof validates that the i-th commitment is correctly opened // at the i-th point // dataTranscript is some extra data that might be needed for Fiat Shamir, and is appended at the end // of the original transcript. -func BatchVerify(proof OpeningProof, digests []kzg.Digest, points []fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { - - if len(digests) != len(proof.ClaimedValues) { - return ErrInvalidNumberOfPoints - } - if len(digests) != len(points) { - return ErrInvalidNumberOfPoints - } - - // transcript - fs := fiatshamir.NewTranscript(hf, "gamma", "z") - - // derive γ - gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) - if err != nil { - return err - } - - // derive z - // TODO seems ok that z depend only on W, need to check that carefully - z, err := deriveChallenge("z", nil, []kzg.Digest{proof.W}, fs) - if err != nil { - return err - } - - // check that e(F + zW', [1]_{2})=e(W',[x]_{2}) - // where F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i}-[∑ᵢγⁱZ_{T\xᵢ}(z)fᵢ(z)]_{1}-Z_{T}(z)[W] - var sumGammaiZTminusXiFiz, tmp, accGamma fr.Element - nbInstances := len(points) - gammaiZTminusXiz := make([]fr.Element, nbInstances) - accGamma.SetOne() - bufPoints := make([]fr.Element, len(points)-1) - for i := 0; i < len(points); i++ { - - copy(bufPoints, points[:i]) - copy(bufPoints[i:], points[i+1:]) - - ztMinusXi := buildVanishingPoly(bufPoints) - gammaiZTminusXiz[i] = eval(ztMinusXi, z) - gammaiZTminusXiz[i].Mul(&accGamma, &gammaiZTminusXiz[i]) - - tmp.Mul(&gammaiZTminusXiz[i], &proof.ClaimedValues[i]) - sumGammaiZTminusXiFiz.Add(&sumGammaiZTminusXiFiz, &tmp) - - accGamma.Mul(&accGamma, &gamma) - } - - // ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} - config := ecc.MultiExpConfig{} - var sumGammaiZtMinusXiComi kzg.Digest - _, err = sumGammaiZtMinusXiComi.MultiExp(digests, gammaiZTminusXiz, config) - if err != nil { - return err - } - - var bufBigInt big.Int - - // [∑ᵢZ_{T\xᵢ}fᵢ(z)]_{1} - var sumGammaiZTminusXiFizCom kzg.Digest - var sumGammaiZTminusXiFizBigInt big.Int - sumGammaiZTminusXiFiz.BigInt(&sumGammaiZTminusXiFizBigInt) - sumGammaiZTminusXiFizCom.ScalarMultiplication(&vk.G1, &sumGammaiZTminusXiFizBigInt) - - // Z_{T}(z)[W] - zt := buildVanishingPoly(points) - ztz := eval(zt, z) - var ztW kzg.Digest - ztz.BigInt(&bufBigInt) - ztW.ScalarMultiplication(&proof.W, &bufBigInt) - - // F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} - [∑ᵢγⁱZ_{T\xᵢ}fᵢ(z)]_{1} - Z_{T}(z)[W] - var f kzg.Digest - f.Sub(&sumGammaiZtMinusXiComi, &sumGammaiZTminusXiFizCom). - Sub(&f, &ztW) - - // F+zW' - var zWPrime kzg.Digest - z.BigInt(&bufBigInt) - zWPrime.ScalarMultiplication(&proof.WPrime, &bufBigInt) - f.Add(&f, &zWPrime) - f.Neg(&f) - - // check that e(F+zW',[1]_{2})=e(W',[x]_{2}) - check, err := bn254.PairingCheckFixedQ( - []bn254.G1Affine{f, proof.WPrime}, - vk.Lines[:], - ) - - if !check { - return ErrVerifyOpeningProof - } - - return nil -} +// func BatchVerify(proof OpeningProof, digests []kzg.Digest, points []fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + +// if len(digests) != len(proof.ClaimedValues) { +// return ErrInvalidNumberOfPoints +// } +// if len(digests) != len(points) { +// return ErrInvalidNumberOfPoints +// } + +// // transcript +// fs := fiatshamir.NewTranscript(hf, "gamma", "z") + +// // derive γ +// gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) +// if err != nil { +// return err +// } + +// // derive z +// // TODO seems ok that z depend only on W, need to check that carefully +// z, err := deriveChallenge("z", nil, []kzg.Digest{proof.W}, fs) +// if err != nil { +// return err +// } + +// // check that e(F + zW', [1]_{2})=e(W',[x]_{2}) +// // where F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i}-[∑ᵢγⁱZ_{T\xᵢ}(z)fᵢ(z)]_{1}-Z_{T}(z)[W] +// var sumGammaiZTminusXiFiz, tmp, accGamma fr.Element +// nbInstances := len(points) +// gammaiZTminusXiz := make([]fr.Element, nbInstances) +// accGamma.SetOne() +// bufPoints := make([]fr.Element, len(points)-1) +// for i := 0; i < len(points); i++ { + +// copy(bufPoints, points[:i]) +// copy(bufPoints[i:], points[i+1:]) + +// ztMinusXi := buildVanishingPoly(bufPoints) +// gammaiZTminusXiz[i] = eval(ztMinusXi, z) +// gammaiZTminusXiz[i].Mul(&accGamma, &gammaiZTminusXiz[i]) + +// tmp.Mul(&gammaiZTminusXiz[i], &proof.ClaimedValues[i]) +// sumGammaiZTminusXiFiz.Add(&sumGammaiZTminusXiFiz, &tmp) + +// accGamma.Mul(&accGamma, &gamma) +// } + +// // ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} +// config := ecc.MultiExpConfig{} +// var sumGammaiZtMinusXiComi kzg.Digest +// _, err = sumGammaiZtMinusXiComi.MultiExp(digests, gammaiZTminusXiz, config) +// if err != nil { +// return err +// } + +// var bufBigInt big.Int + +// // [∑ᵢZ_{T\xᵢ}fᵢ(z)]_{1} +// var sumGammaiZTminusXiFizCom kzg.Digest +// var sumGammaiZTminusXiFizBigInt big.Int +// sumGammaiZTminusXiFiz.BigInt(&sumGammaiZTminusXiFizBigInt) +// sumGammaiZTminusXiFizCom.ScalarMultiplication(&vk.G1, &sumGammaiZTminusXiFizBigInt) + +// // Z_{T}(z)[W] +// zt := buildVanishingPoly(points) +// ztz := eval(zt, z) +// var ztW kzg.Digest +// ztz.BigInt(&bufBigInt) +// ztW.ScalarMultiplication(&proof.W, &bufBigInt) + +// // F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} - [∑ᵢγⁱZ_{T\xᵢ}fᵢ(z)]_{1} - Z_{T}(z)[W] +// var f kzg.Digest +// f.Sub(&sumGammaiZtMinusXiComi, &sumGammaiZTminusXiFizCom). +// Sub(&f, &ztW) + +// // F+zW' +// var zWPrime kzg.Digest +// z.BigInt(&bufBigInt) +// zWPrime.ScalarMultiplication(&proof.WPrime, &bufBigInt) +// f.Add(&f, &zWPrime) +// f.Neg(&f) + +// // check that e(F+zW',[1]_{2})=e(W',[x]_{2}) +// check, err := bn254.PairingCheckFixedQ( +// []bn254.G1Affine{f, proof.WPrime}, +// vk.Lines[:], +// ) + +// if !check { +// return ErrVerifyOpeningProof +// } + +// return nil +// } // deriveChallenge derives a challenge using Fiat Shamir to polynomials. // The arguments are added to the transcript in the order in which they are given. -func deriveChallenge(name string, points []fr.Element, digests []kzg.Digest, t *fiatshamir.Transcript, dataTranscript ...[]byte) (fr.Element, error) { +func deriveChallenge(name string, points [][]fr.Element, digests []kzg.Digest, t *fiatshamir.Transcript, dataTranscript ...[]byte) (fr.Element, error) { // derive the challenge gamma, binded to the point and the commitments for i := range points { - if err := t.Bind(name, points[i].Marshal()); err != nil { - return fr.Element{}, err + for j := range points[i] { + if err := t.Bind(name, points[i][j].Marshal()); err != nil { + return fr.Element{}, err + } } } for i := range digests { @@ -330,6 +351,23 @@ func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { return f } +// returns S_{T\S_{i}} where S_{i}=x[i] +func buildZtMinusSi(x [][]fr.Element, i int) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + bufPoints := make([]fr.Element, 0, nbPoints-len(x[i])) + for j := 0; j < i; j++ { + bufPoints = append(bufPoints, x[j]...) + } + for j := i + 1; j < len(x); j++ { + bufPoints = append(bufPoints, x[j]...) + } + ztMinusSi := buildVanishingPoly(bufPoints) + return ztMinusSi +} + // returns πᵢ(X-xᵢ) func buildVanishingPoly(x []fr.Element) []fr.Element { res := make([]fr.Element, 1, len(x)+1) diff --git a/ecc/bn254/shplonk/shplonk_test.go b/ecc/bn254/shplonk/shplonk_test.go index 6a81569bbe..dcc4912947 100644 --- a/ecc/bn254/shplonk/shplonk_test.go +++ b/ecc/bn254/shplonk/shplonk_test.go @@ -15,14 +15,12 @@ package shplonk import ( - "crypto/sha256" "math/big" "testing" "github.com/consensys/gnark-crypto/ecc" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark-crypto/ecc/bn254/kzg" - "github.com/stretchr/testify/require" ) // Test SRS re-used across tests of the KZG scheme @@ -35,41 +33,81 @@ func init() { testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) } -func TestOpening(t *testing.T) { - - assert := require.New(t) - - nbPolys := 2 - sizePoly := make([]int, nbPolys) - for i := 0; i < nbPolys; i++ { - sizePoly[i] = 5 + i - } - polys := make([][]fr.Element, nbPolys) - for i := 0; i < nbPolys; i++ { - polys[i] = make([]fr.Element, sizePoly[i]) - for j := 0; j < sizePoly[i]; j++ { - polys[i][j].SetRandom() +// func TestOpening(t *testing.T) { + +// assert := require.New(t) + +// nbPolys := 2 +// sizePoly := make([]int, nbPolys) +// for i := 0; i < nbPolys; i++ { +// sizePoly[i] = 5 + i +// } +// polys := make([][]fr.Element, nbPolys) +// for i := 0; i < nbPolys; i++ { +// polys[i] = make([]fr.Element, sizePoly[i]) +// for j := 0; j < sizePoly[i]; j++ { +// polys[i][j].SetRandom() +// } +// } + +// digests := make([]kzg.Digest, nbPolys) +// for i := 0; i < nbPolys; i++ { +// digests[i], _ = kzg.Commit(polys[i], testSrs.Pk) +// } + +// points := make([]fr.Element, nbPolys) +// for i := 0; i < nbPolys; i++ { +// points[i].SetRandom() +// } + +// hf := sha256.New() + +// openingProof, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) +// assert.NoError(err) + +// err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) +// assert.NoError(err) + +// } + +func TestBuildZtMinusSi(t *testing.T) { + + nbSi := 10 + points := make([][]fr.Element, nbSi) + sizeSi := make([]int, nbSi) + nbPoints := 0 + for i := 0; i < nbSi; i++ { + sizeSi[i] = 5 + i + nbPoints += sizeSi[i] + points[i] = make([]fr.Element, sizeSi[i]) + for j := 0; j < sizeSi[i]; j++ { + points[i][j].SetRandom() } } - - digests := make([]kzg.Digest, nbPolys) - for i := 0; i < nbPolys; i++ { - digests[i], _ = kzg.Commit(polys[i], testSrs.Pk) - } - - points := make([]fr.Element, nbPolys) - for i := 0; i < nbPolys; i++ { - points[i].SetRandom() + for i := 0; i < nbSi; i++ { + ztMinusSi := buildZtMinusSi(points, i) + if len(ztMinusSi) != nbPoints-sizeSi[i]+1 { + t.Fatal("deg(Z_{T-S_{i}}) should be nbPoints-size(S_{i})") + } + for j := 0; j < nbSi; j++ { + if j == i { + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{i}) should not be zero") + } + } + continue + } + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if !y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{j}) should be zero") + } + } + } } - hf := sha256.New() - - openingProof, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) - assert.NoError(err) - - err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) - assert.NoError(err) - } func TestInterpolate(t *testing.T) { From 05b330d65cbfd8da0fdb7da3e56ff5c7fa7b374a Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Sun, 10 Mar 2024 23:12:43 +0100 Subject: [PATCH 14/66] feat: single point to set in open --- ecc/bn254/shplonk/shplonk.go | 268 +++++++++++++++++++---------------- 1 file changed, 144 insertions(+), 124 deletions(-) diff --git a/ecc/bn254/shplonk/shplonk.go b/ecc/bn254/shplonk/shplonk.go index 887533f050..c9eb8b3bce 100644 --- a/ecc/bn254/shplonk/shplonk.go +++ b/ecc/bn254/shplonk/shplonk.go @@ -18,6 +18,7 @@ package shplonk import ( "errors" + "hash" "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" @@ -35,7 +36,8 @@ var ( // implements io.ReaderFrom and io.WriterTo type OpeningProof struct { - // W = ∑ᵢ γⁱZ_{T\xᵢ}(f_i(X)-f(x_i)) where Z_{T} is the vanishing polynomial on the (xᵢ)_{i} + // W = ∑ᵢ γⁱZ_{T\Sᵢ}(f_i(X)-r)/Z_{T} where Z_{T} is the vanishing polynomial on the (Sᵢ)_{i} + // and r interpolates fᵢ(Sᵢ) on (Sᵢ) W bn254.G1Affine // L(X)/(X-z) where L(X)=∑ᵢγⁱZ_{T\xᵢ}(f_i(X)-rᵢ) - Z_{T}W(X) @@ -46,135 +48,133 @@ type OpeningProof struct { } // BatchOpen opens the list of polynomials on points, where the i-th polynomials is opend at points[i]. -// func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, pk kzg.ProvingKey, dataTranscript ...[]byte) (OpeningProof, error) { +func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, pk kzg.ProvingKey, dataTranscript ...[]byte) (OpeningProof, error) { -// var res OpeningProof + var res OpeningProof -// nbInstances := len(polynomials) -// if len(polynomials) != len(points) { -// return res, ErrInvalidNumberOfPoints -// } + nbInstances := len(polynomials) + if len(polynomials) != len(points) { + return res, ErrInvalidNumberOfPoints + } -// // transcript -// fs := fiatshamir.NewTranscript(hf, "gamma", "z") + // transcript + fs := fiatshamir.NewTranscript(hf, "gamma", "z") -// // derive γ -// gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) -// if err != nil { -// return res, err -// } + // derive γ + gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) + if err != nil { + return res, err + } -// // compute the size of the linear combination -// maxSizePolys := len(polynomials[0]) -// for i := 1; i < len(polynomials); i++ { -// if maxSizePolys < len(polynomials[i]) { -// maxSizePolys = len(polynomials[i]) -// } -// } -// nbPoints := 0 -// sizeSi := make([]int, len(points)) -// for i := 0; i < nbInstances; i++ { -// nbPoints += len(points[i]) -// sizeSi[i] = len(points[i]) -// } -// totalSize := 0 -// for i := 0; i < nbInstances; i++ { -// sizeIthTerm := len(polynomials[i]) + nbPoints - len(points[i]) // the degree of the i-th term is len(polynomials[i])-1+nbPoints-len(points[i]) -// if totalSize < sizeIthTerm { -// totalSize = sizeIthTerm -// } -// } + // compute the size of the linear combination + maxSizePolys := len(polynomials[0]) + for i := 1; i < len(polynomials); i++ { + if maxSizePolys < len(polynomials[i]) { + maxSizePolys = len(polynomials[i]) + } + } + nbPoints := 0 + sizeSi := make([]int, len(points)) + for i := 0; i < nbInstances; i++ { + nbPoints += len(points[i]) + sizeSi[i] = len(points[i]) + } + totalSize := 0 // size of f := ∑ᵢ γⁱZ_{T\Sᵢ}(f_i(X)-r) + for i := 0; i < nbInstances; i++ { + sizeIthTerm := len(polynomials[i]) + nbPoints - len(points[i]) // the degree of the i-th term is len(polynomials[i])-1+nbPoints-len(points[i]) + if totalSize < sizeIthTerm { + totalSize = sizeIthTerm + } + } -// bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) -// bufTotalSize := make([]fr.Element, totalSize) -// f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation -// res.ClaimedValues = make([][]fr.Element, nbInstances) -// for i := 0; i < nbPoints; i++ { -// res.ClaimedValues[i] = make([]fr.Element, len(points[i])) -// } -// var accGamma fr.Element -// accGamma.SetOne() + bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) + bufTotalSize := make([]fr.Element, totalSize) + f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation + res.ClaimedValues = make([][]fr.Element, nbInstances) + for i := 0; i < nbPoints; i++ { + res.ClaimedValues[i] = make([]fr.Element, len(points[i])) + } + var accGamma fr.Element + accGamma.SetOne() -// ztMinusSi := make([][]fr.Element, nbInstances) -// for i := 0; i < nbInstances; i++ { - -// for j := 0; j < len(points[i]); j++ { -// res.ClaimedValues[i][j] = eval(polynomials[i], points[i][j]) -// } - -// bufPoints := make([]fr.Element, 0, nbPoints-sizeSi[i]) -// for j := 0; j < i; j++ { -// bufPoints = append(bufPoints, points[j]...) -// } -// for j := i + 1; j < nbInstances; j++ { -// bufPoints = append(bufPoints, points[j]...) -// } -// ztMinusSi[i] = buildVanishingPoly(bufPoints) - -// copy(bufMaxSizePolynomials, polynomials[i]) -// bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &res.ClaimedValues[i]) -// bufTotalSize = mul(bufMaxSizePolynomials, ztMinusXi[i], bufTotalSize) -// bufTotalSize = mulByConstant(bufTotalSize, accGamma) -// for j := 0; j < len(bufTotalSize); j++ { -// f[j].Add(&f[j], &bufTotalSize[j]) -// } + ztMinusSi := make([][]fr.Element, nbInstances) + ri := make([][]fr.Element, nbInstances) + for i := 0; i < nbInstances; i++ { -// accGamma.Mul(&accGamma, &gamma) -// setZero(bufMaxSizePolynomials) -// } + for j := 0; j < len(points[i]); j++ { + res.ClaimedValues[i][j] = eval(polynomials[i], points[i][j]) + } -// zt := buildVanishingPoly(points) -// w := div(f, zt) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation -// res.W, err = kzg.Commit(w, pk) -// if err != nil { -// return res, err -// } + ztMinusSi[i] = buildZtMinusSi(points, i) -// // derive z -// z, err := deriveChallenge("z", nil, []kzg.Digest{res.W}, fs) -// if err != nil { -// return res, err -// } + copy(bufMaxSizePolynomials, polynomials[i]) + ri[i] = interpolate(points[i], res.ClaimedValues[i]) + sub(bufMaxSizePolynomials, ri[i]) + bufTotalSize = mul(bufMaxSizePolynomials, ztMinusSi[i], bufTotalSize) + bufTotalSize = mulByConstant(bufTotalSize, accGamma) + for j := 0; j < len(bufTotalSize); j++ { + f[j].Add(&f[j], &bufTotalSize[j]) + } -// // compute L = ∑ᵢγⁱZ_{T\xᵢ}(z)(fᵢ-rᵢ(z))-Z_{T}(z)W -// accGamma.SetOne() -// var gammaiZtMinusXi fr.Element -// l := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation -// for i := 0; i < len(polynomials); i++ { - -// zi := eval(ztMinusXi[i], z) -// gammaiZtMinusXi.Mul(&accGamma, &zi) -// copy(bufMaxSizePolynomials, polynomials[i]) -// bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &res.ClaimedValues[i]) -// mulByConstant(bufMaxSizePolynomials, gammaiZtMinusXi) -// for j := 0; j < len(bufMaxSizePolynomials); j++ { -// l[j].Add(&l[j], &bufMaxSizePolynomials[j]) -// } - -// setZero(bufMaxSizePolynomials) -// accGamma.Mul(&accGamma, &gamma) -// } -// ztz := eval(zt, z) -// setZero(bufTotalSize) -// copy(bufTotalSize, w) -// mulByConstant(bufTotalSize, ztz) -// for i := 0; i < totalSize-maxSizePolys; i++ { -// l[totalSize-1-i].Neg(&bufTotalSize[totalSize-1-i]) -// } -// for i := 0; i < maxSizePolys; i++ { -// l[i].Sub(&l[i], &bufTotalSize[i]) -// } + accGamma.Mul(&accGamma, &gamma) + setZero(bufMaxSizePolynomials) + } -// xMinusZ := buildVanishingPoly([]fr.Element{z}) -// wPrime := div(l, xMinusZ) + zt := buildVanishingPoly(flatten(points)) + w := div(f, zt) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation + res.W, err = kzg.Commit(w, pk) + if err != nil { + return res, err + } -// res.WPrime, err = kzg.Commit(wPrime, pk) -// if err != nil { -// return res, err -// } + // derive z + z, err := deriveChallenge("z", nil, []kzg.Digest{res.W}, fs) + if err != nil { + return res, err + } -// return res, nil -// } + // compute L = ∑ᵢγⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z))-Z_{T}(z)W + accGamma.SetOne() + var gammaiZtMinusSiZ fr.Element + l := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation + for i := 0; i < len(polynomials); i++ { + + ztMinusSiZ := eval(ztMinusSi[i], z) + + gammaiZtMinusSiZ.Mul(&accGamma, &ztMinusSiZ) + + copy(bufMaxSizePolynomials, polynomials[i]) + riz := eval(ri[i], z) + bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) + mulByConstant(bufMaxSizePolynomials, gammaiZtMinusSiZ) + for j := 0; j < len(bufMaxSizePolynomials); j++ { + l[j].Add(&l[j], &bufMaxSizePolynomials[j]) + } + + setZero(bufMaxSizePolynomials) + accGamma.Mul(&accGamma, &gamma) + } + ztz := eval(zt, z) + setZero(bufTotalSize) + copy(bufTotalSize, w) + mulByConstant(bufTotalSize, ztz) + for i := 0; i < totalSize-maxSizePolys; i++ { + l[totalSize-1-i].Neg(&bufTotalSize[totalSize-1-i]) + } + for i := 0; i < maxSizePolys; i++ { + l[i].Sub(&l[i], &bufTotalSize[i]) + } + + xMinusZ := buildVanishingPoly([]fr.Element{z}) + wPrime := div(l, xMinusZ) + + res.WPrime, err = kzg.Commit(wPrime, pk) + if err != nil { + return res, err + } + + return res, nil +} // BatchVerify uses proof to check that the commitments correctly open to proof.ClaimedValues // at points. The order matters: the proof validates that the i-th commitment is correctly opened @@ -313,6 +313,18 @@ func deriveChallenge(name string, points [][]fr.Element, digests []kzg.Digest, t // ------------------------------ // utils +func flatten(x [][]fr.Element) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + res := make([]fr.Element, 0, nbPoints) + for i := 0; i < len(x); i++ { + res = append(res, x[i]...) + } + return res +} + // sets f to zero func setZero(f []fr.Element) { for i := 0; i < len(f); i++ { @@ -351,7 +363,7 @@ func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { return f } -// returns S_{T\S_{i}} where S_{i}=x[i] +// returns S_{T\Sᵢ} where Sᵢ=x[i] func buildZtMinusSi(x [][]fr.Element, i int) []fr.Element { nbPoints := 0 for i := 0; i < len(x); i++ { @@ -405,27 +417,35 @@ func buildLagrangeFromDomain(x []fr.Element, i int) []fr.Element { return res } +// returns f-g, the memory of f is re used, deg(g) << deg(f) here +func sub(f, g []fr.Element) []fr.Element { + for i := 0; i < len(g); i++ { + f[i].Sub(&f[i], &g[i]) + } + return f +} + // returns f*g using naive multiplication // deg(f)>>deg(g), deg(small) =~ 10 max // buf is used as a buffer and should not be f or g // f and g are not modified -func mul(f, g []fr.Element, buf []fr.Element) []fr.Element { +func mul(f, g []fr.Element, res []fr.Element) []fr.Element { sizeRes := len(f) + len(g) - 1 - if len(buf) < sizeRes { - s := make([]fr.Element, sizeRes-len(buf)) - buf = append(buf, s...) + if len(res) < sizeRes { + s := make([]fr.Element, sizeRes-len(res)) + res = append(res, s...) } - setZero(buf) + setZero(res) var tmp fr.Element for i := 0; i < len(g); i++ { for j := 0; j < len(f); j++ { tmp.Mul(&f[j], &g[i]) - buf[j+i].Add(&buf[j+i], &tmp) + res[j+i].Add(&res[j+i], &tmp) } } - return buf + return res } // returns f/g (assuming g divides f) From 23cc6fa3169c9eafaf9281c892eeeaf071c1d6ca Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Mon, 11 Mar 2024 11:38:43 +0100 Subject: [PATCH 15/66] fix: used upper bound of the total size in buf --- ecc/bn254/shplonk/shplonk.go | 11 ++---- ecc/bn254/shplonk/shplonk_test.go | 61 +++++++++++++++++-------------- 2 files changed, 36 insertions(+), 36 deletions(-) diff --git a/ecc/bn254/shplonk/shplonk.go b/ecc/bn254/shplonk/shplonk.go index c9eb8b3bce..2ab9fc95e1 100644 --- a/ecc/bn254/shplonk/shplonk.go +++ b/ecc/bn254/shplonk/shplonk.go @@ -79,19 +79,13 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E nbPoints += len(points[i]) sizeSi[i] = len(points[i]) } - totalSize := 0 // size of f := ∑ᵢ γⁱZ_{T\Sᵢ}(f_i(X)-r) - for i := 0; i < nbInstances; i++ { - sizeIthTerm := len(polynomials[i]) + nbPoints - len(points[i]) // the degree of the i-th term is len(polynomials[i])-1+nbPoints-len(points[i]) - if totalSize < sizeIthTerm { - totalSize = sizeIthTerm - } - } + totalSize := maxSizePolys + nbPoints // upper bound of the size of f := ∑ᵢ γⁱZ_{T\Sᵢ}(f_i(X)-r) bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) bufTotalSize := make([]fr.Element, totalSize) f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation res.ClaimedValues = make([][]fr.Element, nbInstances) - for i := 0; i < nbPoints; i++ { + for i := 0; i < nbInstances; i++ { res.ClaimedValues[i] = make([]fr.Element, len(points[i])) } var accGamma fr.Element @@ -110,6 +104,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E copy(bufMaxSizePolynomials, polynomials[i]) ri[i] = interpolate(points[i], res.ClaimedValues[i]) sub(bufMaxSizePolynomials, ri[i]) + bufTotalSize = mul(bufMaxSizePolynomials, ztMinusSi[i], bufTotalSize) bufTotalSize = mulByConstant(bufTotalSize, accGamma) for j := 0; j < len(bufTotalSize); j++ { diff --git a/ecc/bn254/shplonk/shplonk_test.go b/ecc/bn254/shplonk/shplonk_test.go index dcc4912947..4a718bea51 100644 --- a/ecc/bn254/shplonk/shplonk_test.go +++ b/ecc/bn254/shplonk/shplonk_test.go @@ -15,12 +15,14 @@ package shplonk import ( + "crypto/sha256" "math/big" "testing" "github.com/consensys/gnark-crypto/ecc" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark-crypto/ecc/bn254/kzg" + "github.com/stretchr/testify/require" ) // Test SRS re-used across tests of the KZG scheme @@ -33,42 +35,45 @@ func init() { testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) } -// func TestOpening(t *testing.T) { +func TestOpening(t *testing.T) { -// assert := require.New(t) + assert := require.New(t) -// nbPolys := 2 -// sizePoly := make([]int, nbPolys) -// for i := 0; i < nbPolys; i++ { -// sizePoly[i] = 5 + i -// } -// polys := make([][]fr.Element, nbPolys) -// for i := 0; i < nbPolys; i++ { -// polys[i] = make([]fr.Element, sizePoly[i]) -// for j := 0; j < sizePoly[i]; j++ { -// polys[i][j].SetRandom() -// } -// } + nbPolys := 2 + sizePoly := make([]int, nbPolys) + for i := 0; i < nbPolys; i++ { + sizePoly[i] = 10 + i + } + polys := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + polys[i] = make([]fr.Element, sizePoly[i]) + for j := 0; j < sizePoly[i]; j++ { + polys[i][j].SetRandom() + } + } -// digests := make([]kzg.Digest, nbPolys) -// for i := 0; i < nbPolys; i++ { -// digests[i], _ = kzg.Commit(polys[i], testSrs.Pk) -// } + digests := make([]kzg.Digest, nbPolys) + for i := 0; i < nbPolys; i++ { + digests[i], _ = kzg.Commit(polys[i], testSrs.Pk) + } -// points := make([]fr.Element, nbPolys) -// for i := 0; i < nbPolys; i++ { -// points[i].SetRandom() -// } + points := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + points[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + points[i][j].SetRandom() + } + } -// hf := sha256.New() + hf := sha256.New() -// openingProof, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) -// assert.NoError(err) + _, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) + assert.NoError(err) -// err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) -// assert.NoError(err) + // err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + // assert.NoError(err) -// } +} func TestBuildZtMinusSi(t *testing.T) { From aa57e6a71eac45a8d128f89e9099ee5dc03ffdf0 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Tue, 12 Mar 2024 09:39:55 +0100 Subject: [PATCH 16/66] feat: test ok --- ecc/bn254/shplonk/shplonk.go | 202 +++++++++++++++--------------- ecc/bn254/shplonk/shplonk_test.go | 11 +- 2 files changed, 109 insertions(+), 104 deletions(-) diff --git a/ecc/bn254/shplonk/shplonk.go b/ecc/bn254/shplonk/shplonk.go index 2ab9fc95e1..b8b3cc1a25 100644 --- a/ecc/bn254/shplonk/shplonk.go +++ b/ecc/bn254/shplonk/shplonk.go @@ -19,7 +19,9 @@ package shplonk import ( "errors" "hash" + "math/big" + "github.com/consensys/gnark-crypto/ecc" "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark-crypto/ecc/bn254/kzg" @@ -134,14 +136,13 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E l := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation for i := 0; i < len(polynomials); i++ { - ztMinusSiZ := eval(ztMinusSi[i], z) - - gammaiZtMinusSiZ.Mul(&accGamma, &ztMinusSiZ) + ztMinusSiZ := eval(ztMinusSi[i], z) // Z_{T\Sᵢ}(z) + gammaiZtMinusSiZ.Mul(&accGamma, &ztMinusSiZ) // γⁱZ_{T\Sᵢ}(z) copy(bufMaxSizePolynomials, polynomials[i]) riz := eval(ri[i], z) - bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) - mulByConstant(bufMaxSizePolynomials, gammaiZtMinusSiZ) + bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) + mulByConstant(bufMaxSizePolynomials, gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) for j := 0; j < len(bufMaxSizePolynomials); j++ { l[j].Add(&l[j], &bufMaxSizePolynomials[j]) } @@ -152,13 +153,13 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E ztz := eval(zt, z) setZero(bufTotalSize) copy(bufTotalSize, w) - mulByConstant(bufTotalSize, ztz) + mulByConstant(bufTotalSize, ztz) // Z_{T}(z)W for i := 0; i < totalSize-maxSizePolys; i++ { l[totalSize-1-i].Neg(&bufTotalSize[totalSize-1-i]) } for i := 0; i < maxSizePolys; i++ { l[i].Sub(&l[i], &bufTotalSize[i]) - } + } // L <- L-Z_{T}(z)W xMinusZ := buildVanishingPoly([]fr.Element{z}) wPrime := div(l, xMinusZ) @@ -176,100 +177,99 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E // at the i-th point // dataTranscript is some extra data that might be needed for Fiat Shamir, and is appended at the end // of the original transcript. -// func BatchVerify(proof OpeningProof, digests []kzg.Digest, points []fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { - -// if len(digests) != len(proof.ClaimedValues) { -// return ErrInvalidNumberOfPoints -// } -// if len(digests) != len(points) { -// return ErrInvalidNumberOfPoints -// } - -// // transcript -// fs := fiatshamir.NewTranscript(hf, "gamma", "z") - -// // derive γ -// gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) -// if err != nil { -// return err -// } - -// // derive z -// // TODO seems ok that z depend only on W, need to check that carefully -// z, err := deriveChallenge("z", nil, []kzg.Digest{proof.W}, fs) -// if err != nil { -// return err -// } - -// // check that e(F + zW', [1]_{2})=e(W',[x]_{2}) -// // where F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i}-[∑ᵢγⁱZ_{T\xᵢ}(z)fᵢ(z)]_{1}-Z_{T}(z)[W] -// var sumGammaiZTminusXiFiz, tmp, accGamma fr.Element -// nbInstances := len(points) -// gammaiZTminusXiz := make([]fr.Element, nbInstances) -// accGamma.SetOne() -// bufPoints := make([]fr.Element, len(points)-1) -// for i := 0; i < len(points); i++ { - -// copy(bufPoints, points[:i]) -// copy(bufPoints[i:], points[i+1:]) - -// ztMinusXi := buildVanishingPoly(bufPoints) -// gammaiZTminusXiz[i] = eval(ztMinusXi, z) -// gammaiZTminusXiz[i].Mul(&accGamma, &gammaiZTminusXiz[i]) - -// tmp.Mul(&gammaiZTminusXiz[i], &proof.ClaimedValues[i]) -// sumGammaiZTminusXiFiz.Add(&sumGammaiZTminusXiFiz, &tmp) - -// accGamma.Mul(&accGamma, &gamma) -// } - -// // ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} -// config := ecc.MultiExpConfig{} -// var sumGammaiZtMinusXiComi kzg.Digest -// _, err = sumGammaiZtMinusXiComi.MultiExp(digests, gammaiZTminusXiz, config) -// if err != nil { -// return err -// } - -// var bufBigInt big.Int - -// // [∑ᵢZ_{T\xᵢ}fᵢ(z)]_{1} -// var sumGammaiZTminusXiFizCom kzg.Digest -// var sumGammaiZTminusXiFizBigInt big.Int -// sumGammaiZTminusXiFiz.BigInt(&sumGammaiZTminusXiFizBigInt) -// sumGammaiZTminusXiFizCom.ScalarMultiplication(&vk.G1, &sumGammaiZTminusXiFizBigInt) - -// // Z_{T}(z)[W] -// zt := buildVanishingPoly(points) -// ztz := eval(zt, z) -// var ztW kzg.Digest -// ztz.BigInt(&bufBigInt) -// ztW.ScalarMultiplication(&proof.W, &bufBigInt) - -// // F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} - [∑ᵢγⁱZ_{T\xᵢ}fᵢ(z)]_{1} - Z_{T}(z)[W] -// var f kzg.Digest -// f.Sub(&sumGammaiZtMinusXiComi, &sumGammaiZTminusXiFizCom). -// Sub(&f, &ztW) - -// // F+zW' -// var zWPrime kzg.Digest -// z.BigInt(&bufBigInt) -// zWPrime.ScalarMultiplication(&proof.WPrime, &bufBigInt) -// f.Add(&f, &zWPrime) -// f.Neg(&f) - -// // check that e(F+zW',[1]_{2})=e(W',[x]_{2}) -// check, err := bn254.PairingCheckFixedQ( -// []bn254.G1Affine{f, proof.WPrime}, -// vk.Lines[:], -// ) - -// if !check { -// return ErrVerifyOpeningProof -// } - -// return nil -// } +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + if len(digests) != len(proof.ClaimedValues) { + return ErrInvalidNumberOfPoints + } + if len(digests) != len(points) { + return ErrInvalidNumberOfPoints + } + + // transcript + fs := fiatshamir.NewTranscript(hf, "gamma", "z") + + // derive γ + gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) + if err != nil { + return err + } + + // derive z + // TODO seems ok that z depend only on W, need to check that carefully + z, err := deriveChallenge("z", nil, []kzg.Digest{proof.W}, fs) + if err != nil { + return err + } + + // check that e(F + zW', [1]_{2})=e(W',[x]_{2}) + // where F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i}-[∑ᵢγⁱZ_{T\xᵢ}(z)fᵢ(z)]_{1}-Z_{T}(z)[W] + var sumGammaiZTminusSiRiz, tmp, accGamma fr.Element + nbInstances := len(points) + gammaiZTminusSiz := make([]fr.Element, nbInstances) + accGamma.SetOne() + ri := make([][]fr.Element, nbInstances) + for i := 0; i < len(points); i++ { + + ztMinusSi := buildZtMinusSi(points, i) // Z_{T-S_{i}}(X) + gammaiZTminusSiz[i] = eval(ztMinusSi, z) // Z_{T-S_{i}}(z) + gammaiZTminusSiz[i].Mul(&accGamma, &gammaiZTminusSiz[i]) // \gamma^{i} Z_{T-S_{i}}(z) + + ri[i] = interpolate(points[i], proof.ClaimedValues[i]) + riz := eval(ri[i], z) // r_{i}(z) + tmp.Mul(&gammaiZTminusSiz[i], &riz) // Z_{T-S_{i}}(z)r_{i}(z) + sumGammaiZTminusSiRiz.Add(&sumGammaiZTminusSiRiz, &tmp) + + accGamma.Mul(&accGamma, &gamma) + } + + // ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} + config := ecc.MultiExpConfig{} + var sumGammaiZtMinusSiComi kzg.Digest + _, err = sumGammaiZtMinusSiComi.MultiExp(digests, gammaiZTminusSiz, config) + if err != nil { + return err + } + + var bufBigInt big.Int + + // [∑ᵢZ_{T\xᵢ}fᵢ(z)]_{1} + var sumGammaiZTminusSiRizCom kzg.Digest + var sumGammaiZTminusSiRizBigInt big.Int + sumGammaiZTminusSiRiz.BigInt(&sumGammaiZTminusSiRizBigInt) + sumGammaiZTminusSiRizCom.ScalarMultiplication(&vk.G1, &sumGammaiZTminusSiRizBigInt) + + // Z_{T}(z)[W] + zt := buildVanishingPoly(flatten(points)) + ztz := eval(zt, z) + var ztW kzg.Digest + ztz.BigInt(&bufBigInt) + ztW.ScalarMultiplication(&proof.W, &bufBigInt) + + // F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} - [∑ᵢγⁱZ_{T\xᵢ}fᵢ(z)]_{1} - Z_{T}(z)[W] + var f kzg.Digest + f.Sub(&sumGammaiZtMinusSiComi, &sumGammaiZTminusSiRizCom). + Sub(&f, &ztW) + + // F+zW' + var zWPrime kzg.Digest + z.BigInt(&bufBigInt) + zWPrime.ScalarMultiplication(&proof.WPrime, &bufBigInt) + f.Add(&f, &zWPrime) + f.Neg(&f) + + // check that e(F+zW',[1]_{2})=e(W',[x]_{2}) + check, err := bn254.PairingCheckFixedQ( + []bn254.G1Affine{f, proof.WPrime}, + vk.Lines[:], + ) + + if !check { + return ErrVerifyOpeningProof + } + + return nil +} // deriveChallenge derives a challenge using Fiat Shamir to polynomials. // The arguments are added to the transcript in the order in which they are given. diff --git a/ecc/bn254/shplonk/shplonk_test.go b/ecc/bn254/shplonk/shplonk_test.go index 4a718bea51..2b082d16c8 100644 --- a/ecc/bn254/shplonk/shplonk_test.go +++ b/ecc/bn254/shplonk/shplonk_test.go @@ -67,11 +67,16 @@ func TestOpening(t *testing.T) { hf := sha256.New() - _, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) + // correct proof + openingProof, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) + assert.NoError(err) + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) assert.NoError(err) - // err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) - // assert.NoError(err) + // tampered proof + openingProof.ClaimedValues[0][0].SetRandom() + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.Error(err) } From 3c068623b62584b2425836a52ff51a46b0acf746 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Tue, 12 Mar 2024 15:42:44 +0100 Subject: [PATCH 17/66] feat: generator ok --- internal/generator/main.go | 4 + internal/generator/shplonk/generator.go | 23 + .../generator/shplonk/template/doc.go.tmpl | 2 + .../shplonk/template/shplonk.go.tmpl | 444 ++++++++++++++++++ .../shplonk/template/shplonk.test.go.tmpl | 302 ++++++++++++ 5 files changed, 775 insertions(+) create mode 100644 internal/generator/shplonk/generator.go create mode 100644 internal/generator/shplonk/template/doc.go.tmpl create mode 100644 internal/generator/shplonk/template/shplonk.go.tmpl create mode 100644 internal/generator/shplonk/template/shplonk.test.go.tmpl diff --git a/internal/generator/main.go b/internal/generator/main.go index 389f96c2e0..32802d3fee 100644 --- a/internal/generator/main.go +++ b/internal/generator/main.go @@ -27,6 +27,7 @@ import ( "github.com/consensys/gnark-crypto/internal/generator/permutation" "github.com/consensys/gnark-crypto/internal/generator/plookup" "github.com/consensys/gnark-crypto/internal/generator/polynomial" + "github.com/consensys/gnark-crypto/internal/generator/shplonk" "github.com/consensys/gnark-crypto/internal/generator/sis" "github.com/consensys/gnark-crypto/internal/generator/sumcheck" "github.com/consensys/gnark-crypto/internal/generator/test_vector_utils" @@ -99,6 +100,9 @@ func main() { // generate kzg on fr assertNoError(kzg.Generate(conf, filepath.Join(curveDir, "kzg"), bgen)) + // generate shplonk on fr + assertNoError(shplonk.Generate(conf, filepath.Join(curveDir, "shplonk"), bgen)) + // generate pedersen on fr assertNoError(pedersen.Generate(conf, filepath.Join(curveDir, "fr", "pedersen"), bgen)) diff --git a/internal/generator/shplonk/generator.go b/internal/generator/shplonk/generator.go new file mode 100644 index 0000000000..789ab126b4 --- /dev/null +++ b/internal/generator/shplonk/generator.go @@ -0,0 +1,23 @@ +package shplonk + +import ( + "path/filepath" + + "github.com/consensys/bavard" + "github.com/consensys/gnark-crypto/internal/generator/config" +) + +func Generate(conf config.Curve, baseDir string, bgen *bavard.BatchGenerator) error { + + // kzg commitment scheme + conf.Package = "shplonk" + entries := []bavard.Entry{ + {File: filepath.Join(baseDir, "doc.go"), Templates: []string{"doc.go.tmpl"}}, + {File: filepath.Join(baseDir, "shplonk.go"), Templates: []string{"shplonk.go.tmpl"}}, + {File: filepath.Join(baseDir, "shplonk_test.go"), Templates: []string{"shplonk.test.go.tmpl"}}, + // {File: filepath.Join(baseDir, "marshal.go"), Templates: []string{"marshal.go.tmpl"}}, + // {File: filepath.Join(baseDir, "utils.go"), Templates: []string{"utils.go.tmpl"}}, + } + return bgen.Generate(conf, conf.Package, "./shplonk/template/", entries...) + +} diff --git a/internal/generator/shplonk/template/doc.go.tmpl b/internal/generator/shplonk/template/doc.go.tmpl new file mode 100644 index 0000000000..3a63b76f90 --- /dev/null +++ b/internal/generator/shplonk/template/doc.go.tmpl @@ -0,0 +1,2 @@ +// Package {{.Package}} provides a SHPLONK commitment scheme, cf https://eprint.iacr.org/2020/081.pdf +package {{.Package}} \ No newline at end of file diff --git a/internal/generator/shplonk/template/shplonk.go.tmpl b/internal/generator/shplonk/template/shplonk.go.tmpl new file mode 100644 index 0000000000..c5f8fcbee4 --- /dev/null +++ b/internal/generator/shplonk/template/shplonk.go.tmpl @@ -0,0 +1,444 @@ +import ( + "errors" + "hash" + "math/big" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}" + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/fr" + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" +) + +var ( + ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") + ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") +) + +// OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. +// +// implements io.ReaderFrom and io.WriterTo +type OpeningProof struct { + + // W = ∑ᵢ γⁱZ_{T\Sᵢ}(f_i(X)-r)/Z_{T} where Z_{T} is the vanishing polynomial on the (Sᵢ)_{i} + // and r interpolates fᵢ(Sᵢ) on (Sᵢ) + W {{ .CurvePackage }}.G1Affine + + // L(X)/(X-z) where L(X)=∑ᵢγⁱZ_{T\xᵢ}(f_i(X)-rᵢ) - Z_{T}W(X) + WPrime {{ .CurvePackage }}.G1Affine + + // ClaimedValues[i] are the values of fᵢ on Sᵢ + ClaimedValues [][]fr.Element +} + +// BatchOpen opens the list of polynomials on points, where the i-th polynomials is opend at points[i]. +func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, pk kzg.ProvingKey, dataTranscript ...[]byte) (OpeningProof, error) { + + var res OpeningProof + + nbInstances := len(polynomials) + if len(polynomials) != len(points) { + return res, ErrInvalidNumberOfPoints + } + + // transcript + fs := fiatshamir.NewTranscript(hf, "gamma", "z") + + // derive γ + gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) + if err != nil { + return res, err + } + + // compute the size of the linear combination + maxSizePolys := len(polynomials[0]) + for i := 1; i < len(polynomials); i++ { + if maxSizePolys < len(polynomials[i]) { + maxSizePolys = len(polynomials[i]) + } + } + nbPoints := 0 + sizeSi := make([]int, len(points)) + for i := 0; i < nbInstances; i++ { + nbPoints += len(points[i]) + sizeSi[i] = len(points[i]) + } + totalSize := maxSizePolys + nbPoints // upper bound of the size of f := ∑ᵢ γⁱZ_{T\Sᵢ}(f_i(X)-r) + + bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) + bufTotalSize := make([]fr.Element, totalSize) + f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation + res.ClaimedValues = make([][]fr.Element, nbInstances) + for i := 0; i < nbInstances; i++ { + res.ClaimedValues[i] = make([]fr.Element, len(points[i])) + } + var accGamma fr.Element + accGamma.SetOne() + + ztMinusSi := make([][]fr.Element, nbInstances) + ri := make([][]fr.Element, nbInstances) + for i := 0; i < nbInstances; i++ { + + for j := 0; j < len(points[i]); j++ { + res.ClaimedValues[i][j] = eval(polynomials[i], points[i][j]) + } + + ztMinusSi[i] = buildZtMinusSi(points, i) + + copy(bufMaxSizePolynomials, polynomials[i]) + ri[i] = interpolate(points[i], res.ClaimedValues[i]) + sub(bufMaxSizePolynomials, ri[i]) + + bufTotalSize = mul(bufMaxSizePolynomials, ztMinusSi[i], bufTotalSize) + bufTotalSize = mulByConstant(bufTotalSize, accGamma) + for j := 0; j < len(bufTotalSize); j++ { + f[j].Add(&f[j], &bufTotalSize[j]) + } + + accGamma.Mul(&accGamma, &gamma) + setZero(bufMaxSizePolynomials) + } + + zt := buildVanishingPoly(flatten(points)) + w := div(f, zt) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation + res.W, err = kzg.Commit(w, pk) + if err != nil { + return res, err + } + + // derive z + z, err := deriveChallenge("z", nil, []kzg.Digest{res.W}, fs) + if err != nil { + return res, err + } + + // compute L = ∑ᵢγⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z))-Z_{T}(z)W + accGamma.SetOne() + var gammaiZtMinusSiZ fr.Element + l := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation + for i := 0; i < len(polynomials); i++ { + + ztMinusSiZ := eval(ztMinusSi[i], z) // Z_{T\Sᵢ}(z) + gammaiZtMinusSiZ.Mul(&accGamma, &ztMinusSiZ) // γⁱZ_{T\Sᵢ}(z) + + copy(bufMaxSizePolynomials, polynomials[i]) + riz := eval(ri[i], z) + bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) + mulByConstant(bufMaxSizePolynomials, gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) + for j := 0; j < len(bufMaxSizePolynomials); j++ { + l[j].Add(&l[j], &bufMaxSizePolynomials[j]) + } + + setZero(bufMaxSizePolynomials) + accGamma.Mul(&accGamma, &gamma) + } + ztz := eval(zt, z) + setZero(bufTotalSize) + copy(bufTotalSize, w) + mulByConstant(bufTotalSize, ztz) // Z_{T}(z)W + for i := 0; i < totalSize-maxSizePolys; i++ { + l[totalSize-1-i].Neg(&bufTotalSize[totalSize-1-i]) + } + for i := 0; i < maxSizePolys; i++ { + l[i].Sub(&l[i], &bufTotalSize[i]) + } // L <- L-Z_{T}(z)W + + xMinusZ := buildVanishingPoly([]fr.Element{z}) + wPrime := div(l, xMinusZ) + + res.WPrime, err = kzg.Commit(wPrime, pk) + if err != nil { + return res, err + } + + return res, nil +} + +// BatchVerify uses proof to check that the commitments correctly open to proof.ClaimedValues +// at points. The order matters: the proof validates that the i-th commitment is correctly opened +// at the i-th point +// dataTranscript is some extra data that might be needed for Fiat Shamir, and is appended at the end +// of the original transcript. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + if len(digests) != len(proof.ClaimedValues) { + return ErrInvalidNumberOfPoints + } + if len(digests) != len(points) { + return ErrInvalidNumberOfPoints + } + + // transcript + fs := fiatshamir.NewTranscript(hf, "gamma", "z") + + // derive γ + gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) + if err != nil { + return err + } + + // derive z + // TODO seems ok that z depend only on W, need to check that carefully + z, err := deriveChallenge("z", nil, []kzg.Digest{proof.W}, fs) + if err != nil { + return err + } + + // check that e(F + zW', [1]_{2})=e(W',[x]_{2}) + // where F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i}-[∑ᵢγⁱZ_{T\xᵢ}(z)fᵢ(z)]_{1}-Z_{T}(z)[W] + var sumGammaiZTminusSiRiz, tmp, accGamma fr.Element + nbInstances := len(points) + gammaiZTminusSiz := make([]fr.Element, nbInstances) + accGamma.SetOne() + ri := make([][]fr.Element, nbInstances) + for i := 0; i < len(points); i++ { + + ztMinusSi := buildZtMinusSi(points, i) // Z_{T-S_{i}}(X) + gammaiZTminusSiz[i] = eval(ztMinusSi, z) // Z_{T-S_{i}}(z) + gammaiZTminusSiz[i].Mul(&accGamma, &gammaiZTminusSiz[i]) // \gamma^{i} Z_{T-S_{i}}(z) + + ri[i] = interpolate(points[i], proof.ClaimedValues[i]) + riz := eval(ri[i], z) // r_{i}(z) + tmp.Mul(&gammaiZTminusSiz[i], &riz) // Z_{T-S_{i}}(z)r_{i}(z) + sumGammaiZTminusSiRiz.Add(&sumGammaiZTminusSiRiz, &tmp) + + accGamma.Mul(&accGamma, &gamma) + } + + // ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} + config := ecc.MultiExpConfig{} + var sumGammaiZtMinusSiComi kzg.Digest + _, err = sumGammaiZtMinusSiComi.MultiExp(digests, gammaiZTminusSiz, config) + if err != nil { + return err + } + + var bufBigInt big.Int + + // [∑ᵢZ_{T\xᵢ}fᵢ(z)]_{1} + var sumGammaiZTminusSiRizCom kzg.Digest + var sumGammaiZTminusSiRizBigInt big.Int + sumGammaiZTminusSiRiz.BigInt(&sumGammaiZTminusSiRizBigInt) + sumGammaiZTminusSiRizCom.ScalarMultiplication(&vk.G1, &sumGammaiZTminusSiRizBigInt) + + // Z_{T}(z)[W] + zt := buildVanishingPoly(flatten(points)) + ztz := eval(zt, z) + var ztW kzg.Digest + ztz.BigInt(&bufBigInt) + ztW.ScalarMultiplication(&proof.W, &bufBigInt) + + // F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} - [∑ᵢγⁱZ_{T\xᵢ}fᵢ(z)]_{1} - Z_{T}(z)[W] + var f kzg.Digest + f.Sub(&sumGammaiZtMinusSiComi, &sumGammaiZTminusSiRizCom). + Sub(&f, &ztW) + + // F+zW' + var zWPrime kzg.Digest + z.BigInt(&bufBigInt) + zWPrime.ScalarMultiplication(&proof.WPrime, &bufBigInt) + f.Add(&f, &zWPrime) + f.Neg(&f) + + // check that e(F+zW',[1]_{2})=e(W',[x]_{2}) + check, err := {{ .CurvePackage }}.PairingCheckFixedQ( + []{{ .CurvePackage }}.G1Affine{f, proof.WPrime}, + vk.Lines[:], + ) + + if !check { + return ErrVerifyOpeningProof + } + + return nil +} + +// deriveChallenge derives a challenge using Fiat Shamir to polynomials. +// The arguments are added to the transcript in the order in which they are given. +func deriveChallenge(name string, points [][]fr.Element, digests []kzg.Digest, t *fiatshamir.Transcript, dataTranscript ...[]byte) (fr.Element, error) { + + // derive the challenge gamma, binded to the point and the commitments + for i := range points { + for j := range points[i] { + if err := t.Bind(name, points[i][j].Marshal()); err != nil { + return fr.Element{}, err + } + } + } + for i := range digests { + if err := t.Bind(name, digests[i].Marshal()); err != nil { + return fr.Element{}, err + } + } + + for i := 0; i < len(dataTranscript); i++ { + if err := t.Bind(name, dataTranscript[i]); err != nil { + return fr.Element{}, err + } + } + + gammaByte, err := t.ComputeChallenge(name) + if err != nil { + return fr.Element{}, err + } + var gamma fr.Element + gamma.SetBytes(gammaByte) + + return gamma, nil +} + +// ------------------------------ +// utils + +func flatten(x [][]fr.Element) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + res := make([]fr.Element, 0, nbPoints) + for i := 0; i < len(x); i++ { + res = append(res, x[i]...) + } + return res +} + +// sets f to zero +func setZero(f []fr.Element) { + for i := 0; i < len(f); i++ { + f[i].SetZero() + } +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} + +// returns γ*f, re-using f +func mulByConstant(f []fr.Element, gamma fr.Element) []fr.Element { + for i := 0; i < len(f); i++ { + f[i].Mul(&f[i], &gamma) + } + return f +} + +// computes f <- (x-a)*f +// memory of f is re used, need to pass a copy to not modify it +func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { + s := len(f) + var tmp fr.Element + f = append(f, fr.NewElement(0)) + f[s] = f[s-1] + for i := s - 1; i >= 1; i-- { + tmp.Mul(&f[i], &a) + f[i].Sub(&f[i-1], &tmp) + } + f[0].Mul(&f[0], &a).Neg(&f[0]) + return f +} + +// returns S_{T\Sᵢ} where Sᵢ=x[i] +func buildZtMinusSi(x [][]fr.Element, i int) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + bufPoints := make([]fr.Element, 0, nbPoints-len(x[i])) + for j := 0; j < i; j++ { + bufPoints = append(bufPoints, x[j]...) + } + for j := i + 1; j < len(x); j++ { + bufPoints = append(bufPoints, x[j]...) + } + ztMinusSi := buildVanishingPoly(bufPoints) + return ztMinusSi +} + +// returns πᵢ(X-xᵢ) +func buildVanishingPoly(x []fr.Element) []fr.Element { + res := make([]fr.Element, 1, len(x)+1) + res[0].SetOne() + for i := 0; i < len(x); i++ { + res = multiplyLinearFactor(res, x[i]) + } + return res +} + +// returns f such that f(xᵢ)=yᵢ, x and y are assumed to be of the same size +func interpolate(x, y []fr.Element) []fr.Element { + + res := make([]fr.Element, len(x)) + for i := 0; i < len(x); i++ { + li := buildLagrangeFromDomain(x, i) + li = mulByConstant(li, y[i]) + for j := 0; j < len(x); j++ { + res[j].Add(&res[j], &li[j]) + } + } + + return res +} + +// returns f such that f(xⱼ)=δⁱⱼ +func buildLagrangeFromDomain(x []fr.Element, i int) []fr.Element { + xx := make([]fr.Element, len(x)-1) + copy(xx, x[:i]) + copy(xx[i:], x[i+1:]) + res := buildVanishingPoly(xx) + d := eval(res, x[i]) + d.Inverse(&d) + res = mulByConstant(res, d) + return res +} + +// returns f-g, the memory of f is re used, deg(g) << deg(f) here +func sub(f, g []fr.Element) []fr.Element { + for i := 0; i < len(g); i++ { + f[i].Sub(&f[i], &g[i]) + } + return f +} + +// returns f*g using naive multiplication +// deg(f)>>deg(g), deg(small) =~ 10 max +// buf is used as a buffer and should not be f or g +// f and g are not modified +func mul(f, g []fr.Element, res []fr.Element) []fr.Element { + + sizeRes := len(f) + len(g) - 1 + if len(res) < sizeRes { + s := make([]fr.Element, sizeRes-len(res)) + res = append(res, s...) + } + setZero(res) + + var tmp fr.Element + for i := 0; i < len(g); i++ { + for j := 0; j < len(f); j++ { + tmp.Mul(&f[j], &g[i]) + res[j+i].Add(&res[j+i], &tmp) + } + } + return res +} + +// returns f/g (assuming g divides f) +// OK to not use fft if deg(g) is small +// g's leading coefficient is assumed to be 1 +// f memory is re-used for the result, need to pass a copy to not modify it +func div(f, g []fr.Element) []fr.Element { + sizef := len(f) + sizeg := len(g) + stop := sizeg - +1 + var t fr.Element + for i := sizef - 2; i >= stop; i-- { + for j := 0; j < sizeg-1; j++ { + t.Mul(&f[i+1], &g[sizeg-2-j]) + f[i-j].Sub(&f[i-j], &t) + } + } + return f[sizeg-1:] +} diff --git a/internal/generator/shplonk/template/shplonk.test.go.tmpl b/internal/generator/shplonk/template/shplonk.test.go.tmpl new file mode 100644 index 0000000000..d3f6611b64 --- /dev/null +++ b/internal/generator/shplonk/template/shplonk.test.go.tmpl @@ -0,0 +1,302 @@ +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/fr" + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 230 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestOpening(t *testing.T) { + + assert := require.New(t) + + nbPolys := 2 + sizePoly := make([]int, nbPolys) + for i := 0; i < nbPolys; i++ { + sizePoly[i] = 10 + i + } + polys := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + polys[i] = make([]fr.Element, sizePoly[i]) + for j := 0; j < sizePoly[i]; j++ { + polys[i][j].SetRandom() + } + } + + digests := make([]kzg.Digest, nbPolys) + for i := 0; i < nbPolys; i++ { + digests[i], _ = kzg.Commit(polys[i], testSrs.Pk) + } + + points := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + points[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + points[i][j].SetRandom() + } + } + + hf := sha256.New() + + // correct proof + openingProof, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) + assert.NoError(err) + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.NoError(err) + + // tampered proof + openingProof.ClaimedValues[0][0].SetRandom() + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestBuildZtMinusSi(t *testing.T) { + + nbSi := 10 + points := make([][]fr.Element, nbSi) + sizeSi := make([]int, nbSi) + nbPoints := 0 + for i := 0; i < nbSi; i++ { + sizeSi[i] = 5 + i + nbPoints += sizeSi[i] + points[i] = make([]fr.Element, sizeSi[i]) + for j := 0; j < sizeSi[i]; j++ { + points[i][j].SetRandom() + } + } + for i := 0; i < nbSi; i++ { + ztMinusSi := buildZtMinusSi(points, i) + if len(ztMinusSi) != nbPoints-sizeSi[i]+1 { + t.Fatal("deg(Z_{T-S_{i}}) should be nbPoints-size(S_{i})") + } + for j := 0; j < nbSi; j++ { + if j == i { + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{i}) should not be zero") + } + } + continue + } + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if !y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{j}) should be zero") + } + } + } + } + +} + +func TestInterpolate(t *testing.T) { + + nbPoints := 10 + x := make([]fr.Element, nbPoints) + y := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + y[i].SetRandom() + } + f := interpolate(x, y) + for i := 0; i < nbPoints; i++ { + fx := eval(f, x[i]) + if !fx.Equal(&y[i]) { + t.Fatal("f(x_{i})!=y_{i}") + } + } + +} + +func TestBuildLagrangeFromDomain(t *testing.T) { + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + var r fr.Element + for i := 0; i < nbPoints; i++ { + + l := buildLagrangeFromDomain(points, i) + + // check that l(xᵢ)=1 and l(xⱼ)=0 for j!=i + for j := 0; j < nbPoints; j++ { + y := eval(l, points[j]) + if i == j { + if !y.IsOne() { + t.Fatal("l_{i}(x_{i}) should be equal to 1") + } + } else { + if !y.IsZero() { + t.Fatal("l_{i}(x_{j}) where i!=j should be equal to 0") + } + } + } + r.SetRandom() + y := eval(l, r) + if y.IsZero() { + t.Fatal("l_{i}(x) should not be zero if x is random") + } + } + +} + +func TestBuildVanishingPoly(t *testing.T) { + s := 10 + x := make([]fr.Element, s) + for i := 0; i < s; i++ { + x[i].SetRandom() + } + r := buildVanishingPoly(x) + + if len(r) != s+1 { + t.Fatal("error degree r") + } + + // check that r(xᵢ)=0 for all i + for i := 0; i < len(x); i++ { + y := eval(r, x[i]) + if !y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at xᵢ should be zero") + } + } + + // check that r(y)!=0 for a random point + var a fr.Element + a.SetRandom() + y := eval(r, a) + if y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at r \neq xᵢ should not be zero") + } +} + +func TestMultiplyLinearFactor(t *testing.T) { + + s := 10 + f := make([]fr.Element, s, s+1) + for i := 0; i < 10; i++ { + f[i].SetRandom() + } + + var a, y fr.Element + a.SetRandom() + f = multiplyLinearFactor(f, a) + y = eval(f, a) + if !y.IsZero() { + t.Fatal("(X-a)f(X) should be zero at a") + } + a.SetRandom() + y = eval(f, a) + if y.IsZero() { + t.Fatal("(X-1)f(X) at a random point should not be zero") + } + +} + +func TestNaiveMul(t *testing.T) { + + size := 10 + f := make([]fr.Element, size) + for i := 0; i < size; i++ { + f[i].SetRandom() + } + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + + v := buildVanishingPoly(points) + buf := make([]fr.Element, size+nbPoints-1) + g := mul(f, v, buf) + + // check that g(xᵢ) = 0 + for i := 0; i < nbPoints; i++ { + y := eval(g, points[i]) + if !y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at x_{i} should be zero") + } + } + + // check that g(r) != 0 for a random point + var a fr.Element + a.SetRandom() + y := eval(g, a) + if y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at a random point should not be zero") + } + +} + +func TestDiv(t *testing.T) { + + nbPoints := 10 + s := 10 + f := make([]fr.Element, s, s+nbPoints) + for i := 0; i < s; i++ { + f[i].SetRandom() + } + + // backup + g := make([]fr.Element, s) + copy(g, f) + + // successive divions of linear terms + x := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + q := make([][2]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + q[i][1].SetOne() + q[i][0].Neg(&x[i]) + f = div(f, q[i][:]) + } + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + + // division by a degree > 1 polynomial + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + r := buildVanishingPoly(x) + f = div(f, r) + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + +} From 1f69dbce9ff37429f7a0a6969cb8bec0790be728 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Tue, 12 Mar 2024 16:05:11 +0100 Subject: [PATCH 18/66] feat: marshal ok --- ecc/bn254/shplonk/marshal.go | 60 ++++++++++++++++++++++++++++++++++++ 1 file changed, 60 insertions(+) create mode 100644 ecc/bn254/shplonk/marshal.go diff --git a/ecc/bn254/shplonk/marshal.go b/ecc/bn254/shplonk/marshal.go new file mode 100644 index 0000000000..66eaf0302f --- /dev/null +++ b/ecc/bn254/shplonk/marshal.go @@ -0,0 +1,60 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package shplonk + +import ( + "io" + + "github.com/consensys/gnark-crypto/ecc/bn254" +) + +func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { + + dec := bn254.NewDecoder(r) + + toDecode := []interface{}{ + &proof.W, + &proof.WPrime, + proof.ClaimedValues, + } + + for _, v := range toDecode { + if err := dec.Decode(v); err != nil { + return dec.BytesRead(), err + } + } + + return dec.BytesRead(), nil +} + +// WriteTo writes binary encoding of a OpeningProof +func (proof *OpeningProof) WriteTo(w io.Writer) (int64, error) { + + enc := bn254.NewEncoder(w) + + toEncode := []interface{}{ + &proof.W, + &proof.WPrime, + proof.ClaimedValues, + } + + for _, v := range toEncode { + if err := enc.Encode(v); err != nil { + return enc.BytesWritten(), err + } + } + + return enc.BytesWritten(), nil +} From 8a58a0921ac7b3dc5d93c64c13fadbd58522a50d Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Tue, 12 Mar 2024 16:10:12 +0100 Subject: [PATCH 19/66] feat: additions of template files marshal shplonk --- internal/generator/shplonk/generator.go | 2 +- .../shplonk/template/marshal.go.tmpl | 44 +++++++++++++++++++ 2 files changed, 45 insertions(+), 1 deletion(-) create mode 100644 internal/generator/shplonk/template/marshal.go.tmpl diff --git a/internal/generator/shplonk/generator.go b/internal/generator/shplonk/generator.go index 789ab126b4..94e1118300 100644 --- a/internal/generator/shplonk/generator.go +++ b/internal/generator/shplonk/generator.go @@ -15,7 +15,7 @@ func Generate(conf config.Curve, baseDir string, bgen *bavard.BatchGenerator) er {File: filepath.Join(baseDir, "doc.go"), Templates: []string{"doc.go.tmpl"}}, {File: filepath.Join(baseDir, "shplonk.go"), Templates: []string{"shplonk.go.tmpl"}}, {File: filepath.Join(baseDir, "shplonk_test.go"), Templates: []string{"shplonk.test.go.tmpl"}}, - // {File: filepath.Join(baseDir, "marshal.go"), Templates: []string{"marshal.go.tmpl"}}, + {File: filepath.Join(baseDir, "marshal.go"), Templates: []string{"marshal.go.tmpl"}}, // {File: filepath.Join(baseDir, "utils.go"), Templates: []string{"utils.go.tmpl"}}, } return bgen.Generate(conf, conf.Package, "./shplonk/template/", entries...) diff --git a/internal/generator/shplonk/template/marshal.go.tmpl b/internal/generator/shplonk/template/marshal.go.tmpl new file mode 100644 index 0000000000..316f852ef5 --- /dev/null +++ b/internal/generator/shplonk/template/marshal.go.tmpl @@ -0,0 +1,44 @@ +import ( + "io" + + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}" +) + +func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { + + dec := {{ .CurvePackage }}.NewDecoder(r) + + toDecode := []interface{}{ + &proof.W, + &proof.WPrime, + proof.ClaimedValues, + } + + for _, v := range toDecode { + if err := dec.Decode(v); err != nil { + return dec.BytesRead(), err + } + } + + return dec.BytesRead(), nil +} + +// WriteTo writes binary encoding of a OpeningProof +func (proof *OpeningProof) WriteTo(w io.Writer) (int64, error) { + + enc := {{ .CurvePackage }}.NewEncoder(w) + + toEncode := []interface{}{ + &proof.W, + &proof.WPrime, + proof.ClaimedValues, + } + + for _, v := range toEncode { + if err := enc.Encode(v); err != nil { + return enc.BytesWritten(), err + } + } + + return enc.BytesWritten(), nil +} From 39a4e67ddf1b3a782ea4a983fe90fe540c7c18cd Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Tue, 12 Mar 2024 16:11:11 +0100 Subject: [PATCH 20/66] feat: code gen ok --- ecc/bls12-377/shplonk/doc.go | 18 + ecc/bls12-377/shplonk/marshal.go | 62 ++++ ecc/bls12-377/shplonk/shplonk.go | 462 ++++++++++++++++++++++++++ ecc/bls12-377/shplonk/shplonk_test.go | 320 ++++++++++++++++++ ecc/bls12-378/shplonk/doc.go | 18 + ecc/bls12-378/shplonk/marshal.go | 62 ++++ ecc/bls12-378/shplonk/shplonk.go | 462 ++++++++++++++++++++++++++ ecc/bls12-378/shplonk/shplonk_test.go | 320 ++++++++++++++++++ ecc/bls12-381/shplonk/doc.go | 18 + ecc/bls12-381/shplonk/marshal.go | 62 ++++ ecc/bls12-381/shplonk/shplonk.go | 462 ++++++++++++++++++++++++++ ecc/bls12-381/shplonk/shplonk_test.go | 320 ++++++++++++++++++ ecc/bls24-315/shplonk/doc.go | 18 + ecc/bls24-315/shplonk/marshal.go | 62 ++++ ecc/bls24-315/shplonk/shplonk.go | 462 ++++++++++++++++++++++++++ ecc/bls24-315/shplonk/shplonk_test.go | 320 ++++++++++++++++++ ecc/bls24-317/shplonk/doc.go | 18 + ecc/bls24-317/shplonk/marshal.go | 62 ++++ ecc/bls24-317/shplonk/shplonk.go | 462 ++++++++++++++++++++++++++ ecc/bls24-317/shplonk/shplonk_test.go | 320 ++++++++++++++++++ ecc/bn254/shplonk/doc.go | 18 + ecc/bn254/shplonk/marshal.go | 2 + ecc/bn254/shplonk/shplonk.go | 2 +- ecc/bn254/shplonk/shplonk_test.go | 2 + ecc/bw6-633/shplonk/doc.go | 18 + ecc/bw6-633/shplonk/marshal.go | 62 ++++ ecc/bw6-633/shplonk/shplonk.go | 462 ++++++++++++++++++++++++++ ecc/bw6-633/shplonk/shplonk_test.go | 320 ++++++++++++++++++ ecc/bw6-756/shplonk/doc.go | 18 + ecc/bw6-756/shplonk/marshal.go | 62 ++++ ecc/bw6-756/shplonk/shplonk.go | 462 ++++++++++++++++++++++++++ ecc/bw6-756/shplonk/shplonk_test.go | 320 ++++++++++++++++++ ecc/bw6-761/shplonk/doc.go | 18 + ecc/bw6-761/shplonk/marshal.go | 62 ++++ ecc/bw6-761/shplonk/shplonk.go | 462 ++++++++++++++++++++++++++ ecc/bw6-761/shplonk/shplonk_test.go | 320 ++++++++++++++++++ 36 files changed, 6919 insertions(+), 1 deletion(-) create mode 100644 ecc/bls12-377/shplonk/doc.go create mode 100644 ecc/bls12-377/shplonk/marshal.go create mode 100644 ecc/bls12-377/shplonk/shplonk.go create mode 100644 ecc/bls12-377/shplonk/shplonk_test.go create mode 100644 ecc/bls12-378/shplonk/doc.go create mode 100644 ecc/bls12-378/shplonk/marshal.go create mode 100644 ecc/bls12-378/shplonk/shplonk.go create mode 100644 ecc/bls12-378/shplonk/shplonk_test.go create mode 100644 ecc/bls12-381/shplonk/doc.go create mode 100644 ecc/bls12-381/shplonk/marshal.go create mode 100644 ecc/bls12-381/shplonk/shplonk.go create mode 100644 ecc/bls12-381/shplonk/shplonk_test.go create mode 100644 ecc/bls24-315/shplonk/doc.go create mode 100644 ecc/bls24-315/shplonk/marshal.go create mode 100644 ecc/bls24-315/shplonk/shplonk.go create mode 100644 ecc/bls24-315/shplonk/shplonk_test.go create mode 100644 ecc/bls24-317/shplonk/doc.go create mode 100644 ecc/bls24-317/shplonk/marshal.go create mode 100644 ecc/bls24-317/shplonk/shplonk.go create mode 100644 ecc/bls24-317/shplonk/shplonk_test.go create mode 100644 ecc/bn254/shplonk/doc.go create mode 100644 ecc/bw6-633/shplonk/doc.go create mode 100644 ecc/bw6-633/shplonk/marshal.go create mode 100644 ecc/bw6-633/shplonk/shplonk.go create mode 100644 ecc/bw6-633/shplonk/shplonk_test.go create mode 100644 ecc/bw6-756/shplonk/doc.go create mode 100644 ecc/bw6-756/shplonk/marshal.go create mode 100644 ecc/bw6-756/shplonk/shplonk.go create mode 100644 ecc/bw6-756/shplonk/shplonk_test.go create mode 100644 ecc/bw6-761/shplonk/doc.go create mode 100644 ecc/bw6-761/shplonk/marshal.go create mode 100644 ecc/bw6-761/shplonk/shplonk.go create mode 100644 ecc/bw6-761/shplonk/shplonk_test.go diff --git a/ecc/bls12-377/shplonk/doc.go b/ecc/bls12-377/shplonk/doc.go new file mode 100644 index 0000000000..e048f46009 --- /dev/null +++ b/ecc/bls12-377/shplonk/doc.go @@ -0,0 +1,18 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +// Package shplonk provides a SHPLONK commitment scheme, cf https://eprint.iacr.org/2020/081.pdf +package shplonk diff --git a/ecc/bls12-377/shplonk/marshal.go b/ecc/bls12-377/shplonk/marshal.go new file mode 100644 index 0000000000..9e03a86d06 --- /dev/null +++ b/ecc/bls12-377/shplonk/marshal.go @@ -0,0 +1,62 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "io" + + "github.com/consensys/gnark-crypto/ecc/bls12-377" +) + +func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { + + dec := bls12377.NewDecoder(r) + + toDecode := []interface{}{ + &proof.W, + &proof.WPrime, + proof.ClaimedValues, + } + + for _, v := range toDecode { + if err := dec.Decode(v); err != nil { + return dec.BytesRead(), err + } + } + + return dec.BytesRead(), nil +} + +// WriteTo writes binary encoding of a OpeningProof +func (proof *OpeningProof) WriteTo(w io.Writer) (int64, error) { + + enc := bls12377.NewEncoder(w) + + toEncode := []interface{}{ + &proof.W, + &proof.WPrime, + proof.ClaimedValues, + } + + for _, v := range toEncode { + if err := enc.Encode(v); err != nil { + return enc.BytesWritten(), err + } + } + + return enc.BytesWritten(), nil +} diff --git a/ecc/bls12-377/shplonk/shplonk.go b/ecc/bls12-377/shplonk/shplonk.go new file mode 100644 index 0000000000..13faee07d4 --- /dev/null +++ b/ecc/bls12-377/shplonk/shplonk.go @@ -0,0 +1,462 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "errors" + "hash" + "math/big" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls12-377" + "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-377/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" +) + +var ( + ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") + ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") +) + +// OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. +// +// implements io.ReaderFrom and io.WriterTo +type OpeningProof struct { + + // W = ∑ᵢ γⁱZ_{T\Sᵢ}(f_i(X)-r)/Z_{T} where Z_{T} is the vanishing polynomial on the (Sᵢ)_{i} + // and r interpolates fᵢ(Sᵢ) on (Sᵢ) + W bls12377.G1Affine + + // L(X)/(X-z) where L(X)=∑ᵢγⁱZ_{T\xᵢ}(f_i(X)-rᵢ) - Z_{T}W(X) + WPrime bls12377.G1Affine + + // ClaimedValues[i] are the values of fᵢ on Sᵢ + ClaimedValues [][]fr.Element +} + +// BatchOpen opens the list of polynomials on points, where the i-th polynomials is opend at points[i]. +func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, pk kzg.ProvingKey, dataTranscript ...[]byte) (OpeningProof, error) { + + var res OpeningProof + + nbInstances := len(polynomials) + if len(polynomials) != len(points) { + return res, ErrInvalidNumberOfPoints + } + + // transcript + fs := fiatshamir.NewTranscript(hf, "gamma", "z") + + // derive γ + gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) + if err != nil { + return res, err + } + + // compute the size of the linear combination + maxSizePolys := len(polynomials[0]) + for i := 1; i < len(polynomials); i++ { + if maxSizePolys < len(polynomials[i]) { + maxSizePolys = len(polynomials[i]) + } + } + nbPoints := 0 + sizeSi := make([]int, len(points)) + for i := 0; i < nbInstances; i++ { + nbPoints += len(points[i]) + sizeSi[i] = len(points[i]) + } + totalSize := maxSizePolys + nbPoints // upper bound of the size of f := ∑ᵢ γⁱZ_{T\Sᵢ}(f_i(X)-r) + + bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) + bufTotalSize := make([]fr.Element, totalSize) + f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation + res.ClaimedValues = make([][]fr.Element, nbInstances) + for i := 0; i < nbInstances; i++ { + res.ClaimedValues[i] = make([]fr.Element, len(points[i])) + } + var accGamma fr.Element + accGamma.SetOne() + + ztMinusSi := make([][]fr.Element, nbInstances) + ri := make([][]fr.Element, nbInstances) + for i := 0; i < nbInstances; i++ { + + for j := 0; j < len(points[i]); j++ { + res.ClaimedValues[i][j] = eval(polynomials[i], points[i][j]) + } + + ztMinusSi[i] = buildZtMinusSi(points, i) + + copy(bufMaxSizePolynomials, polynomials[i]) + ri[i] = interpolate(points[i], res.ClaimedValues[i]) + sub(bufMaxSizePolynomials, ri[i]) + + bufTotalSize = mul(bufMaxSizePolynomials, ztMinusSi[i], bufTotalSize) + bufTotalSize = mulByConstant(bufTotalSize, accGamma) + for j := 0; j < len(bufTotalSize); j++ { + f[j].Add(&f[j], &bufTotalSize[j]) + } + + accGamma.Mul(&accGamma, &gamma) + setZero(bufMaxSizePolynomials) + } + + zt := buildVanishingPoly(flatten(points)) + w := div(f, zt) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation + res.W, err = kzg.Commit(w, pk) + if err != nil { + return res, err + } + + // derive z + z, err := deriveChallenge("z", nil, []kzg.Digest{res.W}, fs) + if err != nil { + return res, err + } + + // compute L = ∑ᵢγⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z))-Z_{T}(z)W + accGamma.SetOne() + var gammaiZtMinusSiZ fr.Element + l := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation + for i := 0; i < len(polynomials); i++ { + + ztMinusSiZ := eval(ztMinusSi[i], z) // Z_{T\Sᵢ}(z) + gammaiZtMinusSiZ.Mul(&accGamma, &ztMinusSiZ) // γⁱZ_{T\Sᵢ}(z) + + copy(bufMaxSizePolynomials, polynomials[i]) + riz := eval(ri[i], z) + bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) + mulByConstant(bufMaxSizePolynomials, gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) + for j := 0; j < len(bufMaxSizePolynomials); j++ { + l[j].Add(&l[j], &bufMaxSizePolynomials[j]) + } + + setZero(bufMaxSizePolynomials) + accGamma.Mul(&accGamma, &gamma) + } + ztz := eval(zt, z) + setZero(bufTotalSize) + copy(bufTotalSize, w) + mulByConstant(bufTotalSize, ztz) // Z_{T}(z)W + for i := 0; i < totalSize-maxSizePolys; i++ { + l[totalSize-1-i].Neg(&bufTotalSize[totalSize-1-i]) + } + for i := 0; i < maxSizePolys; i++ { + l[i].Sub(&l[i], &bufTotalSize[i]) + } // L <- L-Z_{T}(z)W + + xMinusZ := buildVanishingPoly([]fr.Element{z}) + wPrime := div(l, xMinusZ) + + res.WPrime, err = kzg.Commit(wPrime, pk) + if err != nil { + return res, err + } + + return res, nil +} + +// BatchVerify uses proof to check that the commitments correctly open to proof.ClaimedValues +// at points. The order matters: the proof validates that the i-th commitment is correctly opened +// at the i-th point +// dataTranscript is some extra data that might be needed for Fiat Shamir, and is appended at the end +// of the original transcript. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + if len(digests) != len(proof.ClaimedValues) { + return ErrInvalidNumberOfPoints + } + if len(digests) != len(points) { + return ErrInvalidNumberOfPoints + } + + // transcript + fs := fiatshamir.NewTranscript(hf, "gamma", "z") + + // derive γ + gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) + if err != nil { + return err + } + + // derive z + // TODO seems ok that z depend only on W, need to check that carefully + z, err := deriveChallenge("z", nil, []kzg.Digest{proof.W}, fs) + if err != nil { + return err + } + + // check that e(F + zW', [1]_{2})=e(W',[x]_{2}) + // where F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i}-[∑ᵢγⁱZ_{T\xᵢ}(z)fᵢ(z)]_{1}-Z_{T}(z)[W] + var sumGammaiZTminusSiRiz, tmp, accGamma fr.Element + nbInstances := len(points) + gammaiZTminusSiz := make([]fr.Element, nbInstances) + accGamma.SetOne() + ri := make([][]fr.Element, nbInstances) + for i := 0; i < len(points); i++ { + + ztMinusSi := buildZtMinusSi(points, i) // Z_{T-S_{i}}(X) + gammaiZTminusSiz[i] = eval(ztMinusSi, z) // Z_{T-S_{i}}(z) + gammaiZTminusSiz[i].Mul(&accGamma, &gammaiZTminusSiz[i]) // \gamma^{i} Z_{T-S_{i}}(z) + + ri[i] = interpolate(points[i], proof.ClaimedValues[i]) + riz := eval(ri[i], z) // r_{i}(z) + tmp.Mul(&gammaiZTminusSiz[i], &riz) // Z_{T-S_{i}}(z)r_{i}(z) + sumGammaiZTminusSiRiz.Add(&sumGammaiZTminusSiRiz, &tmp) + + accGamma.Mul(&accGamma, &gamma) + } + + // ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} + config := ecc.MultiExpConfig{} + var sumGammaiZtMinusSiComi kzg.Digest + _, err = sumGammaiZtMinusSiComi.MultiExp(digests, gammaiZTminusSiz, config) + if err != nil { + return err + } + + var bufBigInt big.Int + + // [∑ᵢZ_{T\xᵢ}fᵢ(z)]_{1} + var sumGammaiZTminusSiRizCom kzg.Digest + var sumGammaiZTminusSiRizBigInt big.Int + sumGammaiZTminusSiRiz.BigInt(&sumGammaiZTminusSiRizBigInt) + sumGammaiZTminusSiRizCom.ScalarMultiplication(&vk.G1, &sumGammaiZTminusSiRizBigInt) + + // Z_{T}(z)[W] + zt := buildVanishingPoly(flatten(points)) + ztz := eval(zt, z) + var ztW kzg.Digest + ztz.BigInt(&bufBigInt) + ztW.ScalarMultiplication(&proof.W, &bufBigInt) + + // F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} - [∑ᵢγⁱZ_{T\xᵢ}fᵢ(z)]_{1} - Z_{T}(z)[W] + var f kzg.Digest + f.Sub(&sumGammaiZtMinusSiComi, &sumGammaiZTminusSiRizCom). + Sub(&f, &ztW) + + // F+zW' + var zWPrime kzg.Digest + z.BigInt(&bufBigInt) + zWPrime.ScalarMultiplication(&proof.WPrime, &bufBigInt) + f.Add(&f, &zWPrime) + f.Neg(&f) + + // check that e(F+zW',[1]_{2})=e(W',[x]_{2}) + check, err := bls12377.PairingCheckFixedQ( + []bls12377.G1Affine{f, proof.WPrime}, + vk.Lines[:], + ) + + if !check { + return ErrVerifyOpeningProof + } + + return nil +} + +// deriveChallenge derives a challenge using Fiat Shamir to polynomials. +// The arguments are added to the transcript in the order in which they are given. +func deriveChallenge(name string, points [][]fr.Element, digests []kzg.Digest, t *fiatshamir.Transcript, dataTranscript ...[]byte) (fr.Element, error) { + + // derive the challenge gamma, binded to the point and the commitments + for i := range points { + for j := range points[i] { + if err := t.Bind(name, points[i][j].Marshal()); err != nil { + return fr.Element{}, err + } + } + } + for i := range digests { + if err := t.Bind(name, digests[i].Marshal()); err != nil { + return fr.Element{}, err + } + } + + for i := 0; i < len(dataTranscript); i++ { + if err := t.Bind(name, dataTranscript[i]); err != nil { + return fr.Element{}, err + } + } + + gammaByte, err := t.ComputeChallenge(name) + if err != nil { + return fr.Element{}, err + } + var gamma fr.Element + gamma.SetBytes(gammaByte) + + return gamma, nil +} + +// ------------------------------ +// utils + +func flatten(x [][]fr.Element) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + res := make([]fr.Element, 0, nbPoints) + for i := 0; i < len(x); i++ { + res = append(res, x[i]...) + } + return res +} + +// sets f to zero +func setZero(f []fr.Element) { + for i := 0; i < len(f); i++ { + f[i].SetZero() + } +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} + +// returns γ*f, re-using f +func mulByConstant(f []fr.Element, gamma fr.Element) []fr.Element { + for i := 0; i < len(f); i++ { + f[i].Mul(&f[i], &gamma) + } + return f +} + +// computes f <- (x-a)*f +// memory of f is re used, need to pass a copy to not modify it +func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { + s := len(f) + var tmp fr.Element + f = append(f, fr.NewElement(0)) + f[s] = f[s-1] + for i := s - 1; i >= 1; i-- { + tmp.Mul(&f[i], &a) + f[i].Sub(&f[i-1], &tmp) + } + f[0].Mul(&f[0], &a).Neg(&f[0]) + return f +} + +// returns S_{T\Sᵢ} where Sᵢ=x[i] +func buildZtMinusSi(x [][]fr.Element, i int) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + bufPoints := make([]fr.Element, 0, nbPoints-len(x[i])) + for j := 0; j < i; j++ { + bufPoints = append(bufPoints, x[j]...) + } + for j := i + 1; j < len(x); j++ { + bufPoints = append(bufPoints, x[j]...) + } + ztMinusSi := buildVanishingPoly(bufPoints) + return ztMinusSi +} + +// returns πᵢ(X-xᵢ) +func buildVanishingPoly(x []fr.Element) []fr.Element { + res := make([]fr.Element, 1, len(x)+1) + res[0].SetOne() + for i := 0; i < len(x); i++ { + res = multiplyLinearFactor(res, x[i]) + } + return res +} + +// returns f such that f(xᵢ)=yᵢ, x and y are assumed to be of the same size +func interpolate(x, y []fr.Element) []fr.Element { + + res := make([]fr.Element, len(x)) + for i := 0; i < len(x); i++ { + li := buildLagrangeFromDomain(x, i) + li = mulByConstant(li, y[i]) + for j := 0; j < len(x); j++ { + res[j].Add(&res[j], &li[j]) + } + } + + return res +} + +// returns f such that f(xⱼ)=δⁱⱼ +func buildLagrangeFromDomain(x []fr.Element, i int) []fr.Element { + xx := make([]fr.Element, len(x)-1) + copy(xx, x[:i]) + copy(xx[i:], x[i+1:]) + res := buildVanishingPoly(xx) + d := eval(res, x[i]) + d.Inverse(&d) + res = mulByConstant(res, d) + return res +} + +// returns f-g, the memory of f is re used, deg(g) << deg(f) here +func sub(f, g []fr.Element) []fr.Element { + for i := 0; i < len(g); i++ { + f[i].Sub(&f[i], &g[i]) + } + return f +} + +// returns f*g using naive multiplication +// deg(f)>>deg(g), deg(small) =~ 10 max +// buf is used as a buffer and should not be f or g +// f and g are not modified +func mul(f, g []fr.Element, res []fr.Element) []fr.Element { + + sizeRes := len(f) + len(g) - 1 + if len(res) < sizeRes { + s := make([]fr.Element, sizeRes-len(res)) + res = append(res, s...) + } + setZero(res) + + var tmp fr.Element + for i := 0; i < len(g); i++ { + for j := 0; j < len(f); j++ { + tmp.Mul(&f[j], &g[i]) + res[j+i].Add(&res[j+i], &tmp) + } + } + return res +} + +// returns f/g (assuming g divides f) +// OK to not use fft if deg(g) is small +// g's leading coefficient is assumed to be 1 +// f memory is re-used for the result, need to pass a copy to not modify it +func div(f, g []fr.Element) []fr.Element { + sizef := len(f) + sizeg := len(g) + stop := sizeg - +1 + var t fr.Element + for i := sizef - 2; i >= stop; i-- { + for j := 0; j < sizeg-1; j++ { + t.Mul(&f[i+1], &g[sizeg-2-j]) + f[i-j].Sub(&f[i-j], &t) + } + } + return f[sizeg-1:] +} diff --git a/ecc/bls12-377/shplonk/shplonk_test.go b/ecc/bls12-377/shplonk/shplonk_test.go new file mode 100644 index 0000000000..69a26be1b8 --- /dev/null +++ b/ecc/bls12-377/shplonk/shplonk_test.go @@ -0,0 +1,320 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-377/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 230 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestOpening(t *testing.T) { + + assert := require.New(t) + + nbPolys := 2 + sizePoly := make([]int, nbPolys) + for i := 0; i < nbPolys; i++ { + sizePoly[i] = 10 + i + } + polys := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + polys[i] = make([]fr.Element, sizePoly[i]) + for j := 0; j < sizePoly[i]; j++ { + polys[i][j].SetRandom() + } + } + + digests := make([]kzg.Digest, nbPolys) + for i := 0; i < nbPolys; i++ { + digests[i], _ = kzg.Commit(polys[i], testSrs.Pk) + } + + points := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + points[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + points[i][j].SetRandom() + } + } + + hf := sha256.New() + + // correct proof + openingProof, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) + assert.NoError(err) + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.NoError(err) + + // tampered proof + openingProof.ClaimedValues[0][0].SetRandom() + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestBuildZtMinusSi(t *testing.T) { + + nbSi := 10 + points := make([][]fr.Element, nbSi) + sizeSi := make([]int, nbSi) + nbPoints := 0 + for i := 0; i < nbSi; i++ { + sizeSi[i] = 5 + i + nbPoints += sizeSi[i] + points[i] = make([]fr.Element, sizeSi[i]) + for j := 0; j < sizeSi[i]; j++ { + points[i][j].SetRandom() + } + } + for i := 0; i < nbSi; i++ { + ztMinusSi := buildZtMinusSi(points, i) + if len(ztMinusSi) != nbPoints-sizeSi[i]+1 { + t.Fatal("deg(Z_{T-S_{i}}) should be nbPoints-size(S_{i})") + } + for j := 0; j < nbSi; j++ { + if j == i { + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{i}) should not be zero") + } + } + continue + } + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if !y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{j}) should be zero") + } + } + } + } + +} + +func TestInterpolate(t *testing.T) { + + nbPoints := 10 + x := make([]fr.Element, nbPoints) + y := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + y[i].SetRandom() + } + f := interpolate(x, y) + for i := 0; i < nbPoints; i++ { + fx := eval(f, x[i]) + if !fx.Equal(&y[i]) { + t.Fatal("f(x_{i})!=y_{i}") + } + } + +} + +func TestBuildLagrangeFromDomain(t *testing.T) { + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + var r fr.Element + for i := 0; i < nbPoints; i++ { + + l := buildLagrangeFromDomain(points, i) + + // check that l(xᵢ)=1 and l(xⱼ)=0 for j!=i + for j := 0; j < nbPoints; j++ { + y := eval(l, points[j]) + if i == j { + if !y.IsOne() { + t.Fatal("l_{i}(x_{i}) should be equal to 1") + } + } else { + if !y.IsZero() { + t.Fatal("l_{i}(x_{j}) where i!=j should be equal to 0") + } + } + } + r.SetRandom() + y := eval(l, r) + if y.IsZero() { + t.Fatal("l_{i}(x) should not be zero if x is random") + } + } + +} + +func TestBuildVanishingPoly(t *testing.T) { + s := 10 + x := make([]fr.Element, s) + for i := 0; i < s; i++ { + x[i].SetRandom() + } + r := buildVanishingPoly(x) + + if len(r) != s+1 { + t.Fatal("error degree r") + } + + // check that r(xᵢ)=0 for all i + for i := 0; i < len(x); i++ { + y := eval(r, x[i]) + if !y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at xᵢ should be zero") + } + } + + // check that r(y)!=0 for a random point + var a fr.Element + a.SetRandom() + y := eval(r, a) + if y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at r \neq xᵢ should not be zero") + } +} + +func TestMultiplyLinearFactor(t *testing.T) { + + s := 10 + f := make([]fr.Element, s, s+1) + for i := 0; i < 10; i++ { + f[i].SetRandom() + } + + var a, y fr.Element + a.SetRandom() + f = multiplyLinearFactor(f, a) + y = eval(f, a) + if !y.IsZero() { + t.Fatal("(X-a)f(X) should be zero at a") + } + a.SetRandom() + y = eval(f, a) + if y.IsZero() { + t.Fatal("(X-1)f(X) at a random point should not be zero") + } + +} + +func TestNaiveMul(t *testing.T) { + + size := 10 + f := make([]fr.Element, size) + for i := 0; i < size; i++ { + f[i].SetRandom() + } + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + + v := buildVanishingPoly(points) + buf := make([]fr.Element, size+nbPoints-1) + g := mul(f, v, buf) + + // check that g(xᵢ) = 0 + for i := 0; i < nbPoints; i++ { + y := eval(g, points[i]) + if !y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at x_{i} should be zero") + } + } + + // check that g(r) != 0 for a random point + var a fr.Element + a.SetRandom() + y := eval(g, a) + if y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at a random point should not be zero") + } + +} + +func TestDiv(t *testing.T) { + + nbPoints := 10 + s := 10 + f := make([]fr.Element, s, s+nbPoints) + for i := 0; i < s; i++ { + f[i].SetRandom() + } + + // backup + g := make([]fr.Element, s) + copy(g, f) + + // successive divions of linear terms + x := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + q := make([][2]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + q[i][1].SetOne() + q[i][0].Neg(&x[i]) + f = div(f, q[i][:]) + } + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + + // division by a degree > 1 polynomial + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + r := buildVanishingPoly(x) + f = div(f, r) + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + +} diff --git a/ecc/bls12-378/shplonk/doc.go b/ecc/bls12-378/shplonk/doc.go new file mode 100644 index 0000000000..e048f46009 --- /dev/null +++ b/ecc/bls12-378/shplonk/doc.go @@ -0,0 +1,18 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +// Package shplonk provides a SHPLONK commitment scheme, cf https://eprint.iacr.org/2020/081.pdf +package shplonk diff --git a/ecc/bls12-378/shplonk/marshal.go b/ecc/bls12-378/shplonk/marshal.go new file mode 100644 index 0000000000..6774448de5 --- /dev/null +++ b/ecc/bls12-378/shplonk/marshal.go @@ -0,0 +1,62 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "io" + + "github.com/consensys/gnark-crypto/ecc/bls12-378" +) + +func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { + + dec := bls12378.NewDecoder(r) + + toDecode := []interface{}{ + &proof.W, + &proof.WPrime, + proof.ClaimedValues, + } + + for _, v := range toDecode { + if err := dec.Decode(v); err != nil { + return dec.BytesRead(), err + } + } + + return dec.BytesRead(), nil +} + +// WriteTo writes binary encoding of a OpeningProof +func (proof *OpeningProof) WriteTo(w io.Writer) (int64, error) { + + enc := bls12378.NewEncoder(w) + + toEncode := []interface{}{ + &proof.W, + &proof.WPrime, + proof.ClaimedValues, + } + + for _, v := range toEncode { + if err := enc.Encode(v); err != nil { + return enc.BytesWritten(), err + } + } + + return enc.BytesWritten(), nil +} diff --git a/ecc/bls12-378/shplonk/shplonk.go b/ecc/bls12-378/shplonk/shplonk.go new file mode 100644 index 0000000000..1f447522ab --- /dev/null +++ b/ecc/bls12-378/shplonk/shplonk.go @@ -0,0 +1,462 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "errors" + "hash" + "math/big" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls12-378" + "github.com/consensys/gnark-crypto/ecc/bls12-378/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-378/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" +) + +var ( + ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") + ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") +) + +// OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. +// +// implements io.ReaderFrom and io.WriterTo +type OpeningProof struct { + + // W = ∑ᵢ γⁱZ_{T\Sᵢ}(f_i(X)-r)/Z_{T} where Z_{T} is the vanishing polynomial on the (Sᵢ)_{i} + // and r interpolates fᵢ(Sᵢ) on (Sᵢ) + W bls12378.G1Affine + + // L(X)/(X-z) where L(X)=∑ᵢγⁱZ_{T\xᵢ}(f_i(X)-rᵢ) - Z_{T}W(X) + WPrime bls12378.G1Affine + + // ClaimedValues[i] are the values of fᵢ on Sᵢ + ClaimedValues [][]fr.Element +} + +// BatchOpen opens the list of polynomials on points, where the i-th polynomials is opend at points[i]. +func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, pk kzg.ProvingKey, dataTranscript ...[]byte) (OpeningProof, error) { + + var res OpeningProof + + nbInstances := len(polynomials) + if len(polynomials) != len(points) { + return res, ErrInvalidNumberOfPoints + } + + // transcript + fs := fiatshamir.NewTranscript(hf, "gamma", "z") + + // derive γ + gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) + if err != nil { + return res, err + } + + // compute the size of the linear combination + maxSizePolys := len(polynomials[0]) + for i := 1; i < len(polynomials); i++ { + if maxSizePolys < len(polynomials[i]) { + maxSizePolys = len(polynomials[i]) + } + } + nbPoints := 0 + sizeSi := make([]int, len(points)) + for i := 0; i < nbInstances; i++ { + nbPoints += len(points[i]) + sizeSi[i] = len(points[i]) + } + totalSize := maxSizePolys + nbPoints // upper bound of the size of f := ∑ᵢ γⁱZ_{T\Sᵢ}(f_i(X)-r) + + bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) + bufTotalSize := make([]fr.Element, totalSize) + f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation + res.ClaimedValues = make([][]fr.Element, nbInstances) + for i := 0; i < nbInstances; i++ { + res.ClaimedValues[i] = make([]fr.Element, len(points[i])) + } + var accGamma fr.Element + accGamma.SetOne() + + ztMinusSi := make([][]fr.Element, nbInstances) + ri := make([][]fr.Element, nbInstances) + for i := 0; i < nbInstances; i++ { + + for j := 0; j < len(points[i]); j++ { + res.ClaimedValues[i][j] = eval(polynomials[i], points[i][j]) + } + + ztMinusSi[i] = buildZtMinusSi(points, i) + + copy(bufMaxSizePolynomials, polynomials[i]) + ri[i] = interpolate(points[i], res.ClaimedValues[i]) + sub(bufMaxSizePolynomials, ri[i]) + + bufTotalSize = mul(bufMaxSizePolynomials, ztMinusSi[i], bufTotalSize) + bufTotalSize = mulByConstant(bufTotalSize, accGamma) + for j := 0; j < len(bufTotalSize); j++ { + f[j].Add(&f[j], &bufTotalSize[j]) + } + + accGamma.Mul(&accGamma, &gamma) + setZero(bufMaxSizePolynomials) + } + + zt := buildVanishingPoly(flatten(points)) + w := div(f, zt) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation + res.W, err = kzg.Commit(w, pk) + if err != nil { + return res, err + } + + // derive z + z, err := deriveChallenge("z", nil, []kzg.Digest{res.W}, fs) + if err != nil { + return res, err + } + + // compute L = ∑ᵢγⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z))-Z_{T}(z)W + accGamma.SetOne() + var gammaiZtMinusSiZ fr.Element + l := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation + for i := 0; i < len(polynomials); i++ { + + ztMinusSiZ := eval(ztMinusSi[i], z) // Z_{T\Sᵢ}(z) + gammaiZtMinusSiZ.Mul(&accGamma, &ztMinusSiZ) // γⁱZ_{T\Sᵢ}(z) + + copy(bufMaxSizePolynomials, polynomials[i]) + riz := eval(ri[i], z) + bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) + mulByConstant(bufMaxSizePolynomials, gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) + for j := 0; j < len(bufMaxSizePolynomials); j++ { + l[j].Add(&l[j], &bufMaxSizePolynomials[j]) + } + + setZero(bufMaxSizePolynomials) + accGamma.Mul(&accGamma, &gamma) + } + ztz := eval(zt, z) + setZero(bufTotalSize) + copy(bufTotalSize, w) + mulByConstant(bufTotalSize, ztz) // Z_{T}(z)W + for i := 0; i < totalSize-maxSizePolys; i++ { + l[totalSize-1-i].Neg(&bufTotalSize[totalSize-1-i]) + } + for i := 0; i < maxSizePolys; i++ { + l[i].Sub(&l[i], &bufTotalSize[i]) + } // L <- L-Z_{T}(z)W + + xMinusZ := buildVanishingPoly([]fr.Element{z}) + wPrime := div(l, xMinusZ) + + res.WPrime, err = kzg.Commit(wPrime, pk) + if err != nil { + return res, err + } + + return res, nil +} + +// BatchVerify uses proof to check that the commitments correctly open to proof.ClaimedValues +// at points. The order matters: the proof validates that the i-th commitment is correctly opened +// at the i-th point +// dataTranscript is some extra data that might be needed for Fiat Shamir, and is appended at the end +// of the original transcript. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + if len(digests) != len(proof.ClaimedValues) { + return ErrInvalidNumberOfPoints + } + if len(digests) != len(points) { + return ErrInvalidNumberOfPoints + } + + // transcript + fs := fiatshamir.NewTranscript(hf, "gamma", "z") + + // derive γ + gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) + if err != nil { + return err + } + + // derive z + // TODO seems ok that z depend only on W, need to check that carefully + z, err := deriveChallenge("z", nil, []kzg.Digest{proof.W}, fs) + if err != nil { + return err + } + + // check that e(F + zW', [1]_{2})=e(W',[x]_{2}) + // where F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i}-[∑ᵢγⁱZ_{T\xᵢ}(z)fᵢ(z)]_{1}-Z_{T}(z)[W] + var sumGammaiZTminusSiRiz, tmp, accGamma fr.Element + nbInstances := len(points) + gammaiZTminusSiz := make([]fr.Element, nbInstances) + accGamma.SetOne() + ri := make([][]fr.Element, nbInstances) + for i := 0; i < len(points); i++ { + + ztMinusSi := buildZtMinusSi(points, i) // Z_{T-S_{i}}(X) + gammaiZTminusSiz[i] = eval(ztMinusSi, z) // Z_{T-S_{i}}(z) + gammaiZTminusSiz[i].Mul(&accGamma, &gammaiZTminusSiz[i]) // \gamma^{i} Z_{T-S_{i}}(z) + + ri[i] = interpolate(points[i], proof.ClaimedValues[i]) + riz := eval(ri[i], z) // r_{i}(z) + tmp.Mul(&gammaiZTminusSiz[i], &riz) // Z_{T-S_{i}}(z)r_{i}(z) + sumGammaiZTminusSiRiz.Add(&sumGammaiZTminusSiRiz, &tmp) + + accGamma.Mul(&accGamma, &gamma) + } + + // ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} + config := ecc.MultiExpConfig{} + var sumGammaiZtMinusSiComi kzg.Digest + _, err = sumGammaiZtMinusSiComi.MultiExp(digests, gammaiZTminusSiz, config) + if err != nil { + return err + } + + var bufBigInt big.Int + + // [∑ᵢZ_{T\xᵢ}fᵢ(z)]_{1} + var sumGammaiZTminusSiRizCom kzg.Digest + var sumGammaiZTminusSiRizBigInt big.Int + sumGammaiZTminusSiRiz.BigInt(&sumGammaiZTminusSiRizBigInt) + sumGammaiZTminusSiRizCom.ScalarMultiplication(&vk.G1, &sumGammaiZTminusSiRizBigInt) + + // Z_{T}(z)[W] + zt := buildVanishingPoly(flatten(points)) + ztz := eval(zt, z) + var ztW kzg.Digest + ztz.BigInt(&bufBigInt) + ztW.ScalarMultiplication(&proof.W, &bufBigInt) + + // F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} - [∑ᵢγⁱZ_{T\xᵢ}fᵢ(z)]_{1} - Z_{T}(z)[W] + var f kzg.Digest + f.Sub(&sumGammaiZtMinusSiComi, &sumGammaiZTminusSiRizCom). + Sub(&f, &ztW) + + // F+zW' + var zWPrime kzg.Digest + z.BigInt(&bufBigInt) + zWPrime.ScalarMultiplication(&proof.WPrime, &bufBigInt) + f.Add(&f, &zWPrime) + f.Neg(&f) + + // check that e(F+zW',[1]_{2})=e(W',[x]_{2}) + check, err := bls12378.PairingCheckFixedQ( + []bls12378.G1Affine{f, proof.WPrime}, + vk.Lines[:], + ) + + if !check { + return ErrVerifyOpeningProof + } + + return nil +} + +// deriveChallenge derives a challenge using Fiat Shamir to polynomials. +// The arguments are added to the transcript in the order in which they are given. +func deriveChallenge(name string, points [][]fr.Element, digests []kzg.Digest, t *fiatshamir.Transcript, dataTranscript ...[]byte) (fr.Element, error) { + + // derive the challenge gamma, binded to the point and the commitments + for i := range points { + for j := range points[i] { + if err := t.Bind(name, points[i][j].Marshal()); err != nil { + return fr.Element{}, err + } + } + } + for i := range digests { + if err := t.Bind(name, digests[i].Marshal()); err != nil { + return fr.Element{}, err + } + } + + for i := 0; i < len(dataTranscript); i++ { + if err := t.Bind(name, dataTranscript[i]); err != nil { + return fr.Element{}, err + } + } + + gammaByte, err := t.ComputeChallenge(name) + if err != nil { + return fr.Element{}, err + } + var gamma fr.Element + gamma.SetBytes(gammaByte) + + return gamma, nil +} + +// ------------------------------ +// utils + +func flatten(x [][]fr.Element) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + res := make([]fr.Element, 0, nbPoints) + for i := 0; i < len(x); i++ { + res = append(res, x[i]...) + } + return res +} + +// sets f to zero +func setZero(f []fr.Element) { + for i := 0; i < len(f); i++ { + f[i].SetZero() + } +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} + +// returns γ*f, re-using f +func mulByConstant(f []fr.Element, gamma fr.Element) []fr.Element { + for i := 0; i < len(f); i++ { + f[i].Mul(&f[i], &gamma) + } + return f +} + +// computes f <- (x-a)*f +// memory of f is re used, need to pass a copy to not modify it +func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { + s := len(f) + var tmp fr.Element + f = append(f, fr.NewElement(0)) + f[s] = f[s-1] + for i := s - 1; i >= 1; i-- { + tmp.Mul(&f[i], &a) + f[i].Sub(&f[i-1], &tmp) + } + f[0].Mul(&f[0], &a).Neg(&f[0]) + return f +} + +// returns S_{T\Sᵢ} where Sᵢ=x[i] +func buildZtMinusSi(x [][]fr.Element, i int) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + bufPoints := make([]fr.Element, 0, nbPoints-len(x[i])) + for j := 0; j < i; j++ { + bufPoints = append(bufPoints, x[j]...) + } + for j := i + 1; j < len(x); j++ { + bufPoints = append(bufPoints, x[j]...) + } + ztMinusSi := buildVanishingPoly(bufPoints) + return ztMinusSi +} + +// returns πᵢ(X-xᵢ) +func buildVanishingPoly(x []fr.Element) []fr.Element { + res := make([]fr.Element, 1, len(x)+1) + res[0].SetOne() + for i := 0; i < len(x); i++ { + res = multiplyLinearFactor(res, x[i]) + } + return res +} + +// returns f such that f(xᵢ)=yᵢ, x and y are assumed to be of the same size +func interpolate(x, y []fr.Element) []fr.Element { + + res := make([]fr.Element, len(x)) + for i := 0; i < len(x); i++ { + li := buildLagrangeFromDomain(x, i) + li = mulByConstant(li, y[i]) + for j := 0; j < len(x); j++ { + res[j].Add(&res[j], &li[j]) + } + } + + return res +} + +// returns f such that f(xⱼ)=δⁱⱼ +func buildLagrangeFromDomain(x []fr.Element, i int) []fr.Element { + xx := make([]fr.Element, len(x)-1) + copy(xx, x[:i]) + copy(xx[i:], x[i+1:]) + res := buildVanishingPoly(xx) + d := eval(res, x[i]) + d.Inverse(&d) + res = mulByConstant(res, d) + return res +} + +// returns f-g, the memory of f is re used, deg(g) << deg(f) here +func sub(f, g []fr.Element) []fr.Element { + for i := 0; i < len(g); i++ { + f[i].Sub(&f[i], &g[i]) + } + return f +} + +// returns f*g using naive multiplication +// deg(f)>>deg(g), deg(small) =~ 10 max +// buf is used as a buffer and should not be f or g +// f and g are not modified +func mul(f, g []fr.Element, res []fr.Element) []fr.Element { + + sizeRes := len(f) + len(g) - 1 + if len(res) < sizeRes { + s := make([]fr.Element, sizeRes-len(res)) + res = append(res, s...) + } + setZero(res) + + var tmp fr.Element + for i := 0; i < len(g); i++ { + for j := 0; j < len(f); j++ { + tmp.Mul(&f[j], &g[i]) + res[j+i].Add(&res[j+i], &tmp) + } + } + return res +} + +// returns f/g (assuming g divides f) +// OK to not use fft if deg(g) is small +// g's leading coefficient is assumed to be 1 +// f memory is re-used for the result, need to pass a copy to not modify it +func div(f, g []fr.Element) []fr.Element { + sizef := len(f) + sizeg := len(g) + stop := sizeg - +1 + var t fr.Element + for i := sizef - 2; i >= stop; i-- { + for j := 0; j < sizeg-1; j++ { + t.Mul(&f[i+1], &g[sizeg-2-j]) + f[i-j].Sub(&f[i-j], &t) + } + } + return f[sizeg-1:] +} diff --git a/ecc/bls12-378/shplonk/shplonk_test.go b/ecc/bls12-378/shplonk/shplonk_test.go new file mode 100644 index 0000000000..dcb5f69421 --- /dev/null +++ b/ecc/bls12-378/shplonk/shplonk_test.go @@ -0,0 +1,320 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls12-378/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-378/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 230 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestOpening(t *testing.T) { + + assert := require.New(t) + + nbPolys := 2 + sizePoly := make([]int, nbPolys) + for i := 0; i < nbPolys; i++ { + sizePoly[i] = 10 + i + } + polys := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + polys[i] = make([]fr.Element, sizePoly[i]) + for j := 0; j < sizePoly[i]; j++ { + polys[i][j].SetRandom() + } + } + + digests := make([]kzg.Digest, nbPolys) + for i := 0; i < nbPolys; i++ { + digests[i], _ = kzg.Commit(polys[i], testSrs.Pk) + } + + points := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + points[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + points[i][j].SetRandom() + } + } + + hf := sha256.New() + + // correct proof + openingProof, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) + assert.NoError(err) + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.NoError(err) + + // tampered proof + openingProof.ClaimedValues[0][0].SetRandom() + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestBuildZtMinusSi(t *testing.T) { + + nbSi := 10 + points := make([][]fr.Element, nbSi) + sizeSi := make([]int, nbSi) + nbPoints := 0 + for i := 0; i < nbSi; i++ { + sizeSi[i] = 5 + i + nbPoints += sizeSi[i] + points[i] = make([]fr.Element, sizeSi[i]) + for j := 0; j < sizeSi[i]; j++ { + points[i][j].SetRandom() + } + } + for i := 0; i < nbSi; i++ { + ztMinusSi := buildZtMinusSi(points, i) + if len(ztMinusSi) != nbPoints-sizeSi[i]+1 { + t.Fatal("deg(Z_{T-S_{i}}) should be nbPoints-size(S_{i})") + } + for j := 0; j < nbSi; j++ { + if j == i { + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{i}) should not be zero") + } + } + continue + } + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if !y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{j}) should be zero") + } + } + } + } + +} + +func TestInterpolate(t *testing.T) { + + nbPoints := 10 + x := make([]fr.Element, nbPoints) + y := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + y[i].SetRandom() + } + f := interpolate(x, y) + for i := 0; i < nbPoints; i++ { + fx := eval(f, x[i]) + if !fx.Equal(&y[i]) { + t.Fatal("f(x_{i})!=y_{i}") + } + } + +} + +func TestBuildLagrangeFromDomain(t *testing.T) { + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + var r fr.Element + for i := 0; i < nbPoints; i++ { + + l := buildLagrangeFromDomain(points, i) + + // check that l(xᵢ)=1 and l(xⱼ)=0 for j!=i + for j := 0; j < nbPoints; j++ { + y := eval(l, points[j]) + if i == j { + if !y.IsOne() { + t.Fatal("l_{i}(x_{i}) should be equal to 1") + } + } else { + if !y.IsZero() { + t.Fatal("l_{i}(x_{j}) where i!=j should be equal to 0") + } + } + } + r.SetRandom() + y := eval(l, r) + if y.IsZero() { + t.Fatal("l_{i}(x) should not be zero if x is random") + } + } + +} + +func TestBuildVanishingPoly(t *testing.T) { + s := 10 + x := make([]fr.Element, s) + for i := 0; i < s; i++ { + x[i].SetRandom() + } + r := buildVanishingPoly(x) + + if len(r) != s+1 { + t.Fatal("error degree r") + } + + // check that r(xᵢ)=0 for all i + for i := 0; i < len(x); i++ { + y := eval(r, x[i]) + if !y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at xᵢ should be zero") + } + } + + // check that r(y)!=0 for a random point + var a fr.Element + a.SetRandom() + y := eval(r, a) + if y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at r \neq xᵢ should not be zero") + } +} + +func TestMultiplyLinearFactor(t *testing.T) { + + s := 10 + f := make([]fr.Element, s, s+1) + for i := 0; i < 10; i++ { + f[i].SetRandom() + } + + var a, y fr.Element + a.SetRandom() + f = multiplyLinearFactor(f, a) + y = eval(f, a) + if !y.IsZero() { + t.Fatal("(X-a)f(X) should be zero at a") + } + a.SetRandom() + y = eval(f, a) + if y.IsZero() { + t.Fatal("(X-1)f(X) at a random point should not be zero") + } + +} + +func TestNaiveMul(t *testing.T) { + + size := 10 + f := make([]fr.Element, size) + for i := 0; i < size; i++ { + f[i].SetRandom() + } + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + + v := buildVanishingPoly(points) + buf := make([]fr.Element, size+nbPoints-1) + g := mul(f, v, buf) + + // check that g(xᵢ) = 0 + for i := 0; i < nbPoints; i++ { + y := eval(g, points[i]) + if !y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at x_{i} should be zero") + } + } + + // check that g(r) != 0 for a random point + var a fr.Element + a.SetRandom() + y := eval(g, a) + if y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at a random point should not be zero") + } + +} + +func TestDiv(t *testing.T) { + + nbPoints := 10 + s := 10 + f := make([]fr.Element, s, s+nbPoints) + for i := 0; i < s; i++ { + f[i].SetRandom() + } + + // backup + g := make([]fr.Element, s) + copy(g, f) + + // successive divions of linear terms + x := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + q := make([][2]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + q[i][1].SetOne() + q[i][0].Neg(&x[i]) + f = div(f, q[i][:]) + } + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + + // division by a degree > 1 polynomial + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + r := buildVanishingPoly(x) + f = div(f, r) + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + +} diff --git a/ecc/bls12-381/shplonk/doc.go b/ecc/bls12-381/shplonk/doc.go new file mode 100644 index 0000000000..e048f46009 --- /dev/null +++ b/ecc/bls12-381/shplonk/doc.go @@ -0,0 +1,18 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +// Package shplonk provides a SHPLONK commitment scheme, cf https://eprint.iacr.org/2020/081.pdf +package shplonk diff --git a/ecc/bls12-381/shplonk/marshal.go b/ecc/bls12-381/shplonk/marshal.go new file mode 100644 index 0000000000..b609b73266 --- /dev/null +++ b/ecc/bls12-381/shplonk/marshal.go @@ -0,0 +1,62 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "io" + + "github.com/consensys/gnark-crypto/ecc/bls12-381" +) + +func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { + + dec := bls12381.NewDecoder(r) + + toDecode := []interface{}{ + &proof.W, + &proof.WPrime, + proof.ClaimedValues, + } + + for _, v := range toDecode { + if err := dec.Decode(v); err != nil { + return dec.BytesRead(), err + } + } + + return dec.BytesRead(), nil +} + +// WriteTo writes binary encoding of a OpeningProof +func (proof *OpeningProof) WriteTo(w io.Writer) (int64, error) { + + enc := bls12381.NewEncoder(w) + + toEncode := []interface{}{ + &proof.W, + &proof.WPrime, + proof.ClaimedValues, + } + + for _, v := range toEncode { + if err := enc.Encode(v); err != nil { + return enc.BytesWritten(), err + } + } + + return enc.BytesWritten(), nil +} diff --git a/ecc/bls12-381/shplonk/shplonk.go b/ecc/bls12-381/shplonk/shplonk.go new file mode 100644 index 0000000000..43d624ae23 --- /dev/null +++ b/ecc/bls12-381/shplonk/shplonk.go @@ -0,0 +1,462 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "errors" + "hash" + "math/big" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls12-381" + "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-381/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" +) + +var ( + ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") + ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") +) + +// OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. +// +// implements io.ReaderFrom and io.WriterTo +type OpeningProof struct { + + // W = ∑ᵢ γⁱZ_{T\Sᵢ}(f_i(X)-r)/Z_{T} where Z_{T} is the vanishing polynomial on the (Sᵢ)_{i} + // and r interpolates fᵢ(Sᵢ) on (Sᵢ) + W bls12381.G1Affine + + // L(X)/(X-z) where L(X)=∑ᵢγⁱZ_{T\xᵢ}(f_i(X)-rᵢ) - Z_{T}W(X) + WPrime bls12381.G1Affine + + // ClaimedValues[i] are the values of fᵢ on Sᵢ + ClaimedValues [][]fr.Element +} + +// BatchOpen opens the list of polynomials on points, where the i-th polynomials is opend at points[i]. +func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, pk kzg.ProvingKey, dataTranscript ...[]byte) (OpeningProof, error) { + + var res OpeningProof + + nbInstances := len(polynomials) + if len(polynomials) != len(points) { + return res, ErrInvalidNumberOfPoints + } + + // transcript + fs := fiatshamir.NewTranscript(hf, "gamma", "z") + + // derive γ + gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) + if err != nil { + return res, err + } + + // compute the size of the linear combination + maxSizePolys := len(polynomials[0]) + for i := 1; i < len(polynomials); i++ { + if maxSizePolys < len(polynomials[i]) { + maxSizePolys = len(polynomials[i]) + } + } + nbPoints := 0 + sizeSi := make([]int, len(points)) + for i := 0; i < nbInstances; i++ { + nbPoints += len(points[i]) + sizeSi[i] = len(points[i]) + } + totalSize := maxSizePolys + nbPoints // upper bound of the size of f := ∑ᵢ γⁱZ_{T\Sᵢ}(f_i(X)-r) + + bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) + bufTotalSize := make([]fr.Element, totalSize) + f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation + res.ClaimedValues = make([][]fr.Element, nbInstances) + for i := 0; i < nbInstances; i++ { + res.ClaimedValues[i] = make([]fr.Element, len(points[i])) + } + var accGamma fr.Element + accGamma.SetOne() + + ztMinusSi := make([][]fr.Element, nbInstances) + ri := make([][]fr.Element, nbInstances) + for i := 0; i < nbInstances; i++ { + + for j := 0; j < len(points[i]); j++ { + res.ClaimedValues[i][j] = eval(polynomials[i], points[i][j]) + } + + ztMinusSi[i] = buildZtMinusSi(points, i) + + copy(bufMaxSizePolynomials, polynomials[i]) + ri[i] = interpolate(points[i], res.ClaimedValues[i]) + sub(bufMaxSizePolynomials, ri[i]) + + bufTotalSize = mul(bufMaxSizePolynomials, ztMinusSi[i], bufTotalSize) + bufTotalSize = mulByConstant(bufTotalSize, accGamma) + for j := 0; j < len(bufTotalSize); j++ { + f[j].Add(&f[j], &bufTotalSize[j]) + } + + accGamma.Mul(&accGamma, &gamma) + setZero(bufMaxSizePolynomials) + } + + zt := buildVanishingPoly(flatten(points)) + w := div(f, zt) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation + res.W, err = kzg.Commit(w, pk) + if err != nil { + return res, err + } + + // derive z + z, err := deriveChallenge("z", nil, []kzg.Digest{res.W}, fs) + if err != nil { + return res, err + } + + // compute L = ∑ᵢγⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z))-Z_{T}(z)W + accGamma.SetOne() + var gammaiZtMinusSiZ fr.Element + l := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation + for i := 0; i < len(polynomials); i++ { + + ztMinusSiZ := eval(ztMinusSi[i], z) // Z_{T\Sᵢ}(z) + gammaiZtMinusSiZ.Mul(&accGamma, &ztMinusSiZ) // γⁱZ_{T\Sᵢ}(z) + + copy(bufMaxSizePolynomials, polynomials[i]) + riz := eval(ri[i], z) + bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) + mulByConstant(bufMaxSizePolynomials, gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) + for j := 0; j < len(bufMaxSizePolynomials); j++ { + l[j].Add(&l[j], &bufMaxSizePolynomials[j]) + } + + setZero(bufMaxSizePolynomials) + accGamma.Mul(&accGamma, &gamma) + } + ztz := eval(zt, z) + setZero(bufTotalSize) + copy(bufTotalSize, w) + mulByConstant(bufTotalSize, ztz) // Z_{T}(z)W + for i := 0; i < totalSize-maxSizePolys; i++ { + l[totalSize-1-i].Neg(&bufTotalSize[totalSize-1-i]) + } + for i := 0; i < maxSizePolys; i++ { + l[i].Sub(&l[i], &bufTotalSize[i]) + } // L <- L-Z_{T}(z)W + + xMinusZ := buildVanishingPoly([]fr.Element{z}) + wPrime := div(l, xMinusZ) + + res.WPrime, err = kzg.Commit(wPrime, pk) + if err != nil { + return res, err + } + + return res, nil +} + +// BatchVerify uses proof to check that the commitments correctly open to proof.ClaimedValues +// at points. The order matters: the proof validates that the i-th commitment is correctly opened +// at the i-th point +// dataTranscript is some extra data that might be needed for Fiat Shamir, and is appended at the end +// of the original transcript. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + if len(digests) != len(proof.ClaimedValues) { + return ErrInvalidNumberOfPoints + } + if len(digests) != len(points) { + return ErrInvalidNumberOfPoints + } + + // transcript + fs := fiatshamir.NewTranscript(hf, "gamma", "z") + + // derive γ + gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) + if err != nil { + return err + } + + // derive z + // TODO seems ok that z depend only on W, need to check that carefully + z, err := deriveChallenge("z", nil, []kzg.Digest{proof.W}, fs) + if err != nil { + return err + } + + // check that e(F + zW', [1]_{2})=e(W',[x]_{2}) + // where F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i}-[∑ᵢγⁱZ_{T\xᵢ}(z)fᵢ(z)]_{1}-Z_{T}(z)[W] + var sumGammaiZTminusSiRiz, tmp, accGamma fr.Element + nbInstances := len(points) + gammaiZTminusSiz := make([]fr.Element, nbInstances) + accGamma.SetOne() + ri := make([][]fr.Element, nbInstances) + for i := 0; i < len(points); i++ { + + ztMinusSi := buildZtMinusSi(points, i) // Z_{T-S_{i}}(X) + gammaiZTminusSiz[i] = eval(ztMinusSi, z) // Z_{T-S_{i}}(z) + gammaiZTminusSiz[i].Mul(&accGamma, &gammaiZTminusSiz[i]) // \gamma^{i} Z_{T-S_{i}}(z) + + ri[i] = interpolate(points[i], proof.ClaimedValues[i]) + riz := eval(ri[i], z) // r_{i}(z) + tmp.Mul(&gammaiZTminusSiz[i], &riz) // Z_{T-S_{i}}(z)r_{i}(z) + sumGammaiZTminusSiRiz.Add(&sumGammaiZTminusSiRiz, &tmp) + + accGamma.Mul(&accGamma, &gamma) + } + + // ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} + config := ecc.MultiExpConfig{} + var sumGammaiZtMinusSiComi kzg.Digest + _, err = sumGammaiZtMinusSiComi.MultiExp(digests, gammaiZTminusSiz, config) + if err != nil { + return err + } + + var bufBigInt big.Int + + // [∑ᵢZ_{T\xᵢ}fᵢ(z)]_{1} + var sumGammaiZTminusSiRizCom kzg.Digest + var sumGammaiZTminusSiRizBigInt big.Int + sumGammaiZTminusSiRiz.BigInt(&sumGammaiZTminusSiRizBigInt) + sumGammaiZTminusSiRizCom.ScalarMultiplication(&vk.G1, &sumGammaiZTminusSiRizBigInt) + + // Z_{T}(z)[W] + zt := buildVanishingPoly(flatten(points)) + ztz := eval(zt, z) + var ztW kzg.Digest + ztz.BigInt(&bufBigInt) + ztW.ScalarMultiplication(&proof.W, &bufBigInt) + + // F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} - [∑ᵢγⁱZ_{T\xᵢ}fᵢ(z)]_{1} - Z_{T}(z)[W] + var f kzg.Digest + f.Sub(&sumGammaiZtMinusSiComi, &sumGammaiZTminusSiRizCom). + Sub(&f, &ztW) + + // F+zW' + var zWPrime kzg.Digest + z.BigInt(&bufBigInt) + zWPrime.ScalarMultiplication(&proof.WPrime, &bufBigInt) + f.Add(&f, &zWPrime) + f.Neg(&f) + + // check that e(F+zW',[1]_{2})=e(W',[x]_{2}) + check, err := bls12381.PairingCheckFixedQ( + []bls12381.G1Affine{f, proof.WPrime}, + vk.Lines[:], + ) + + if !check { + return ErrVerifyOpeningProof + } + + return nil +} + +// deriveChallenge derives a challenge using Fiat Shamir to polynomials. +// The arguments are added to the transcript in the order in which they are given. +func deriveChallenge(name string, points [][]fr.Element, digests []kzg.Digest, t *fiatshamir.Transcript, dataTranscript ...[]byte) (fr.Element, error) { + + // derive the challenge gamma, binded to the point and the commitments + for i := range points { + for j := range points[i] { + if err := t.Bind(name, points[i][j].Marshal()); err != nil { + return fr.Element{}, err + } + } + } + for i := range digests { + if err := t.Bind(name, digests[i].Marshal()); err != nil { + return fr.Element{}, err + } + } + + for i := 0; i < len(dataTranscript); i++ { + if err := t.Bind(name, dataTranscript[i]); err != nil { + return fr.Element{}, err + } + } + + gammaByte, err := t.ComputeChallenge(name) + if err != nil { + return fr.Element{}, err + } + var gamma fr.Element + gamma.SetBytes(gammaByte) + + return gamma, nil +} + +// ------------------------------ +// utils + +func flatten(x [][]fr.Element) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + res := make([]fr.Element, 0, nbPoints) + for i := 0; i < len(x); i++ { + res = append(res, x[i]...) + } + return res +} + +// sets f to zero +func setZero(f []fr.Element) { + for i := 0; i < len(f); i++ { + f[i].SetZero() + } +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} + +// returns γ*f, re-using f +func mulByConstant(f []fr.Element, gamma fr.Element) []fr.Element { + for i := 0; i < len(f); i++ { + f[i].Mul(&f[i], &gamma) + } + return f +} + +// computes f <- (x-a)*f +// memory of f is re used, need to pass a copy to not modify it +func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { + s := len(f) + var tmp fr.Element + f = append(f, fr.NewElement(0)) + f[s] = f[s-1] + for i := s - 1; i >= 1; i-- { + tmp.Mul(&f[i], &a) + f[i].Sub(&f[i-1], &tmp) + } + f[0].Mul(&f[0], &a).Neg(&f[0]) + return f +} + +// returns S_{T\Sᵢ} where Sᵢ=x[i] +func buildZtMinusSi(x [][]fr.Element, i int) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + bufPoints := make([]fr.Element, 0, nbPoints-len(x[i])) + for j := 0; j < i; j++ { + bufPoints = append(bufPoints, x[j]...) + } + for j := i + 1; j < len(x); j++ { + bufPoints = append(bufPoints, x[j]...) + } + ztMinusSi := buildVanishingPoly(bufPoints) + return ztMinusSi +} + +// returns πᵢ(X-xᵢ) +func buildVanishingPoly(x []fr.Element) []fr.Element { + res := make([]fr.Element, 1, len(x)+1) + res[0].SetOne() + for i := 0; i < len(x); i++ { + res = multiplyLinearFactor(res, x[i]) + } + return res +} + +// returns f such that f(xᵢ)=yᵢ, x and y are assumed to be of the same size +func interpolate(x, y []fr.Element) []fr.Element { + + res := make([]fr.Element, len(x)) + for i := 0; i < len(x); i++ { + li := buildLagrangeFromDomain(x, i) + li = mulByConstant(li, y[i]) + for j := 0; j < len(x); j++ { + res[j].Add(&res[j], &li[j]) + } + } + + return res +} + +// returns f such that f(xⱼ)=δⁱⱼ +func buildLagrangeFromDomain(x []fr.Element, i int) []fr.Element { + xx := make([]fr.Element, len(x)-1) + copy(xx, x[:i]) + copy(xx[i:], x[i+1:]) + res := buildVanishingPoly(xx) + d := eval(res, x[i]) + d.Inverse(&d) + res = mulByConstant(res, d) + return res +} + +// returns f-g, the memory of f is re used, deg(g) << deg(f) here +func sub(f, g []fr.Element) []fr.Element { + for i := 0; i < len(g); i++ { + f[i].Sub(&f[i], &g[i]) + } + return f +} + +// returns f*g using naive multiplication +// deg(f)>>deg(g), deg(small) =~ 10 max +// buf is used as a buffer and should not be f or g +// f and g are not modified +func mul(f, g []fr.Element, res []fr.Element) []fr.Element { + + sizeRes := len(f) + len(g) - 1 + if len(res) < sizeRes { + s := make([]fr.Element, sizeRes-len(res)) + res = append(res, s...) + } + setZero(res) + + var tmp fr.Element + for i := 0; i < len(g); i++ { + for j := 0; j < len(f); j++ { + tmp.Mul(&f[j], &g[i]) + res[j+i].Add(&res[j+i], &tmp) + } + } + return res +} + +// returns f/g (assuming g divides f) +// OK to not use fft if deg(g) is small +// g's leading coefficient is assumed to be 1 +// f memory is re-used for the result, need to pass a copy to not modify it +func div(f, g []fr.Element) []fr.Element { + sizef := len(f) + sizeg := len(g) + stop := sizeg - +1 + var t fr.Element + for i := sizef - 2; i >= stop; i-- { + for j := 0; j < sizeg-1; j++ { + t.Mul(&f[i+1], &g[sizeg-2-j]) + f[i-j].Sub(&f[i-j], &t) + } + } + return f[sizeg-1:] +} diff --git a/ecc/bls12-381/shplonk/shplonk_test.go b/ecc/bls12-381/shplonk/shplonk_test.go new file mode 100644 index 0000000000..043238b47d --- /dev/null +++ b/ecc/bls12-381/shplonk/shplonk_test.go @@ -0,0 +1,320 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-381/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 230 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestOpening(t *testing.T) { + + assert := require.New(t) + + nbPolys := 2 + sizePoly := make([]int, nbPolys) + for i := 0; i < nbPolys; i++ { + sizePoly[i] = 10 + i + } + polys := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + polys[i] = make([]fr.Element, sizePoly[i]) + for j := 0; j < sizePoly[i]; j++ { + polys[i][j].SetRandom() + } + } + + digests := make([]kzg.Digest, nbPolys) + for i := 0; i < nbPolys; i++ { + digests[i], _ = kzg.Commit(polys[i], testSrs.Pk) + } + + points := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + points[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + points[i][j].SetRandom() + } + } + + hf := sha256.New() + + // correct proof + openingProof, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) + assert.NoError(err) + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.NoError(err) + + // tampered proof + openingProof.ClaimedValues[0][0].SetRandom() + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestBuildZtMinusSi(t *testing.T) { + + nbSi := 10 + points := make([][]fr.Element, nbSi) + sizeSi := make([]int, nbSi) + nbPoints := 0 + for i := 0; i < nbSi; i++ { + sizeSi[i] = 5 + i + nbPoints += sizeSi[i] + points[i] = make([]fr.Element, sizeSi[i]) + for j := 0; j < sizeSi[i]; j++ { + points[i][j].SetRandom() + } + } + for i := 0; i < nbSi; i++ { + ztMinusSi := buildZtMinusSi(points, i) + if len(ztMinusSi) != nbPoints-sizeSi[i]+1 { + t.Fatal("deg(Z_{T-S_{i}}) should be nbPoints-size(S_{i})") + } + for j := 0; j < nbSi; j++ { + if j == i { + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{i}) should not be zero") + } + } + continue + } + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if !y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{j}) should be zero") + } + } + } + } + +} + +func TestInterpolate(t *testing.T) { + + nbPoints := 10 + x := make([]fr.Element, nbPoints) + y := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + y[i].SetRandom() + } + f := interpolate(x, y) + for i := 0; i < nbPoints; i++ { + fx := eval(f, x[i]) + if !fx.Equal(&y[i]) { + t.Fatal("f(x_{i})!=y_{i}") + } + } + +} + +func TestBuildLagrangeFromDomain(t *testing.T) { + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + var r fr.Element + for i := 0; i < nbPoints; i++ { + + l := buildLagrangeFromDomain(points, i) + + // check that l(xᵢ)=1 and l(xⱼ)=0 for j!=i + for j := 0; j < nbPoints; j++ { + y := eval(l, points[j]) + if i == j { + if !y.IsOne() { + t.Fatal("l_{i}(x_{i}) should be equal to 1") + } + } else { + if !y.IsZero() { + t.Fatal("l_{i}(x_{j}) where i!=j should be equal to 0") + } + } + } + r.SetRandom() + y := eval(l, r) + if y.IsZero() { + t.Fatal("l_{i}(x) should not be zero if x is random") + } + } + +} + +func TestBuildVanishingPoly(t *testing.T) { + s := 10 + x := make([]fr.Element, s) + for i := 0; i < s; i++ { + x[i].SetRandom() + } + r := buildVanishingPoly(x) + + if len(r) != s+1 { + t.Fatal("error degree r") + } + + // check that r(xᵢ)=0 for all i + for i := 0; i < len(x); i++ { + y := eval(r, x[i]) + if !y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at xᵢ should be zero") + } + } + + // check that r(y)!=0 for a random point + var a fr.Element + a.SetRandom() + y := eval(r, a) + if y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at r \neq xᵢ should not be zero") + } +} + +func TestMultiplyLinearFactor(t *testing.T) { + + s := 10 + f := make([]fr.Element, s, s+1) + for i := 0; i < 10; i++ { + f[i].SetRandom() + } + + var a, y fr.Element + a.SetRandom() + f = multiplyLinearFactor(f, a) + y = eval(f, a) + if !y.IsZero() { + t.Fatal("(X-a)f(X) should be zero at a") + } + a.SetRandom() + y = eval(f, a) + if y.IsZero() { + t.Fatal("(X-1)f(X) at a random point should not be zero") + } + +} + +func TestNaiveMul(t *testing.T) { + + size := 10 + f := make([]fr.Element, size) + for i := 0; i < size; i++ { + f[i].SetRandom() + } + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + + v := buildVanishingPoly(points) + buf := make([]fr.Element, size+nbPoints-1) + g := mul(f, v, buf) + + // check that g(xᵢ) = 0 + for i := 0; i < nbPoints; i++ { + y := eval(g, points[i]) + if !y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at x_{i} should be zero") + } + } + + // check that g(r) != 0 for a random point + var a fr.Element + a.SetRandom() + y := eval(g, a) + if y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at a random point should not be zero") + } + +} + +func TestDiv(t *testing.T) { + + nbPoints := 10 + s := 10 + f := make([]fr.Element, s, s+nbPoints) + for i := 0; i < s; i++ { + f[i].SetRandom() + } + + // backup + g := make([]fr.Element, s) + copy(g, f) + + // successive divions of linear terms + x := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + q := make([][2]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + q[i][1].SetOne() + q[i][0].Neg(&x[i]) + f = div(f, q[i][:]) + } + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + + // division by a degree > 1 polynomial + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + r := buildVanishingPoly(x) + f = div(f, r) + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + +} diff --git a/ecc/bls24-315/shplonk/doc.go b/ecc/bls24-315/shplonk/doc.go new file mode 100644 index 0000000000..e048f46009 --- /dev/null +++ b/ecc/bls24-315/shplonk/doc.go @@ -0,0 +1,18 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +// Package shplonk provides a SHPLONK commitment scheme, cf https://eprint.iacr.org/2020/081.pdf +package shplonk diff --git a/ecc/bls24-315/shplonk/marshal.go b/ecc/bls24-315/shplonk/marshal.go new file mode 100644 index 0000000000..a57a90679f --- /dev/null +++ b/ecc/bls24-315/shplonk/marshal.go @@ -0,0 +1,62 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "io" + + "github.com/consensys/gnark-crypto/ecc/bls24-315" +) + +func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { + + dec := bls24315.NewDecoder(r) + + toDecode := []interface{}{ + &proof.W, + &proof.WPrime, + proof.ClaimedValues, + } + + for _, v := range toDecode { + if err := dec.Decode(v); err != nil { + return dec.BytesRead(), err + } + } + + return dec.BytesRead(), nil +} + +// WriteTo writes binary encoding of a OpeningProof +func (proof *OpeningProof) WriteTo(w io.Writer) (int64, error) { + + enc := bls24315.NewEncoder(w) + + toEncode := []interface{}{ + &proof.W, + &proof.WPrime, + proof.ClaimedValues, + } + + for _, v := range toEncode { + if err := enc.Encode(v); err != nil { + return enc.BytesWritten(), err + } + } + + return enc.BytesWritten(), nil +} diff --git a/ecc/bls24-315/shplonk/shplonk.go b/ecc/bls24-315/shplonk/shplonk.go new file mode 100644 index 0000000000..f3be3f5bfa --- /dev/null +++ b/ecc/bls24-315/shplonk/shplonk.go @@ -0,0 +1,462 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "errors" + "hash" + "math/big" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls24-315" + "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-315/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" +) + +var ( + ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") + ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") +) + +// OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. +// +// implements io.ReaderFrom and io.WriterTo +type OpeningProof struct { + + // W = ∑ᵢ γⁱZ_{T\Sᵢ}(f_i(X)-r)/Z_{T} where Z_{T} is the vanishing polynomial on the (Sᵢ)_{i} + // and r interpolates fᵢ(Sᵢ) on (Sᵢ) + W bls24315.G1Affine + + // L(X)/(X-z) where L(X)=∑ᵢγⁱZ_{T\xᵢ}(f_i(X)-rᵢ) - Z_{T}W(X) + WPrime bls24315.G1Affine + + // ClaimedValues[i] are the values of fᵢ on Sᵢ + ClaimedValues [][]fr.Element +} + +// BatchOpen opens the list of polynomials on points, where the i-th polynomials is opend at points[i]. +func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, pk kzg.ProvingKey, dataTranscript ...[]byte) (OpeningProof, error) { + + var res OpeningProof + + nbInstances := len(polynomials) + if len(polynomials) != len(points) { + return res, ErrInvalidNumberOfPoints + } + + // transcript + fs := fiatshamir.NewTranscript(hf, "gamma", "z") + + // derive γ + gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) + if err != nil { + return res, err + } + + // compute the size of the linear combination + maxSizePolys := len(polynomials[0]) + for i := 1; i < len(polynomials); i++ { + if maxSizePolys < len(polynomials[i]) { + maxSizePolys = len(polynomials[i]) + } + } + nbPoints := 0 + sizeSi := make([]int, len(points)) + for i := 0; i < nbInstances; i++ { + nbPoints += len(points[i]) + sizeSi[i] = len(points[i]) + } + totalSize := maxSizePolys + nbPoints // upper bound of the size of f := ∑ᵢ γⁱZ_{T\Sᵢ}(f_i(X)-r) + + bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) + bufTotalSize := make([]fr.Element, totalSize) + f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation + res.ClaimedValues = make([][]fr.Element, nbInstances) + for i := 0; i < nbInstances; i++ { + res.ClaimedValues[i] = make([]fr.Element, len(points[i])) + } + var accGamma fr.Element + accGamma.SetOne() + + ztMinusSi := make([][]fr.Element, nbInstances) + ri := make([][]fr.Element, nbInstances) + for i := 0; i < nbInstances; i++ { + + for j := 0; j < len(points[i]); j++ { + res.ClaimedValues[i][j] = eval(polynomials[i], points[i][j]) + } + + ztMinusSi[i] = buildZtMinusSi(points, i) + + copy(bufMaxSizePolynomials, polynomials[i]) + ri[i] = interpolate(points[i], res.ClaimedValues[i]) + sub(bufMaxSizePolynomials, ri[i]) + + bufTotalSize = mul(bufMaxSizePolynomials, ztMinusSi[i], bufTotalSize) + bufTotalSize = mulByConstant(bufTotalSize, accGamma) + for j := 0; j < len(bufTotalSize); j++ { + f[j].Add(&f[j], &bufTotalSize[j]) + } + + accGamma.Mul(&accGamma, &gamma) + setZero(bufMaxSizePolynomials) + } + + zt := buildVanishingPoly(flatten(points)) + w := div(f, zt) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation + res.W, err = kzg.Commit(w, pk) + if err != nil { + return res, err + } + + // derive z + z, err := deriveChallenge("z", nil, []kzg.Digest{res.W}, fs) + if err != nil { + return res, err + } + + // compute L = ∑ᵢγⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z))-Z_{T}(z)W + accGamma.SetOne() + var gammaiZtMinusSiZ fr.Element + l := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation + for i := 0; i < len(polynomials); i++ { + + ztMinusSiZ := eval(ztMinusSi[i], z) // Z_{T\Sᵢ}(z) + gammaiZtMinusSiZ.Mul(&accGamma, &ztMinusSiZ) // γⁱZ_{T\Sᵢ}(z) + + copy(bufMaxSizePolynomials, polynomials[i]) + riz := eval(ri[i], z) + bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) + mulByConstant(bufMaxSizePolynomials, gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) + for j := 0; j < len(bufMaxSizePolynomials); j++ { + l[j].Add(&l[j], &bufMaxSizePolynomials[j]) + } + + setZero(bufMaxSizePolynomials) + accGamma.Mul(&accGamma, &gamma) + } + ztz := eval(zt, z) + setZero(bufTotalSize) + copy(bufTotalSize, w) + mulByConstant(bufTotalSize, ztz) // Z_{T}(z)W + for i := 0; i < totalSize-maxSizePolys; i++ { + l[totalSize-1-i].Neg(&bufTotalSize[totalSize-1-i]) + } + for i := 0; i < maxSizePolys; i++ { + l[i].Sub(&l[i], &bufTotalSize[i]) + } // L <- L-Z_{T}(z)W + + xMinusZ := buildVanishingPoly([]fr.Element{z}) + wPrime := div(l, xMinusZ) + + res.WPrime, err = kzg.Commit(wPrime, pk) + if err != nil { + return res, err + } + + return res, nil +} + +// BatchVerify uses proof to check that the commitments correctly open to proof.ClaimedValues +// at points. The order matters: the proof validates that the i-th commitment is correctly opened +// at the i-th point +// dataTranscript is some extra data that might be needed for Fiat Shamir, and is appended at the end +// of the original transcript. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + if len(digests) != len(proof.ClaimedValues) { + return ErrInvalidNumberOfPoints + } + if len(digests) != len(points) { + return ErrInvalidNumberOfPoints + } + + // transcript + fs := fiatshamir.NewTranscript(hf, "gamma", "z") + + // derive γ + gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) + if err != nil { + return err + } + + // derive z + // TODO seems ok that z depend only on W, need to check that carefully + z, err := deriveChallenge("z", nil, []kzg.Digest{proof.W}, fs) + if err != nil { + return err + } + + // check that e(F + zW', [1]_{2})=e(W',[x]_{2}) + // where F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i}-[∑ᵢγⁱZ_{T\xᵢ}(z)fᵢ(z)]_{1}-Z_{T}(z)[W] + var sumGammaiZTminusSiRiz, tmp, accGamma fr.Element + nbInstances := len(points) + gammaiZTminusSiz := make([]fr.Element, nbInstances) + accGamma.SetOne() + ri := make([][]fr.Element, nbInstances) + for i := 0; i < len(points); i++ { + + ztMinusSi := buildZtMinusSi(points, i) // Z_{T-S_{i}}(X) + gammaiZTminusSiz[i] = eval(ztMinusSi, z) // Z_{T-S_{i}}(z) + gammaiZTminusSiz[i].Mul(&accGamma, &gammaiZTminusSiz[i]) // \gamma^{i} Z_{T-S_{i}}(z) + + ri[i] = interpolate(points[i], proof.ClaimedValues[i]) + riz := eval(ri[i], z) // r_{i}(z) + tmp.Mul(&gammaiZTminusSiz[i], &riz) // Z_{T-S_{i}}(z)r_{i}(z) + sumGammaiZTminusSiRiz.Add(&sumGammaiZTminusSiRiz, &tmp) + + accGamma.Mul(&accGamma, &gamma) + } + + // ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} + config := ecc.MultiExpConfig{} + var sumGammaiZtMinusSiComi kzg.Digest + _, err = sumGammaiZtMinusSiComi.MultiExp(digests, gammaiZTminusSiz, config) + if err != nil { + return err + } + + var bufBigInt big.Int + + // [∑ᵢZ_{T\xᵢ}fᵢ(z)]_{1} + var sumGammaiZTminusSiRizCom kzg.Digest + var sumGammaiZTminusSiRizBigInt big.Int + sumGammaiZTminusSiRiz.BigInt(&sumGammaiZTminusSiRizBigInt) + sumGammaiZTminusSiRizCom.ScalarMultiplication(&vk.G1, &sumGammaiZTminusSiRizBigInt) + + // Z_{T}(z)[W] + zt := buildVanishingPoly(flatten(points)) + ztz := eval(zt, z) + var ztW kzg.Digest + ztz.BigInt(&bufBigInt) + ztW.ScalarMultiplication(&proof.W, &bufBigInt) + + // F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} - [∑ᵢγⁱZ_{T\xᵢ}fᵢ(z)]_{1} - Z_{T}(z)[W] + var f kzg.Digest + f.Sub(&sumGammaiZtMinusSiComi, &sumGammaiZTminusSiRizCom). + Sub(&f, &ztW) + + // F+zW' + var zWPrime kzg.Digest + z.BigInt(&bufBigInt) + zWPrime.ScalarMultiplication(&proof.WPrime, &bufBigInt) + f.Add(&f, &zWPrime) + f.Neg(&f) + + // check that e(F+zW',[1]_{2})=e(W',[x]_{2}) + check, err := bls24315.PairingCheckFixedQ( + []bls24315.G1Affine{f, proof.WPrime}, + vk.Lines[:], + ) + + if !check { + return ErrVerifyOpeningProof + } + + return nil +} + +// deriveChallenge derives a challenge using Fiat Shamir to polynomials. +// The arguments are added to the transcript in the order in which they are given. +func deriveChallenge(name string, points [][]fr.Element, digests []kzg.Digest, t *fiatshamir.Transcript, dataTranscript ...[]byte) (fr.Element, error) { + + // derive the challenge gamma, binded to the point and the commitments + for i := range points { + for j := range points[i] { + if err := t.Bind(name, points[i][j].Marshal()); err != nil { + return fr.Element{}, err + } + } + } + for i := range digests { + if err := t.Bind(name, digests[i].Marshal()); err != nil { + return fr.Element{}, err + } + } + + for i := 0; i < len(dataTranscript); i++ { + if err := t.Bind(name, dataTranscript[i]); err != nil { + return fr.Element{}, err + } + } + + gammaByte, err := t.ComputeChallenge(name) + if err != nil { + return fr.Element{}, err + } + var gamma fr.Element + gamma.SetBytes(gammaByte) + + return gamma, nil +} + +// ------------------------------ +// utils + +func flatten(x [][]fr.Element) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + res := make([]fr.Element, 0, nbPoints) + for i := 0; i < len(x); i++ { + res = append(res, x[i]...) + } + return res +} + +// sets f to zero +func setZero(f []fr.Element) { + for i := 0; i < len(f); i++ { + f[i].SetZero() + } +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} + +// returns γ*f, re-using f +func mulByConstant(f []fr.Element, gamma fr.Element) []fr.Element { + for i := 0; i < len(f); i++ { + f[i].Mul(&f[i], &gamma) + } + return f +} + +// computes f <- (x-a)*f +// memory of f is re used, need to pass a copy to not modify it +func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { + s := len(f) + var tmp fr.Element + f = append(f, fr.NewElement(0)) + f[s] = f[s-1] + for i := s - 1; i >= 1; i-- { + tmp.Mul(&f[i], &a) + f[i].Sub(&f[i-1], &tmp) + } + f[0].Mul(&f[0], &a).Neg(&f[0]) + return f +} + +// returns S_{T\Sᵢ} where Sᵢ=x[i] +func buildZtMinusSi(x [][]fr.Element, i int) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + bufPoints := make([]fr.Element, 0, nbPoints-len(x[i])) + for j := 0; j < i; j++ { + bufPoints = append(bufPoints, x[j]...) + } + for j := i + 1; j < len(x); j++ { + bufPoints = append(bufPoints, x[j]...) + } + ztMinusSi := buildVanishingPoly(bufPoints) + return ztMinusSi +} + +// returns πᵢ(X-xᵢ) +func buildVanishingPoly(x []fr.Element) []fr.Element { + res := make([]fr.Element, 1, len(x)+1) + res[0].SetOne() + for i := 0; i < len(x); i++ { + res = multiplyLinearFactor(res, x[i]) + } + return res +} + +// returns f such that f(xᵢ)=yᵢ, x and y are assumed to be of the same size +func interpolate(x, y []fr.Element) []fr.Element { + + res := make([]fr.Element, len(x)) + for i := 0; i < len(x); i++ { + li := buildLagrangeFromDomain(x, i) + li = mulByConstant(li, y[i]) + for j := 0; j < len(x); j++ { + res[j].Add(&res[j], &li[j]) + } + } + + return res +} + +// returns f such that f(xⱼ)=δⁱⱼ +func buildLagrangeFromDomain(x []fr.Element, i int) []fr.Element { + xx := make([]fr.Element, len(x)-1) + copy(xx, x[:i]) + copy(xx[i:], x[i+1:]) + res := buildVanishingPoly(xx) + d := eval(res, x[i]) + d.Inverse(&d) + res = mulByConstant(res, d) + return res +} + +// returns f-g, the memory of f is re used, deg(g) << deg(f) here +func sub(f, g []fr.Element) []fr.Element { + for i := 0; i < len(g); i++ { + f[i].Sub(&f[i], &g[i]) + } + return f +} + +// returns f*g using naive multiplication +// deg(f)>>deg(g), deg(small) =~ 10 max +// buf is used as a buffer and should not be f or g +// f and g are not modified +func mul(f, g []fr.Element, res []fr.Element) []fr.Element { + + sizeRes := len(f) + len(g) - 1 + if len(res) < sizeRes { + s := make([]fr.Element, sizeRes-len(res)) + res = append(res, s...) + } + setZero(res) + + var tmp fr.Element + for i := 0; i < len(g); i++ { + for j := 0; j < len(f); j++ { + tmp.Mul(&f[j], &g[i]) + res[j+i].Add(&res[j+i], &tmp) + } + } + return res +} + +// returns f/g (assuming g divides f) +// OK to not use fft if deg(g) is small +// g's leading coefficient is assumed to be 1 +// f memory is re-used for the result, need to pass a copy to not modify it +func div(f, g []fr.Element) []fr.Element { + sizef := len(f) + sizeg := len(g) + stop := sizeg - +1 + var t fr.Element + for i := sizef - 2; i >= stop; i-- { + for j := 0; j < sizeg-1; j++ { + t.Mul(&f[i+1], &g[sizeg-2-j]) + f[i-j].Sub(&f[i-j], &t) + } + } + return f[sizeg-1:] +} diff --git a/ecc/bls24-315/shplonk/shplonk_test.go b/ecc/bls24-315/shplonk/shplonk_test.go new file mode 100644 index 0000000000..3e282d99a1 --- /dev/null +++ b/ecc/bls24-315/shplonk/shplonk_test.go @@ -0,0 +1,320 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-315/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 230 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestOpening(t *testing.T) { + + assert := require.New(t) + + nbPolys := 2 + sizePoly := make([]int, nbPolys) + for i := 0; i < nbPolys; i++ { + sizePoly[i] = 10 + i + } + polys := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + polys[i] = make([]fr.Element, sizePoly[i]) + for j := 0; j < sizePoly[i]; j++ { + polys[i][j].SetRandom() + } + } + + digests := make([]kzg.Digest, nbPolys) + for i := 0; i < nbPolys; i++ { + digests[i], _ = kzg.Commit(polys[i], testSrs.Pk) + } + + points := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + points[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + points[i][j].SetRandom() + } + } + + hf := sha256.New() + + // correct proof + openingProof, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) + assert.NoError(err) + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.NoError(err) + + // tampered proof + openingProof.ClaimedValues[0][0].SetRandom() + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestBuildZtMinusSi(t *testing.T) { + + nbSi := 10 + points := make([][]fr.Element, nbSi) + sizeSi := make([]int, nbSi) + nbPoints := 0 + for i := 0; i < nbSi; i++ { + sizeSi[i] = 5 + i + nbPoints += sizeSi[i] + points[i] = make([]fr.Element, sizeSi[i]) + for j := 0; j < sizeSi[i]; j++ { + points[i][j].SetRandom() + } + } + for i := 0; i < nbSi; i++ { + ztMinusSi := buildZtMinusSi(points, i) + if len(ztMinusSi) != nbPoints-sizeSi[i]+1 { + t.Fatal("deg(Z_{T-S_{i}}) should be nbPoints-size(S_{i})") + } + for j := 0; j < nbSi; j++ { + if j == i { + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{i}) should not be zero") + } + } + continue + } + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if !y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{j}) should be zero") + } + } + } + } + +} + +func TestInterpolate(t *testing.T) { + + nbPoints := 10 + x := make([]fr.Element, nbPoints) + y := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + y[i].SetRandom() + } + f := interpolate(x, y) + for i := 0; i < nbPoints; i++ { + fx := eval(f, x[i]) + if !fx.Equal(&y[i]) { + t.Fatal("f(x_{i})!=y_{i}") + } + } + +} + +func TestBuildLagrangeFromDomain(t *testing.T) { + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + var r fr.Element + for i := 0; i < nbPoints; i++ { + + l := buildLagrangeFromDomain(points, i) + + // check that l(xᵢ)=1 and l(xⱼ)=0 for j!=i + for j := 0; j < nbPoints; j++ { + y := eval(l, points[j]) + if i == j { + if !y.IsOne() { + t.Fatal("l_{i}(x_{i}) should be equal to 1") + } + } else { + if !y.IsZero() { + t.Fatal("l_{i}(x_{j}) where i!=j should be equal to 0") + } + } + } + r.SetRandom() + y := eval(l, r) + if y.IsZero() { + t.Fatal("l_{i}(x) should not be zero if x is random") + } + } + +} + +func TestBuildVanishingPoly(t *testing.T) { + s := 10 + x := make([]fr.Element, s) + for i := 0; i < s; i++ { + x[i].SetRandom() + } + r := buildVanishingPoly(x) + + if len(r) != s+1 { + t.Fatal("error degree r") + } + + // check that r(xᵢ)=0 for all i + for i := 0; i < len(x); i++ { + y := eval(r, x[i]) + if !y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at xᵢ should be zero") + } + } + + // check that r(y)!=0 for a random point + var a fr.Element + a.SetRandom() + y := eval(r, a) + if y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at r \neq xᵢ should not be zero") + } +} + +func TestMultiplyLinearFactor(t *testing.T) { + + s := 10 + f := make([]fr.Element, s, s+1) + for i := 0; i < 10; i++ { + f[i].SetRandom() + } + + var a, y fr.Element + a.SetRandom() + f = multiplyLinearFactor(f, a) + y = eval(f, a) + if !y.IsZero() { + t.Fatal("(X-a)f(X) should be zero at a") + } + a.SetRandom() + y = eval(f, a) + if y.IsZero() { + t.Fatal("(X-1)f(X) at a random point should not be zero") + } + +} + +func TestNaiveMul(t *testing.T) { + + size := 10 + f := make([]fr.Element, size) + for i := 0; i < size; i++ { + f[i].SetRandom() + } + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + + v := buildVanishingPoly(points) + buf := make([]fr.Element, size+nbPoints-1) + g := mul(f, v, buf) + + // check that g(xᵢ) = 0 + for i := 0; i < nbPoints; i++ { + y := eval(g, points[i]) + if !y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at x_{i} should be zero") + } + } + + // check that g(r) != 0 for a random point + var a fr.Element + a.SetRandom() + y := eval(g, a) + if y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at a random point should not be zero") + } + +} + +func TestDiv(t *testing.T) { + + nbPoints := 10 + s := 10 + f := make([]fr.Element, s, s+nbPoints) + for i := 0; i < s; i++ { + f[i].SetRandom() + } + + // backup + g := make([]fr.Element, s) + copy(g, f) + + // successive divions of linear terms + x := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + q := make([][2]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + q[i][1].SetOne() + q[i][0].Neg(&x[i]) + f = div(f, q[i][:]) + } + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + + // division by a degree > 1 polynomial + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + r := buildVanishingPoly(x) + f = div(f, r) + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + +} diff --git a/ecc/bls24-317/shplonk/doc.go b/ecc/bls24-317/shplonk/doc.go new file mode 100644 index 0000000000..e048f46009 --- /dev/null +++ b/ecc/bls24-317/shplonk/doc.go @@ -0,0 +1,18 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +// Package shplonk provides a SHPLONK commitment scheme, cf https://eprint.iacr.org/2020/081.pdf +package shplonk diff --git a/ecc/bls24-317/shplonk/marshal.go b/ecc/bls24-317/shplonk/marshal.go new file mode 100644 index 0000000000..b69c0946ca --- /dev/null +++ b/ecc/bls24-317/shplonk/marshal.go @@ -0,0 +1,62 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "io" + + "github.com/consensys/gnark-crypto/ecc/bls24-317" +) + +func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { + + dec := bls24317.NewDecoder(r) + + toDecode := []interface{}{ + &proof.W, + &proof.WPrime, + proof.ClaimedValues, + } + + for _, v := range toDecode { + if err := dec.Decode(v); err != nil { + return dec.BytesRead(), err + } + } + + return dec.BytesRead(), nil +} + +// WriteTo writes binary encoding of a OpeningProof +func (proof *OpeningProof) WriteTo(w io.Writer) (int64, error) { + + enc := bls24317.NewEncoder(w) + + toEncode := []interface{}{ + &proof.W, + &proof.WPrime, + proof.ClaimedValues, + } + + for _, v := range toEncode { + if err := enc.Encode(v); err != nil { + return enc.BytesWritten(), err + } + } + + return enc.BytesWritten(), nil +} diff --git a/ecc/bls24-317/shplonk/shplonk.go b/ecc/bls24-317/shplonk/shplonk.go new file mode 100644 index 0000000000..dc8cdfa14a --- /dev/null +++ b/ecc/bls24-317/shplonk/shplonk.go @@ -0,0 +1,462 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "errors" + "hash" + "math/big" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls24-317" + "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-317/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" +) + +var ( + ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") + ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") +) + +// OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. +// +// implements io.ReaderFrom and io.WriterTo +type OpeningProof struct { + + // W = ∑ᵢ γⁱZ_{T\Sᵢ}(f_i(X)-r)/Z_{T} where Z_{T} is the vanishing polynomial on the (Sᵢ)_{i} + // and r interpolates fᵢ(Sᵢ) on (Sᵢ) + W bls24317.G1Affine + + // L(X)/(X-z) where L(X)=∑ᵢγⁱZ_{T\xᵢ}(f_i(X)-rᵢ) - Z_{T}W(X) + WPrime bls24317.G1Affine + + // ClaimedValues[i] are the values of fᵢ on Sᵢ + ClaimedValues [][]fr.Element +} + +// BatchOpen opens the list of polynomials on points, where the i-th polynomials is opend at points[i]. +func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, pk kzg.ProvingKey, dataTranscript ...[]byte) (OpeningProof, error) { + + var res OpeningProof + + nbInstances := len(polynomials) + if len(polynomials) != len(points) { + return res, ErrInvalidNumberOfPoints + } + + // transcript + fs := fiatshamir.NewTranscript(hf, "gamma", "z") + + // derive γ + gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) + if err != nil { + return res, err + } + + // compute the size of the linear combination + maxSizePolys := len(polynomials[0]) + for i := 1; i < len(polynomials); i++ { + if maxSizePolys < len(polynomials[i]) { + maxSizePolys = len(polynomials[i]) + } + } + nbPoints := 0 + sizeSi := make([]int, len(points)) + for i := 0; i < nbInstances; i++ { + nbPoints += len(points[i]) + sizeSi[i] = len(points[i]) + } + totalSize := maxSizePolys + nbPoints // upper bound of the size of f := ∑ᵢ γⁱZ_{T\Sᵢ}(f_i(X)-r) + + bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) + bufTotalSize := make([]fr.Element, totalSize) + f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation + res.ClaimedValues = make([][]fr.Element, nbInstances) + for i := 0; i < nbInstances; i++ { + res.ClaimedValues[i] = make([]fr.Element, len(points[i])) + } + var accGamma fr.Element + accGamma.SetOne() + + ztMinusSi := make([][]fr.Element, nbInstances) + ri := make([][]fr.Element, nbInstances) + for i := 0; i < nbInstances; i++ { + + for j := 0; j < len(points[i]); j++ { + res.ClaimedValues[i][j] = eval(polynomials[i], points[i][j]) + } + + ztMinusSi[i] = buildZtMinusSi(points, i) + + copy(bufMaxSizePolynomials, polynomials[i]) + ri[i] = interpolate(points[i], res.ClaimedValues[i]) + sub(bufMaxSizePolynomials, ri[i]) + + bufTotalSize = mul(bufMaxSizePolynomials, ztMinusSi[i], bufTotalSize) + bufTotalSize = mulByConstant(bufTotalSize, accGamma) + for j := 0; j < len(bufTotalSize); j++ { + f[j].Add(&f[j], &bufTotalSize[j]) + } + + accGamma.Mul(&accGamma, &gamma) + setZero(bufMaxSizePolynomials) + } + + zt := buildVanishingPoly(flatten(points)) + w := div(f, zt) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation + res.W, err = kzg.Commit(w, pk) + if err != nil { + return res, err + } + + // derive z + z, err := deriveChallenge("z", nil, []kzg.Digest{res.W}, fs) + if err != nil { + return res, err + } + + // compute L = ∑ᵢγⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z))-Z_{T}(z)W + accGamma.SetOne() + var gammaiZtMinusSiZ fr.Element + l := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation + for i := 0; i < len(polynomials); i++ { + + ztMinusSiZ := eval(ztMinusSi[i], z) // Z_{T\Sᵢ}(z) + gammaiZtMinusSiZ.Mul(&accGamma, &ztMinusSiZ) // γⁱZ_{T\Sᵢ}(z) + + copy(bufMaxSizePolynomials, polynomials[i]) + riz := eval(ri[i], z) + bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) + mulByConstant(bufMaxSizePolynomials, gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) + for j := 0; j < len(bufMaxSizePolynomials); j++ { + l[j].Add(&l[j], &bufMaxSizePolynomials[j]) + } + + setZero(bufMaxSizePolynomials) + accGamma.Mul(&accGamma, &gamma) + } + ztz := eval(zt, z) + setZero(bufTotalSize) + copy(bufTotalSize, w) + mulByConstant(bufTotalSize, ztz) // Z_{T}(z)W + for i := 0; i < totalSize-maxSizePolys; i++ { + l[totalSize-1-i].Neg(&bufTotalSize[totalSize-1-i]) + } + for i := 0; i < maxSizePolys; i++ { + l[i].Sub(&l[i], &bufTotalSize[i]) + } // L <- L-Z_{T}(z)W + + xMinusZ := buildVanishingPoly([]fr.Element{z}) + wPrime := div(l, xMinusZ) + + res.WPrime, err = kzg.Commit(wPrime, pk) + if err != nil { + return res, err + } + + return res, nil +} + +// BatchVerify uses proof to check that the commitments correctly open to proof.ClaimedValues +// at points. The order matters: the proof validates that the i-th commitment is correctly opened +// at the i-th point +// dataTranscript is some extra data that might be needed for Fiat Shamir, and is appended at the end +// of the original transcript. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + if len(digests) != len(proof.ClaimedValues) { + return ErrInvalidNumberOfPoints + } + if len(digests) != len(points) { + return ErrInvalidNumberOfPoints + } + + // transcript + fs := fiatshamir.NewTranscript(hf, "gamma", "z") + + // derive γ + gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) + if err != nil { + return err + } + + // derive z + // TODO seems ok that z depend only on W, need to check that carefully + z, err := deriveChallenge("z", nil, []kzg.Digest{proof.W}, fs) + if err != nil { + return err + } + + // check that e(F + zW', [1]_{2})=e(W',[x]_{2}) + // where F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i}-[∑ᵢγⁱZ_{T\xᵢ}(z)fᵢ(z)]_{1}-Z_{T}(z)[W] + var sumGammaiZTminusSiRiz, tmp, accGamma fr.Element + nbInstances := len(points) + gammaiZTminusSiz := make([]fr.Element, nbInstances) + accGamma.SetOne() + ri := make([][]fr.Element, nbInstances) + for i := 0; i < len(points); i++ { + + ztMinusSi := buildZtMinusSi(points, i) // Z_{T-S_{i}}(X) + gammaiZTminusSiz[i] = eval(ztMinusSi, z) // Z_{T-S_{i}}(z) + gammaiZTminusSiz[i].Mul(&accGamma, &gammaiZTminusSiz[i]) // \gamma^{i} Z_{T-S_{i}}(z) + + ri[i] = interpolate(points[i], proof.ClaimedValues[i]) + riz := eval(ri[i], z) // r_{i}(z) + tmp.Mul(&gammaiZTminusSiz[i], &riz) // Z_{T-S_{i}}(z)r_{i}(z) + sumGammaiZTminusSiRiz.Add(&sumGammaiZTminusSiRiz, &tmp) + + accGamma.Mul(&accGamma, &gamma) + } + + // ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} + config := ecc.MultiExpConfig{} + var sumGammaiZtMinusSiComi kzg.Digest + _, err = sumGammaiZtMinusSiComi.MultiExp(digests, gammaiZTminusSiz, config) + if err != nil { + return err + } + + var bufBigInt big.Int + + // [∑ᵢZ_{T\xᵢ}fᵢ(z)]_{1} + var sumGammaiZTminusSiRizCom kzg.Digest + var sumGammaiZTminusSiRizBigInt big.Int + sumGammaiZTminusSiRiz.BigInt(&sumGammaiZTminusSiRizBigInt) + sumGammaiZTminusSiRizCom.ScalarMultiplication(&vk.G1, &sumGammaiZTminusSiRizBigInt) + + // Z_{T}(z)[W] + zt := buildVanishingPoly(flatten(points)) + ztz := eval(zt, z) + var ztW kzg.Digest + ztz.BigInt(&bufBigInt) + ztW.ScalarMultiplication(&proof.W, &bufBigInt) + + // F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} - [∑ᵢγⁱZ_{T\xᵢ}fᵢ(z)]_{1} - Z_{T}(z)[W] + var f kzg.Digest + f.Sub(&sumGammaiZtMinusSiComi, &sumGammaiZTminusSiRizCom). + Sub(&f, &ztW) + + // F+zW' + var zWPrime kzg.Digest + z.BigInt(&bufBigInt) + zWPrime.ScalarMultiplication(&proof.WPrime, &bufBigInt) + f.Add(&f, &zWPrime) + f.Neg(&f) + + // check that e(F+zW',[1]_{2})=e(W',[x]_{2}) + check, err := bls24317.PairingCheckFixedQ( + []bls24317.G1Affine{f, proof.WPrime}, + vk.Lines[:], + ) + + if !check { + return ErrVerifyOpeningProof + } + + return nil +} + +// deriveChallenge derives a challenge using Fiat Shamir to polynomials. +// The arguments are added to the transcript in the order in which they are given. +func deriveChallenge(name string, points [][]fr.Element, digests []kzg.Digest, t *fiatshamir.Transcript, dataTranscript ...[]byte) (fr.Element, error) { + + // derive the challenge gamma, binded to the point and the commitments + for i := range points { + for j := range points[i] { + if err := t.Bind(name, points[i][j].Marshal()); err != nil { + return fr.Element{}, err + } + } + } + for i := range digests { + if err := t.Bind(name, digests[i].Marshal()); err != nil { + return fr.Element{}, err + } + } + + for i := 0; i < len(dataTranscript); i++ { + if err := t.Bind(name, dataTranscript[i]); err != nil { + return fr.Element{}, err + } + } + + gammaByte, err := t.ComputeChallenge(name) + if err != nil { + return fr.Element{}, err + } + var gamma fr.Element + gamma.SetBytes(gammaByte) + + return gamma, nil +} + +// ------------------------------ +// utils + +func flatten(x [][]fr.Element) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + res := make([]fr.Element, 0, nbPoints) + for i := 0; i < len(x); i++ { + res = append(res, x[i]...) + } + return res +} + +// sets f to zero +func setZero(f []fr.Element) { + for i := 0; i < len(f); i++ { + f[i].SetZero() + } +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} + +// returns γ*f, re-using f +func mulByConstant(f []fr.Element, gamma fr.Element) []fr.Element { + for i := 0; i < len(f); i++ { + f[i].Mul(&f[i], &gamma) + } + return f +} + +// computes f <- (x-a)*f +// memory of f is re used, need to pass a copy to not modify it +func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { + s := len(f) + var tmp fr.Element + f = append(f, fr.NewElement(0)) + f[s] = f[s-1] + for i := s - 1; i >= 1; i-- { + tmp.Mul(&f[i], &a) + f[i].Sub(&f[i-1], &tmp) + } + f[0].Mul(&f[0], &a).Neg(&f[0]) + return f +} + +// returns S_{T\Sᵢ} where Sᵢ=x[i] +func buildZtMinusSi(x [][]fr.Element, i int) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + bufPoints := make([]fr.Element, 0, nbPoints-len(x[i])) + for j := 0; j < i; j++ { + bufPoints = append(bufPoints, x[j]...) + } + for j := i + 1; j < len(x); j++ { + bufPoints = append(bufPoints, x[j]...) + } + ztMinusSi := buildVanishingPoly(bufPoints) + return ztMinusSi +} + +// returns πᵢ(X-xᵢ) +func buildVanishingPoly(x []fr.Element) []fr.Element { + res := make([]fr.Element, 1, len(x)+1) + res[0].SetOne() + for i := 0; i < len(x); i++ { + res = multiplyLinearFactor(res, x[i]) + } + return res +} + +// returns f such that f(xᵢ)=yᵢ, x and y are assumed to be of the same size +func interpolate(x, y []fr.Element) []fr.Element { + + res := make([]fr.Element, len(x)) + for i := 0; i < len(x); i++ { + li := buildLagrangeFromDomain(x, i) + li = mulByConstant(li, y[i]) + for j := 0; j < len(x); j++ { + res[j].Add(&res[j], &li[j]) + } + } + + return res +} + +// returns f such that f(xⱼ)=δⁱⱼ +func buildLagrangeFromDomain(x []fr.Element, i int) []fr.Element { + xx := make([]fr.Element, len(x)-1) + copy(xx, x[:i]) + copy(xx[i:], x[i+1:]) + res := buildVanishingPoly(xx) + d := eval(res, x[i]) + d.Inverse(&d) + res = mulByConstant(res, d) + return res +} + +// returns f-g, the memory of f is re used, deg(g) << deg(f) here +func sub(f, g []fr.Element) []fr.Element { + for i := 0; i < len(g); i++ { + f[i].Sub(&f[i], &g[i]) + } + return f +} + +// returns f*g using naive multiplication +// deg(f)>>deg(g), deg(small) =~ 10 max +// buf is used as a buffer and should not be f or g +// f and g are not modified +func mul(f, g []fr.Element, res []fr.Element) []fr.Element { + + sizeRes := len(f) + len(g) - 1 + if len(res) < sizeRes { + s := make([]fr.Element, sizeRes-len(res)) + res = append(res, s...) + } + setZero(res) + + var tmp fr.Element + for i := 0; i < len(g); i++ { + for j := 0; j < len(f); j++ { + tmp.Mul(&f[j], &g[i]) + res[j+i].Add(&res[j+i], &tmp) + } + } + return res +} + +// returns f/g (assuming g divides f) +// OK to not use fft if deg(g) is small +// g's leading coefficient is assumed to be 1 +// f memory is re-used for the result, need to pass a copy to not modify it +func div(f, g []fr.Element) []fr.Element { + sizef := len(f) + sizeg := len(g) + stop := sizeg - +1 + var t fr.Element + for i := sizef - 2; i >= stop; i-- { + for j := 0; j < sizeg-1; j++ { + t.Mul(&f[i+1], &g[sizeg-2-j]) + f[i-j].Sub(&f[i-j], &t) + } + } + return f[sizeg-1:] +} diff --git a/ecc/bls24-317/shplonk/shplonk_test.go b/ecc/bls24-317/shplonk/shplonk_test.go new file mode 100644 index 0000000000..14dd5af291 --- /dev/null +++ b/ecc/bls24-317/shplonk/shplonk_test.go @@ -0,0 +1,320 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-317/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 230 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestOpening(t *testing.T) { + + assert := require.New(t) + + nbPolys := 2 + sizePoly := make([]int, nbPolys) + for i := 0; i < nbPolys; i++ { + sizePoly[i] = 10 + i + } + polys := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + polys[i] = make([]fr.Element, sizePoly[i]) + for j := 0; j < sizePoly[i]; j++ { + polys[i][j].SetRandom() + } + } + + digests := make([]kzg.Digest, nbPolys) + for i := 0; i < nbPolys; i++ { + digests[i], _ = kzg.Commit(polys[i], testSrs.Pk) + } + + points := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + points[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + points[i][j].SetRandom() + } + } + + hf := sha256.New() + + // correct proof + openingProof, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) + assert.NoError(err) + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.NoError(err) + + // tampered proof + openingProof.ClaimedValues[0][0].SetRandom() + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestBuildZtMinusSi(t *testing.T) { + + nbSi := 10 + points := make([][]fr.Element, nbSi) + sizeSi := make([]int, nbSi) + nbPoints := 0 + for i := 0; i < nbSi; i++ { + sizeSi[i] = 5 + i + nbPoints += sizeSi[i] + points[i] = make([]fr.Element, sizeSi[i]) + for j := 0; j < sizeSi[i]; j++ { + points[i][j].SetRandom() + } + } + for i := 0; i < nbSi; i++ { + ztMinusSi := buildZtMinusSi(points, i) + if len(ztMinusSi) != nbPoints-sizeSi[i]+1 { + t.Fatal("deg(Z_{T-S_{i}}) should be nbPoints-size(S_{i})") + } + for j := 0; j < nbSi; j++ { + if j == i { + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{i}) should not be zero") + } + } + continue + } + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if !y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{j}) should be zero") + } + } + } + } + +} + +func TestInterpolate(t *testing.T) { + + nbPoints := 10 + x := make([]fr.Element, nbPoints) + y := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + y[i].SetRandom() + } + f := interpolate(x, y) + for i := 0; i < nbPoints; i++ { + fx := eval(f, x[i]) + if !fx.Equal(&y[i]) { + t.Fatal("f(x_{i})!=y_{i}") + } + } + +} + +func TestBuildLagrangeFromDomain(t *testing.T) { + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + var r fr.Element + for i := 0; i < nbPoints; i++ { + + l := buildLagrangeFromDomain(points, i) + + // check that l(xᵢ)=1 and l(xⱼ)=0 for j!=i + for j := 0; j < nbPoints; j++ { + y := eval(l, points[j]) + if i == j { + if !y.IsOne() { + t.Fatal("l_{i}(x_{i}) should be equal to 1") + } + } else { + if !y.IsZero() { + t.Fatal("l_{i}(x_{j}) where i!=j should be equal to 0") + } + } + } + r.SetRandom() + y := eval(l, r) + if y.IsZero() { + t.Fatal("l_{i}(x) should not be zero if x is random") + } + } + +} + +func TestBuildVanishingPoly(t *testing.T) { + s := 10 + x := make([]fr.Element, s) + for i := 0; i < s; i++ { + x[i].SetRandom() + } + r := buildVanishingPoly(x) + + if len(r) != s+1 { + t.Fatal("error degree r") + } + + // check that r(xᵢ)=0 for all i + for i := 0; i < len(x); i++ { + y := eval(r, x[i]) + if !y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at xᵢ should be zero") + } + } + + // check that r(y)!=0 for a random point + var a fr.Element + a.SetRandom() + y := eval(r, a) + if y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at r \neq xᵢ should not be zero") + } +} + +func TestMultiplyLinearFactor(t *testing.T) { + + s := 10 + f := make([]fr.Element, s, s+1) + for i := 0; i < 10; i++ { + f[i].SetRandom() + } + + var a, y fr.Element + a.SetRandom() + f = multiplyLinearFactor(f, a) + y = eval(f, a) + if !y.IsZero() { + t.Fatal("(X-a)f(X) should be zero at a") + } + a.SetRandom() + y = eval(f, a) + if y.IsZero() { + t.Fatal("(X-1)f(X) at a random point should not be zero") + } + +} + +func TestNaiveMul(t *testing.T) { + + size := 10 + f := make([]fr.Element, size) + for i := 0; i < size; i++ { + f[i].SetRandom() + } + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + + v := buildVanishingPoly(points) + buf := make([]fr.Element, size+nbPoints-1) + g := mul(f, v, buf) + + // check that g(xᵢ) = 0 + for i := 0; i < nbPoints; i++ { + y := eval(g, points[i]) + if !y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at x_{i} should be zero") + } + } + + // check that g(r) != 0 for a random point + var a fr.Element + a.SetRandom() + y := eval(g, a) + if y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at a random point should not be zero") + } + +} + +func TestDiv(t *testing.T) { + + nbPoints := 10 + s := 10 + f := make([]fr.Element, s, s+nbPoints) + for i := 0; i < s; i++ { + f[i].SetRandom() + } + + // backup + g := make([]fr.Element, s) + copy(g, f) + + // successive divions of linear terms + x := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + q := make([][2]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + q[i][1].SetOne() + q[i][0].Neg(&x[i]) + f = div(f, q[i][:]) + } + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + + // division by a degree > 1 polynomial + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + r := buildVanishingPoly(x) + f = div(f, r) + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + +} diff --git a/ecc/bn254/shplonk/doc.go b/ecc/bn254/shplonk/doc.go new file mode 100644 index 0000000000..e048f46009 --- /dev/null +++ b/ecc/bn254/shplonk/doc.go @@ -0,0 +1,18 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +// Package shplonk provides a SHPLONK commitment scheme, cf https://eprint.iacr.org/2020/081.pdf +package shplonk diff --git a/ecc/bn254/shplonk/marshal.go b/ecc/bn254/shplonk/marshal.go index 66eaf0302f..050e1d6fdf 100644 --- a/ecc/bn254/shplonk/marshal.go +++ b/ecc/bn254/shplonk/marshal.go @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +// Code generated by consensys/gnark-crypto DO NOT EDIT + package shplonk import ( diff --git a/ecc/bn254/shplonk/shplonk.go b/ecc/bn254/shplonk/shplonk.go index b8b3cc1a25..3ed334464e 100644 --- a/ecc/bn254/shplonk/shplonk.go +++ b/ecc/bn254/shplonk/shplonk.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -//cf https://eprint.iacr.org/2020/081.pdf +// Code generated by consensys/gnark-crypto DO NOT EDIT package shplonk diff --git a/ecc/bn254/shplonk/shplonk_test.go b/ecc/bn254/shplonk/shplonk_test.go index 2b082d16c8..6d2c2841b0 100644 --- a/ecc/bn254/shplonk/shplonk_test.go +++ b/ecc/bn254/shplonk/shplonk_test.go @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +// Code generated by consensys/gnark-crypto DO NOT EDIT + package shplonk import ( diff --git a/ecc/bw6-633/shplonk/doc.go b/ecc/bw6-633/shplonk/doc.go new file mode 100644 index 0000000000..e048f46009 --- /dev/null +++ b/ecc/bw6-633/shplonk/doc.go @@ -0,0 +1,18 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +// Package shplonk provides a SHPLONK commitment scheme, cf https://eprint.iacr.org/2020/081.pdf +package shplonk diff --git a/ecc/bw6-633/shplonk/marshal.go b/ecc/bw6-633/shplonk/marshal.go new file mode 100644 index 0000000000..2c44c28a65 --- /dev/null +++ b/ecc/bw6-633/shplonk/marshal.go @@ -0,0 +1,62 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "io" + + "github.com/consensys/gnark-crypto/ecc/bw6-633" +) + +func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { + + dec := bw6633.NewDecoder(r) + + toDecode := []interface{}{ + &proof.W, + &proof.WPrime, + proof.ClaimedValues, + } + + for _, v := range toDecode { + if err := dec.Decode(v); err != nil { + return dec.BytesRead(), err + } + } + + return dec.BytesRead(), nil +} + +// WriteTo writes binary encoding of a OpeningProof +func (proof *OpeningProof) WriteTo(w io.Writer) (int64, error) { + + enc := bw6633.NewEncoder(w) + + toEncode := []interface{}{ + &proof.W, + &proof.WPrime, + proof.ClaimedValues, + } + + for _, v := range toEncode { + if err := enc.Encode(v); err != nil { + return enc.BytesWritten(), err + } + } + + return enc.BytesWritten(), nil +} diff --git a/ecc/bw6-633/shplonk/shplonk.go b/ecc/bw6-633/shplonk/shplonk.go new file mode 100644 index 0000000000..a3d6bd1999 --- /dev/null +++ b/ecc/bw6-633/shplonk/shplonk.go @@ -0,0 +1,462 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "errors" + "hash" + "math/big" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bw6-633" + "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-633/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" +) + +var ( + ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") + ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") +) + +// OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. +// +// implements io.ReaderFrom and io.WriterTo +type OpeningProof struct { + + // W = ∑ᵢ γⁱZ_{T\Sᵢ}(f_i(X)-r)/Z_{T} where Z_{T} is the vanishing polynomial on the (Sᵢ)_{i} + // and r interpolates fᵢ(Sᵢ) on (Sᵢ) + W bw6633.G1Affine + + // L(X)/(X-z) where L(X)=∑ᵢγⁱZ_{T\xᵢ}(f_i(X)-rᵢ) - Z_{T}W(X) + WPrime bw6633.G1Affine + + // ClaimedValues[i] are the values of fᵢ on Sᵢ + ClaimedValues [][]fr.Element +} + +// BatchOpen opens the list of polynomials on points, where the i-th polynomials is opend at points[i]. +func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, pk kzg.ProvingKey, dataTranscript ...[]byte) (OpeningProof, error) { + + var res OpeningProof + + nbInstances := len(polynomials) + if len(polynomials) != len(points) { + return res, ErrInvalidNumberOfPoints + } + + // transcript + fs := fiatshamir.NewTranscript(hf, "gamma", "z") + + // derive γ + gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) + if err != nil { + return res, err + } + + // compute the size of the linear combination + maxSizePolys := len(polynomials[0]) + for i := 1; i < len(polynomials); i++ { + if maxSizePolys < len(polynomials[i]) { + maxSizePolys = len(polynomials[i]) + } + } + nbPoints := 0 + sizeSi := make([]int, len(points)) + for i := 0; i < nbInstances; i++ { + nbPoints += len(points[i]) + sizeSi[i] = len(points[i]) + } + totalSize := maxSizePolys + nbPoints // upper bound of the size of f := ∑ᵢ γⁱZ_{T\Sᵢ}(f_i(X)-r) + + bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) + bufTotalSize := make([]fr.Element, totalSize) + f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation + res.ClaimedValues = make([][]fr.Element, nbInstances) + for i := 0; i < nbInstances; i++ { + res.ClaimedValues[i] = make([]fr.Element, len(points[i])) + } + var accGamma fr.Element + accGamma.SetOne() + + ztMinusSi := make([][]fr.Element, nbInstances) + ri := make([][]fr.Element, nbInstances) + for i := 0; i < nbInstances; i++ { + + for j := 0; j < len(points[i]); j++ { + res.ClaimedValues[i][j] = eval(polynomials[i], points[i][j]) + } + + ztMinusSi[i] = buildZtMinusSi(points, i) + + copy(bufMaxSizePolynomials, polynomials[i]) + ri[i] = interpolate(points[i], res.ClaimedValues[i]) + sub(bufMaxSizePolynomials, ri[i]) + + bufTotalSize = mul(bufMaxSizePolynomials, ztMinusSi[i], bufTotalSize) + bufTotalSize = mulByConstant(bufTotalSize, accGamma) + for j := 0; j < len(bufTotalSize); j++ { + f[j].Add(&f[j], &bufTotalSize[j]) + } + + accGamma.Mul(&accGamma, &gamma) + setZero(bufMaxSizePolynomials) + } + + zt := buildVanishingPoly(flatten(points)) + w := div(f, zt) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation + res.W, err = kzg.Commit(w, pk) + if err != nil { + return res, err + } + + // derive z + z, err := deriveChallenge("z", nil, []kzg.Digest{res.W}, fs) + if err != nil { + return res, err + } + + // compute L = ∑ᵢγⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z))-Z_{T}(z)W + accGamma.SetOne() + var gammaiZtMinusSiZ fr.Element + l := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation + for i := 0; i < len(polynomials); i++ { + + ztMinusSiZ := eval(ztMinusSi[i], z) // Z_{T\Sᵢ}(z) + gammaiZtMinusSiZ.Mul(&accGamma, &ztMinusSiZ) // γⁱZ_{T\Sᵢ}(z) + + copy(bufMaxSizePolynomials, polynomials[i]) + riz := eval(ri[i], z) + bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) + mulByConstant(bufMaxSizePolynomials, gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) + for j := 0; j < len(bufMaxSizePolynomials); j++ { + l[j].Add(&l[j], &bufMaxSizePolynomials[j]) + } + + setZero(bufMaxSizePolynomials) + accGamma.Mul(&accGamma, &gamma) + } + ztz := eval(zt, z) + setZero(bufTotalSize) + copy(bufTotalSize, w) + mulByConstant(bufTotalSize, ztz) // Z_{T}(z)W + for i := 0; i < totalSize-maxSizePolys; i++ { + l[totalSize-1-i].Neg(&bufTotalSize[totalSize-1-i]) + } + for i := 0; i < maxSizePolys; i++ { + l[i].Sub(&l[i], &bufTotalSize[i]) + } // L <- L-Z_{T}(z)W + + xMinusZ := buildVanishingPoly([]fr.Element{z}) + wPrime := div(l, xMinusZ) + + res.WPrime, err = kzg.Commit(wPrime, pk) + if err != nil { + return res, err + } + + return res, nil +} + +// BatchVerify uses proof to check that the commitments correctly open to proof.ClaimedValues +// at points. The order matters: the proof validates that the i-th commitment is correctly opened +// at the i-th point +// dataTranscript is some extra data that might be needed for Fiat Shamir, and is appended at the end +// of the original transcript. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + if len(digests) != len(proof.ClaimedValues) { + return ErrInvalidNumberOfPoints + } + if len(digests) != len(points) { + return ErrInvalidNumberOfPoints + } + + // transcript + fs := fiatshamir.NewTranscript(hf, "gamma", "z") + + // derive γ + gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) + if err != nil { + return err + } + + // derive z + // TODO seems ok that z depend only on W, need to check that carefully + z, err := deriveChallenge("z", nil, []kzg.Digest{proof.W}, fs) + if err != nil { + return err + } + + // check that e(F + zW', [1]_{2})=e(W',[x]_{2}) + // where F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i}-[∑ᵢγⁱZ_{T\xᵢ}(z)fᵢ(z)]_{1}-Z_{T}(z)[W] + var sumGammaiZTminusSiRiz, tmp, accGamma fr.Element + nbInstances := len(points) + gammaiZTminusSiz := make([]fr.Element, nbInstances) + accGamma.SetOne() + ri := make([][]fr.Element, nbInstances) + for i := 0; i < len(points); i++ { + + ztMinusSi := buildZtMinusSi(points, i) // Z_{T-S_{i}}(X) + gammaiZTminusSiz[i] = eval(ztMinusSi, z) // Z_{T-S_{i}}(z) + gammaiZTminusSiz[i].Mul(&accGamma, &gammaiZTminusSiz[i]) // \gamma^{i} Z_{T-S_{i}}(z) + + ri[i] = interpolate(points[i], proof.ClaimedValues[i]) + riz := eval(ri[i], z) // r_{i}(z) + tmp.Mul(&gammaiZTminusSiz[i], &riz) // Z_{T-S_{i}}(z)r_{i}(z) + sumGammaiZTminusSiRiz.Add(&sumGammaiZTminusSiRiz, &tmp) + + accGamma.Mul(&accGamma, &gamma) + } + + // ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} + config := ecc.MultiExpConfig{} + var sumGammaiZtMinusSiComi kzg.Digest + _, err = sumGammaiZtMinusSiComi.MultiExp(digests, gammaiZTminusSiz, config) + if err != nil { + return err + } + + var bufBigInt big.Int + + // [∑ᵢZ_{T\xᵢ}fᵢ(z)]_{1} + var sumGammaiZTminusSiRizCom kzg.Digest + var sumGammaiZTminusSiRizBigInt big.Int + sumGammaiZTminusSiRiz.BigInt(&sumGammaiZTminusSiRizBigInt) + sumGammaiZTminusSiRizCom.ScalarMultiplication(&vk.G1, &sumGammaiZTminusSiRizBigInt) + + // Z_{T}(z)[W] + zt := buildVanishingPoly(flatten(points)) + ztz := eval(zt, z) + var ztW kzg.Digest + ztz.BigInt(&bufBigInt) + ztW.ScalarMultiplication(&proof.W, &bufBigInt) + + // F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} - [∑ᵢγⁱZ_{T\xᵢ}fᵢ(z)]_{1} - Z_{T}(z)[W] + var f kzg.Digest + f.Sub(&sumGammaiZtMinusSiComi, &sumGammaiZTminusSiRizCom). + Sub(&f, &ztW) + + // F+zW' + var zWPrime kzg.Digest + z.BigInt(&bufBigInt) + zWPrime.ScalarMultiplication(&proof.WPrime, &bufBigInt) + f.Add(&f, &zWPrime) + f.Neg(&f) + + // check that e(F+zW',[1]_{2})=e(W',[x]_{2}) + check, err := bw6633.PairingCheckFixedQ( + []bw6633.G1Affine{f, proof.WPrime}, + vk.Lines[:], + ) + + if !check { + return ErrVerifyOpeningProof + } + + return nil +} + +// deriveChallenge derives a challenge using Fiat Shamir to polynomials. +// The arguments are added to the transcript in the order in which they are given. +func deriveChallenge(name string, points [][]fr.Element, digests []kzg.Digest, t *fiatshamir.Transcript, dataTranscript ...[]byte) (fr.Element, error) { + + // derive the challenge gamma, binded to the point and the commitments + for i := range points { + for j := range points[i] { + if err := t.Bind(name, points[i][j].Marshal()); err != nil { + return fr.Element{}, err + } + } + } + for i := range digests { + if err := t.Bind(name, digests[i].Marshal()); err != nil { + return fr.Element{}, err + } + } + + for i := 0; i < len(dataTranscript); i++ { + if err := t.Bind(name, dataTranscript[i]); err != nil { + return fr.Element{}, err + } + } + + gammaByte, err := t.ComputeChallenge(name) + if err != nil { + return fr.Element{}, err + } + var gamma fr.Element + gamma.SetBytes(gammaByte) + + return gamma, nil +} + +// ------------------------------ +// utils + +func flatten(x [][]fr.Element) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + res := make([]fr.Element, 0, nbPoints) + for i := 0; i < len(x); i++ { + res = append(res, x[i]...) + } + return res +} + +// sets f to zero +func setZero(f []fr.Element) { + for i := 0; i < len(f); i++ { + f[i].SetZero() + } +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} + +// returns γ*f, re-using f +func mulByConstant(f []fr.Element, gamma fr.Element) []fr.Element { + for i := 0; i < len(f); i++ { + f[i].Mul(&f[i], &gamma) + } + return f +} + +// computes f <- (x-a)*f +// memory of f is re used, need to pass a copy to not modify it +func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { + s := len(f) + var tmp fr.Element + f = append(f, fr.NewElement(0)) + f[s] = f[s-1] + for i := s - 1; i >= 1; i-- { + tmp.Mul(&f[i], &a) + f[i].Sub(&f[i-1], &tmp) + } + f[0].Mul(&f[0], &a).Neg(&f[0]) + return f +} + +// returns S_{T\Sᵢ} where Sᵢ=x[i] +func buildZtMinusSi(x [][]fr.Element, i int) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + bufPoints := make([]fr.Element, 0, nbPoints-len(x[i])) + for j := 0; j < i; j++ { + bufPoints = append(bufPoints, x[j]...) + } + for j := i + 1; j < len(x); j++ { + bufPoints = append(bufPoints, x[j]...) + } + ztMinusSi := buildVanishingPoly(bufPoints) + return ztMinusSi +} + +// returns πᵢ(X-xᵢ) +func buildVanishingPoly(x []fr.Element) []fr.Element { + res := make([]fr.Element, 1, len(x)+1) + res[0].SetOne() + for i := 0; i < len(x); i++ { + res = multiplyLinearFactor(res, x[i]) + } + return res +} + +// returns f such that f(xᵢ)=yᵢ, x and y are assumed to be of the same size +func interpolate(x, y []fr.Element) []fr.Element { + + res := make([]fr.Element, len(x)) + for i := 0; i < len(x); i++ { + li := buildLagrangeFromDomain(x, i) + li = mulByConstant(li, y[i]) + for j := 0; j < len(x); j++ { + res[j].Add(&res[j], &li[j]) + } + } + + return res +} + +// returns f such that f(xⱼ)=δⁱⱼ +func buildLagrangeFromDomain(x []fr.Element, i int) []fr.Element { + xx := make([]fr.Element, len(x)-1) + copy(xx, x[:i]) + copy(xx[i:], x[i+1:]) + res := buildVanishingPoly(xx) + d := eval(res, x[i]) + d.Inverse(&d) + res = mulByConstant(res, d) + return res +} + +// returns f-g, the memory of f is re used, deg(g) << deg(f) here +func sub(f, g []fr.Element) []fr.Element { + for i := 0; i < len(g); i++ { + f[i].Sub(&f[i], &g[i]) + } + return f +} + +// returns f*g using naive multiplication +// deg(f)>>deg(g), deg(small) =~ 10 max +// buf is used as a buffer and should not be f or g +// f and g are not modified +func mul(f, g []fr.Element, res []fr.Element) []fr.Element { + + sizeRes := len(f) + len(g) - 1 + if len(res) < sizeRes { + s := make([]fr.Element, sizeRes-len(res)) + res = append(res, s...) + } + setZero(res) + + var tmp fr.Element + for i := 0; i < len(g); i++ { + for j := 0; j < len(f); j++ { + tmp.Mul(&f[j], &g[i]) + res[j+i].Add(&res[j+i], &tmp) + } + } + return res +} + +// returns f/g (assuming g divides f) +// OK to not use fft if deg(g) is small +// g's leading coefficient is assumed to be 1 +// f memory is re-used for the result, need to pass a copy to not modify it +func div(f, g []fr.Element) []fr.Element { + sizef := len(f) + sizeg := len(g) + stop := sizeg - +1 + var t fr.Element + for i := sizef - 2; i >= stop; i-- { + for j := 0; j < sizeg-1; j++ { + t.Mul(&f[i+1], &g[sizeg-2-j]) + f[i-j].Sub(&f[i-j], &t) + } + } + return f[sizeg-1:] +} diff --git a/ecc/bw6-633/shplonk/shplonk_test.go b/ecc/bw6-633/shplonk/shplonk_test.go new file mode 100644 index 0000000000..807b263e08 --- /dev/null +++ b/ecc/bw6-633/shplonk/shplonk_test.go @@ -0,0 +1,320 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-633/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 230 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestOpening(t *testing.T) { + + assert := require.New(t) + + nbPolys := 2 + sizePoly := make([]int, nbPolys) + for i := 0; i < nbPolys; i++ { + sizePoly[i] = 10 + i + } + polys := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + polys[i] = make([]fr.Element, sizePoly[i]) + for j := 0; j < sizePoly[i]; j++ { + polys[i][j].SetRandom() + } + } + + digests := make([]kzg.Digest, nbPolys) + for i := 0; i < nbPolys; i++ { + digests[i], _ = kzg.Commit(polys[i], testSrs.Pk) + } + + points := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + points[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + points[i][j].SetRandom() + } + } + + hf := sha256.New() + + // correct proof + openingProof, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) + assert.NoError(err) + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.NoError(err) + + // tampered proof + openingProof.ClaimedValues[0][0].SetRandom() + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestBuildZtMinusSi(t *testing.T) { + + nbSi := 10 + points := make([][]fr.Element, nbSi) + sizeSi := make([]int, nbSi) + nbPoints := 0 + for i := 0; i < nbSi; i++ { + sizeSi[i] = 5 + i + nbPoints += sizeSi[i] + points[i] = make([]fr.Element, sizeSi[i]) + for j := 0; j < sizeSi[i]; j++ { + points[i][j].SetRandom() + } + } + for i := 0; i < nbSi; i++ { + ztMinusSi := buildZtMinusSi(points, i) + if len(ztMinusSi) != nbPoints-sizeSi[i]+1 { + t.Fatal("deg(Z_{T-S_{i}}) should be nbPoints-size(S_{i})") + } + for j := 0; j < nbSi; j++ { + if j == i { + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{i}) should not be zero") + } + } + continue + } + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if !y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{j}) should be zero") + } + } + } + } + +} + +func TestInterpolate(t *testing.T) { + + nbPoints := 10 + x := make([]fr.Element, nbPoints) + y := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + y[i].SetRandom() + } + f := interpolate(x, y) + for i := 0; i < nbPoints; i++ { + fx := eval(f, x[i]) + if !fx.Equal(&y[i]) { + t.Fatal("f(x_{i})!=y_{i}") + } + } + +} + +func TestBuildLagrangeFromDomain(t *testing.T) { + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + var r fr.Element + for i := 0; i < nbPoints; i++ { + + l := buildLagrangeFromDomain(points, i) + + // check that l(xᵢ)=1 and l(xⱼ)=0 for j!=i + for j := 0; j < nbPoints; j++ { + y := eval(l, points[j]) + if i == j { + if !y.IsOne() { + t.Fatal("l_{i}(x_{i}) should be equal to 1") + } + } else { + if !y.IsZero() { + t.Fatal("l_{i}(x_{j}) where i!=j should be equal to 0") + } + } + } + r.SetRandom() + y := eval(l, r) + if y.IsZero() { + t.Fatal("l_{i}(x) should not be zero if x is random") + } + } + +} + +func TestBuildVanishingPoly(t *testing.T) { + s := 10 + x := make([]fr.Element, s) + for i := 0; i < s; i++ { + x[i].SetRandom() + } + r := buildVanishingPoly(x) + + if len(r) != s+1 { + t.Fatal("error degree r") + } + + // check that r(xᵢ)=0 for all i + for i := 0; i < len(x); i++ { + y := eval(r, x[i]) + if !y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at xᵢ should be zero") + } + } + + // check that r(y)!=0 for a random point + var a fr.Element + a.SetRandom() + y := eval(r, a) + if y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at r \neq xᵢ should not be zero") + } +} + +func TestMultiplyLinearFactor(t *testing.T) { + + s := 10 + f := make([]fr.Element, s, s+1) + for i := 0; i < 10; i++ { + f[i].SetRandom() + } + + var a, y fr.Element + a.SetRandom() + f = multiplyLinearFactor(f, a) + y = eval(f, a) + if !y.IsZero() { + t.Fatal("(X-a)f(X) should be zero at a") + } + a.SetRandom() + y = eval(f, a) + if y.IsZero() { + t.Fatal("(X-1)f(X) at a random point should not be zero") + } + +} + +func TestNaiveMul(t *testing.T) { + + size := 10 + f := make([]fr.Element, size) + for i := 0; i < size; i++ { + f[i].SetRandom() + } + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + + v := buildVanishingPoly(points) + buf := make([]fr.Element, size+nbPoints-1) + g := mul(f, v, buf) + + // check that g(xᵢ) = 0 + for i := 0; i < nbPoints; i++ { + y := eval(g, points[i]) + if !y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at x_{i} should be zero") + } + } + + // check that g(r) != 0 for a random point + var a fr.Element + a.SetRandom() + y := eval(g, a) + if y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at a random point should not be zero") + } + +} + +func TestDiv(t *testing.T) { + + nbPoints := 10 + s := 10 + f := make([]fr.Element, s, s+nbPoints) + for i := 0; i < s; i++ { + f[i].SetRandom() + } + + // backup + g := make([]fr.Element, s) + copy(g, f) + + // successive divions of linear terms + x := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + q := make([][2]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + q[i][1].SetOne() + q[i][0].Neg(&x[i]) + f = div(f, q[i][:]) + } + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + + // division by a degree > 1 polynomial + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + r := buildVanishingPoly(x) + f = div(f, r) + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + +} diff --git a/ecc/bw6-756/shplonk/doc.go b/ecc/bw6-756/shplonk/doc.go new file mode 100644 index 0000000000..e048f46009 --- /dev/null +++ b/ecc/bw6-756/shplonk/doc.go @@ -0,0 +1,18 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +// Package shplonk provides a SHPLONK commitment scheme, cf https://eprint.iacr.org/2020/081.pdf +package shplonk diff --git a/ecc/bw6-756/shplonk/marshal.go b/ecc/bw6-756/shplonk/marshal.go new file mode 100644 index 0000000000..e7b0bf4ed8 --- /dev/null +++ b/ecc/bw6-756/shplonk/marshal.go @@ -0,0 +1,62 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "io" + + "github.com/consensys/gnark-crypto/ecc/bw6-756" +) + +func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { + + dec := bw6756.NewDecoder(r) + + toDecode := []interface{}{ + &proof.W, + &proof.WPrime, + proof.ClaimedValues, + } + + for _, v := range toDecode { + if err := dec.Decode(v); err != nil { + return dec.BytesRead(), err + } + } + + return dec.BytesRead(), nil +} + +// WriteTo writes binary encoding of a OpeningProof +func (proof *OpeningProof) WriteTo(w io.Writer) (int64, error) { + + enc := bw6756.NewEncoder(w) + + toEncode := []interface{}{ + &proof.W, + &proof.WPrime, + proof.ClaimedValues, + } + + for _, v := range toEncode { + if err := enc.Encode(v); err != nil { + return enc.BytesWritten(), err + } + } + + return enc.BytesWritten(), nil +} diff --git a/ecc/bw6-756/shplonk/shplonk.go b/ecc/bw6-756/shplonk/shplonk.go new file mode 100644 index 0000000000..d8353e303b --- /dev/null +++ b/ecc/bw6-756/shplonk/shplonk.go @@ -0,0 +1,462 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "errors" + "hash" + "math/big" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bw6-756" + "github.com/consensys/gnark-crypto/ecc/bw6-756/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-756/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" +) + +var ( + ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") + ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") +) + +// OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. +// +// implements io.ReaderFrom and io.WriterTo +type OpeningProof struct { + + // W = ∑ᵢ γⁱZ_{T\Sᵢ}(f_i(X)-r)/Z_{T} where Z_{T} is the vanishing polynomial on the (Sᵢ)_{i} + // and r interpolates fᵢ(Sᵢ) on (Sᵢ) + W bw6756.G1Affine + + // L(X)/(X-z) where L(X)=∑ᵢγⁱZ_{T\xᵢ}(f_i(X)-rᵢ) - Z_{T}W(X) + WPrime bw6756.G1Affine + + // ClaimedValues[i] are the values of fᵢ on Sᵢ + ClaimedValues [][]fr.Element +} + +// BatchOpen opens the list of polynomials on points, where the i-th polynomials is opend at points[i]. +func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, pk kzg.ProvingKey, dataTranscript ...[]byte) (OpeningProof, error) { + + var res OpeningProof + + nbInstances := len(polynomials) + if len(polynomials) != len(points) { + return res, ErrInvalidNumberOfPoints + } + + // transcript + fs := fiatshamir.NewTranscript(hf, "gamma", "z") + + // derive γ + gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) + if err != nil { + return res, err + } + + // compute the size of the linear combination + maxSizePolys := len(polynomials[0]) + for i := 1; i < len(polynomials); i++ { + if maxSizePolys < len(polynomials[i]) { + maxSizePolys = len(polynomials[i]) + } + } + nbPoints := 0 + sizeSi := make([]int, len(points)) + for i := 0; i < nbInstances; i++ { + nbPoints += len(points[i]) + sizeSi[i] = len(points[i]) + } + totalSize := maxSizePolys + nbPoints // upper bound of the size of f := ∑ᵢ γⁱZ_{T\Sᵢ}(f_i(X)-r) + + bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) + bufTotalSize := make([]fr.Element, totalSize) + f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation + res.ClaimedValues = make([][]fr.Element, nbInstances) + for i := 0; i < nbInstances; i++ { + res.ClaimedValues[i] = make([]fr.Element, len(points[i])) + } + var accGamma fr.Element + accGamma.SetOne() + + ztMinusSi := make([][]fr.Element, nbInstances) + ri := make([][]fr.Element, nbInstances) + for i := 0; i < nbInstances; i++ { + + for j := 0; j < len(points[i]); j++ { + res.ClaimedValues[i][j] = eval(polynomials[i], points[i][j]) + } + + ztMinusSi[i] = buildZtMinusSi(points, i) + + copy(bufMaxSizePolynomials, polynomials[i]) + ri[i] = interpolate(points[i], res.ClaimedValues[i]) + sub(bufMaxSizePolynomials, ri[i]) + + bufTotalSize = mul(bufMaxSizePolynomials, ztMinusSi[i], bufTotalSize) + bufTotalSize = mulByConstant(bufTotalSize, accGamma) + for j := 0; j < len(bufTotalSize); j++ { + f[j].Add(&f[j], &bufTotalSize[j]) + } + + accGamma.Mul(&accGamma, &gamma) + setZero(bufMaxSizePolynomials) + } + + zt := buildVanishingPoly(flatten(points)) + w := div(f, zt) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation + res.W, err = kzg.Commit(w, pk) + if err != nil { + return res, err + } + + // derive z + z, err := deriveChallenge("z", nil, []kzg.Digest{res.W}, fs) + if err != nil { + return res, err + } + + // compute L = ∑ᵢγⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z))-Z_{T}(z)W + accGamma.SetOne() + var gammaiZtMinusSiZ fr.Element + l := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation + for i := 0; i < len(polynomials); i++ { + + ztMinusSiZ := eval(ztMinusSi[i], z) // Z_{T\Sᵢ}(z) + gammaiZtMinusSiZ.Mul(&accGamma, &ztMinusSiZ) // γⁱZ_{T\Sᵢ}(z) + + copy(bufMaxSizePolynomials, polynomials[i]) + riz := eval(ri[i], z) + bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) + mulByConstant(bufMaxSizePolynomials, gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) + for j := 0; j < len(bufMaxSizePolynomials); j++ { + l[j].Add(&l[j], &bufMaxSizePolynomials[j]) + } + + setZero(bufMaxSizePolynomials) + accGamma.Mul(&accGamma, &gamma) + } + ztz := eval(zt, z) + setZero(bufTotalSize) + copy(bufTotalSize, w) + mulByConstant(bufTotalSize, ztz) // Z_{T}(z)W + for i := 0; i < totalSize-maxSizePolys; i++ { + l[totalSize-1-i].Neg(&bufTotalSize[totalSize-1-i]) + } + for i := 0; i < maxSizePolys; i++ { + l[i].Sub(&l[i], &bufTotalSize[i]) + } // L <- L-Z_{T}(z)W + + xMinusZ := buildVanishingPoly([]fr.Element{z}) + wPrime := div(l, xMinusZ) + + res.WPrime, err = kzg.Commit(wPrime, pk) + if err != nil { + return res, err + } + + return res, nil +} + +// BatchVerify uses proof to check that the commitments correctly open to proof.ClaimedValues +// at points. The order matters: the proof validates that the i-th commitment is correctly opened +// at the i-th point +// dataTranscript is some extra data that might be needed for Fiat Shamir, and is appended at the end +// of the original transcript. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + if len(digests) != len(proof.ClaimedValues) { + return ErrInvalidNumberOfPoints + } + if len(digests) != len(points) { + return ErrInvalidNumberOfPoints + } + + // transcript + fs := fiatshamir.NewTranscript(hf, "gamma", "z") + + // derive γ + gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) + if err != nil { + return err + } + + // derive z + // TODO seems ok that z depend only on W, need to check that carefully + z, err := deriveChallenge("z", nil, []kzg.Digest{proof.W}, fs) + if err != nil { + return err + } + + // check that e(F + zW', [1]_{2})=e(W',[x]_{2}) + // where F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i}-[∑ᵢγⁱZ_{T\xᵢ}(z)fᵢ(z)]_{1}-Z_{T}(z)[W] + var sumGammaiZTminusSiRiz, tmp, accGamma fr.Element + nbInstances := len(points) + gammaiZTminusSiz := make([]fr.Element, nbInstances) + accGamma.SetOne() + ri := make([][]fr.Element, nbInstances) + for i := 0; i < len(points); i++ { + + ztMinusSi := buildZtMinusSi(points, i) // Z_{T-S_{i}}(X) + gammaiZTminusSiz[i] = eval(ztMinusSi, z) // Z_{T-S_{i}}(z) + gammaiZTminusSiz[i].Mul(&accGamma, &gammaiZTminusSiz[i]) // \gamma^{i} Z_{T-S_{i}}(z) + + ri[i] = interpolate(points[i], proof.ClaimedValues[i]) + riz := eval(ri[i], z) // r_{i}(z) + tmp.Mul(&gammaiZTminusSiz[i], &riz) // Z_{T-S_{i}}(z)r_{i}(z) + sumGammaiZTminusSiRiz.Add(&sumGammaiZTminusSiRiz, &tmp) + + accGamma.Mul(&accGamma, &gamma) + } + + // ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} + config := ecc.MultiExpConfig{} + var sumGammaiZtMinusSiComi kzg.Digest + _, err = sumGammaiZtMinusSiComi.MultiExp(digests, gammaiZTminusSiz, config) + if err != nil { + return err + } + + var bufBigInt big.Int + + // [∑ᵢZ_{T\xᵢ}fᵢ(z)]_{1} + var sumGammaiZTminusSiRizCom kzg.Digest + var sumGammaiZTminusSiRizBigInt big.Int + sumGammaiZTminusSiRiz.BigInt(&sumGammaiZTminusSiRizBigInt) + sumGammaiZTminusSiRizCom.ScalarMultiplication(&vk.G1, &sumGammaiZTminusSiRizBigInt) + + // Z_{T}(z)[W] + zt := buildVanishingPoly(flatten(points)) + ztz := eval(zt, z) + var ztW kzg.Digest + ztz.BigInt(&bufBigInt) + ztW.ScalarMultiplication(&proof.W, &bufBigInt) + + // F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} - [∑ᵢγⁱZ_{T\xᵢ}fᵢ(z)]_{1} - Z_{T}(z)[W] + var f kzg.Digest + f.Sub(&sumGammaiZtMinusSiComi, &sumGammaiZTminusSiRizCom). + Sub(&f, &ztW) + + // F+zW' + var zWPrime kzg.Digest + z.BigInt(&bufBigInt) + zWPrime.ScalarMultiplication(&proof.WPrime, &bufBigInt) + f.Add(&f, &zWPrime) + f.Neg(&f) + + // check that e(F+zW',[1]_{2})=e(W',[x]_{2}) + check, err := bw6756.PairingCheckFixedQ( + []bw6756.G1Affine{f, proof.WPrime}, + vk.Lines[:], + ) + + if !check { + return ErrVerifyOpeningProof + } + + return nil +} + +// deriveChallenge derives a challenge using Fiat Shamir to polynomials. +// The arguments are added to the transcript in the order in which they are given. +func deriveChallenge(name string, points [][]fr.Element, digests []kzg.Digest, t *fiatshamir.Transcript, dataTranscript ...[]byte) (fr.Element, error) { + + // derive the challenge gamma, binded to the point and the commitments + for i := range points { + for j := range points[i] { + if err := t.Bind(name, points[i][j].Marshal()); err != nil { + return fr.Element{}, err + } + } + } + for i := range digests { + if err := t.Bind(name, digests[i].Marshal()); err != nil { + return fr.Element{}, err + } + } + + for i := 0; i < len(dataTranscript); i++ { + if err := t.Bind(name, dataTranscript[i]); err != nil { + return fr.Element{}, err + } + } + + gammaByte, err := t.ComputeChallenge(name) + if err != nil { + return fr.Element{}, err + } + var gamma fr.Element + gamma.SetBytes(gammaByte) + + return gamma, nil +} + +// ------------------------------ +// utils + +func flatten(x [][]fr.Element) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + res := make([]fr.Element, 0, nbPoints) + for i := 0; i < len(x); i++ { + res = append(res, x[i]...) + } + return res +} + +// sets f to zero +func setZero(f []fr.Element) { + for i := 0; i < len(f); i++ { + f[i].SetZero() + } +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} + +// returns γ*f, re-using f +func mulByConstant(f []fr.Element, gamma fr.Element) []fr.Element { + for i := 0; i < len(f); i++ { + f[i].Mul(&f[i], &gamma) + } + return f +} + +// computes f <- (x-a)*f +// memory of f is re used, need to pass a copy to not modify it +func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { + s := len(f) + var tmp fr.Element + f = append(f, fr.NewElement(0)) + f[s] = f[s-1] + for i := s - 1; i >= 1; i-- { + tmp.Mul(&f[i], &a) + f[i].Sub(&f[i-1], &tmp) + } + f[0].Mul(&f[0], &a).Neg(&f[0]) + return f +} + +// returns S_{T\Sᵢ} where Sᵢ=x[i] +func buildZtMinusSi(x [][]fr.Element, i int) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + bufPoints := make([]fr.Element, 0, nbPoints-len(x[i])) + for j := 0; j < i; j++ { + bufPoints = append(bufPoints, x[j]...) + } + for j := i + 1; j < len(x); j++ { + bufPoints = append(bufPoints, x[j]...) + } + ztMinusSi := buildVanishingPoly(bufPoints) + return ztMinusSi +} + +// returns πᵢ(X-xᵢ) +func buildVanishingPoly(x []fr.Element) []fr.Element { + res := make([]fr.Element, 1, len(x)+1) + res[0].SetOne() + for i := 0; i < len(x); i++ { + res = multiplyLinearFactor(res, x[i]) + } + return res +} + +// returns f such that f(xᵢ)=yᵢ, x and y are assumed to be of the same size +func interpolate(x, y []fr.Element) []fr.Element { + + res := make([]fr.Element, len(x)) + for i := 0; i < len(x); i++ { + li := buildLagrangeFromDomain(x, i) + li = mulByConstant(li, y[i]) + for j := 0; j < len(x); j++ { + res[j].Add(&res[j], &li[j]) + } + } + + return res +} + +// returns f such that f(xⱼ)=δⁱⱼ +func buildLagrangeFromDomain(x []fr.Element, i int) []fr.Element { + xx := make([]fr.Element, len(x)-1) + copy(xx, x[:i]) + copy(xx[i:], x[i+1:]) + res := buildVanishingPoly(xx) + d := eval(res, x[i]) + d.Inverse(&d) + res = mulByConstant(res, d) + return res +} + +// returns f-g, the memory of f is re used, deg(g) << deg(f) here +func sub(f, g []fr.Element) []fr.Element { + for i := 0; i < len(g); i++ { + f[i].Sub(&f[i], &g[i]) + } + return f +} + +// returns f*g using naive multiplication +// deg(f)>>deg(g), deg(small) =~ 10 max +// buf is used as a buffer and should not be f or g +// f and g are not modified +func mul(f, g []fr.Element, res []fr.Element) []fr.Element { + + sizeRes := len(f) + len(g) - 1 + if len(res) < sizeRes { + s := make([]fr.Element, sizeRes-len(res)) + res = append(res, s...) + } + setZero(res) + + var tmp fr.Element + for i := 0; i < len(g); i++ { + for j := 0; j < len(f); j++ { + tmp.Mul(&f[j], &g[i]) + res[j+i].Add(&res[j+i], &tmp) + } + } + return res +} + +// returns f/g (assuming g divides f) +// OK to not use fft if deg(g) is small +// g's leading coefficient is assumed to be 1 +// f memory is re-used for the result, need to pass a copy to not modify it +func div(f, g []fr.Element) []fr.Element { + sizef := len(f) + sizeg := len(g) + stop := sizeg - +1 + var t fr.Element + for i := sizef - 2; i >= stop; i-- { + for j := 0; j < sizeg-1; j++ { + t.Mul(&f[i+1], &g[sizeg-2-j]) + f[i-j].Sub(&f[i-j], &t) + } + } + return f[sizeg-1:] +} diff --git a/ecc/bw6-756/shplonk/shplonk_test.go b/ecc/bw6-756/shplonk/shplonk_test.go new file mode 100644 index 0000000000..c729294363 --- /dev/null +++ b/ecc/bw6-756/shplonk/shplonk_test.go @@ -0,0 +1,320 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bw6-756/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-756/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 230 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestOpening(t *testing.T) { + + assert := require.New(t) + + nbPolys := 2 + sizePoly := make([]int, nbPolys) + for i := 0; i < nbPolys; i++ { + sizePoly[i] = 10 + i + } + polys := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + polys[i] = make([]fr.Element, sizePoly[i]) + for j := 0; j < sizePoly[i]; j++ { + polys[i][j].SetRandom() + } + } + + digests := make([]kzg.Digest, nbPolys) + for i := 0; i < nbPolys; i++ { + digests[i], _ = kzg.Commit(polys[i], testSrs.Pk) + } + + points := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + points[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + points[i][j].SetRandom() + } + } + + hf := sha256.New() + + // correct proof + openingProof, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) + assert.NoError(err) + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.NoError(err) + + // tampered proof + openingProof.ClaimedValues[0][0].SetRandom() + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestBuildZtMinusSi(t *testing.T) { + + nbSi := 10 + points := make([][]fr.Element, nbSi) + sizeSi := make([]int, nbSi) + nbPoints := 0 + for i := 0; i < nbSi; i++ { + sizeSi[i] = 5 + i + nbPoints += sizeSi[i] + points[i] = make([]fr.Element, sizeSi[i]) + for j := 0; j < sizeSi[i]; j++ { + points[i][j].SetRandom() + } + } + for i := 0; i < nbSi; i++ { + ztMinusSi := buildZtMinusSi(points, i) + if len(ztMinusSi) != nbPoints-sizeSi[i]+1 { + t.Fatal("deg(Z_{T-S_{i}}) should be nbPoints-size(S_{i})") + } + for j := 0; j < nbSi; j++ { + if j == i { + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{i}) should not be zero") + } + } + continue + } + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if !y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{j}) should be zero") + } + } + } + } + +} + +func TestInterpolate(t *testing.T) { + + nbPoints := 10 + x := make([]fr.Element, nbPoints) + y := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + y[i].SetRandom() + } + f := interpolate(x, y) + for i := 0; i < nbPoints; i++ { + fx := eval(f, x[i]) + if !fx.Equal(&y[i]) { + t.Fatal("f(x_{i})!=y_{i}") + } + } + +} + +func TestBuildLagrangeFromDomain(t *testing.T) { + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + var r fr.Element + for i := 0; i < nbPoints; i++ { + + l := buildLagrangeFromDomain(points, i) + + // check that l(xᵢ)=1 and l(xⱼ)=0 for j!=i + for j := 0; j < nbPoints; j++ { + y := eval(l, points[j]) + if i == j { + if !y.IsOne() { + t.Fatal("l_{i}(x_{i}) should be equal to 1") + } + } else { + if !y.IsZero() { + t.Fatal("l_{i}(x_{j}) where i!=j should be equal to 0") + } + } + } + r.SetRandom() + y := eval(l, r) + if y.IsZero() { + t.Fatal("l_{i}(x) should not be zero if x is random") + } + } + +} + +func TestBuildVanishingPoly(t *testing.T) { + s := 10 + x := make([]fr.Element, s) + for i := 0; i < s; i++ { + x[i].SetRandom() + } + r := buildVanishingPoly(x) + + if len(r) != s+1 { + t.Fatal("error degree r") + } + + // check that r(xᵢ)=0 for all i + for i := 0; i < len(x); i++ { + y := eval(r, x[i]) + if !y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at xᵢ should be zero") + } + } + + // check that r(y)!=0 for a random point + var a fr.Element + a.SetRandom() + y := eval(r, a) + if y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at r \neq xᵢ should not be zero") + } +} + +func TestMultiplyLinearFactor(t *testing.T) { + + s := 10 + f := make([]fr.Element, s, s+1) + for i := 0; i < 10; i++ { + f[i].SetRandom() + } + + var a, y fr.Element + a.SetRandom() + f = multiplyLinearFactor(f, a) + y = eval(f, a) + if !y.IsZero() { + t.Fatal("(X-a)f(X) should be zero at a") + } + a.SetRandom() + y = eval(f, a) + if y.IsZero() { + t.Fatal("(X-1)f(X) at a random point should not be zero") + } + +} + +func TestNaiveMul(t *testing.T) { + + size := 10 + f := make([]fr.Element, size) + for i := 0; i < size; i++ { + f[i].SetRandom() + } + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + + v := buildVanishingPoly(points) + buf := make([]fr.Element, size+nbPoints-1) + g := mul(f, v, buf) + + // check that g(xᵢ) = 0 + for i := 0; i < nbPoints; i++ { + y := eval(g, points[i]) + if !y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at x_{i} should be zero") + } + } + + // check that g(r) != 0 for a random point + var a fr.Element + a.SetRandom() + y := eval(g, a) + if y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at a random point should not be zero") + } + +} + +func TestDiv(t *testing.T) { + + nbPoints := 10 + s := 10 + f := make([]fr.Element, s, s+nbPoints) + for i := 0; i < s; i++ { + f[i].SetRandom() + } + + // backup + g := make([]fr.Element, s) + copy(g, f) + + // successive divions of linear terms + x := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + q := make([][2]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + q[i][1].SetOne() + q[i][0].Neg(&x[i]) + f = div(f, q[i][:]) + } + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + + // division by a degree > 1 polynomial + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + r := buildVanishingPoly(x) + f = div(f, r) + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + +} diff --git a/ecc/bw6-761/shplonk/doc.go b/ecc/bw6-761/shplonk/doc.go new file mode 100644 index 0000000000..e048f46009 --- /dev/null +++ b/ecc/bw6-761/shplonk/doc.go @@ -0,0 +1,18 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +// Package shplonk provides a SHPLONK commitment scheme, cf https://eprint.iacr.org/2020/081.pdf +package shplonk diff --git a/ecc/bw6-761/shplonk/marshal.go b/ecc/bw6-761/shplonk/marshal.go new file mode 100644 index 0000000000..f931b1dd51 --- /dev/null +++ b/ecc/bw6-761/shplonk/marshal.go @@ -0,0 +1,62 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "io" + + "github.com/consensys/gnark-crypto/ecc/bw6-761" +) + +func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { + + dec := bw6761.NewDecoder(r) + + toDecode := []interface{}{ + &proof.W, + &proof.WPrime, + proof.ClaimedValues, + } + + for _, v := range toDecode { + if err := dec.Decode(v); err != nil { + return dec.BytesRead(), err + } + } + + return dec.BytesRead(), nil +} + +// WriteTo writes binary encoding of a OpeningProof +func (proof *OpeningProof) WriteTo(w io.Writer) (int64, error) { + + enc := bw6761.NewEncoder(w) + + toEncode := []interface{}{ + &proof.W, + &proof.WPrime, + proof.ClaimedValues, + } + + for _, v := range toEncode { + if err := enc.Encode(v); err != nil { + return enc.BytesWritten(), err + } + } + + return enc.BytesWritten(), nil +} diff --git a/ecc/bw6-761/shplonk/shplonk.go b/ecc/bw6-761/shplonk/shplonk.go new file mode 100644 index 0000000000..a4d81d6def --- /dev/null +++ b/ecc/bw6-761/shplonk/shplonk.go @@ -0,0 +1,462 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "errors" + "hash" + "math/big" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bw6-761" + "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-761/kzg" + fiatshamir "github.com/consensys/gnark-crypto/fiat-shamir" +) + +var ( + ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") + ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") +) + +// OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. +// +// implements io.ReaderFrom and io.WriterTo +type OpeningProof struct { + + // W = ∑ᵢ γⁱZ_{T\Sᵢ}(f_i(X)-r)/Z_{T} where Z_{T} is the vanishing polynomial on the (Sᵢ)_{i} + // and r interpolates fᵢ(Sᵢ) on (Sᵢ) + W bw6761.G1Affine + + // L(X)/(X-z) where L(X)=∑ᵢγⁱZ_{T\xᵢ}(f_i(X)-rᵢ) - Z_{T}W(X) + WPrime bw6761.G1Affine + + // ClaimedValues[i] are the values of fᵢ on Sᵢ + ClaimedValues [][]fr.Element +} + +// BatchOpen opens the list of polynomials on points, where the i-th polynomials is opend at points[i]. +func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, pk kzg.ProvingKey, dataTranscript ...[]byte) (OpeningProof, error) { + + var res OpeningProof + + nbInstances := len(polynomials) + if len(polynomials) != len(points) { + return res, ErrInvalidNumberOfPoints + } + + // transcript + fs := fiatshamir.NewTranscript(hf, "gamma", "z") + + // derive γ + gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) + if err != nil { + return res, err + } + + // compute the size of the linear combination + maxSizePolys := len(polynomials[0]) + for i := 1; i < len(polynomials); i++ { + if maxSizePolys < len(polynomials[i]) { + maxSizePolys = len(polynomials[i]) + } + } + nbPoints := 0 + sizeSi := make([]int, len(points)) + for i := 0; i < nbInstances; i++ { + nbPoints += len(points[i]) + sizeSi[i] = len(points[i]) + } + totalSize := maxSizePolys + nbPoints // upper bound of the size of f := ∑ᵢ γⁱZ_{T\Sᵢ}(f_i(X)-r) + + bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) + bufTotalSize := make([]fr.Element, totalSize) + f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation + res.ClaimedValues = make([][]fr.Element, nbInstances) + for i := 0; i < nbInstances; i++ { + res.ClaimedValues[i] = make([]fr.Element, len(points[i])) + } + var accGamma fr.Element + accGamma.SetOne() + + ztMinusSi := make([][]fr.Element, nbInstances) + ri := make([][]fr.Element, nbInstances) + for i := 0; i < nbInstances; i++ { + + for j := 0; j < len(points[i]); j++ { + res.ClaimedValues[i][j] = eval(polynomials[i], points[i][j]) + } + + ztMinusSi[i] = buildZtMinusSi(points, i) + + copy(bufMaxSizePolynomials, polynomials[i]) + ri[i] = interpolate(points[i], res.ClaimedValues[i]) + sub(bufMaxSizePolynomials, ri[i]) + + bufTotalSize = mul(bufMaxSizePolynomials, ztMinusSi[i], bufTotalSize) + bufTotalSize = mulByConstant(bufTotalSize, accGamma) + for j := 0; j < len(bufTotalSize); j++ { + f[j].Add(&f[j], &bufTotalSize[j]) + } + + accGamma.Mul(&accGamma, &gamma) + setZero(bufMaxSizePolynomials) + } + + zt := buildVanishingPoly(flatten(points)) + w := div(f, zt) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation + res.W, err = kzg.Commit(w, pk) + if err != nil { + return res, err + } + + // derive z + z, err := deriveChallenge("z", nil, []kzg.Digest{res.W}, fs) + if err != nil { + return res, err + } + + // compute L = ∑ᵢγⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z))-Z_{T}(z)W + accGamma.SetOne() + var gammaiZtMinusSiZ fr.Element + l := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation page 11 for notation + for i := 0; i < len(polynomials); i++ { + + ztMinusSiZ := eval(ztMinusSi[i], z) // Z_{T\Sᵢ}(z) + gammaiZtMinusSiZ.Mul(&accGamma, &ztMinusSiZ) // γⁱZ_{T\Sᵢ}(z) + + copy(bufMaxSizePolynomials, polynomials[i]) + riz := eval(ri[i], z) + bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) + mulByConstant(bufMaxSizePolynomials, gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) + for j := 0; j < len(bufMaxSizePolynomials); j++ { + l[j].Add(&l[j], &bufMaxSizePolynomials[j]) + } + + setZero(bufMaxSizePolynomials) + accGamma.Mul(&accGamma, &gamma) + } + ztz := eval(zt, z) + setZero(bufTotalSize) + copy(bufTotalSize, w) + mulByConstant(bufTotalSize, ztz) // Z_{T}(z)W + for i := 0; i < totalSize-maxSizePolys; i++ { + l[totalSize-1-i].Neg(&bufTotalSize[totalSize-1-i]) + } + for i := 0; i < maxSizePolys; i++ { + l[i].Sub(&l[i], &bufTotalSize[i]) + } // L <- L-Z_{T}(z)W + + xMinusZ := buildVanishingPoly([]fr.Element{z}) + wPrime := div(l, xMinusZ) + + res.WPrime, err = kzg.Commit(wPrime, pk) + if err != nil { + return res, err + } + + return res, nil +} + +// BatchVerify uses proof to check that the commitments correctly open to proof.ClaimedValues +// at points. The order matters: the proof validates that the i-th commitment is correctly opened +// at the i-th point +// dataTranscript is some extra data that might be needed for Fiat Shamir, and is appended at the end +// of the original transcript. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + if len(digests) != len(proof.ClaimedValues) { + return ErrInvalidNumberOfPoints + } + if len(digests) != len(points) { + return ErrInvalidNumberOfPoints + } + + // transcript + fs := fiatshamir.NewTranscript(hf, "gamma", "z") + + // derive γ + gamma, err := deriveChallenge("gamma", points, digests, fs, dataTranscript...) + if err != nil { + return err + } + + // derive z + // TODO seems ok that z depend only on W, need to check that carefully + z, err := deriveChallenge("z", nil, []kzg.Digest{proof.W}, fs) + if err != nil { + return err + } + + // check that e(F + zW', [1]_{2})=e(W',[x]_{2}) + // where F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i}-[∑ᵢγⁱZ_{T\xᵢ}(z)fᵢ(z)]_{1}-Z_{T}(z)[W] + var sumGammaiZTminusSiRiz, tmp, accGamma fr.Element + nbInstances := len(points) + gammaiZTminusSiz := make([]fr.Element, nbInstances) + accGamma.SetOne() + ri := make([][]fr.Element, nbInstances) + for i := 0; i < len(points); i++ { + + ztMinusSi := buildZtMinusSi(points, i) // Z_{T-S_{i}}(X) + gammaiZTminusSiz[i] = eval(ztMinusSi, z) // Z_{T-S_{i}}(z) + gammaiZTminusSiz[i].Mul(&accGamma, &gammaiZTminusSiz[i]) // \gamma^{i} Z_{T-S_{i}}(z) + + ri[i] = interpolate(points[i], proof.ClaimedValues[i]) + riz := eval(ri[i], z) // r_{i}(z) + tmp.Mul(&gammaiZTminusSiz[i], &riz) // Z_{T-S_{i}}(z)r_{i}(z) + sumGammaiZTminusSiRiz.Add(&sumGammaiZTminusSiRiz, &tmp) + + accGamma.Mul(&accGamma, &gamma) + } + + // ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} + config := ecc.MultiExpConfig{} + var sumGammaiZtMinusSiComi kzg.Digest + _, err = sumGammaiZtMinusSiComi.MultiExp(digests, gammaiZTminusSiz, config) + if err != nil { + return err + } + + var bufBigInt big.Int + + // [∑ᵢZ_{T\xᵢ}fᵢ(z)]_{1} + var sumGammaiZTminusSiRizCom kzg.Digest + var sumGammaiZTminusSiRizBigInt big.Int + sumGammaiZTminusSiRiz.BigInt(&sumGammaiZTminusSiRizBigInt) + sumGammaiZTminusSiRizCom.ScalarMultiplication(&vk.G1, &sumGammaiZTminusSiRizBigInt) + + // Z_{T}(z)[W] + zt := buildVanishingPoly(flatten(points)) + ztz := eval(zt, z) + var ztW kzg.Digest + ztz.BigInt(&bufBigInt) + ztW.ScalarMultiplication(&proof.W, &bufBigInt) + + // F = ∑ᵢγⁱZ_{T\xᵢ}[Com]_{i} - [∑ᵢγⁱZ_{T\xᵢ}fᵢ(z)]_{1} - Z_{T}(z)[W] + var f kzg.Digest + f.Sub(&sumGammaiZtMinusSiComi, &sumGammaiZTminusSiRizCom). + Sub(&f, &ztW) + + // F+zW' + var zWPrime kzg.Digest + z.BigInt(&bufBigInt) + zWPrime.ScalarMultiplication(&proof.WPrime, &bufBigInt) + f.Add(&f, &zWPrime) + f.Neg(&f) + + // check that e(F+zW',[1]_{2})=e(W',[x]_{2}) + check, err := bw6761.PairingCheckFixedQ( + []bw6761.G1Affine{f, proof.WPrime}, + vk.Lines[:], + ) + + if !check { + return ErrVerifyOpeningProof + } + + return nil +} + +// deriveChallenge derives a challenge using Fiat Shamir to polynomials. +// The arguments are added to the transcript in the order in which they are given. +func deriveChallenge(name string, points [][]fr.Element, digests []kzg.Digest, t *fiatshamir.Transcript, dataTranscript ...[]byte) (fr.Element, error) { + + // derive the challenge gamma, binded to the point and the commitments + for i := range points { + for j := range points[i] { + if err := t.Bind(name, points[i][j].Marshal()); err != nil { + return fr.Element{}, err + } + } + } + for i := range digests { + if err := t.Bind(name, digests[i].Marshal()); err != nil { + return fr.Element{}, err + } + } + + for i := 0; i < len(dataTranscript); i++ { + if err := t.Bind(name, dataTranscript[i]); err != nil { + return fr.Element{}, err + } + } + + gammaByte, err := t.ComputeChallenge(name) + if err != nil { + return fr.Element{}, err + } + var gamma fr.Element + gamma.SetBytes(gammaByte) + + return gamma, nil +} + +// ------------------------------ +// utils + +func flatten(x [][]fr.Element) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + res := make([]fr.Element, 0, nbPoints) + for i := 0; i < len(x); i++ { + res = append(res, x[i]...) + } + return res +} + +// sets f to zero +func setZero(f []fr.Element) { + for i := 0; i < len(f); i++ { + f[i].SetZero() + } +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} + +// returns γ*f, re-using f +func mulByConstant(f []fr.Element, gamma fr.Element) []fr.Element { + for i := 0; i < len(f); i++ { + f[i].Mul(&f[i], &gamma) + } + return f +} + +// computes f <- (x-a)*f +// memory of f is re used, need to pass a copy to not modify it +func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { + s := len(f) + var tmp fr.Element + f = append(f, fr.NewElement(0)) + f[s] = f[s-1] + for i := s - 1; i >= 1; i-- { + tmp.Mul(&f[i], &a) + f[i].Sub(&f[i-1], &tmp) + } + f[0].Mul(&f[0], &a).Neg(&f[0]) + return f +} + +// returns S_{T\Sᵢ} where Sᵢ=x[i] +func buildZtMinusSi(x [][]fr.Element, i int) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + bufPoints := make([]fr.Element, 0, nbPoints-len(x[i])) + for j := 0; j < i; j++ { + bufPoints = append(bufPoints, x[j]...) + } + for j := i + 1; j < len(x); j++ { + bufPoints = append(bufPoints, x[j]...) + } + ztMinusSi := buildVanishingPoly(bufPoints) + return ztMinusSi +} + +// returns πᵢ(X-xᵢ) +func buildVanishingPoly(x []fr.Element) []fr.Element { + res := make([]fr.Element, 1, len(x)+1) + res[0].SetOne() + for i := 0; i < len(x); i++ { + res = multiplyLinearFactor(res, x[i]) + } + return res +} + +// returns f such that f(xᵢ)=yᵢ, x and y are assumed to be of the same size +func interpolate(x, y []fr.Element) []fr.Element { + + res := make([]fr.Element, len(x)) + for i := 0; i < len(x); i++ { + li := buildLagrangeFromDomain(x, i) + li = mulByConstant(li, y[i]) + for j := 0; j < len(x); j++ { + res[j].Add(&res[j], &li[j]) + } + } + + return res +} + +// returns f such that f(xⱼ)=δⁱⱼ +func buildLagrangeFromDomain(x []fr.Element, i int) []fr.Element { + xx := make([]fr.Element, len(x)-1) + copy(xx, x[:i]) + copy(xx[i:], x[i+1:]) + res := buildVanishingPoly(xx) + d := eval(res, x[i]) + d.Inverse(&d) + res = mulByConstant(res, d) + return res +} + +// returns f-g, the memory of f is re used, deg(g) << deg(f) here +func sub(f, g []fr.Element) []fr.Element { + for i := 0; i < len(g); i++ { + f[i].Sub(&f[i], &g[i]) + } + return f +} + +// returns f*g using naive multiplication +// deg(f)>>deg(g), deg(small) =~ 10 max +// buf is used as a buffer and should not be f or g +// f and g are not modified +func mul(f, g []fr.Element, res []fr.Element) []fr.Element { + + sizeRes := len(f) + len(g) - 1 + if len(res) < sizeRes { + s := make([]fr.Element, sizeRes-len(res)) + res = append(res, s...) + } + setZero(res) + + var tmp fr.Element + for i := 0; i < len(g); i++ { + for j := 0; j < len(f); j++ { + tmp.Mul(&f[j], &g[i]) + res[j+i].Add(&res[j+i], &tmp) + } + } + return res +} + +// returns f/g (assuming g divides f) +// OK to not use fft if deg(g) is small +// g's leading coefficient is assumed to be 1 +// f memory is re-used for the result, need to pass a copy to not modify it +func div(f, g []fr.Element) []fr.Element { + sizef := len(f) + sizeg := len(g) + stop := sizeg - +1 + var t fr.Element + for i := sizef - 2; i >= stop; i-- { + for j := 0; j < sizeg-1; j++ { + t.Mul(&f[i+1], &g[sizeg-2-j]) + f[i-j].Sub(&f[i-j], &t) + } + } + return f[sizeg-1:] +} diff --git a/ecc/bw6-761/shplonk/shplonk_test.go b/ecc/bw6-761/shplonk/shplonk_test.go new file mode 100644 index 0000000000..e962fe6dff --- /dev/null +++ b/ecc/bw6-761/shplonk/shplonk_test.go @@ -0,0 +1,320 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-761/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 230 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestOpening(t *testing.T) { + + assert := require.New(t) + + nbPolys := 2 + sizePoly := make([]int, nbPolys) + for i := 0; i < nbPolys; i++ { + sizePoly[i] = 10 + i + } + polys := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + polys[i] = make([]fr.Element, sizePoly[i]) + for j := 0; j < sizePoly[i]; j++ { + polys[i][j].SetRandom() + } + } + + digests := make([]kzg.Digest, nbPolys) + for i := 0; i < nbPolys; i++ { + digests[i], _ = kzg.Commit(polys[i], testSrs.Pk) + } + + points := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + points[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + points[i][j].SetRandom() + } + } + + hf := sha256.New() + + // correct proof + openingProof, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) + assert.NoError(err) + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.NoError(err) + + // tampered proof + openingProof.ClaimedValues[0][0].SetRandom() + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestBuildZtMinusSi(t *testing.T) { + + nbSi := 10 + points := make([][]fr.Element, nbSi) + sizeSi := make([]int, nbSi) + nbPoints := 0 + for i := 0; i < nbSi; i++ { + sizeSi[i] = 5 + i + nbPoints += sizeSi[i] + points[i] = make([]fr.Element, sizeSi[i]) + for j := 0; j < sizeSi[i]; j++ { + points[i][j].SetRandom() + } + } + for i := 0; i < nbSi; i++ { + ztMinusSi := buildZtMinusSi(points, i) + if len(ztMinusSi) != nbPoints-sizeSi[i]+1 { + t.Fatal("deg(Z_{T-S_{i}}) should be nbPoints-size(S_{i})") + } + for j := 0; j < nbSi; j++ { + if j == i { + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{i}) should not be zero") + } + } + continue + } + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if !y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{j}) should be zero") + } + } + } + } + +} + +func TestInterpolate(t *testing.T) { + + nbPoints := 10 + x := make([]fr.Element, nbPoints) + y := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + y[i].SetRandom() + } + f := interpolate(x, y) + for i := 0; i < nbPoints; i++ { + fx := eval(f, x[i]) + if !fx.Equal(&y[i]) { + t.Fatal("f(x_{i})!=y_{i}") + } + } + +} + +func TestBuildLagrangeFromDomain(t *testing.T) { + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + var r fr.Element + for i := 0; i < nbPoints; i++ { + + l := buildLagrangeFromDomain(points, i) + + // check that l(xᵢ)=1 and l(xⱼ)=0 for j!=i + for j := 0; j < nbPoints; j++ { + y := eval(l, points[j]) + if i == j { + if !y.IsOne() { + t.Fatal("l_{i}(x_{i}) should be equal to 1") + } + } else { + if !y.IsZero() { + t.Fatal("l_{i}(x_{j}) where i!=j should be equal to 0") + } + } + } + r.SetRandom() + y := eval(l, r) + if y.IsZero() { + t.Fatal("l_{i}(x) should not be zero if x is random") + } + } + +} + +func TestBuildVanishingPoly(t *testing.T) { + s := 10 + x := make([]fr.Element, s) + for i := 0; i < s; i++ { + x[i].SetRandom() + } + r := buildVanishingPoly(x) + + if len(r) != s+1 { + t.Fatal("error degree r") + } + + // check that r(xᵢ)=0 for all i + for i := 0; i < len(x); i++ { + y := eval(r, x[i]) + if !y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at xᵢ should be zero") + } + } + + // check that r(y)!=0 for a random point + var a fr.Element + a.SetRandom() + y := eval(r, a) + if y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at r \neq xᵢ should not be zero") + } +} + +func TestMultiplyLinearFactor(t *testing.T) { + + s := 10 + f := make([]fr.Element, s, s+1) + for i := 0; i < 10; i++ { + f[i].SetRandom() + } + + var a, y fr.Element + a.SetRandom() + f = multiplyLinearFactor(f, a) + y = eval(f, a) + if !y.IsZero() { + t.Fatal("(X-a)f(X) should be zero at a") + } + a.SetRandom() + y = eval(f, a) + if y.IsZero() { + t.Fatal("(X-1)f(X) at a random point should not be zero") + } + +} + +func TestNaiveMul(t *testing.T) { + + size := 10 + f := make([]fr.Element, size) + for i := 0; i < size; i++ { + f[i].SetRandom() + } + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + + v := buildVanishingPoly(points) + buf := make([]fr.Element, size+nbPoints-1) + g := mul(f, v, buf) + + // check that g(xᵢ) = 0 + for i := 0; i < nbPoints; i++ { + y := eval(g, points[i]) + if !y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at x_{i} should be zero") + } + } + + // check that g(r) != 0 for a random point + var a fr.Element + a.SetRandom() + y := eval(g, a) + if y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at a random point should not be zero") + } + +} + +func TestDiv(t *testing.T) { + + nbPoints := 10 + s := 10 + f := make([]fr.Element, s, s+nbPoints) + for i := 0; i < s; i++ { + f[i].SetRandom() + } + + // backup + g := make([]fr.Element, s) + copy(g, f) + + // successive divions of linear terms + x := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + q := make([][2]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + q[i][1].SetOne() + q[i][0].Neg(&x[i]) + f = div(f, q[i][:]) + } + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + + // division by a degree > 1 polynomial + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + r := buildVanishingPoly(x) + f = div(f, r) + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + +} From 18554885c18beba07c660d23447dae2bc9b1ba19 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Thu, 14 Mar 2024 11:54:58 +0100 Subject: [PATCH 21/66] fix: added condition for existence of t-th roots --- ecc/bn254/fflonk/fflonk.go | 71 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 71 insertions(+) create mode 100644 ecc/bn254/fflonk/fflonk.go diff --git a/ecc/bn254/fflonk/fflonk.go b/ecc/bn254/fflonk/fflonk.go new file mode 100644 index 0000000000..ca44cb8b41 --- /dev/null +++ b/ecc/bn254/fflonk/fflonk.go @@ -0,0 +1,71 @@ +package fflonk + +import ( + "errors" + "math/big" + + "github.com/consensys/gnark-crypto/ecc/bn254/fr" +) + +var ( + ErrRootsOne = errors.New("Fr does not contain all the t-th roots of 1") + ErrRootsX = errors.New("Fr does not contain all the t-th roots of the input") +) + +// utils + +// getGenFrStar returns a generator of Fr^{*} +func getGenFrStar() fr.Element { + var res fr.Element + res.SetUint64(5) + return res +} + +// returns the t t-th roots of x, return an error if they do not exist in Fr +func extractRoots(x fr.Element, t int) ([]fr.Element, error) { + + // for the t-th roots of x to exist we need + // * t | r-1 + // * t² | p - (t-1) + r := fr.Modulus() + tBigInt := big.NewInt(int64(t)) + oneBigInt := big.NewInt(1) + var a, b big.Int + a.Sub(r, oneBigInt) + a.Mod(&a, tBigInt) + zeroBigInt := big.NewInt(0) + if a.Cmp(zeroBigInt) != 0 { + return nil, ErrRootsOne + } + a.SetUint64(uint64(t)).Mul(tBigInt, tBigInt) + b.Sub(r, tBigInt).Add(&b, oneBigInt) + a.Mod(&b, &a) + if b.Cmp(zeroBigInt) != 0 { + return nil, ErrRootsX + } + + // ᵗ√(x) = x^{(p-1)/t + 1} + var expo big.Int + var tthRoot fr.Element + r = fr.Modulus() + tBigInt = big.NewInt(int64(t)) + expo.Sub(r, oneBigInt). + Div(&expo, tBigInt). + Add(&expo, oneBigInt) + tthRoot.Exp(x, &expo) + + // compute the t-th roots of 1 + r.Sub(r, oneBigInt) + tBigInt.Div(r, tBigInt) + gen := getGenFrStar() + gen.Exp(gen, tBigInt) + + res := make([]fr.Element, t) + res[0].Set(&tthRoot) + for i := 1; i < t; i++ { + res[i].Mul(&res[i-1], &gen) + } + + return res, nil + +} From 77f4a854078396b438f8c17295fc3472ef11f2a2 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Thu, 14 Mar 2024 11:55:19 +0100 Subject: [PATCH 22/66] feat: test roots extraction --- ecc/bn254/fflonk/fflonk_test.go | 36 +++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) create mode 100644 ecc/bn254/fflonk/fflonk_test.go diff --git a/ecc/bn254/fflonk/fflonk_test.go b/ecc/bn254/fflonk/fflonk_test.go new file mode 100644 index 0000000000..f345c2a04b --- /dev/null +++ b/ecc/bn254/fflonk/fflonk_test.go @@ -0,0 +1,36 @@ +package fflonk + +import ( + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/stretchr/testify/require" +) + +func TestExtractRoots(t *testing.T) { + + assert := require.New(t) + + m := 9 + var x fr.Element + x.SetRandom() + roots, err := extractRoots(x, m) + assert.NoError(err) + + // check that (yᵐ-x)=Πᵢ(y-ωⁱᵗ√(x)) for a random y + var y fr.Element + y.SetRandom() + expo := big.NewInt(int64(m)) + y.Exp(x, expo).Sub(&y, &x) + var rhs, tmp fr.Element + rhs.SetOne() + for i := 0; i < m; i++ { + tmp.Sub(&y, &roots[i]) + rhs.Mul(&rhs, &tmp) + } + if !rhs.Equal(&y) { + assert.Fail("(yᵐ-x) != Πᵢ(y-ωⁱᵗ√(x)))") + } + +} From 70781a5668524eb004282e70a308333a7debbe35 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Thu, 14 Mar 2024 18:48:22 +0100 Subject: [PATCH 23/66] feat: folding ok --- ecc/bn254/fflonk/fflonk.go | 117 +++++++++++++++++++++----------- ecc/bn254/fflonk/fflonk_test.go | 52 +++++++++----- 2 files changed, 111 insertions(+), 58 deletions(-) diff --git a/ecc/bn254/fflonk/fflonk.go b/ecc/bn254/fflonk/fflonk.go index ca44cb8b41..b3aadcb1b8 100644 --- a/ecc/bn254/fflonk/fflonk.go +++ b/ecc/bn254/fflonk/fflonk.go @@ -2,9 +2,9 @@ package fflonk import ( "errors" - "math/big" "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark-crypto/ecc/bn254/kzg" ) var ( @@ -21,51 +21,86 @@ func getGenFrStar() fr.Element { return res } -// returns the t t-th roots of x, return an error if they do not exist in Fr -func extractRoots(x fr.Element, t int) ([]fr.Element, error) { +// Commit commits to a list of polynomial by intertwinning them like in the FFT, that is +// returns ∑_{i= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} - // compute the t-th roots of 1 - r.Sub(r, oneBigInt) - tBigInt.Div(r, tBigInt) - gen := getGenFrStar() - gen.Exp(gen, tBigInt) +// returns the t t-th roots of x, return an error if they do not exist in Fr +// func extractRoots(x fr.Element, t int) ([]fr.Element, error) { - res := make([]fr.Element, t) - res[0].Set(&tthRoot) - for i := 1; i < t; i++ { - res[i].Mul(&res[i-1], &gen) - } +// // for the t-th roots of x to exist we need +// // * t | r-1 +// // * t² | p - (t-1) +// r := fr.Modulus() +// tBigInt := big.NewInt(int64(t)) +// oneBigInt := big.NewInt(1) +// var a, b big.Int +// a.Sub(r, oneBigInt) +// a.Mod(&a, tBigInt) +// zeroBigInt := big.NewInt(0) +// if a.Cmp(zeroBigInt) != 0 { +// return nil, ErrRootsOne +// } +// a.SetUint64(uint64(t)).Mul(tBigInt, tBigInt) +// b.Sub(r, tBigInt).Add(&b, oneBigInt) +// a.Mod(&b, &a) +// if b.Cmp(zeroBigInt) != 0 { +// return nil, ErrRootsX +// } - return res, nil +// // ᵗ√(x) = x^{(p-1)/t + 1} +// var expo big.Int +// var tthRoot fr.Element +// r = fr.Modulus() +// tBigInt = big.NewInt(int64(t)) +// expo.Sub(r, oneBigInt). +// Div(&expo, tBigInt). +// Add(&expo, oneBigInt) +// tthRoot.Exp(x, &expo) -} +// // compute the t-th roots of 1 +// r.Sub(r, oneBigInt) +// tBigInt.Div(r, tBigInt) +// gen := getGenFrStar() +// gen.Exp(gen, tBigInt) + +// res := make([]fr.Element, t) +// res[0].Set(&tthRoot) +// for i := 1; i < t; i++ { +// res[i].Mul(&res[i-1], &gen) +// } + +// return res, nil + +// } diff --git a/ecc/bn254/fflonk/fflonk_test.go b/ecc/bn254/fflonk/fflonk_test.go index f345c2a04b..75370984f1 100644 --- a/ecc/bn254/fflonk/fflonk_test.go +++ b/ecc/bn254/fflonk/fflonk_test.go @@ -4,33 +4,51 @@ import ( "math/big" "testing" + "github.com/consensys/gnark-crypto/ecc" "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark-crypto/ecc/bn254/kzg" "github.com/stretchr/testify/require" ) -func TestExtractRoots(t *testing.T) { +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 230 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestCommit(t *testing.T) { assert := require.New(t) - m := 9 + // sample polynomials + nbPolys := 2 + p := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + p[i] = make([]fr.Element, i+10) + for j := 0; j < i+10; j++ { + p[i][j].SetRandom() + } + } + + // fflonk commit to them var x fr.Element x.SetRandom() - roots, err := extractRoots(x, m) + proof, err := kzg.Open(Fold(p), x, testSrs.Pk) assert.NoError(err) - // check that (yᵐ-x)=Πᵢ(y-ωⁱᵗ√(x)) for a random y - var y fr.Element - y.SetRandom() - expo := big.NewInt(int64(m)) - y.Exp(x, expo).Sub(&y, &x) - var rhs, tmp fr.Element - rhs.SetOne() - for i := 0; i < m; i++ { - tmp.Sub(&y, &roots[i]) - rhs.Mul(&rhs, &tmp) - } - if !rhs.Equal(&y) { - assert.Fail("(yᵐ-x) != Πᵢ(y-ωⁱᵗ√(x)))") + // check that Open(C, x) = ∑_{i Date: Fri, 15 Mar 2024 16:36:25 +0100 Subject: [PATCH 24/66] feat: fflonk fold and open --- ecc/bn254/fflonk/fflonk.go | 126 ++++++++++++++++++++++++++++++++++--- 1 file changed, 116 insertions(+), 10 deletions(-) diff --git a/ecc/bn254/fflonk/fflonk.go b/ecc/bn254/fflonk/fflonk.go index b3aadcb1b8..3878fc51d0 100644 --- a/ecc/bn254/fflonk/fflonk.go +++ b/ecc/bn254/fflonk/fflonk.go @@ -2,28 +2,37 @@ package fflonk import ( "errors" + "hash" + "math/big" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark-crypto/ecc/bn254/kzg" + "github.com/consensys/gnark-crypto/ecc/bn254/shplonk" ) var ( - ErrRootsOne = errors.New("Fr does not contain all the t-th roots of 1") - ErrRootsX = errors.New("Fr does not contain all the t-th roots of the input") + ErrRootsOne = errors.New("Fr does not contain all the t-th roots of 1") + ErrNbPolynomialsNbPoints = errors.New("the number of packs of polynomials should be the same as the number of pack of points") ) -// utils +// Opening fflonk proof for opening a list of list of polynomials ((fʲᵢ)ᵢ)ⱼ where each +// pack of polynomials (fʲᵢ)ᵢ (the pack is indexed by j) is opened on a powers of elements in +// the set (Sʲᵢ)ᵢ (indexed by j), where the power is |(fʲᵢ)ᵢ|. +// +// implements io.ReaderFrom and io.WriterTo +type OpeningProof struct { -// getGenFrStar returns a generator of Fr^{*} -func getGenFrStar() fr.Element { - var res fr.Element - res.SetUint64(5) - return res + // shplonk opening proof of the folded polynomials + SOpeningProof shplonk.OpeningProof + + // ClaimedValues ClaimedValues[i][j] contains the values + // of fʲᵢ on Sⱼ^{|(fʲᵢ)ᵢ|} + ClaimedValues [][][]fr.Element } -// Commit commits to a list of polynomial by intertwinning them like in the FFT, that is +// CommitAndFold commits to a list of polynomial by intertwinning them like in the FFT, that is // returns ∑_{i if + // the i-th pack contains t polynomials where ω is a t-th root of 1 + var omega fr.Element + zeroBigInt := big.NewInt(0) + genFrStar := getGenFrStar() + rMinusOneBigInt := fr.Modulus() + oneBigInt := big.NewInt(1) + rMinusOneBigInt.Sub(rMinusOneBigInt, oneBigInt) + newPoints := make([][]fr.Element, len(points)) + for i := 0; i < len(p); i++ { + tmpBigInt.SetUint64(uint64(len(p[i]))) + tmpBigInt.Mod(rMinusOneBigInt, &tmpBigInt) + if tmpBigInt.Cmp(zeroBigInt) != 0 { + return res, ErrRootsOne + } + tmpBigInt.SetUint64(uint64(len(p[i]))) + tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) + omega.Exp(genFrStar, &tmpBigInt) + t := len(p[i]) + newPoints[i] = make([]fr.Element, t*len(points[i])) + for j := 0; j < len(points[i]); j++ { + newPoints[i][j*t].Set(&points[i][j]) + for k := 1; k < t; k++ { + newPoints[i][j*t+k].Mul(&newPoints[i][j*t+k-1], &omega) + } + } + } + + // step 5: shplonk open the list of single polynomials on the new sets + var err error + res.SOpeningProof, err = shplonk.BatchOpen(foldedPolynomials, digests, points, hf, pk, dataTranscript...) + + return res, err + +} + +// utils + +// getGenFrStar returns a generator of Fr^{*} +func getGenFrStar() fr.Element { + var res fr.Element + res.SetUint64(5) + return res +} + func eval(f []fr.Element, x fr.Element) fr.Element { var y fr.Element for i := len(f) - 1; i >= 0; i-- { @@ -56,6 +157,11 @@ func eval(f []fr.Element, x fr.Element) fr.Element { return y } +// Open +// func Open(polynomials [][]fr.Element, digests []kzg.Digest, points []fr.Element, hf hash.Hash, pk kzg.ProvingKey, dataTranscript ...[]byte) (shplonk.OpeningProof, error) { + +// } + // returns the t t-th roots of x, return an error if they do not exist in Fr // func extractRoots(x fr.Element, t int) ([]fr.Element, error) { From 5bf6194cf9b4e025f89b5c7534772b6f77758a27 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Fri, 15 Mar 2024 17:44:03 +0100 Subject: [PATCH 25/66] feat: getIthRootOne ok --- ecc/bn254/fflonk/fflonk.go | 28 ++++++++++++++++++++++++++++ ecc/bn254/fflonk/fflonk_test.go | 17 +++++++++++++++++ 2 files changed, 45 insertions(+) diff --git a/ecc/bn254/fflonk/fflonk.go b/ecc/bn254/fflonk/fflonk.go index 3878fc51d0..aa4789a425 100644 --- a/ecc/bn254/fflonk/fflonk.go +++ b/ecc/bn254/fflonk/fflonk.go @@ -140,6 +140,14 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, } +// BatchVerify uses a proof to check that each digest digests[i] is correctly opened on the set points[i]. +// The digests are the commitments to the folded underlying polynomials. The shplonk proof is +// verified directly using the embedded shplonk proof, and the claimed values consistency between the underlying +// shplonk proof +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + return nil +} + // utils // getGenFrStar returns a generator of Fr^{*} @@ -149,6 +157,26 @@ func getGenFrStar() fr.Element { return res } +// getIthRootOne returns a generator of Z/iZ +func getIthRootOne(i int) (fr.Element, error) { + var omega fr.Element + var tmpBigInt, zeroBigInt big.Int + oneBigInt := big.NewInt(1) + zeroBigInt.SetUint64(0) + rMinusOneBigInt := fr.Modulus() + rMinusOneBigInt.Sub(rMinusOneBigInt, oneBigInt) + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Mod(rMinusOneBigInt, &tmpBigInt) + if tmpBigInt.Cmp(&zeroBigInt) != 0 { + return omega, ErrRootsOne + } + genFrStar := getGenFrStar() + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) + omega.Exp(genFrStar, &tmpBigInt) + return omega, nil +} + func eval(f []fr.Element, x fr.Element) fr.Element { var y fr.Element for i := len(f) - 1; i >= 0; i-- { diff --git a/ecc/bn254/fflonk/fflonk_test.go b/ecc/bn254/fflonk/fflonk_test.go index 75370984f1..29a2b28587 100644 --- a/ecc/bn254/fflonk/fflonk_test.go +++ b/ecc/bn254/fflonk/fflonk_test.go @@ -52,3 +52,20 @@ func TestCommit(t *testing.T) { y := eval(px, x) assert.True(y.Equal(&proof.ClaimedValue)) } + +func TestGetIthRootOne(t *testing.T) { + + assert := require.New(t) + + order := 9 + omega, err := getIthRootOne(order) + assert.NoError(err) + var orderBigInt big.Int + orderBigInt.SetUint64(uint64(order)) + omega.Exp(omega, &orderBigInt) + assert.True(omega.IsOne()) + + order = 7 + _, err = getIthRootOne(order) + assert.Error(err) +} From 69b626124528cc498572525188c3f5f98637de90 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Mon, 18 Mar 2024 13:12:13 +0100 Subject: [PATCH 26/66] feat: factored extended set --- ecc/bn254/fflonk/fflonk.go | 171 +++++++++++++++++++------------------ 1 file changed, 90 insertions(+), 81 deletions(-) diff --git a/ecc/bn254/fflonk/fflonk.go b/ecc/bn254/fflonk/fflonk.go index aa4789a425..34f6b300a9 100644 --- a/ecc/bn254/fflonk/fflonk.go +++ b/ecc/bn254/fflonk/fflonk.go @@ -11,13 +11,15 @@ import ( ) var ( - ErrRootsOne = errors.New("Fr does not contain all the t-th roots of 1") - ErrNbPolynomialsNbPoints = errors.New("the number of packs of polynomials should be the same as the number of pack of points") + ErrRootsOne = errors.New("Fr does not contain all the t-th roots of 1") + ErrNbPolynomialsNbPoints = errors.New("the number of packs of polynomials should be the same as the number of pack of points") + ErrInonsistentFolding = errors.New("the outer claimed values are not consistent with the shplonk proof") + ErrInconsistentNumberFoldedPoints = errors.New("the number of outer claimed values is inconsistent with the number of claimed values in the shplonk proof") ) // Opening fflonk proof for opening a list of list of polynomials ((fʲᵢ)ᵢ)ⱼ where each // pack of polynomials (fʲᵢ)ᵢ (the pack is indexed by j) is opened on a powers of elements in -// the set (Sʲᵢ)ᵢ (indexed by j), where the power is |(fʲᵢ)ᵢ|. +// the set Sʲ (indexed by j), where the power is |(fʲᵢ)ᵢ|. // // implements io.ReaderFrom and io.WriterTo type OpeningProof struct { @@ -89,7 +91,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, res.ClaimedValues = make([][][]fr.Element, len(p)) for i := 0; i < len(p); i++ { res.ClaimedValues[i] = make([][]fr.Element, len(p[i])) - for j := 0; j < len(points[i]); j++ { + for j := 0; j < len(p[i]); j++ { res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) for k := 0; k < len(points[i]); k++ { res.ClaimedValues[i][j][k] = eval(p[i][j], pointsPowerM[i][k]) @@ -106,34 +108,17 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, // step 4: compute the associated roots, that is for each point p corresponding // to a pack i of polynomials, we extend to if // the i-th pack contains t polynomials where ω is a t-th root of 1 - var omega fr.Element - zeroBigInt := big.NewInt(0) - genFrStar := getGenFrStar() - rMinusOneBigInt := fr.Modulus() - oneBigInt := big.NewInt(1) - rMinusOneBigInt.Sub(rMinusOneBigInt, oneBigInt) newPoints := make([][]fr.Element, len(points)) + var err error for i := 0; i < len(p); i++ { - tmpBigInt.SetUint64(uint64(len(p[i]))) - tmpBigInt.Mod(rMinusOneBigInt, &tmpBigInt) - if tmpBigInt.Cmp(zeroBigInt) != 0 { - return res, ErrRootsOne - } - tmpBigInt.SetUint64(uint64(len(p[i]))) - tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) - omega.Exp(genFrStar, &tmpBigInt) t := len(p[i]) - newPoints[i] = make([]fr.Element, t*len(points[i])) - for j := 0; j < len(points[i]); j++ { - newPoints[i][j*t].Set(&points[i][j]) - for k := 1; k < t; k++ { - newPoints[i][j*t+k].Mul(&newPoints[i][j*t+k-1], &omega) - } + newPoints[i], err = extendSet(points[i], t) + if err != nil { + return res, err } } // step 5: shplonk open the list of single polynomials on the new sets - var err error res.SOpeningProof, err = shplonk.BatchOpen(foldedPolynomials, digests, points, hf, pk, dataTranscript...) return res, err @@ -142,9 +127,68 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, // BatchVerify uses a proof to check that each digest digests[i] is correctly opened on the set points[i]. // The digests are the commitments to the folded underlying polynomials. The shplonk proof is -// verified directly using the embedded shplonk proof, and the claimed values consistency between the underlying -// shplonk proof +// verified directly using the embedded shplonk proof. This function only computes the consistency +// between the claimed values of the underlying shplonk proof and the outer claimed values, using the fft-like +// folding. Namely, the outer claimed values are the evaluation of the original polynomials (so before they +// were folded) at the relevant powers of the points. func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + // step 0: consistency checks between the folded claimed values of shplonk and the claimed + // values at the powers of the Sᵢ + for i := 0; i < len(proof.ClaimedValues); i++ { + sizeSi := len(proof.ClaimedValues[i][0]) + for j := 1; j < len(proof.ClaimedValues[i]); j++ { + // each set of opening must be of the same size (opeings on powers of Si) + if sizeSi != len(proof.ClaimedValues[i][j]) { + return ErrNbPolynomialsNbPoints + } + } + currNbPolynomials := len(proof.ClaimedValues[i]) + sizeSi = sizeSi * currNbPolynomials + // |originalPolynomials_{i}|x|Sᵢ| == |foldedPolynomials|x|folded Sᵢ| + if sizeSi != len(proof.SOpeningProof.ClaimedValues[i]) { + return ErrInconsistentNumberFoldedPoints + } + } + + // step 1: fold the outer claimed values and check that they correspond to the + // shplonk claimed values + var curFoldedClaimedValue, accOmega fr.Element + for i := 0; i < len(proof.ClaimedValues); i++ { + t := len(proof.ClaimedValues[i]) + omega, err := getIthRootOne(t) + if err != nil { + return err + } + sizeSi := len(proof.ClaimedValues[i][0]) + polyClaimedValues := make([]fr.Element, t) + accOmega.SetOne() + for j := 0; j < sizeSi; j++ { + for k := 0; k < t; k++ { + polyClaimedValues[k].Set(&proof.ClaimedValues[i][k][j]) + } + for l := 0; l < t; l++ { + curFoldedClaimedValue = eval(polyClaimedValues, accOmega) + if !curFoldedClaimedValue.Equal(&proof.SOpeningProof.ClaimedValues[i][j*t+l]) { + return ErrInonsistentFolding + } + accOmega.Mul(&accOmega, &omega) + } + } + } + + // step 2: verify the embedded shplonk proof + extendedPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(points); i++ { + t := len(proof.ClaimedValues[i]) + extendedPoints[i], err = extendSet(points[i], t) + if err != nil { + return err + } + } + err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) + return nil } @@ -177,6 +221,25 @@ func getIthRootOne(i int) (fr.Element, error) { return omega, nil } +// extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] +func extendSet(p []fr.Element, t int) ([]fr.Element, error) { + + omega, err := getIthRootOne(t) + if err != nil { + return nil, err + } + nbPoints := len(p) + newPoints := make([]fr.Element, t*nbPoints) + for i := 0; i < nbPoints; i++ { + newPoints[i*t].Set(&p[i]) + for k := 1; k < t; k++ { + newPoints[i*t+k].Mul(&newPoints[i*t+k-1], &omega) + } + } + + return newPoints, nil +} + func eval(f []fr.Element, x fr.Element) fr.Element { var y fr.Element for i := len(f) - 1; i >= 0; i-- { @@ -184,57 +247,3 @@ func eval(f []fr.Element, x fr.Element) fr.Element { } return y } - -// Open -// func Open(polynomials [][]fr.Element, digests []kzg.Digest, points []fr.Element, hf hash.Hash, pk kzg.ProvingKey, dataTranscript ...[]byte) (shplonk.OpeningProof, error) { - -// } - -// returns the t t-th roots of x, return an error if they do not exist in Fr -// func extractRoots(x fr.Element, t int) ([]fr.Element, error) { - -// // for the t-th roots of x to exist we need -// // * t | r-1 -// // * t² | p - (t-1) -// r := fr.Modulus() -// tBigInt := big.NewInt(int64(t)) -// oneBigInt := big.NewInt(1) -// var a, b big.Int -// a.Sub(r, oneBigInt) -// a.Mod(&a, tBigInt) -// zeroBigInt := big.NewInt(0) -// if a.Cmp(zeroBigInt) != 0 { -// return nil, ErrRootsOne -// } -// a.SetUint64(uint64(t)).Mul(tBigInt, tBigInt) -// b.Sub(r, tBigInt).Add(&b, oneBigInt) -// a.Mod(&b, &a) -// if b.Cmp(zeroBigInt) != 0 { -// return nil, ErrRootsX -// } - -// // ᵗ√(x) = x^{(p-1)/t + 1} -// var expo big.Int -// var tthRoot fr.Element -// r = fr.Modulus() -// tBigInt = big.NewInt(int64(t)) -// expo.Sub(r, oneBigInt). -// Div(&expo, tBigInt). -// Add(&expo, oneBigInt) -// tthRoot.Exp(x, &expo) - -// // compute the t-th roots of 1 -// r.Sub(r, oneBigInt) -// tBigInt.Div(r, tBigInt) -// gen := getGenFrStar() -// gen.Exp(gen, tBigInt) - -// res := make([]fr.Element, t) -// res[0].Set(&tthRoot) -// for i := 1; i < t; i++ { -// res[i].Mul(&res[i-1], &gen) -// } - -// return res, nil - -// } From 0b73a022467034e7eb8e6418ecf17270edb13284 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Mon, 18 Mar 2024 18:25:11 +0100 Subject: [PATCH 27/66] feat: fixed fflonk folding --- ecc/bn254/fflonk/fflonk.go | 10 +++--- ecc/bn254/fflonk/fflonk_test.go | 56 ++++++++++++++++++++++++++++++++- 2 files changed, 60 insertions(+), 6 deletions(-) diff --git a/ecc/bn254/fflonk/fflonk.go b/ecc/bn254/fflonk/fflonk.go index 34f6b300a9..3f8533e51b 100644 --- a/ecc/bn254/fflonk/fflonk.go +++ b/ecc/bn254/fflonk/fflonk.go @@ -119,7 +119,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, } // step 5: shplonk open the list of single polynomials on the new sets - res.SOpeningProof, err = shplonk.BatchOpen(foldedPolynomials, digests, points, hf, pk, dataTranscript...) + res.SOpeningProof, err = shplonk.BatchOpen(foldedPolynomials, digests, newPoints, hf, pk, dataTranscript...) return res, err @@ -153,7 +153,7 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element // step 1: fold the outer claimed values and check that they correspond to the // shplonk claimed values - var curFoldedClaimedValue, accOmega fr.Element + var curFoldedClaimedValue, omgeaiPoint fr.Element for i := 0; i < len(proof.ClaimedValues); i++ { t := len(proof.ClaimedValues[i]) omega, err := getIthRootOne(t) @@ -162,17 +162,17 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element } sizeSi := len(proof.ClaimedValues[i][0]) polyClaimedValues := make([]fr.Element, t) - accOmega.SetOne() for j := 0; j < sizeSi; j++ { for k := 0; k < t; k++ { polyClaimedValues[k].Set(&proof.ClaimedValues[i][k][j]) } + omgeaiPoint.Set(&points[i][j]) for l := 0; l < t; l++ { - curFoldedClaimedValue = eval(polyClaimedValues, accOmega) + curFoldedClaimedValue = eval(polyClaimedValues, omgeaiPoint) if !curFoldedClaimedValue.Equal(&proof.SOpeningProof.ClaimedValues[i][j*t+l]) { return ErrInonsistentFolding } - accOmega.Mul(&accOmega, &omega) + omgeaiPoint.Mul(&omgeaiPoint, &omega) } } } diff --git a/ecc/bn254/fflonk/fflonk_test.go b/ecc/bn254/fflonk/fflonk_test.go index 29a2b28587..e5409a602a 100644 --- a/ecc/bn254/fflonk/fflonk_test.go +++ b/ecc/bn254/fflonk/fflonk_test.go @@ -1,6 +1,7 @@ package fflonk import ( + "crypto/sha256" "math/big" "testing" @@ -15,11 +16,64 @@ var testSrs *kzg.SRS var bAlpha *big.Int func init() { - const srsSize = 230 + const srsSize = 400 bAlpha = new(big.Int).SetInt64(42) // randomise ? testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) } +func TestFflonk(t *testing.T) { + + assert := require.New(t) + + // sample random polynomials of various sizes + nbSets := 5 + p := make([][][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + nbPolysInSet := 9 + p[i] = make([][]fr.Element, nbPolysInSet) + for j := 0; j < nbPolysInSet; j++ { + curSizePoly := j + 10 + p[i][j] = make([]fr.Element, curSizePoly) + for k := 0; k < curSizePoly; k++ { + p[i][j][k].SetRandom() + } + } + } + + // sample random sets Sᵢ + x := make([][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + curSetSize := i + 4 + x[i] = make([]fr.Element, curSetSize) + for j := 0; j < curSetSize; j++ { + x[i][j].SetRandom() + } + } + + // commit to the folded polynomials + digests := make([]kzg.Digest, nbSets) + var err error + for i := 0; i < nbSets; i++ { + digests[i], err = CommitAndFold(p[i], testSrs.Pk) + assert.NoError(err) + } + + // compute flonk opening proof + hf := sha256.New() + proof, err := BatchOpen(p, digests, x, hf, testSrs.Pk) + assert.NoError(err) + + // check opening proof + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.NoError(err) + + // tamper the proof + proof.ClaimedValues[0][0][0].SetRandom() + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.Error(err) + +} + func TestCommit(t *testing.T) { assert := require.New(t) From 24ac555295c429f2c9b98111db6593371ae56e20 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Mon, 18 Mar 2024 18:35:36 +0100 Subject: [PATCH 28/66] feat: addition of templates for fflonk --- internal/generator/fflonk/generator.go | 23 ++ .../generator/fflonk/template/fflonk.go.tmpl | 265 ++++++++++++++++++ .../fflonk/template/fflonk.test.go.tmpl | 123 ++++++++ internal/generator/main.go | 3 + 4 files changed, 414 insertions(+) create mode 100644 internal/generator/fflonk/generator.go create mode 100644 internal/generator/fflonk/template/fflonk.go.tmpl create mode 100644 internal/generator/fflonk/template/fflonk.test.go.tmpl diff --git a/internal/generator/fflonk/generator.go b/internal/generator/fflonk/generator.go new file mode 100644 index 0000000000..2bc2897afb --- /dev/null +++ b/internal/generator/fflonk/generator.go @@ -0,0 +1,23 @@ +package fflonk + +import ( + "path/filepath" + + "github.com/consensys/bavard" + "github.com/consensys/gnark-crypto/internal/generator/config" +) + +func Generate(conf config.Curve, baseDir string, bgen *bavard.BatchGenerator) error { + + // kzg commitment scheme + conf.Package = "fflonk" + entries := []bavard.Entry{ + // {File: filepath.Join(baseDir, "doc.go"), Templates: []string{"doc.go.tmpl"}}, + {File: filepath.Join(baseDir, "fflonk.go"), Templates: []string{"fflonk.go.tmpl"}}, + {File: filepath.Join(baseDir, "fflonk_test.go"), Templates: []string{"fflonk.test.go.tmpl"}}, + // {File: filepath.Join(baseDir, "marshal.go"), Templates: []string{"marshal.go.tmpl"}}, + // {File: filepath.Join(baseDir, "utils.go"), Templates: []string{"utils.go.tmpl"}}, + } + return bgen.Generate(conf, conf.Package, "./fflonk/template/", entries...) + +} diff --git a/internal/generator/fflonk/template/fflonk.go.tmpl b/internal/generator/fflonk/template/fflonk.go.tmpl new file mode 100644 index 0000000000..f9ee38a06b --- /dev/null +++ b/internal/generator/fflonk/template/fflonk.go.tmpl @@ -0,0 +1,265 @@ +import ( + "errors" + "hash" + "math/big" + + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/fr" + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/kzg" + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/shplonk" +) + +var ( + ErrRootsOne = errors.New("Fr does not contain all the t-th roots of 1") + ErrNbPolynomialsNbPoints = errors.New("the number of packs of polynomials should be the same as the number of pack of points") + ErrInonsistentFolding = errors.New("the outer claimed values are not consistent with the shplonk proof") + ErrInconsistentNumberFoldedPoints = errors.New("the number of outer claimed values is inconsistent with the number of claimed values in the shplonk proof") +) + +// Opening fflonk proof for opening a list of list of polynomials ((fʲᵢ)ᵢ)ⱼ where each +// pack of polynomials (fʲᵢ)ᵢ (the pack is indexed by j) is opened on a powers of elements in +// the set Sʲ (indexed by j), where the power is |(fʲᵢ)ᵢ|. +// +// implements io.ReaderFrom and io.WriterTo +type OpeningProof struct { + + // shplonk opening proof of the folded polynomials + SOpeningProof shplonk.OpeningProof + + // ClaimedValues ClaimedValues[i][j] contains the values + // of fʲᵢ on Sⱼ^{|(fʲᵢ)ᵢ|} + ClaimedValues [][][]fr.Element +} + +// CommitAndFold commits to a list of polynomial by intertwinning them like in the FFT, that is +// returns ∑_{i if + // the i-th pack contains t polynomials where ω is a t-th root of 1 + newPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(p); i++ { + t := len(p[i]) + newPoints[i], err = extendSet(points[i], t) + if err != nil { + return res, err + } + } + + // step 5: shplonk open the list of single polynomials on the new sets + res.SOpeningProof, err = shplonk.BatchOpen(foldedPolynomials, digests, newPoints, hf, pk, dataTranscript...) + + return res, err + +} + +// BatchVerify uses a proof to check that each digest digests[i] is correctly opened on the set points[i]. +// The digests are the commitments to the folded underlying polynomials. The shplonk proof is +// verified directly using the embedded shplonk proof. This function only computes the consistency +// between the claimed values of the underlying shplonk proof and the outer claimed values, using the fft-like +// folding. Namely, the outer claimed values are the evaluation of the original polynomials (so before they +// were folded) at the relevant powers of the points. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + // step 0: consistency checks between the folded claimed values of shplonk and the claimed + // values at the powers of the Sᵢ + for i := 0; i < len(proof.ClaimedValues); i++ { + sizeSi := len(proof.ClaimedValues[i][0]) + for j := 1; j < len(proof.ClaimedValues[i]); j++ { + // each set of opening must be of the same size (opeings on powers of Si) + if sizeSi != len(proof.ClaimedValues[i][j]) { + return ErrNbPolynomialsNbPoints + } + } + currNbPolynomials := len(proof.ClaimedValues[i]) + sizeSi = sizeSi * currNbPolynomials + // |originalPolynomials_{i}|x|Sᵢ| == |foldedPolynomials|x|folded Sᵢ| + if sizeSi != len(proof.SOpeningProof.ClaimedValues[i]) { + return ErrInconsistentNumberFoldedPoints + } + } + + // step 1: fold the outer claimed values and check that they correspond to the + // shplonk claimed values + var curFoldedClaimedValue, omgeaiPoint fr.Element + for i := 0; i < len(proof.ClaimedValues); i++ { + t := len(proof.ClaimedValues[i]) + omega, err := getIthRootOne(t) + if err != nil { + return err + } + sizeSi := len(proof.ClaimedValues[i][0]) + polyClaimedValues := make([]fr.Element, t) + for j := 0; j < sizeSi; j++ { + for k := 0; k < t; k++ { + polyClaimedValues[k].Set(&proof.ClaimedValues[i][k][j]) + } + omgeaiPoint.Set(&points[i][j]) + for l := 0; l < t; l++ { + curFoldedClaimedValue = eval(polyClaimedValues, omgeaiPoint) + if !curFoldedClaimedValue.Equal(&proof.SOpeningProof.ClaimedValues[i][j*t+l]) { + return ErrInonsistentFolding + } + omgeaiPoint.Mul(&omgeaiPoint, &omega) + } + } + } + + // step 2: verify the embedded shplonk proof + extendedPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(points); i++ { + t := len(proof.ClaimedValues[i]) + extendedPoints[i], err = extendSet(points[i], t) + if err != nil { + return err + } + } + err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) + + return nil +} + +// utils + +// getGenFrStar returns a generator of Fr^{*} +func getGenFrStar() fr.Element { + var res fr.Element + {{if eq .Name "bls12-378"}} + res.SetUint64(22) + {{else if eq .Name "bls12-377"}} + res.SetUint64(22) + {{else if eq .Name "bls12-381"}} + res.SetUint64(7) + {{else if eq .Name "bn254"}} + res.SetUint64(5) + {{else if eq .Name "bw6-761"}} + res.SetUint64(15) + {{else if eq .Name "bw6-756"}} + res.SetUint64(5) + {{else if eq .Name "bw6-633"}} + res.SetUint64(13) + {{else if eq .Name "bls24-315"}} + res.SetUint64(7) + {{else if eq .Name "bls24-317"}} + res.SetUint64(7) + {{end}} + return res +} + +// getIthRootOne returns a generator of Z/iZ +func getIthRootOne(i int) (fr.Element, error) { + var omega fr.Element + var tmpBigInt, zeroBigInt big.Int + oneBigInt := big.NewInt(1) + zeroBigInt.SetUint64(0) + rMinusOneBigInt := fr.Modulus() + rMinusOneBigInt.Sub(rMinusOneBigInt, oneBigInt) + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Mod(rMinusOneBigInt, &tmpBigInt) + if tmpBigInt.Cmp(&zeroBigInt) != 0 { + return omega, ErrRootsOne + } + genFrStar := getGenFrStar() + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) + omega.Exp(genFrStar, &tmpBigInt) + return omega, nil +} + +// extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] +func extendSet(p []fr.Element, t int) ([]fr.Element, error) { + + omega, err := getIthRootOne(t) + if err != nil { + return nil, err + } + nbPoints := len(p) + newPoints := make([]fr.Element, t*nbPoints) + for i := 0; i < nbPoints; i++ { + newPoints[i*t].Set(&p[i]) + for k := 1; k < t; k++ { + newPoints[i*t+k].Mul(&newPoints[i*t+k-1], &omega) + } + } + + return newPoints, nil +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} diff --git a/internal/generator/fflonk/template/fflonk.test.go.tmpl b/internal/generator/fflonk/template/fflonk.test.go.tmpl new file mode 100644 index 0000000000..c65b2afa17 --- /dev/null +++ b/internal/generator/fflonk/template/fflonk.test.go.tmpl @@ -0,0 +1,123 @@ +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/fr" + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 400 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestFflonk(t *testing.T) { + + assert := require.New(t) + + // sample random polynomials of various sizes + nbSets := 5 + p := make([][][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + nbPolysInSet := 9 + p[i] = make([][]fr.Element, nbPolysInSet) + for j := 0; j < nbPolysInSet; j++ { + curSizePoly := j + 10 + p[i][j] = make([]fr.Element, curSizePoly) + for k := 0; k < curSizePoly; k++ { + p[i][j][k].SetRandom() + } + } + } + + // sample random sets Sᵢ + x := make([][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + curSetSize := i + 4 + x[i] = make([]fr.Element, curSetSize) + for j := 0; j < curSetSize; j++ { + x[i][j].SetRandom() + } + } + + // commit to the folded polynomials + digests := make([]kzg.Digest, nbSets) + var err error + for i := 0; i < nbSets; i++ { + digests[i], err = CommitAndFold(p[i], testSrs.Pk) + assert.NoError(err) + } + + // compute flonk opening proof + hf := sha256.New() + proof, err := BatchOpen(p, digests, x, hf, testSrs.Pk) + assert.NoError(err) + + // check opening proof + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.NoError(err) + + // tamper the proof + proof.ClaimedValues[0][0][0].SetRandom() + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestCommit(t *testing.T) { + + assert := require.New(t) + + // sample polynomials + nbPolys := 2 + p := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + p[i] = make([]fr.Element, i+10) + for j := 0; j < i+10; j++ { + p[i][j].SetRandom() + } + } + + // fflonk commit to them + var x fr.Element + x.SetRandom() + proof, err := kzg.Open(Fold(p), x, testSrs.Pk) + assert.NoError(err) + + // check that Open(C, x) = ∑_{i Date: Mon, 18 Mar 2024 18:37:43 +0100 Subject: [PATCH 29/66] feat: add doc for fflonk --- internal/generator/fflonk/generator.go | 3 +-- internal/generator/fflonk/template/doc.go.tmpl | 5 +++++ 2 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 internal/generator/fflonk/template/doc.go.tmpl diff --git a/internal/generator/fflonk/generator.go b/internal/generator/fflonk/generator.go index 2bc2897afb..40ee17daf3 100644 --- a/internal/generator/fflonk/generator.go +++ b/internal/generator/fflonk/generator.go @@ -12,11 +12,10 @@ func Generate(conf config.Curve, baseDir string, bgen *bavard.BatchGenerator) er // kzg commitment scheme conf.Package = "fflonk" entries := []bavard.Entry{ - // {File: filepath.Join(baseDir, "doc.go"), Templates: []string{"doc.go.tmpl"}}, + {File: filepath.Join(baseDir, "doc.go"), Templates: []string{"doc.go.tmpl"}}, {File: filepath.Join(baseDir, "fflonk.go"), Templates: []string{"fflonk.go.tmpl"}}, {File: filepath.Join(baseDir, "fflonk_test.go"), Templates: []string{"fflonk.test.go.tmpl"}}, // {File: filepath.Join(baseDir, "marshal.go"), Templates: []string{"marshal.go.tmpl"}}, - // {File: filepath.Join(baseDir, "utils.go"), Templates: []string{"utils.go.tmpl"}}, } return bgen.Generate(conf, conf.Package, "./fflonk/template/", entries...) diff --git a/internal/generator/fflonk/template/doc.go.tmpl b/internal/generator/fflonk/template/doc.go.tmpl new file mode 100644 index 0000000000..382fe8636e --- /dev/null +++ b/internal/generator/fflonk/template/doc.go.tmpl @@ -0,0 +1,5 @@ +// Package {{.Package}} provides fflonk commitment, based on shplonk. +// +// See https://eprint.iacr.org/2020/081.pdf for shplonk +// See https://eprint.iacr.org/2021/1167.pdf for fflonk. +package {{.Package}} \ No newline at end of file From 68c09521781df17adcdd8f936346bd3408e47512 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Mon, 18 Mar 2024 18:46:26 +0100 Subject: [PATCH 30/66] feat: added fflonk in code gen main --- internal/generator/main.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/internal/generator/main.go b/internal/generator/main.go index 7522a0c7bc..ee5e072a5f 100644 --- a/internal/generator/main.go +++ b/internal/generator/main.go @@ -16,6 +16,7 @@ import ( "github.com/consensys/gnark-crypto/internal/generator/ecdsa" "github.com/consensys/gnark-crypto/internal/generator/edwards" "github.com/consensys/gnark-crypto/internal/generator/edwards/eddsa" + "github.com/consensys/gnark-crypto/internal/generator/fflonk" "github.com/consensys/gnark-crypto/internal/generator/fft" fri "github.com/consensys/gnark-crypto/internal/generator/fri/template" "github.com/consensys/gnark-crypto/internal/generator/gkr" @@ -104,7 +105,7 @@ func main() { assertNoError(shplonk.Generate(conf, filepath.Join(curveDir, "shplonk"), bgen)) // generate fflonk on fr - assertNoError(shplonk.Generate(conf, filepath.Join(curveDir, "fflonk"), bgen)) + assertNoError(fflonk.Generate(conf, filepath.Join(curveDir, "fflonk"), bgen)) // generate pedersen on fr assertNoError(pedersen.Generate(conf, filepath.Join(curveDir, "fr", "pedersen"), bgen)) From e4e4a39c17b7b2da22a844a4da3398603d2e9406 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Mon, 18 Mar 2024 18:48:07 +0100 Subject: [PATCH 31/66] feat: code gen, tests for existence of roots of 1 need to be tuned --- ecc/bls12-377/fflonk/doc.go | 21 +++ ecc/bls12-377/fflonk/fflonk.go | 267 ++++++++++++++++++++++++++++ ecc/bls12-377/fflonk/fflonk_test.go | 141 +++++++++++++++ ecc/bls12-378/fflonk/doc.go | 21 +++ ecc/bls12-378/fflonk/fflonk.go | 267 ++++++++++++++++++++++++++++ ecc/bls12-378/fflonk/fflonk_test.go | 141 +++++++++++++++ ecc/bls12-381/fflonk/doc.go | 21 +++ ecc/bls12-381/fflonk/fflonk.go | 267 ++++++++++++++++++++++++++++ ecc/bls12-381/fflonk/fflonk_test.go | 141 +++++++++++++++ ecc/bls24-315/fflonk/doc.go | 21 +++ ecc/bls24-315/fflonk/fflonk.go | 267 ++++++++++++++++++++++++++++ ecc/bls24-315/fflonk/fflonk_test.go | 141 +++++++++++++++ ecc/bls24-317/fflonk/doc.go | 21 +++ ecc/bls24-317/fflonk/fflonk.go | 267 ++++++++++++++++++++++++++++ ecc/bls24-317/fflonk/fflonk_test.go | 141 +++++++++++++++ ecc/bn254/fflonk/doc.go | 21 +++ ecc/bn254/fflonk/fflonk.go | 18 ++ ecc/bn254/fflonk/fflonk_test.go | 16 ++ ecc/bw6-633/fflonk/doc.go | 21 +++ ecc/bw6-633/fflonk/fflonk.go | 267 ++++++++++++++++++++++++++++ ecc/bw6-633/fflonk/fflonk_test.go | 141 +++++++++++++++ ecc/bw6-756/fflonk/doc.go | 21 +++ ecc/bw6-756/fflonk/fflonk.go | 267 ++++++++++++++++++++++++++++ ecc/bw6-756/fflonk/fflonk_test.go | 141 +++++++++++++++ ecc/bw6-761/fflonk/doc.go | 21 +++ ecc/bw6-761/fflonk/fflonk.go | 267 ++++++++++++++++++++++++++++ ecc/bw6-761/fflonk/fflonk_test.go | 141 +++++++++++++++ 27 files changed, 3487 insertions(+) create mode 100644 ecc/bls12-377/fflonk/doc.go create mode 100644 ecc/bls12-377/fflonk/fflonk.go create mode 100644 ecc/bls12-377/fflonk/fflonk_test.go create mode 100644 ecc/bls12-378/fflonk/doc.go create mode 100644 ecc/bls12-378/fflonk/fflonk.go create mode 100644 ecc/bls12-378/fflonk/fflonk_test.go create mode 100644 ecc/bls12-381/fflonk/doc.go create mode 100644 ecc/bls12-381/fflonk/fflonk.go create mode 100644 ecc/bls12-381/fflonk/fflonk_test.go create mode 100644 ecc/bls24-315/fflonk/doc.go create mode 100644 ecc/bls24-315/fflonk/fflonk.go create mode 100644 ecc/bls24-315/fflonk/fflonk_test.go create mode 100644 ecc/bls24-317/fflonk/doc.go create mode 100644 ecc/bls24-317/fflonk/fflonk.go create mode 100644 ecc/bls24-317/fflonk/fflonk_test.go create mode 100644 ecc/bn254/fflonk/doc.go create mode 100644 ecc/bw6-633/fflonk/doc.go create mode 100644 ecc/bw6-633/fflonk/fflonk.go create mode 100644 ecc/bw6-633/fflonk/fflonk_test.go create mode 100644 ecc/bw6-756/fflonk/doc.go create mode 100644 ecc/bw6-756/fflonk/fflonk.go create mode 100644 ecc/bw6-756/fflonk/fflonk_test.go create mode 100644 ecc/bw6-761/fflonk/doc.go create mode 100644 ecc/bw6-761/fflonk/fflonk.go create mode 100644 ecc/bw6-761/fflonk/fflonk_test.go diff --git a/ecc/bls12-377/fflonk/doc.go b/ecc/bls12-377/fflonk/doc.go new file mode 100644 index 0000000000..694eaa9734 --- /dev/null +++ b/ecc/bls12-377/fflonk/doc.go @@ -0,0 +1,21 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +// Package fflonk provides fflonk commitment, based on shplonk. +// +// See https://eprint.iacr.org/2020/081.pdf for shplonk +// See https://eprint.iacr.org/2021/1167.pdf for fflonk. +package fflonk diff --git a/ecc/bls12-377/fflonk/fflonk.go b/ecc/bls12-377/fflonk/fflonk.go new file mode 100644 index 0000000000..31176422e0 --- /dev/null +++ b/ecc/bls12-377/fflonk/fflonk.go @@ -0,0 +1,267 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "errors" + "hash" + "math/big" + + "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-377/kzg" + "github.com/consensys/gnark-crypto/ecc/bls12-377/shplonk" +) + +var ( + ErrRootsOne = errors.New("Fr does not contain all the t-th roots of 1") + ErrNbPolynomialsNbPoints = errors.New("the number of packs of polynomials should be the same as the number of pack of points") + ErrInonsistentFolding = errors.New("the outer claimed values are not consistent with the shplonk proof") + ErrInconsistentNumberFoldedPoints = errors.New("the number of outer claimed values is inconsistent with the number of claimed values in the shplonk proof") +) + +// Opening fflonk proof for opening a list of list of polynomials ((fʲᵢ)ᵢ)ⱼ where each +// pack of polynomials (fʲᵢ)ᵢ (the pack is indexed by j) is opened on a powers of elements in +// the set Sʲ (indexed by j), where the power is |(fʲᵢ)ᵢ|. +// +// implements io.ReaderFrom and io.WriterTo +type OpeningProof struct { + + // shplonk opening proof of the folded polynomials + SOpeningProof shplonk.OpeningProof + + // ClaimedValues ClaimedValues[i][j] contains the values + // of fʲᵢ on Sⱼ^{|(fʲᵢ)ᵢ|} + ClaimedValues [][][]fr.Element +} + +// CommitAndFold commits to a list of polynomial by intertwinning them like in the FFT, that is +// returns ∑_{i if + // the i-th pack contains t polynomials where ω is a t-th root of 1 + newPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(p); i++ { + t := len(p[i]) + newPoints[i], err = extendSet(points[i], t) + if err != nil { + return res, err + } + } + + // step 5: shplonk open the list of single polynomials on the new sets + res.SOpeningProof, err = shplonk.BatchOpen(foldedPolynomials, digests, newPoints, hf, pk, dataTranscript...) + + return res, err + +} + +// BatchVerify uses a proof to check that each digest digests[i] is correctly opened on the set points[i]. +// The digests are the commitments to the folded underlying polynomials. The shplonk proof is +// verified directly using the embedded shplonk proof. This function only computes the consistency +// between the claimed values of the underlying shplonk proof and the outer claimed values, using the fft-like +// folding. Namely, the outer claimed values are the evaluation of the original polynomials (so before they +// were folded) at the relevant powers of the points. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + // step 0: consistency checks between the folded claimed values of shplonk and the claimed + // values at the powers of the Sᵢ + for i := 0; i < len(proof.ClaimedValues); i++ { + sizeSi := len(proof.ClaimedValues[i][0]) + for j := 1; j < len(proof.ClaimedValues[i]); j++ { + // each set of opening must be of the same size (opeings on powers of Si) + if sizeSi != len(proof.ClaimedValues[i][j]) { + return ErrNbPolynomialsNbPoints + } + } + currNbPolynomials := len(proof.ClaimedValues[i]) + sizeSi = sizeSi * currNbPolynomials + // |originalPolynomials_{i}|x|Sᵢ| == |foldedPolynomials|x|folded Sᵢ| + if sizeSi != len(proof.SOpeningProof.ClaimedValues[i]) { + return ErrInconsistentNumberFoldedPoints + } + } + + // step 1: fold the outer claimed values and check that they correspond to the + // shplonk claimed values + var curFoldedClaimedValue, omgeaiPoint fr.Element + for i := 0; i < len(proof.ClaimedValues); i++ { + t := len(proof.ClaimedValues[i]) + omega, err := getIthRootOne(t) + if err != nil { + return err + } + sizeSi := len(proof.ClaimedValues[i][0]) + polyClaimedValues := make([]fr.Element, t) + for j := 0; j < sizeSi; j++ { + for k := 0; k < t; k++ { + polyClaimedValues[k].Set(&proof.ClaimedValues[i][k][j]) + } + omgeaiPoint.Set(&points[i][j]) + for l := 0; l < t; l++ { + curFoldedClaimedValue = eval(polyClaimedValues, omgeaiPoint) + if !curFoldedClaimedValue.Equal(&proof.SOpeningProof.ClaimedValues[i][j*t+l]) { + return ErrInonsistentFolding + } + omgeaiPoint.Mul(&omgeaiPoint, &omega) + } + } + } + + // step 2: verify the embedded shplonk proof + extendedPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(points); i++ { + t := len(proof.ClaimedValues[i]) + extendedPoints[i], err = extendSet(points[i], t) + if err != nil { + return err + } + } + err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) + + return nil +} + +// utils + +// getGenFrStar returns a generator of Fr^{*} +func getGenFrStar() fr.Element { + var res fr.Element + + res.SetUint64(22) + + return res +} + +// getIthRootOne returns a generator of Z/iZ +func getIthRootOne(i int) (fr.Element, error) { + var omega fr.Element + var tmpBigInt, zeroBigInt big.Int + oneBigInt := big.NewInt(1) + zeroBigInt.SetUint64(0) + rMinusOneBigInt := fr.Modulus() + rMinusOneBigInt.Sub(rMinusOneBigInt, oneBigInt) + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Mod(rMinusOneBigInt, &tmpBigInt) + if tmpBigInt.Cmp(&zeroBigInt) != 0 { + return omega, ErrRootsOne + } + genFrStar := getGenFrStar() + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) + omega.Exp(genFrStar, &tmpBigInt) + return omega, nil +} + +// extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] +func extendSet(p []fr.Element, t int) ([]fr.Element, error) { + + omega, err := getIthRootOne(t) + if err != nil { + return nil, err + } + nbPoints := len(p) + newPoints := make([]fr.Element, t*nbPoints) + for i := 0; i < nbPoints; i++ { + newPoints[i*t].Set(&p[i]) + for k := 1; k < t; k++ { + newPoints[i*t+k].Mul(&newPoints[i*t+k-1], &omega) + } + } + + return newPoints, nil +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} diff --git a/ecc/bls12-377/fflonk/fflonk_test.go b/ecc/bls12-377/fflonk/fflonk_test.go new file mode 100644 index 0000000000..4caddc248d --- /dev/null +++ b/ecc/bls12-377/fflonk/fflonk_test.go @@ -0,0 +1,141 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-377/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 400 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestFflonk(t *testing.T) { + + assert := require.New(t) + + // sample random polynomials of various sizes + nbSets := 5 + p := make([][][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + nbPolysInSet := 9 + p[i] = make([][]fr.Element, nbPolysInSet) + for j := 0; j < nbPolysInSet; j++ { + curSizePoly := j + 10 + p[i][j] = make([]fr.Element, curSizePoly) + for k := 0; k < curSizePoly; k++ { + p[i][j][k].SetRandom() + } + } + } + + // sample random sets Sᵢ + x := make([][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + curSetSize := i + 4 + x[i] = make([]fr.Element, curSetSize) + for j := 0; j < curSetSize; j++ { + x[i][j].SetRandom() + } + } + + // commit to the folded polynomials + digests := make([]kzg.Digest, nbSets) + var err error + for i := 0; i < nbSets; i++ { + digests[i], err = CommitAndFold(p[i], testSrs.Pk) + assert.NoError(err) + } + + // compute flonk opening proof + hf := sha256.New() + proof, err := BatchOpen(p, digests, x, hf, testSrs.Pk) + assert.NoError(err) + + // check opening proof + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.NoError(err) + + // tamper the proof + proof.ClaimedValues[0][0][0].SetRandom() + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestCommit(t *testing.T) { + + assert := require.New(t) + + // sample polynomials + nbPolys := 2 + p := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + p[i] = make([]fr.Element, i+10) + for j := 0; j < i+10; j++ { + p[i][j].SetRandom() + } + } + + // fflonk commit to them + var x fr.Element + x.SetRandom() + proof, err := kzg.Open(Fold(p), x, testSrs.Pk) + assert.NoError(err) + + // check that Open(C, x) = ∑_{i if + // the i-th pack contains t polynomials where ω is a t-th root of 1 + newPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(p); i++ { + t := len(p[i]) + newPoints[i], err = extendSet(points[i], t) + if err != nil { + return res, err + } + } + + // step 5: shplonk open the list of single polynomials on the new sets + res.SOpeningProof, err = shplonk.BatchOpen(foldedPolynomials, digests, newPoints, hf, pk, dataTranscript...) + + return res, err + +} + +// BatchVerify uses a proof to check that each digest digests[i] is correctly opened on the set points[i]. +// The digests are the commitments to the folded underlying polynomials. The shplonk proof is +// verified directly using the embedded shplonk proof. This function only computes the consistency +// between the claimed values of the underlying shplonk proof and the outer claimed values, using the fft-like +// folding. Namely, the outer claimed values are the evaluation of the original polynomials (so before they +// were folded) at the relevant powers of the points. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + // step 0: consistency checks between the folded claimed values of shplonk and the claimed + // values at the powers of the Sᵢ + for i := 0; i < len(proof.ClaimedValues); i++ { + sizeSi := len(proof.ClaimedValues[i][0]) + for j := 1; j < len(proof.ClaimedValues[i]); j++ { + // each set of opening must be of the same size (opeings on powers of Si) + if sizeSi != len(proof.ClaimedValues[i][j]) { + return ErrNbPolynomialsNbPoints + } + } + currNbPolynomials := len(proof.ClaimedValues[i]) + sizeSi = sizeSi * currNbPolynomials + // |originalPolynomials_{i}|x|Sᵢ| == |foldedPolynomials|x|folded Sᵢ| + if sizeSi != len(proof.SOpeningProof.ClaimedValues[i]) { + return ErrInconsistentNumberFoldedPoints + } + } + + // step 1: fold the outer claimed values and check that they correspond to the + // shplonk claimed values + var curFoldedClaimedValue, omgeaiPoint fr.Element + for i := 0; i < len(proof.ClaimedValues); i++ { + t := len(proof.ClaimedValues[i]) + omega, err := getIthRootOne(t) + if err != nil { + return err + } + sizeSi := len(proof.ClaimedValues[i][0]) + polyClaimedValues := make([]fr.Element, t) + for j := 0; j < sizeSi; j++ { + for k := 0; k < t; k++ { + polyClaimedValues[k].Set(&proof.ClaimedValues[i][k][j]) + } + omgeaiPoint.Set(&points[i][j]) + for l := 0; l < t; l++ { + curFoldedClaimedValue = eval(polyClaimedValues, omgeaiPoint) + if !curFoldedClaimedValue.Equal(&proof.SOpeningProof.ClaimedValues[i][j*t+l]) { + return ErrInonsistentFolding + } + omgeaiPoint.Mul(&omgeaiPoint, &omega) + } + } + } + + // step 2: verify the embedded shplonk proof + extendedPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(points); i++ { + t := len(proof.ClaimedValues[i]) + extendedPoints[i], err = extendSet(points[i], t) + if err != nil { + return err + } + } + err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) + + return nil +} + +// utils + +// getGenFrStar returns a generator of Fr^{*} +func getGenFrStar() fr.Element { + var res fr.Element + + res.SetUint64(22) + + return res +} + +// getIthRootOne returns a generator of Z/iZ +func getIthRootOne(i int) (fr.Element, error) { + var omega fr.Element + var tmpBigInt, zeroBigInt big.Int + oneBigInt := big.NewInt(1) + zeroBigInt.SetUint64(0) + rMinusOneBigInt := fr.Modulus() + rMinusOneBigInt.Sub(rMinusOneBigInt, oneBigInt) + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Mod(rMinusOneBigInt, &tmpBigInt) + if tmpBigInt.Cmp(&zeroBigInt) != 0 { + return omega, ErrRootsOne + } + genFrStar := getGenFrStar() + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) + omega.Exp(genFrStar, &tmpBigInt) + return omega, nil +} + +// extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] +func extendSet(p []fr.Element, t int) ([]fr.Element, error) { + + omega, err := getIthRootOne(t) + if err != nil { + return nil, err + } + nbPoints := len(p) + newPoints := make([]fr.Element, t*nbPoints) + for i := 0; i < nbPoints; i++ { + newPoints[i*t].Set(&p[i]) + for k := 1; k < t; k++ { + newPoints[i*t+k].Mul(&newPoints[i*t+k-1], &omega) + } + } + + return newPoints, nil +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} diff --git a/ecc/bls12-378/fflonk/fflonk_test.go b/ecc/bls12-378/fflonk/fflonk_test.go new file mode 100644 index 0000000000..e26d4f7380 --- /dev/null +++ b/ecc/bls12-378/fflonk/fflonk_test.go @@ -0,0 +1,141 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls12-378/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-378/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 400 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestFflonk(t *testing.T) { + + assert := require.New(t) + + // sample random polynomials of various sizes + nbSets := 5 + p := make([][][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + nbPolysInSet := 9 + p[i] = make([][]fr.Element, nbPolysInSet) + for j := 0; j < nbPolysInSet; j++ { + curSizePoly := j + 10 + p[i][j] = make([]fr.Element, curSizePoly) + for k := 0; k < curSizePoly; k++ { + p[i][j][k].SetRandom() + } + } + } + + // sample random sets Sᵢ + x := make([][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + curSetSize := i + 4 + x[i] = make([]fr.Element, curSetSize) + for j := 0; j < curSetSize; j++ { + x[i][j].SetRandom() + } + } + + // commit to the folded polynomials + digests := make([]kzg.Digest, nbSets) + var err error + for i := 0; i < nbSets; i++ { + digests[i], err = CommitAndFold(p[i], testSrs.Pk) + assert.NoError(err) + } + + // compute flonk opening proof + hf := sha256.New() + proof, err := BatchOpen(p, digests, x, hf, testSrs.Pk) + assert.NoError(err) + + // check opening proof + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.NoError(err) + + // tamper the proof + proof.ClaimedValues[0][0][0].SetRandom() + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestCommit(t *testing.T) { + + assert := require.New(t) + + // sample polynomials + nbPolys := 2 + p := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + p[i] = make([]fr.Element, i+10) + for j := 0; j < i+10; j++ { + p[i][j].SetRandom() + } + } + + // fflonk commit to them + var x fr.Element + x.SetRandom() + proof, err := kzg.Open(Fold(p), x, testSrs.Pk) + assert.NoError(err) + + // check that Open(C, x) = ∑_{i if + // the i-th pack contains t polynomials where ω is a t-th root of 1 + newPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(p); i++ { + t := len(p[i]) + newPoints[i], err = extendSet(points[i], t) + if err != nil { + return res, err + } + } + + // step 5: shplonk open the list of single polynomials on the new sets + res.SOpeningProof, err = shplonk.BatchOpen(foldedPolynomials, digests, newPoints, hf, pk, dataTranscript...) + + return res, err + +} + +// BatchVerify uses a proof to check that each digest digests[i] is correctly opened on the set points[i]. +// The digests are the commitments to the folded underlying polynomials. The shplonk proof is +// verified directly using the embedded shplonk proof. This function only computes the consistency +// between the claimed values of the underlying shplonk proof and the outer claimed values, using the fft-like +// folding. Namely, the outer claimed values are the evaluation of the original polynomials (so before they +// were folded) at the relevant powers of the points. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + // step 0: consistency checks between the folded claimed values of shplonk and the claimed + // values at the powers of the Sᵢ + for i := 0; i < len(proof.ClaimedValues); i++ { + sizeSi := len(proof.ClaimedValues[i][0]) + for j := 1; j < len(proof.ClaimedValues[i]); j++ { + // each set of opening must be of the same size (opeings on powers of Si) + if sizeSi != len(proof.ClaimedValues[i][j]) { + return ErrNbPolynomialsNbPoints + } + } + currNbPolynomials := len(proof.ClaimedValues[i]) + sizeSi = sizeSi * currNbPolynomials + // |originalPolynomials_{i}|x|Sᵢ| == |foldedPolynomials|x|folded Sᵢ| + if sizeSi != len(proof.SOpeningProof.ClaimedValues[i]) { + return ErrInconsistentNumberFoldedPoints + } + } + + // step 1: fold the outer claimed values and check that they correspond to the + // shplonk claimed values + var curFoldedClaimedValue, omgeaiPoint fr.Element + for i := 0; i < len(proof.ClaimedValues); i++ { + t := len(proof.ClaimedValues[i]) + omega, err := getIthRootOne(t) + if err != nil { + return err + } + sizeSi := len(proof.ClaimedValues[i][0]) + polyClaimedValues := make([]fr.Element, t) + for j := 0; j < sizeSi; j++ { + for k := 0; k < t; k++ { + polyClaimedValues[k].Set(&proof.ClaimedValues[i][k][j]) + } + omgeaiPoint.Set(&points[i][j]) + for l := 0; l < t; l++ { + curFoldedClaimedValue = eval(polyClaimedValues, omgeaiPoint) + if !curFoldedClaimedValue.Equal(&proof.SOpeningProof.ClaimedValues[i][j*t+l]) { + return ErrInonsistentFolding + } + omgeaiPoint.Mul(&omgeaiPoint, &omega) + } + } + } + + // step 2: verify the embedded shplonk proof + extendedPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(points); i++ { + t := len(proof.ClaimedValues[i]) + extendedPoints[i], err = extendSet(points[i], t) + if err != nil { + return err + } + } + err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) + + return nil +} + +// utils + +// getGenFrStar returns a generator of Fr^{*} +func getGenFrStar() fr.Element { + var res fr.Element + + res.SetUint64(7) + + return res +} + +// getIthRootOne returns a generator of Z/iZ +func getIthRootOne(i int) (fr.Element, error) { + var omega fr.Element + var tmpBigInt, zeroBigInt big.Int + oneBigInt := big.NewInt(1) + zeroBigInt.SetUint64(0) + rMinusOneBigInt := fr.Modulus() + rMinusOneBigInt.Sub(rMinusOneBigInt, oneBigInt) + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Mod(rMinusOneBigInt, &tmpBigInt) + if tmpBigInt.Cmp(&zeroBigInt) != 0 { + return omega, ErrRootsOne + } + genFrStar := getGenFrStar() + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) + omega.Exp(genFrStar, &tmpBigInt) + return omega, nil +} + +// extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] +func extendSet(p []fr.Element, t int) ([]fr.Element, error) { + + omega, err := getIthRootOne(t) + if err != nil { + return nil, err + } + nbPoints := len(p) + newPoints := make([]fr.Element, t*nbPoints) + for i := 0; i < nbPoints; i++ { + newPoints[i*t].Set(&p[i]) + for k := 1; k < t; k++ { + newPoints[i*t+k].Mul(&newPoints[i*t+k-1], &omega) + } + } + + return newPoints, nil +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} diff --git a/ecc/bls12-381/fflonk/fflonk_test.go b/ecc/bls12-381/fflonk/fflonk_test.go new file mode 100644 index 0000000000..417e22139d --- /dev/null +++ b/ecc/bls12-381/fflonk/fflonk_test.go @@ -0,0 +1,141 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-381/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 400 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestFflonk(t *testing.T) { + + assert := require.New(t) + + // sample random polynomials of various sizes + nbSets := 5 + p := make([][][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + nbPolysInSet := 9 + p[i] = make([][]fr.Element, nbPolysInSet) + for j := 0; j < nbPolysInSet; j++ { + curSizePoly := j + 10 + p[i][j] = make([]fr.Element, curSizePoly) + for k := 0; k < curSizePoly; k++ { + p[i][j][k].SetRandom() + } + } + } + + // sample random sets Sᵢ + x := make([][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + curSetSize := i + 4 + x[i] = make([]fr.Element, curSetSize) + for j := 0; j < curSetSize; j++ { + x[i][j].SetRandom() + } + } + + // commit to the folded polynomials + digests := make([]kzg.Digest, nbSets) + var err error + for i := 0; i < nbSets; i++ { + digests[i], err = CommitAndFold(p[i], testSrs.Pk) + assert.NoError(err) + } + + // compute flonk opening proof + hf := sha256.New() + proof, err := BatchOpen(p, digests, x, hf, testSrs.Pk) + assert.NoError(err) + + // check opening proof + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.NoError(err) + + // tamper the proof + proof.ClaimedValues[0][0][0].SetRandom() + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestCommit(t *testing.T) { + + assert := require.New(t) + + // sample polynomials + nbPolys := 2 + p := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + p[i] = make([]fr.Element, i+10) + for j := 0; j < i+10; j++ { + p[i][j].SetRandom() + } + } + + // fflonk commit to them + var x fr.Element + x.SetRandom() + proof, err := kzg.Open(Fold(p), x, testSrs.Pk) + assert.NoError(err) + + // check that Open(C, x) = ∑_{i if + // the i-th pack contains t polynomials where ω is a t-th root of 1 + newPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(p); i++ { + t := len(p[i]) + newPoints[i], err = extendSet(points[i], t) + if err != nil { + return res, err + } + } + + // step 5: shplonk open the list of single polynomials on the new sets + res.SOpeningProof, err = shplonk.BatchOpen(foldedPolynomials, digests, newPoints, hf, pk, dataTranscript...) + + return res, err + +} + +// BatchVerify uses a proof to check that each digest digests[i] is correctly opened on the set points[i]. +// The digests are the commitments to the folded underlying polynomials. The shplonk proof is +// verified directly using the embedded shplonk proof. This function only computes the consistency +// between the claimed values of the underlying shplonk proof and the outer claimed values, using the fft-like +// folding. Namely, the outer claimed values are the evaluation of the original polynomials (so before they +// were folded) at the relevant powers of the points. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + // step 0: consistency checks between the folded claimed values of shplonk and the claimed + // values at the powers of the Sᵢ + for i := 0; i < len(proof.ClaimedValues); i++ { + sizeSi := len(proof.ClaimedValues[i][0]) + for j := 1; j < len(proof.ClaimedValues[i]); j++ { + // each set of opening must be of the same size (opeings on powers of Si) + if sizeSi != len(proof.ClaimedValues[i][j]) { + return ErrNbPolynomialsNbPoints + } + } + currNbPolynomials := len(proof.ClaimedValues[i]) + sizeSi = sizeSi * currNbPolynomials + // |originalPolynomials_{i}|x|Sᵢ| == |foldedPolynomials|x|folded Sᵢ| + if sizeSi != len(proof.SOpeningProof.ClaimedValues[i]) { + return ErrInconsistentNumberFoldedPoints + } + } + + // step 1: fold the outer claimed values and check that they correspond to the + // shplonk claimed values + var curFoldedClaimedValue, omgeaiPoint fr.Element + for i := 0; i < len(proof.ClaimedValues); i++ { + t := len(proof.ClaimedValues[i]) + omega, err := getIthRootOne(t) + if err != nil { + return err + } + sizeSi := len(proof.ClaimedValues[i][0]) + polyClaimedValues := make([]fr.Element, t) + for j := 0; j < sizeSi; j++ { + for k := 0; k < t; k++ { + polyClaimedValues[k].Set(&proof.ClaimedValues[i][k][j]) + } + omgeaiPoint.Set(&points[i][j]) + for l := 0; l < t; l++ { + curFoldedClaimedValue = eval(polyClaimedValues, omgeaiPoint) + if !curFoldedClaimedValue.Equal(&proof.SOpeningProof.ClaimedValues[i][j*t+l]) { + return ErrInonsistentFolding + } + omgeaiPoint.Mul(&omgeaiPoint, &omega) + } + } + } + + // step 2: verify the embedded shplonk proof + extendedPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(points); i++ { + t := len(proof.ClaimedValues[i]) + extendedPoints[i], err = extendSet(points[i], t) + if err != nil { + return err + } + } + err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) + + return nil +} + +// utils + +// getGenFrStar returns a generator of Fr^{*} +func getGenFrStar() fr.Element { + var res fr.Element + + res.SetUint64(7) + + return res +} + +// getIthRootOne returns a generator of Z/iZ +func getIthRootOne(i int) (fr.Element, error) { + var omega fr.Element + var tmpBigInt, zeroBigInt big.Int + oneBigInt := big.NewInt(1) + zeroBigInt.SetUint64(0) + rMinusOneBigInt := fr.Modulus() + rMinusOneBigInt.Sub(rMinusOneBigInt, oneBigInt) + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Mod(rMinusOneBigInt, &tmpBigInt) + if tmpBigInt.Cmp(&zeroBigInt) != 0 { + return omega, ErrRootsOne + } + genFrStar := getGenFrStar() + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) + omega.Exp(genFrStar, &tmpBigInt) + return omega, nil +} + +// extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] +func extendSet(p []fr.Element, t int) ([]fr.Element, error) { + + omega, err := getIthRootOne(t) + if err != nil { + return nil, err + } + nbPoints := len(p) + newPoints := make([]fr.Element, t*nbPoints) + for i := 0; i < nbPoints; i++ { + newPoints[i*t].Set(&p[i]) + for k := 1; k < t; k++ { + newPoints[i*t+k].Mul(&newPoints[i*t+k-1], &omega) + } + } + + return newPoints, nil +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} diff --git a/ecc/bls24-315/fflonk/fflonk_test.go b/ecc/bls24-315/fflonk/fflonk_test.go new file mode 100644 index 0000000000..af7cc9faf7 --- /dev/null +++ b/ecc/bls24-315/fflonk/fflonk_test.go @@ -0,0 +1,141 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-315/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 400 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestFflonk(t *testing.T) { + + assert := require.New(t) + + // sample random polynomials of various sizes + nbSets := 5 + p := make([][][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + nbPolysInSet := 9 + p[i] = make([][]fr.Element, nbPolysInSet) + for j := 0; j < nbPolysInSet; j++ { + curSizePoly := j + 10 + p[i][j] = make([]fr.Element, curSizePoly) + for k := 0; k < curSizePoly; k++ { + p[i][j][k].SetRandom() + } + } + } + + // sample random sets Sᵢ + x := make([][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + curSetSize := i + 4 + x[i] = make([]fr.Element, curSetSize) + for j := 0; j < curSetSize; j++ { + x[i][j].SetRandom() + } + } + + // commit to the folded polynomials + digests := make([]kzg.Digest, nbSets) + var err error + for i := 0; i < nbSets; i++ { + digests[i], err = CommitAndFold(p[i], testSrs.Pk) + assert.NoError(err) + } + + // compute flonk opening proof + hf := sha256.New() + proof, err := BatchOpen(p, digests, x, hf, testSrs.Pk) + assert.NoError(err) + + // check opening proof + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.NoError(err) + + // tamper the proof + proof.ClaimedValues[0][0][0].SetRandom() + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestCommit(t *testing.T) { + + assert := require.New(t) + + // sample polynomials + nbPolys := 2 + p := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + p[i] = make([]fr.Element, i+10) + for j := 0; j < i+10; j++ { + p[i][j].SetRandom() + } + } + + // fflonk commit to them + var x fr.Element + x.SetRandom() + proof, err := kzg.Open(Fold(p), x, testSrs.Pk) + assert.NoError(err) + + // check that Open(C, x) = ∑_{i if + // the i-th pack contains t polynomials where ω is a t-th root of 1 + newPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(p); i++ { + t := len(p[i]) + newPoints[i], err = extendSet(points[i], t) + if err != nil { + return res, err + } + } + + // step 5: shplonk open the list of single polynomials on the new sets + res.SOpeningProof, err = shplonk.BatchOpen(foldedPolynomials, digests, newPoints, hf, pk, dataTranscript...) + + return res, err + +} + +// BatchVerify uses a proof to check that each digest digests[i] is correctly opened on the set points[i]. +// The digests are the commitments to the folded underlying polynomials. The shplonk proof is +// verified directly using the embedded shplonk proof. This function only computes the consistency +// between the claimed values of the underlying shplonk proof and the outer claimed values, using the fft-like +// folding. Namely, the outer claimed values are the evaluation of the original polynomials (so before they +// were folded) at the relevant powers of the points. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + // step 0: consistency checks between the folded claimed values of shplonk and the claimed + // values at the powers of the Sᵢ + for i := 0; i < len(proof.ClaimedValues); i++ { + sizeSi := len(proof.ClaimedValues[i][0]) + for j := 1; j < len(proof.ClaimedValues[i]); j++ { + // each set of opening must be of the same size (opeings on powers of Si) + if sizeSi != len(proof.ClaimedValues[i][j]) { + return ErrNbPolynomialsNbPoints + } + } + currNbPolynomials := len(proof.ClaimedValues[i]) + sizeSi = sizeSi * currNbPolynomials + // |originalPolynomials_{i}|x|Sᵢ| == |foldedPolynomials|x|folded Sᵢ| + if sizeSi != len(proof.SOpeningProof.ClaimedValues[i]) { + return ErrInconsistentNumberFoldedPoints + } + } + + // step 1: fold the outer claimed values and check that they correspond to the + // shplonk claimed values + var curFoldedClaimedValue, omgeaiPoint fr.Element + for i := 0; i < len(proof.ClaimedValues); i++ { + t := len(proof.ClaimedValues[i]) + omega, err := getIthRootOne(t) + if err != nil { + return err + } + sizeSi := len(proof.ClaimedValues[i][0]) + polyClaimedValues := make([]fr.Element, t) + for j := 0; j < sizeSi; j++ { + for k := 0; k < t; k++ { + polyClaimedValues[k].Set(&proof.ClaimedValues[i][k][j]) + } + omgeaiPoint.Set(&points[i][j]) + for l := 0; l < t; l++ { + curFoldedClaimedValue = eval(polyClaimedValues, omgeaiPoint) + if !curFoldedClaimedValue.Equal(&proof.SOpeningProof.ClaimedValues[i][j*t+l]) { + return ErrInonsistentFolding + } + omgeaiPoint.Mul(&omgeaiPoint, &omega) + } + } + } + + // step 2: verify the embedded shplonk proof + extendedPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(points); i++ { + t := len(proof.ClaimedValues[i]) + extendedPoints[i], err = extendSet(points[i], t) + if err != nil { + return err + } + } + err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) + + return nil +} + +// utils + +// getGenFrStar returns a generator of Fr^{*} +func getGenFrStar() fr.Element { + var res fr.Element + + res.SetUint64(7) + + return res +} + +// getIthRootOne returns a generator of Z/iZ +func getIthRootOne(i int) (fr.Element, error) { + var omega fr.Element + var tmpBigInt, zeroBigInt big.Int + oneBigInt := big.NewInt(1) + zeroBigInt.SetUint64(0) + rMinusOneBigInt := fr.Modulus() + rMinusOneBigInt.Sub(rMinusOneBigInt, oneBigInt) + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Mod(rMinusOneBigInt, &tmpBigInt) + if tmpBigInt.Cmp(&zeroBigInt) != 0 { + return omega, ErrRootsOne + } + genFrStar := getGenFrStar() + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) + omega.Exp(genFrStar, &tmpBigInt) + return omega, nil +} + +// extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] +func extendSet(p []fr.Element, t int) ([]fr.Element, error) { + + omega, err := getIthRootOne(t) + if err != nil { + return nil, err + } + nbPoints := len(p) + newPoints := make([]fr.Element, t*nbPoints) + for i := 0; i < nbPoints; i++ { + newPoints[i*t].Set(&p[i]) + for k := 1; k < t; k++ { + newPoints[i*t+k].Mul(&newPoints[i*t+k-1], &omega) + } + } + + return newPoints, nil +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} diff --git a/ecc/bls24-317/fflonk/fflonk_test.go b/ecc/bls24-317/fflonk/fflonk_test.go new file mode 100644 index 0000000000..5d964e4fda --- /dev/null +++ b/ecc/bls24-317/fflonk/fflonk_test.go @@ -0,0 +1,141 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-317/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 400 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestFflonk(t *testing.T) { + + assert := require.New(t) + + // sample random polynomials of various sizes + nbSets := 5 + p := make([][][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + nbPolysInSet := 9 + p[i] = make([][]fr.Element, nbPolysInSet) + for j := 0; j < nbPolysInSet; j++ { + curSizePoly := j + 10 + p[i][j] = make([]fr.Element, curSizePoly) + for k := 0; k < curSizePoly; k++ { + p[i][j][k].SetRandom() + } + } + } + + // sample random sets Sᵢ + x := make([][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + curSetSize := i + 4 + x[i] = make([]fr.Element, curSetSize) + for j := 0; j < curSetSize; j++ { + x[i][j].SetRandom() + } + } + + // commit to the folded polynomials + digests := make([]kzg.Digest, nbSets) + var err error + for i := 0; i < nbSets; i++ { + digests[i], err = CommitAndFold(p[i], testSrs.Pk) + assert.NoError(err) + } + + // compute flonk opening proof + hf := sha256.New() + proof, err := BatchOpen(p, digests, x, hf, testSrs.Pk) + assert.NoError(err) + + // check opening proof + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.NoError(err) + + // tamper the proof + proof.ClaimedValues[0][0][0].SetRandom() + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestCommit(t *testing.T) { + + assert := require.New(t) + + // sample polynomials + nbPolys := 2 + p := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + p[i] = make([]fr.Element, i+10) + for j := 0; j < i+10; j++ { + p[i][j].SetRandom() + } + } + + // fflonk commit to them + var x fr.Element + x.SetRandom() + proof, err := kzg.Open(Fold(p), x, testSrs.Pk) + assert.NoError(err) + + // check that Open(C, x) = ∑_{i if + // the i-th pack contains t polynomials where ω is a t-th root of 1 + newPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(p); i++ { + t := len(p[i]) + newPoints[i], err = extendSet(points[i], t) + if err != nil { + return res, err + } + } + + // step 5: shplonk open the list of single polynomials on the new sets + res.SOpeningProof, err = shplonk.BatchOpen(foldedPolynomials, digests, newPoints, hf, pk, dataTranscript...) + + return res, err + +} + +// BatchVerify uses a proof to check that each digest digests[i] is correctly opened on the set points[i]. +// The digests are the commitments to the folded underlying polynomials. The shplonk proof is +// verified directly using the embedded shplonk proof. This function only computes the consistency +// between the claimed values of the underlying shplonk proof and the outer claimed values, using the fft-like +// folding. Namely, the outer claimed values are the evaluation of the original polynomials (so before they +// were folded) at the relevant powers of the points. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + // step 0: consistency checks between the folded claimed values of shplonk and the claimed + // values at the powers of the Sᵢ + for i := 0; i < len(proof.ClaimedValues); i++ { + sizeSi := len(proof.ClaimedValues[i][0]) + for j := 1; j < len(proof.ClaimedValues[i]); j++ { + // each set of opening must be of the same size (opeings on powers of Si) + if sizeSi != len(proof.ClaimedValues[i][j]) { + return ErrNbPolynomialsNbPoints + } + } + currNbPolynomials := len(proof.ClaimedValues[i]) + sizeSi = sizeSi * currNbPolynomials + // |originalPolynomials_{i}|x|Sᵢ| == |foldedPolynomials|x|folded Sᵢ| + if sizeSi != len(proof.SOpeningProof.ClaimedValues[i]) { + return ErrInconsistentNumberFoldedPoints + } + } + + // step 1: fold the outer claimed values and check that they correspond to the + // shplonk claimed values + var curFoldedClaimedValue, omgeaiPoint fr.Element + for i := 0; i < len(proof.ClaimedValues); i++ { + t := len(proof.ClaimedValues[i]) + omega, err := getIthRootOne(t) + if err != nil { + return err + } + sizeSi := len(proof.ClaimedValues[i][0]) + polyClaimedValues := make([]fr.Element, t) + for j := 0; j < sizeSi; j++ { + for k := 0; k < t; k++ { + polyClaimedValues[k].Set(&proof.ClaimedValues[i][k][j]) + } + omgeaiPoint.Set(&points[i][j]) + for l := 0; l < t; l++ { + curFoldedClaimedValue = eval(polyClaimedValues, omgeaiPoint) + if !curFoldedClaimedValue.Equal(&proof.SOpeningProof.ClaimedValues[i][j*t+l]) { + return ErrInonsistentFolding + } + omgeaiPoint.Mul(&omgeaiPoint, &omega) + } + } + } + + // step 2: verify the embedded shplonk proof + extendedPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(points); i++ { + t := len(proof.ClaimedValues[i]) + extendedPoints[i], err = extendSet(points[i], t) + if err != nil { + return err + } + } + err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) + + return nil +} + +// utils + +// getGenFrStar returns a generator of Fr^{*} +func getGenFrStar() fr.Element { + var res fr.Element + + res.SetUint64(13) + + return res +} + +// getIthRootOne returns a generator of Z/iZ +func getIthRootOne(i int) (fr.Element, error) { + var omega fr.Element + var tmpBigInt, zeroBigInt big.Int + oneBigInt := big.NewInt(1) + zeroBigInt.SetUint64(0) + rMinusOneBigInt := fr.Modulus() + rMinusOneBigInt.Sub(rMinusOneBigInt, oneBigInt) + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Mod(rMinusOneBigInt, &tmpBigInt) + if tmpBigInt.Cmp(&zeroBigInt) != 0 { + return omega, ErrRootsOne + } + genFrStar := getGenFrStar() + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) + omega.Exp(genFrStar, &tmpBigInt) + return omega, nil +} + +// extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] +func extendSet(p []fr.Element, t int) ([]fr.Element, error) { + + omega, err := getIthRootOne(t) + if err != nil { + return nil, err + } + nbPoints := len(p) + newPoints := make([]fr.Element, t*nbPoints) + for i := 0; i < nbPoints; i++ { + newPoints[i*t].Set(&p[i]) + for k := 1; k < t; k++ { + newPoints[i*t+k].Mul(&newPoints[i*t+k-1], &omega) + } + } + + return newPoints, nil +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} diff --git a/ecc/bw6-633/fflonk/fflonk_test.go b/ecc/bw6-633/fflonk/fflonk_test.go new file mode 100644 index 0000000000..1d031a6929 --- /dev/null +++ b/ecc/bw6-633/fflonk/fflonk_test.go @@ -0,0 +1,141 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-633/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 400 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestFflonk(t *testing.T) { + + assert := require.New(t) + + // sample random polynomials of various sizes + nbSets := 5 + p := make([][][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + nbPolysInSet := 9 + p[i] = make([][]fr.Element, nbPolysInSet) + for j := 0; j < nbPolysInSet; j++ { + curSizePoly := j + 10 + p[i][j] = make([]fr.Element, curSizePoly) + for k := 0; k < curSizePoly; k++ { + p[i][j][k].SetRandom() + } + } + } + + // sample random sets Sᵢ + x := make([][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + curSetSize := i + 4 + x[i] = make([]fr.Element, curSetSize) + for j := 0; j < curSetSize; j++ { + x[i][j].SetRandom() + } + } + + // commit to the folded polynomials + digests := make([]kzg.Digest, nbSets) + var err error + for i := 0; i < nbSets; i++ { + digests[i], err = CommitAndFold(p[i], testSrs.Pk) + assert.NoError(err) + } + + // compute flonk opening proof + hf := sha256.New() + proof, err := BatchOpen(p, digests, x, hf, testSrs.Pk) + assert.NoError(err) + + // check opening proof + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.NoError(err) + + // tamper the proof + proof.ClaimedValues[0][0][0].SetRandom() + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestCommit(t *testing.T) { + + assert := require.New(t) + + // sample polynomials + nbPolys := 2 + p := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + p[i] = make([]fr.Element, i+10) + for j := 0; j < i+10; j++ { + p[i][j].SetRandom() + } + } + + // fflonk commit to them + var x fr.Element + x.SetRandom() + proof, err := kzg.Open(Fold(p), x, testSrs.Pk) + assert.NoError(err) + + // check that Open(C, x) = ∑_{i if + // the i-th pack contains t polynomials where ω is a t-th root of 1 + newPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(p); i++ { + t := len(p[i]) + newPoints[i], err = extendSet(points[i], t) + if err != nil { + return res, err + } + } + + // step 5: shplonk open the list of single polynomials on the new sets + res.SOpeningProof, err = shplonk.BatchOpen(foldedPolynomials, digests, newPoints, hf, pk, dataTranscript...) + + return res, err + +} + +// BatchVerify uses a proof to check that each digest digests[i] is correctly opened on the set points[i]. +// The digests are the commitments to the folded underlying polynomials. The shplonk proof is +// verified directly using the embedded shplonk proof. This function only computes the consistency +// between the claimed values of the underlying shplonk proof and the outer claimed values, using the fft-like +// folding. Namely, the outer claimed values are the evaluation of the original polynomials (so before they +// were folded) at the relevant powers of the points. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + // step 0: consistency checks between the folded claimed values of shplonk and the claimed + // values at the powers of the Sᵢ + for i := 0; i < len(proof.ClaimedValues); i++ { + sizeSi := len(proof.ClaimedValues[i][0]) + for j := 1; j < len(proof.ClaimedValues[i]); j++ { + // each set of opening must be of the same size (opeings on powers of Si) + if sizeSi != len(proof.ClaimedValues[i][j]) { + return ErrNbPolynomialsNbPoints + } + } + currNbPolynomials := len(proof.ClaimedValues[i]) + sizeSi = sizeSi * currNbPolynomials + // |originalPolynomials_{i}|x|Sᵢ| == |foldedPolynomials|x|folded Sᵢ| + if sizeSi != len(proof.SOpeningProof.ClaimedValues[i]) { + return ErrInconsistentNumberFoldedPoints + } + } + + // step 1: fold the outer claimed values and check that they correspond to the + // shplonk claimed values + var curFoldedClaimedValue, omgeaiPoint fr.Element + for i := 0; i < len(proof.ClaimedValues); i++ { + t := len(proof.ClaimedValues[i]) + omega, err := getIthRootOne(t) + if err != nil { + return err + } + sizeSi := len(proof.ClaimedValues[i][0]) + polyClaimedValues := make([]fr.Element, t) + for j := 0; j < sizeSi; j++ { + for k := 0; k < t; k++ { + polyClaimedValues[k].Set(&proof.ClaimedValues[i][k][j]) + } + omgeaiPoint.Set(&points[i][j]) + for l := 0; l < t; l++ { + curFoldedClaimedValue = eval(polyClaimedValues, omgeaiPoint) + if !curFoldedClaimedValue.Equal(&proof.SOpeningProof.ClaimedValues[i][j*t+l]) { + return ErrInonsistentFolding + } + omgeaiPoint.Mul(&omgeaiPoint, &omega) + } + } + } + + // step 2: verify the embedded shplonk proof + extendedPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(points); i++ { + t := len(proof.ClaimedValues[i]) + extendedPoints[i], err = extendSet(points[i], t) + if err != nil { + return err + } + } + err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) + + return nil +} + +// utils + +// getGenFrStar returns a generator of Fr^{*} +func getGenFrStar() fr.Element { + var res fr.Element + + res.SetUint64(5) + + return res +} + +// getIthRootOne returns a generator of Z/iZ +func getIthRootOne(i int) (fr.Element, error) { + var omega fr.Element + var tmpBigInt, zeroBigInt big.Int + oneBigInt := big.NewInt(1) + zeroBigInt.SetUint64(0) + rMinusOneBigInt := fr.Modulus() + rMinusOneBigInt.Sub(rMinusOneBigInt, oneBigInt) + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Mod(rMinusOneBigInt, &tmpBigInt) + if tmpBigInt.Cmp(&zeroBigInt) != 0 { + return omega, ErrRootsOne + } + genFrStar := getGenFrStar() + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) + omega.Exp(genFrStar, &tmpBigInt) + return omega, nil +} + +// extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] +func extendSet(p []fr.Element, t int) ([]fr.Element, error) { + + omega, err := getIthRootOne(t) + if err != nil { + return nil, err + } + nbPoints := len(p) + newPoints := make([]fr.Element, t*nbPoints) + for i := 0; i < nbPoints; i++ { + newPoints[i*t].Set(&p[i]) + for k := 1; k < t; k++ { + newPoints[i*t+k].Mul(&newPoints[i*t+k-1], &omega) + } + } + + return newPoints, nil +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} diff --git a/ecc/bw6-756/fflonk/fflonk_test.go b/ecc/bw6-756/fflonk/fflonk_test.go new file mode 100644 index 0000000000..d17a5372d2 --- /dev/null +++ b/ecc/bw6-756/fflonk/fflonk_test.go @@ -0,0 +1,141 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bw6-756/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-756/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 400 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestFflonk(t *testing.T) { + + assert := require.New(t) + + // sample random polynomials of various sizes + nbSets := 5 + p := make([][][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + nbPolysInSet := 9 + p[i] = make([][]fr.Element, nbPolysInSet) + for j := 0; j < nbPolysInSet; j++ { + curSizePoly := j + 10 + p[i][j] = make([]fr.Element, curSizePoly) + for k := 0; k < curSizePoly; k++ { + p[i][j][k].SetRandom() + } + } + } + + // sample random sets Sᵢ + x := make([][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + curSetSize := i + 4 + x[i] = make([]fr.Element, curSetSize) + for j := 0; j < curSetSize; j++ { + x[i][j].SetRandom() + } + } + + // commit to the folded polynomials + digests := make([]kzg.Digest, nbSets) + var err error + for i := 0; i < nbSets; i++ { + digests[i], err = CommitAndFold(p[i], testSrs.Pk) + assert.NoError(err) + } + + // compute flonk opening proof + hf := sha256.New() + proof, err := BatchOpen(p, digests, x, hf, testSrs.Pk) + assert.NoError(err) + + // check opening proof + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.NoError(err) + + // tamper the proof + proof.ClaimedValues[0][0][0].SetRandom() + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestCommit(t *testing.T) { + + assert := require.New(t) + + // sample polynomials + nbPolys := 2 + p := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + p[i] = make([]fr.Element, i+10) + for j := 0; j < i+10; j++ { + p[i][j].SetRandom() + } + } + + // fflonk commit to them + var x fr.Element + x.SetRandom() + proof, err := kzg.Open(Fold(p), x, testSrs.Pk) + assert.NoError(err) + + // check that Open(C, x) = ∑_{i if + // the i-th pack contains t polynomials where ω is a t-th root of 1 + newPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(p); i++ { + t := len(p[i]) + newPoints[i], err = extendSet(points[i], t) + if err != nil { + return res, err + } + } + + // step 5: shplonk open the list of single polynomials on the new sets + res.SOpeningProof, err = shplonk.BatchOpen(foldedPolynomials, digests, newPoints, hf, pk, dataTranscript...) + + return res, err + +} + +// BatchVerify uses a proof to check that each digest digests[i] is correctly opened on the set points[i]. +// The digests are the commitments to the folded underlying polynomials. The shplonk proof is +// verified directly using the embedded shplonk proof. This function only computes the consistency +// between the claimed values of the underlying shplonk proof and the outer claimed values, using the fft-like +// folding. Namely, the outer claimed values are the evaluation of the original polynomials (so before they +// were folded) at the relevant powers of the points. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + // step 0: consistency checks between the folded claimed values of shplonk and the claimed + // values at the powers of the Sᵢ + for i := 0; i < len(proof.ClaimedValues); i++ { + sizeSi := len(proof.ClaimedValues[i][0]) + for j := 1; j < len(proof.ClaimedValues[i]); j++ { + // each set of opening must be of the same size (opeings on powers of Si) + if sizeSi != len(proof.ClaimedValues[i][j]) { + return ErrNbPolynomialsNbPoints + } + } + currNbPolynomials := len(proof.ClaimedValues[i]) + sizeSi = sizeSi * currNbPolynomials + // |originalPolynomials_{i}|x|Sᵢ| == |foldedPolynomials|x|folded Sᵢ| + if sizeSi != len(proof.SOpeningProof.ClaimedValues[i]) { + return ErrInconsistentNumberFoldedPoints + } + } + + // step 1: fold the outer claimed values and check that they correspond to the + // shplonk claimed values + var curFoldedClaimedValue, omgeaiPoint fr.Element + for i := 0; i < len(proof.ClaimedValues); i++ { + t := len(proof.ClaimedValues[i]) + omega, err := getIthRootOne(t) + if err != nil { + return err + } + sizeSi := len(proof.ClaimedValues[i][0]) + polyClaimedValues := make([]fr.Element, t) + for j := 0; j < sizeSi; j++ { + for k := 0; k < t; k++ { + polyClaimedValues[k].Set(&proof.ClaimedValues[i][k][j]) + } + omgeaiPoint.Set(&points[i][j]) + for l := 0; l < t; l++ { + curFoldedClaimedValue = eval(polyClaimedValues, omgeaiPoint) + if !curFoldedClaimedValue.Equal(&proof.SOpeningProof.ClaimedValues[i][j*t+l]) { + return ErrInonsistentFolding + } + omgeaiPoint.Mul(&omgeaiPoint, &omega) + } + } + } + + // step 2: verify the embedded shplonk proof + extendedPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(points); i++ { + t := len(proof.ClaimedValues[i]) + extendedPoints[i], err = extendSet(points[i], t) + if err != nil { + return err + } + } + err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) + + return nil +} + +// utils + +// getGenFrStar returns a generator of Fr^{*} +func getGenFrStar() fr.Element { + var res fr.Element + + res.SetUint64(15) + + return res +} + +// getIthRootOne returns a generator of Z/iZ +func getIthRootOne(i int) (fr.Element, error) { + var omega fr.Element + var tmpBigInt, zeroBigInt big.Int + oneBigInt := big.NewInt(1) + zeroBigInt.SetUint64(0) + rMinusOneBigInt := fr.Modulus() + rMinusOneBigInt.Sub(rMinusOneBigInt, oneBigInt) + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Mod(rMinusOneBigInt, &tmpBigInt) + if tmpBigInt.Cmp(&zeroBigInt) != 0 { + return omega, ErrRootsOne + } + genFrStar := getGenFrStar() + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) + omega.Exp(genFrStar, &tmpBigInt) + return omega, nil +} + +// extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] +func extendSet(p []fr.Element, t int) ([]fr.Element, error) { + + omega, err := getIthRootOne(t) + if err != nil { + return nil, err + } + nbPoints := len(p) + newPoints := make([]fr.Element, t*nbPoints) + for i := 0; i < nbPoints; i++ { + newPoints[i*t].Set(&p[i]) + for k := 1; k < t; k++ { + newPoints[i*t+k].Mul(&newPoints[i*t+k-1], &omega) + } + } + + return newPoints, nil +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} diff --git a/ecc/bw6-761/fflonk/fflonk_test.go b/ecc/bw6-761/fflonk/fflonk_test.go new file mode 100644 index 0000000000..030b6e8db2 --- /dev/null +++ b/ecc/bw6-761/fflonk/fflonk_test.go @@ -0,0 +1,141 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-761/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 400 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestFflonk(t *testing.T) { + + assert := require.New(t) + + // sample random polynomials of various sizes + nbSets := 5 + p := make([][][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + nbPolysInSet := 9 + p[i] = make([][]fr.Element, nbPolysInSet) + for j := 0; j < nbPolysInSet; j++ { + curSizePoly := j + 10 + p[i][j] = make([]fr.Element, curSizePoly) + for k := 0; k < curSizePoly; k++ { + p[i][j][k].SetRandom() + } + } + } + + // sample random sets Sᵢ + x := make([][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + curSetSize := i + 4 + x[i] = make([]fr.Element, curSetSize) + for j := 0; j < curSetSize; j++ { + x[i][j].SetRandom() + } + } + + // commit to the folded polynomials + digests := make([]kzg.Digest, nbSets) + var err error + for i := 0; i < nbSets; i++ { + digests[i], err = CommitAndFold(p[i], testSrs.Pk) + assert.NoError(err) + } + + // compute flonk opening proof + hf := sha256.New() + proof, err := BatchOpen(p, digests, x, hf, testSrs.Pk) + assert.NoError(err) + + // check opening proof + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.NoError(err) + + // tamper the proof + proof.ClaimedValues[0][0][0].SetRandom() + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestCommit(t *testing.T) { + + assert := require.New(t) + + // sample polynomials + nbPolys := 2 + p := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + p[i] = make([]fr.Element, i+10) + for j := 0; j < i+10; j++ { + p[i][j].SetRandom() + } + } + + // fflonk commit to them + var x fr.Element + x.SetRandom() + proof, err := kzg.Open(Fold(p), x, testSrs.Pk) + assert.NoError(err) + + // check that Open(C, x) = ∑_{i Date: Tue, 19 Mar 2024 16:28:47 +0100 Subject: [PATCH 32/66] feat: marshal fflonk bn254 --- ecc/bn254/fflonk/marshal.go | 49 +++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 ecc/bn254/fflonk/marshal.go diff --git a/ecc/bn254/fflonk/marshal.go b/ecc/bn254/fflonk/marshal.go new file mode 100644 index 0000000000..034e00c6ff --- /dev/null +++ b/ecc/bn254/fflonk/marshal.go @@ -0,0 +1,49 @@ +package fflonk + +import ( + "io" + + "github.com/consensys/gnark-crypto/ecc/bn254" +) + +// ReadFrom decodes OpeningProof data from reader. +func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { + + dec := bn254.NewDecoder(r) + + toDecode := []interface{}{ + &proof.SOpeningProof.W, + &proof.SOpeningProof.WPrime, + proof.SOpeningProof.ClaimedValues, + proof.ClaimedValues, + } + + for _, v := range toDecode { + if err := dec.Decode(v); err != nil { + return dec.BytesRead(), err + } + } + + return dec.BytesRead(), nil +} + +// WriteTo writes binary encoding of OpeningProof. +func (proof *OpeningProof) WriteTo(w io.Writer) (int64, error) { + + enc := bn254.NewEncoder(w) + + toEncode := []interface{}{ + &proof.SOpeningProof.W, + &proof.SOpeningProof.WPrime, + proof.SOpeningProof.ClaimedValues, + proof.ClaimedValues, + } + + for _, v := range toEncode { + if err := enc.Encode(v); err != nil { + return enc.BytesWritten(), err + } + } + + return enc.BytesWritten(), nil +} From 6f3074bcfa6b7703b71b3324df567f6483c67ec5 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Tue, 19 Mar 2024 18:48:13 +0100 Subject: [PATCH 33/66] feat: marshal --- ecc/bls12-377/fflonk/marshal.go | 65 ++++++++++++++++++++++++++ ecc/bls12-378/fflonk/marshal.go | 65 ++++++++++++++++++++++++++ ecc/bls12-381/fflonk/marshal.go | 65 ++++++++++++++++++++++++++ ecc/bls24-315/fflonk/marshal.go | 65 ++++++++++++++++++++++++++ ecc/bls24-317/fflonk/marshal.go | 65 ++++++++++++++++++++++++++ ecc/bn254/fflonk/marshal.go | 16 +++++++ ecc/bw6-633/fflonk/marshal.go | 65 ++++++++++++++++++++++++++ ecc/bw6-756/fflonk/marshal.go | 65 ++++++++++++++++++++++++++ ecc/bw6-761/fflonk/marshal.go | 65 ++++++++++++++++++++++++++ internal/generator/fflonk/generator.go | 2 +- 10 files changed, 537 insertions(+), 1 deletion(-) create mode 100644 ecc/bls12-377/fflonk/marshal.go create mode 100644 ecc/bls12-378/fflonk/marshal.go create mode 100644 ecc/bls12-381/fflonk/marshal.go create mode 100644 ecc/bls24-315/fflonk/marshal.go create mode 100644 ecc/bls24-317/fflonk/marshal.go create mode 100644 ecc/bw6-633/fflonk/marshal.go create mode 100644 ecc/bw6-756/fflonk/marshal.go create mode 100644 ecc/bw6-761/fflonk/marshal.go diff --git a/ecc/bls12-377/fflonk/marshal.go b/ecc/bls12-377/fflonk/marshal.go new file mode 100644 index 0000000000..b5dd8da92e --- /dev/null +++ b/ecc/bls12-377/fflonk/marshal.go @@ -0,0 +1,65 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "io" + + "github.com/consensys/gnark-crypto/ecc/bls12-377" +) + +// ReadFrom decodes OpeningProof data from reader. +func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { + + dec := bls12377.NewDecoder(r) + + toDecode := []interface{}{ + &proof.SOpeningProof.W, + &proof.SOpeningProof.WPrime, + proof.SOpeningProof.ClaimedValues, + proof.ClaimedValues, + } + + for _, v := range toDecode { + if err := dec.Decode(v); err != nil { + return dec.BytesRead(), err + } + } + + return dec.BytesRead(), nil +} + +// WriteTo writes binary encoding of OpeningProof. +func (proof *OpeningProof) WriteTo(w io.Writer) (int64, error) { + + enc := bls12377.NewEncoder(w) + + toEncode := []interface{}{ + &proof.SOpeningProof.W, + &proof.SOpeningProof.WPrime, + proof.SOpeningProof.ClaimedValues, + proof.ClaimedValues, + } + + for _, v := range toEncode { + if err := enc.Encode(v); err != nil { + return enc.BytesWritten(), err + } + } + + return enc.BytesWritten(), nil +} diff --git a/ecc/bls12-378/fflonk/marshal.go b/ecc/bls12-378/fflonk/marshal.go new file mode 100644 index 0000000000..bd46e96297 --- /dev/null +++ b/ecc/bls12-378/fflonk/marshal.go @@ -0,0 +1,65 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "io" + + "github.com/consensys/gnark-crypto/ecc/bls12-378" +) + +// ReadFrom decodes OpeningProof data from reader. +func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { + + dec := bls12378.NewDecoder(r) + + toDecode := []interface{}{ + &proof.SOpeningProof.W, + &proof.SOpeningProof.WPrime, + proof.SOpeningProof.ClaimedValues, + proof.ClaimedValues, + } + + for _, v := range toDecode { + if err := dec.Decode(v); err != nil { + return dec.BytesRead(), err + } + } + + return dec.BytesRead(), nil +} + +// WriteTo writes binary encoding of OpeningProof. +func (proof *OpeningProof) WriteTo(w io.Writer) (int64, error) { + + enc := bls12378.NewEncoder(w) + + toEncode := []interface{}{ + &proof.SOpeningProof.W, + &proof.SOpeningProof.WPrime, + proof.SOpeningProof.ClaimedValues, + proof.ClaimedValues, + } + + for _, v := range toEncode { + if err := enc.Encode(v); err != nil { + return enc.BytesWritten(), err + } + } + + return enc.BytesWritten(), nil +} diff --git a/ecc/bls12-381/fflonk/marshal.go b/ecc/bls12-381/fflonk/marshal.go new file mode 100644 index 0000000000..cdf278f6f5 --- /dev/null +++ b/ecc/bls12-381/fflonk/marshal.go @@ -0,0 +1,65 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "io" + + "github.com/consensys/gnark-crypto/ecc/bls12-381" +) + +// ReadFrom decodes OpeningProof data from reader. +func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { + + dec := bls12381.NewDecoder(r) + + toDecode := []interface{}{ + &proof.SOpeningProof.W, + &proof.SOpeningProof.WPrime, + proof.SOpeningProof.ClaimedValues, + proof.ClaimedValues, + } + + for _, v := range toDecode { + if err := dec.Decode(v); err != nil { + return dec.BytesRead(), err + } + } + + return dec.BytesRead(), nil +} + +// WriteTo writes binary encoding of OpeningProof. +func (proof *OpeningProof) WriteTo(w io.Writer) (int64, error) { + + enc := bls12381.NewEncoder(w) + + toEncode := []interface{}{ + &proof.SOpeningProof.W, + &proof.SOpeningProof.WPrime, + proof.SOpeningProof.ClaimedValues, + proof.ClaimedValues, + } + + for _, v := range toEncode { + if err := enc.Encode(v); err != nil { + return enc.BytesWritten(), err + } + } + + return enc.BytesWritten(), nil +} diff --git a/ecc/bls24-315/fflonk/marshal.go b/ecc/bls24-315/fflonk/marshal.go new file mode 100644 index 0000000000..f706558a7b --- /dev/null +++ b/ecc/bls24-315/fflonk/marshal.go @@ -0,0 +1,65 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "io" + + "github.com/consensys/gnark-crypto/ecc/bls24-315" +) + +// ReadFrom decodes OpeningProof data from reader. +func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { + + dec := bls24315.NewDecoder(r) + + toDecode := []interface{}{ + &proof.SOpeningProof.W, + &proof.SOpeningProof.WPrime, + proof.SOpeningProof.ClaimedValues, + proof.ClaimedValues, + } + + for _, v := range toDecode { + if err := dec.Decode(v); err != nil { + return dec.BytesRead(), err + } + } + + return dec.BytesRead(), nil +} + +// WriteTo writes binary encoding of OpeningProof. +func (proof *OpeningProof) WriteTo(w io.Writer) (int64, error) { + + enc := bls24315.NewEncoder(w) + + toEncode := []interface{}{ + &proof.SOpeningProof.W, + &proof.SOpeningProof.WPrime, + proof.SOpeningProof.ClaimedValues, + proof.ClaimedValues, + } + + for _, v := range toEncode { + if err := enc.Encode(v); err != nil { + return enc.BytesWritten(), err + } + } + + return enc.BytesWritten(), nil +} diff --git a/ecc/bls24-317/fflonk/marshal.go b/ecc/bls24-317/fflonk/marshal.go new file mode 100644 index 0000000000..1129b6a503 --- /dev/null +++ b/ecc/bls24-317/fflonk/marshal.go @@ -0,0 +1,65 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "io" + + "github.com/consensys/gnark-crypto/ecc/bls24-317" +) + +// ReadFrom decodes OpeningProof data from reader. +func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { + + dec := bls24317.NewDecoder(r) + + toDecode := []interface{}{ + &proof.SOpeningProof.W, + &proof.SOpeningProof.WPrime, + proof.SOpeningProof.ClaimedValues, + proof.ClaimedValues, + } + + for _, v := range toDecode { + if err := dec.Decode(v); err != nil { + return dec.BytesRead(), err + } + } + + return dec.BytesRead(), nil +} + +// WriteTo writes binary encoding of OpeningProof. +func (proof *OpeningProof) WriteTo(w io.Writer) (int64, error) { + + enc := bls24317.NewEncoder(w) + + toEncode := []interface{}{ + &proof.SOpeningProof.W, + &proof.SOpeningProof.WPrime, + proof.SOpeningProof.ClaimedValues, + proof.ClaimedValues, + } + + for _, v := range toEncode { + if err := enc.Encode(v); err != nil { + return enc.BytesWritten(), err + } + } + + return enc.BytesWritten(), nil +} diff --git a/ecc/bn254/fflonk/marshal.go b/ecc/bn254/fflonk/marshal.go index 034e00c6ff..ff35b08e26 100644 --- a/ecc/bn254/fflonk/marshal.go +++ b/ecc/bn254/fflonk/marshal.go @@ -1,3 +1,19 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + package fflonk import ( diff --git a/ecc/bw6-633/fflonk/marshal.go b/ecc/bw6-633/fflonk/marshal.go new file mode 100644 index 0000000000..a8b354e6c4 --- /dev/null +++ b/ecc/bw6-633/fflonk/marshal.go @@ -0,0 +1,65 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "io" + + "github.com/consensys/gnark-crypto/ecc/bw6-633" +) + +// ReadFrom decodes OpeningProof data from reader. +func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { + + dec := bw6633.NewDecoder(r) + + toDecode := []interface{}{ + &proof.SOpeningProof.W, + &proof.SOpeningProof.WPrime, + proof.SOpeningProof.ClaimedValues, + proof.ClaimedValues, + } + + for _, v := range toDecode { + if err := dec.Decode(v); err != nil { + return dec.BytesRead(), err + } + } + + return dec.BytesRead(), nil +} + +// WriteTo writes binary encoding of OpeningProof. +func (proof *OpeningProof) WriteTo(w io.Writer) (int64, error) { + + enc := bw6633.NewEncoder(w) + + toEncode := []interface{}{ + &proof.SOpeningProof.W, + &proof.SOpeningProof.WPrime, + proof.SOpeningProof.ClaimedValues, + proof.ClaimedValues, + } + + for _, v := range toEncode { + if err := enc.Encode(v); err != nil { + return enc.BytesWritten(), err + } + } + + return enc.BytesWritten(), nil +} diff --git a/ecc/bw6-756/fflonk/marshal.go b/ecc/bw6-756/fflonk/marshal.go new file mode 100644 index 0000000000..1d8be3fa12 --- /dev/null +++ b/ecc/bw6-756/fflonk/marshal.go @@ -0,0 +1,65 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "io" + + "github.com/consensys/gnark-crypto/ecc/bw6-756" +) + +// ReadFrom decodes OpeningProof data from reader. +func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { + + dec := bw6756.NewDecoder(r) + + toDecode := []interface{}{ + &proof.SOpeningProof.W, + &proof.SOpeningProof.WPrime, + proof.SOpeningProof.ClaimedValues, + proof.ClaimedValues, + } + + for _, v := range toDecode { + if err := dec.Decode(v); err != nil { + return dec.BytesRead(), err + } + } + + return dec.BytesRead(), nil +} + +// WriteTo writes binary encoding of OpeningProof. +func (proof *OpeningProof) WriteTo(w io.Writer) (int64, error) { + + enc := bw6756.NewEncoder(w) + + toEncode := []interface{}{ + &proof.SOpeningProof.W, + &proof.SOpeningProof.WPrime, + proof.SOpeningProof.ClaimedValues, + proof.ClaimedValues, + } + + for _, v := range toEncode { + if err := enc.Encode(v); err != nil { + return enc.BytesWritten(), err + } + } + + return enc.BytesWritten(), nil +} diff --git a/ecc/bw6-761/fflonk/marshal.go b/ecc/bw6-761/fflonk/marshal.go new file mode 100644 index 0000000000..243d3035f1 --- /dev/null +++ b/ecc/bw6-761/fflonk/marshal.go @@ -0,0 +1,65 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "io" + + "github.com/consensys/gnark-crypto/ecc/bw6-761" +) + +// ReadFrom decodes OpeningProof data from reader. +func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { + + dec := bw6761.NewDecoder(r) + + toDecode := []interface{}{ + &proof.SOpeningProof.W, + &proof.SOpeningProof.WPrime, + proof.SOpeningProof.ClaimedValues, + proof.ClaimedValues, + } + + for _, v := range toDecode { + if err := dec.Decode(v); err != nil { + return dec.BytesRead(), err + } + } + + return dec.BytesRead(), nil +} + +// WriteTo writes binary encoding of OpeningProof. +func (proof *OpeningProof) WriteTo(w io.Writer) (int64, error) { + + enc := bw6761.NewEncoder(w) + + toEncode := []interface{}{ + &proof.SOpeningProof.W, + &proof.SOpeningProof.WPrime, + proof.SOpeningProof.ClaimedValues, + proof.ClaimedValues, + } + + for _, v := range toEncode { + if err := enc.Encode(v); err != nil { + return enc.BytesWritten(), err + } + } + + return enc.BytesWritten(), nil +} diff --git a/internal/generator/fflonk/generator.go b/internal/generator/fflonk/generator.go index 40ee17daf3..875c129dd8 100644 --- a/internal/generator/fflonk/generator.go +++ b/internal/generator/fflonk/generator.go @@ -15,7 +15,7 @@ func Generate(conf config.Curve, baseDir string, bgen *bavard.BatchGenerator) er {File: filepath.Join(baseDir, "doc.go"), Templates: []string{"doc.go.tmpl"}}, {File: filepath.Join(baseDir, "fflonk.go"), Templates: []string{"fflonk.go.tmpl"}}, {File: filepath.Join(baseDir, "fflonk_test.go"), Templates: []string{"fflonk.test.go.tmpl"}}, - // {File: filepath.Join(baseDir, "marshal.go"), Templates: []string{"marshal.go.tmpl"}}, + {File: filepath.Join(baseDir, "marshal.go"), Templates: []string{"marshal.go.tmpl"}}, } return bgen.Generate(conf, conf.Package, "./fflonk/template/", entries...) From 941631d402bfc341529b83c10f80a69f0bafc25a Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Tue, 19 Mar 2024 18:48:31 +0100 Subject: [PATCH 34/66] feat: template marshal --- .../generator/fflonk/template/marshal.go.tmpl | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 internal/generator/fflonk/template/marshal.go.tmpl diff --git a/internal/generator/fflonk/template/marshal.go.tmpl b/internal/generator/fflonk/template/marshal.go.tmpl new file mode 100644 index 0000000000..e70d43bb7f --- /dev/null +++ b/internal/generator/fflonk/template/marshal.go.tmpl @@ -0,0 +1,47 @@ +import ( + "io" + + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}" +) + +// ReadFrom decodes OpeningProof data from reader. +func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { + + dec := {{ .CurvePackage }}.NewDecoder(r) + + toDecode := []interface{}{ + &proof.SOpeningProof.W, + &proof.SOpeningProof.WPrime, + proof.SOpeningProof.ClaimedValues, + proof.ClaimedValues, + } + + for _, v := range toDecode { + if err := dec.Decode(v); err != nil { + return dec.BytesRead(), err + } + } + + return dec.BytesRead(), nil +} + +// WriteTo writes binary encoding of OpeningProof. +func (proof *OpeningProof) WriteTo(w io.Writer) (int64, error) { + + enc := {{ .CurvePackage }}.NewEncoder(w) + + toEncode := []interface{}{ + &proof.SOpeningProof.W, + &proof.SOpeningProof.WPrime, + proof.SOpeningProof.ClaimedValues, + proof.ClaimedValues, + } + + for _, v := range toEncode { + if err := enc.Encode(v); err != nil { + return enc.BytesWritten(), err + } + } + + return enc.BytesWritten(), nil +} From e651058209cd0b4f62c842f31e2eb31c5bed5bba Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Fri, 29 Mar 2024 12:25:04 +0100 Subject: [PATCH 35/66] feat: add dummy polynomial so the number of poly divides r-1 --- ecc/bn254/fflonk/fflonk.go | 40 +++++++++++++++++++++++++++++--------- 1 file changed, 31 insertions(+), 9 deletions(-) diff --git a/ecc/bn254/fflonk/fflonk.go b/ecc/bn254/fflonk/fflonk.go index 9062a8d2ca..5ec41acc8a 100644 --- a/ecc/bn254/fflonk/fflonk.go +++ b/ecc/bn254/fflonk/fflonk.go @@ -12,8 +12,6 @@ // See the License for the specific language governing permissions and // limitations under the License. -// Code generated by consensys/gnark-crypto DO NOT EDIT - package fflonk import ( @@ -56,16 +54,19 @@ func CommitAndFold(p [][]fr.Element, pk kzg.ProvingKey, nbTasks ...int) (kzg.Dig return com, err } -// Fold returns p folded as in the fft, that is ∑_{i the remaing polynomials are zero + res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) + } } // step 2: fold polynomials @@ -127,8 +133,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, newPoints := make([][]fr.Element, len(points)) var err error for i := 0; i < len(p); i++ { - t := len(p[i]) - newPoints[i], err = extendSet(points[i], t) + newPoints[i], err = extendSet(points[i], nextPowerOfTwoPerPack[i]) if err != nil { return res, err } @@ -239,6 +244,23 @@ func getIthRootOne(i int) (fr.Element, error) { return omega, nil } +// computes the smallest i bounding above number_polynomials +// and dividing r-1. +func getNextDivisorRMinusOne(i int) int { + var zero, tmp, one big.Int + r := fr.Modulus() + one.SetUint64(1) + r.Sub(r, &one) + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + for tmp.Cmp(&zero) != 0 { + i += 1 + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + } + return i +} + // extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] func extendSet(p []fr.Element, t int) ([]fr.Element, error) { From 78bda2d2a276a82cc40a308c257cf79bb8dc49af Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Fri, 29 Mar 2024 18:14:36 +0100 Subject: [PATCH 36/66] feat: proof tunes the number of polynomial so it divides r-1 --- ecc/bls12-377/fflonk/fflonk.go | 56 ++++++++---- ecc/bls12-378/fflonk/fflonk.go | 56 ++++++++---- ecc/bls12-381/fflonk/fflonk.go | 56 ++++++++---- ecc/bls24-315/fflonk/fflonk.go | 56 ++++++++---- ecc/bls24-317/fflonk/fflonk.go | 56 ++++++++---- ecc/bn254/fflonk/fflonk.go | 20 +++-- ecc/bw6-633/fflonk/fflonk.go | 56 ++++++++---- ecc/bw6-756/fflonk/fflonk.go | 56 ++++++++---- ecc/bw6-761/fflonk/fflonk.go | 56 ++++++++---- .../generator/fflonk/template/fflonk.go.tmpl | 88 ++++++++++++------- 10 files changed, 387 insertions(+), 169 deletions(-) diff --git a/ecc/bls12-377/fflonk/fflonk.go b/ecc/bls12-377/fflonk/fflonk.go index 31176422e0..75649f5e41 100644 --- a/ecc/bls12-377/fflonk/fflonk.go +++ b/ecc/bls12-377/fflonk/fflonk.go @@ -56,16 +56,19 @@ func CommitAndFold(p [][]fr.Element, pk kzg.ProvingKey, nbTasks ...int) (kzg.Dig return com, err } -// Fold returns p folded as in the fft, that is ∑_{i the remaing polynomials are zero + res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) + } } // step 2: fold polynomials @@ -127,8 +135,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, newPoints := make([][]fr.Element, len(points)) var err error for i := 0; i < len(p); i++ { - t := len(p[i]) - newPoints[i], err = extendSet(points[i], t) + newPoints[i], err = extendSet(points[i], nextPowerOfTwoPerPack[i]) if err != nil { return res, err } @@ -210,15 +217,6 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element // utils -// getGenFrStar returns a generator of Fr^{*} -func getGenFrStar() fr.Element { - var res fr.Element - - res.SetUint64(22) - - return res -} - // getIthRootOne returns a generator of Z/iZ func getIthRootOne(i int) (fr.Element, error) { var omega fr.Element @@ -239,6 +237,23 @@ func getIthRootOne(i int) (fr.Element, error) { return omega, nil } +// computes the smallest i bounding above number_polynomials +// and dividing r-1. +func getNextDivisorRMinusOne(i int) int { + var zero, tmp, one big.Int + r := fr.Modulus() + one.SetUint64(1) + r.Sub(r, &one) + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + for tmp.Cmp(&zero) != 0 { + i += 1 + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + } + return i +} + // extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] func extendSet(p []fr.Element, t int) ([]fr.Element, error) { @@ -265,3 +280,12 @@ func eval(f []fr.Element, x fr.Element) fr.Element { } return y } + +// getGenFrStar returns a generator of Fr^{*} +func getGenFrStar() fr.Element { + var res fr.Element + + res.SetUint64(22) + + return res +} diff --git a/ecc/bls12-378/fflonk/fflonk.go b/ecc/bls12-378/fflonk/fflonk.go index 967a45d404..d1e276c167 100644 --- a/ecc/bls12-378/fflonk/fflonk.go +++ b/ecc/bls12-378/fflonk/fflonk.go @@ -56,16 +56,19 @@ func CommitAndFold(p [][]fr.Element, pk kzg.ProvingKey, nbTasks ...int) (kzg.Dig return com, err } -// Fold returns p folded as in the fft, that is ∑_{i the remaing polynomials are zero + res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) + } } // step 2: fold polynomials @@ -127,8 +135,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, newPoints := make([][]fr.Element, len(points)) var err error for i := 0; i < len(p); i++ { - t := len(p[i]) - newPoints[i], err = extendSet(points[i], t) + newPoints[i], err = extendSet(points[i], nextPowerOfTwoPerPack[i]) if err != nil { return res, err } @@ -210,15 +217,6 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element // utils -// getGenFrStar returns a generator of Fr^{*} -func getGenFrStar() fr.Element { - var res fr.Element - - res.SetUint64(22) - - return res -} - // getIthRootOne returns a generator of Z/iZ func getIthRootOne(i int) (fr.Element, error) { var omega fr.Element @@ -239,6 +237,23 @@ func getIthRootOne(i int) (fr.Element, error) { return omega, nil } +// computes the smallest i bounding above number_polynomials +// and dividing r-1. +func getNextDivisorRMinusOne(i int) int { + var zero, tmp, one big.Int + r := fr.Modulus() + one.SetUint64(1) + r.Sub(r, &one) + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + for tmp.Cmp(&zero) != 0 { + i += 1 + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + } + return i +} + // extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] func extendSet(p []fr.Element, t int) ([]fr.Element, error) { @@ -265,3 +280,12 @@ func eval(f []fr.Element, x fr.Element) fr.Element { } return y } + +// getGenFrStar returns a generator of Fr^{*} +func getGenFrStar() fr.Element { + var res fr.Element + + res.SetUint64(22) + + return res +} diff --git a/ecc/bls12-381/fflonk/fflonk.go b/ecc/bls12-381/fflonk/fflonk.go index 0709f6d803..79b20c1a8e 100644 --- a/ecc/bls12-381/fflonk/fflonk.go +++ b/ecc/bls12-381/fflonk/fflonk.go @@ -56,16 +56,19 @@ func CommitAndFold(p [][]fr.Element, pk kzg.ProvingKey, nbTasks ...int) (kzg.Dig return com, err } -// Fold returns p folded as in the fft, that is ∑_{i the remaing polynomials are zero + res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) + } } // step 2: fold polynomials @@ -127,8 +135,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, newPoints := make([][]fr.Element, len(points)) var err error for i := 0; i < len(p); i++ { - t := len(p[i]) - newPoints[i], err = extendSet(points[i], t) + newPoints[i], err = extendSet(points[i], nextPowerOfTwoPerPack[i]) if err != nil { return res, err } @@ -210,15 +217,6 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element // utils -// getGenFrStar returns a generator of Fr^{*} -func getGenFrStar() fr.Element { - var res fr.Element - - res.SetUint64(7) - - return res -} - // getIthRootOne returns a generator of Z/iZ func getIthRootOne(i int) (fr.Element, error) { var omega fr.Element @@ -239,6 +237,23 @@ func getIthRootOne(i int) (fr.Element, error) { return omega, nil } +// computes the smallest i bounding above number_polynomials +// and dividing r-1. +func getNextDivisorRMinusOne(i int) int { + var zero, tmp, one big.Int + r := fr.Modulus() + one.SetUint64(1) + r.Sub(r, &one) + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + for tmp.Cmp(&zero) != 0 { + i += 1 + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + } + return i +} + // extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] func extendSet(p []fr.Element, t int) ([]fr.Element, error) { @@ -265,3 +280,12 @@ func eval(f []fr.Element, x fr.Element) fr.Element { } return y } + +// getGenFrStar returns a generator of Fr^{*} +func getGenFrStar() fr.Element { + var res fr.Element + + res.SetUint64(7) + + return res +} diff --git a/ecc/bls24-315/fflonk/fflonk.go b/ecc/bls24-315/fflonk/fflonk.go index c4f890596f..708e50ca85 100644 --- a/ecc/bls24-315/fflonk/fflonk.go +++ b/ecc/bls24-315/fflonk/fflonk.go @@ -56,16 +56,19 @@ func CommitAndFold(p [][]fr.Element, pk kzg.ProvingKey, nbTasks ...int) (kzg.Dig return com, err } -// Fold returns p folded as in the fft, that is ∑_{i the remaing polynomials are zero + res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) + } } // step 2: fold polynomials @@ -127,8 +135,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, newPoints := make([][]fr.Element, len(points)) var err error for i := 0; i < len(p); i++ { - t := len(p[i]) - newPoints[i], err = extendSet(points[i], t) + newPoints[i], err = extendSet(points[i], nextPowerOfTwoPerPack[i]) if err != nil { return res, err } @@ -210,15 +217,6 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element // utils -// getGenFrStar returns a generator of Fr^{*} -func getGenFrStar() fr.Element { - var res fr.Element - - res.SetUint64(7) - - return res -} - // getIthRootOne returns a generator of Z/iZ func getIthRootOne(i int) (fr.Element, error) { var omega fr.Element @@ -239,6 +237,23 @@ func getIthRootOne(i int) (fr.Element, error) { return omega, nil } +// computes the smallest i bounding above number_polynomials +// and dividing r-1. +func getNextDivisorRMinusOne(i int) int { + var zero, tmp, one big.Int + r := fr.Modulus() + one.SetUint64(1) + r.Sub(r, &one) + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + for tmp.Cmp(&zero) != 0 { + i += 1 + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + } + return i +} + // extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] func extendSet(p []fr.Element, t int) ([]fr.Element, error) { @@ -265,3 +280,12 @@ func eval(f []fr.Element, x fr.Element) fr.Element { } return y } + +// getGenFrStar returns a generator of Fr^{*} +func getGenFrStar() fr.Element { + var res fr.Element + + res.SetUint64(7) + + return res +} diff --git a/ecc/bls24-317/fflonk/fflonk.go b/ecc/bls24-317/fflonk/fflonk.go index 37d6bbf592..f9de5d74e5 100644 --- a/ecc/bls24-317/fflonk/fflonk.go +++ b/ecc/bls24-317/fflonk/fflonk.go @@ -56,16 +56,19 @@ func CommitAndFold(p [][]fr.Element, pk kzg.ProvingKey, nbTasks ...int) (kzg.Dig return com, err } -// Fold returns p folded as in the fft, that is ∑_{i the remaing polynomials are zero + res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) + } } // step 2: fold polynomials @@ -127,8 +135,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, newPoints := make([][]fr.Element, len(points)) var err error for i := 0; i < len(p); i++ { - t := len(p[i]) - newPoints[i], err = extendSet(points[i], t) + newPoints[i], err = extendSet(points[i], nextPowerOfTwoPerPack[i]) if err != nil { return res, err } @@ -210,15 +217,6 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element // utils -// getGenFrStar returns a generator of Fr^{*} -func getGenFrStar() fr.Element { - var res fr.Element - - res.SetUint64(7) - - return res -} - // getIthRootOne returns a generator of Z/iZ func getIthRootOne(i int) (fr.Element, error) { var omega fr.Element @@ -239,6 +237,23 @@ func getIthRootOne(i int) (fr.Element, error) { return omega, nil } +// computes the smallest i bounding above number_polynomials +// and dividing r-1. +func getNextDivisorRMinusOne(i int) int { + var zero, tmp, one big.Int + r := fr.Modulus() + one.SetUint64(1) + r.Sub(r, &one) + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + for tmp.Cmp(&zero) != 0 { + i += 1 + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + } + return i +} + // extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] func extendSet(p []fr.Element, t int) ([]fr.Element, error) { @@ -265,3 +280,12 @@ func eval(f []fr.Element, x fr.Element) fr.Element { } return y } + +// getGenFrStar returns a generator of Fr^{*} +func getGenFrStar() fr.Element { + var res fr.Element + + res.SetUint64(7) + + return res +} diff --git a/ecc/bn254/fflonk/fflonk.go b/ecc/bn254/fflonk/fflonk.go index 5ec41acc8a..32fb98795f 100644 --- a/ecc/bn254/fflonk/fflonk.go +++ b/ecc/bn254/fflonk/fflonk.go @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +// Code generated by consensys/gnark-crypto DO NOT EDIT + package fflonk import ( @@ -215,15 +217,6 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element // utils -// getGenFrStar returns a generator of Fr^{*} -func getGenFrStar() fr.Element { - var res fr.Element - - res.SetUint64(5) - - return res -} - // getIthRootOne returns a generator of Z/iZ func getIthRootOne(i int) (fr.Element, error) { var omega fr.Element @@ -287,3 +280,12 @@ func eval(f []fr.Element, x fr.Element) fr.Element { } return y } + +// getGenFrStar returns a generator of Fr^{*} +func getGenFrStar() fr.Element { + var res fr.Element + + res.SetUint64(5) + + return res +} diff --git a/ecc/bw6-633/fflonk/fflonk.go b/ecc/bw6-633/fflonk/fflonk.go index fe88556e0e..2b07024ab3 100644 --- a/ecc/bw6-633/fflonk/fflonk.go +++ b/ecc/bw6-633/fflonk/fflonk.go @@ -56,16 +56,19 @@ func CommitAndFold(p [][]fr.Element, pk kzg.ProvingKey, nbTasks ...int) (kzg.Dig return com, err } -// Fold returns p folded as in the fft, that is ∑_{i the remaing polynomials are zero + res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) + } } // step 2: fold polynomials @@ -127,8 +135,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, newPoints := make([][]fr.Element, len(points)) var err error for i := 0; i < len(p); i++ { - t := len(p[i]) - newPoints[i], err = extendSet(points[i], t) + newPoints[i], err = extendSet(points[i], nextPowerOfTwoPerPack[i]) if err != nil { return res, err } @@ -210,15 +217,6 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element // utils -// getGenFrStar returns a generator of Fr^{*} -func getGenFrStar() fr.Element { - var res fr.Element - - res.SetUint64(13) - - return res -} - // getIthRootOne returns a generator of Z/iZ func getIthRootOne(i int) (fr.Element, error) { var omega fr.Element @@ -239,6 +237,23 @@ func getIthRootOne(i int) (fr.Element, error) { return omega, nil } +// computes the smallest i bounding above number_polynomials +// and dividing r-1. +func getNextDivisorRMinusOne(i int) int { + var zero, tmp, one big.Int + r := fr.Modulus() + one.SetUint64(1) + r.Sub(r, &one) + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + for tmp.Cmp(&zero) != 0 { + i += 1 + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + } + return i +} + // extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] func extendSet(p []fr.Element, t int) ([]fr.Element, error) { @@ -265,3 +280,12 @@ func eval(f []fr.Element, x fr.Element) fr.Element { } return y } + +// getGenFrStar returns a generator of Fr^{*} +func getGenFrStar() fr.Element { + var res fr.Element + + res.SetUint64(13) + + return res +} diff --git a/ecc/bw6-756/fflonk/fflonk.go b/ecc/bw6-756/fflonk/fflonk.go index 5942714f18..38bfd4532f 100644 --- a/ecc/bw6-756/fflonk/fflonk.go +++ b/ecc/bw6-756/fflonk/fflonk.go @@ -56,16 +56,19 @@ func CommitAndFold(p [][]fr.Element, pk kzg.ProvingKey, nbTasks ...int) (kzg.Dig return com, err } -// Fold returns p folded as in the fft, that is ∑_{i the remaing polynomials are zero + res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) + } } // step 2: fold polynomials @@ -127,8 +135,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, newPoints := make([][]fr.Element, len(points)) var err error for i := 0; i < len(p); i++ { - t := len(p[i]) - newPoints[i], err = extendSet(points[i], t) + newPoints[i], err = extendSet(points[i], nextPowerOfTwoPerPack[i]) if err != nil { return res, err } @@ -210,15 +217,6 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element // utils -// getGenFrStar returns a generator of Fr^{*} -func getGenFrStar() fr.Element { - var res fr.Element - - res.SetUint64(5) - - return res -} - // getIthRootOne returns a generator of Z/iZ func getIthRootOne(i int) (fr.Element, error) { var omega fr.Element @@ -239,6 +237,23 @@ func getIthRootOne(i int) (fr.Element, error) { return omega, nil } +// computes the smallest i bounding above number_polynomials +// and dividing r-1. +func getNextDivisorRMinusOne(i int) int { + var zero, tmp, one big.Int + r := fr.Modulus() + one.SetUint64(1) + r.Sub(r, &one) + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + for tmp.Cmp(&zero) != 0 { + i += 1 + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + } + return i +} + // extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] func extendSet(p []fr.Element, t int) ([]fr.Element, error) { @@ -265,3 +280,12 @@ func eval(f []fr.Element, x fr.Element) fr.Element { } return y } + +// getGenFrStar returns a generator of Fr^{*} +func getGenFrStar() fr.Element { + var res fr.Element + + res.SetUint64(5) + + return res +} diff --git a/ecc/bw6-761/fflonk/fflonk.go b/ecc/bw6-761/fflonk/fflonk.go index 1c760fea0d..8c6345f209 100644 --- a/ecc/bw6-761/fflonk/fflonk.go +++ b/ecc/bw6-761/fflonk/fflonk.go @@ -56,16 +56,19 @@ func CommitAndFold(p [][]fr.Element, pk kzg.ProvingKey, nbTasks ...int) (kzg.Dig return com, err } -// Fold returns p folded as in the fft, that is ∑_{i the remaing polynomials are zero + res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) + } } // step 2: fold polynomials @@ -127,8 +135,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, newPoints := make([][]fr.Element, len(points)) var err error for i := 0; i < len(p); i++ { - t := len(p[i]) - newPoints[i], err = extendSet(points[i], t) + newPoints[i], err = extendSet(points[i], nextPowerOfTwoPerPack[i]) if err != nil { return res, err } @@ -210,15 +217,6 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element // utils -// getGenFrStar returns a generator of Fr^{*} -func getGenFrStar() fr.Element { - var res fr.Element - - res.SetUint64(15) - - return res -} - // getIthRootOne returns a generator of Z/iZ func getIthRootOne(i int) (fr.Element, error) { var omega fr.Element @@ -239,6 +237,23 @@ func getIthRootOne(i int) (fr.Element, error) { return omega, nil } +// computes the smallest i bounding above number_polynomials +// and dividing r-1. +func getNextDivisorRMinusOne(i int) int { + var zero, tmp, one big.Int + r := fr.Modulus() + one.SetUint64(1) + r.Sub(r, &one) + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + for tmp.Cmp(&zero) != 0 { + i += 1 + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + } + return i +} + // extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] func extendSet(p []fr.Element, t int) ([]fr.Element, error) { @@ -265,3 +280,12 @@ func eval(f []fr.Element, x fr.Element) fr.Element { } return y } + +// getGenFrStar returns a generator of Fr^{*} +func getGenFrStar() fr.Element { + var res fr.Element + + res.SetUint64(15) + + return res +} diff --git a/internal/generator/fflonk/template/fflonk.go.tmpl b/internal/generator/fflonk/template/fflonk.go.tmpl index f9ee38a06b..169e50587b 100644 --- a/internal/generator/fflonk/template/fflonk.go.tmpl +++ b/internal/generator/fflonk/template/fflonk.go.tmpl @@ -38,16 +38,19 @@ func CommitAndFold(p [][]fr.Element, pk kzg.ProvingKey, nbTasks ...int) (kzg.Dig return com, err } -// Fold returns p folded as in the fft, that is ∑_{i the remaing polynomials are zero + res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) + } } // step 2: fold polynomials @@ -109,8 +117,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, newPoints := make([][]fr.Element, len(points)) var err error for i := 0; i < len(p); i++ { - t := len(p[i]) - newPoints[i], err = extendSet(points[i], t) + newPoints[i], err = extendSet(points[i], nextPowerOfTwoPerPack[i]) if err != nil { return res, err } @@ -192,31 +199,6 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element // utils -// getGenFrStar returns a generator of Fr^{*} -func getGenFrStar() fr.Element { - var res fr.Element - {{if eq .Name "bls12-378"}} - res.SetUint64(22) - {{else if eq .Name "bls12-377"}} - res.SetUint64(22) - {{else if eq .Name "bls12-381"}} - res.SetUint64(7) - {{else if eq .Name "bn254"}} - res.SetUint64(5) - {{else if eq .Name "bw6-761"}} - res.SetUint64(15) - {{else if eq .Name "bw6-756"}} - res.SetUint64(5) - {{else if eq .Name "bw6-633"}} - res.SetUint64(13) - {{else if eq .Name "bls24-315"}} - res.SetUint64(7) - {{else if eq .Name "bls24-317"}} - res.SetUint64(7) - {{end}} - return res -} - // getIthRootOne returns a generator of Z/iZ func getIthRootOne(i int) (fr.Element, error) { var omega fr.Element @@ -237,6 +219,23 @@ func getIthRootOne(i int) (fr.Element, error) { return omega, nil } +// computes the smallest i bounding above number_polynomials +// and dividing r-1. +func getNextDivisorRMinusOne(i int) int { + var zero, tmp, one big.Int + r := fr.Modulus() + one.SetUint64(1) + r.Sub(r, &one) + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + for tmp.Cmp(&zero) != 0 { + i += 1 + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + } + return i +} + // extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] func extendSet(p []fr.Element, t int) ([]fr.Element, error) { @@ -263,3 +262,28 @@ func eval(f []fr.Element, x fr.Element) fr.Element { } return y } + +// getGenFrStar returns a generator of Fr^{*} +func getGenFrStar() fr.Element { + var res fr.Element + {{if eq .Name "bls12-378"}} + res.SetUint64(22) + {{else if eq .Name "bls12-377"}} + res.SetUint64(22) + {{else if eq .Name "bls12-381"}} + res.SetUint64(7) + {{else if eq .Name "bn254"}} + res.SetUint64(5) + {{else if eq .Name "bw6-761"}} + res.SetUint64(15) + {{else if eq .Name "bw6-756"}} + res.SetUint64(5) + {{else if eq .Name "bw6-633"}} + res.SetUint64(13) + {{else if eq .Name "bls24-315"}} + res.SetUint64(7) + {{else if eq .Name "bls24-317"}} + res.SetUint64(7) + {{end}} + return res +} \ No newline at end of file From 6ae652e6b1444992d9650c90838ab49ddf652f36 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Wed, 3 Apr 2024 18:38:23 +0200 Subject: [PATCH 37/66] feat: marshal [][][]frElement --- ecc/bn254/marshal.go | 60 +++++++++++++++++++++++++++++++++++++-- ecc/bn254/marshal_test.go | 20 ++++++++++--- 2 files changed, 74 insertions(+), 6 deletions(-) diff --git a/ecc/bn254/marshal.go b/ecc/bn254/marshal.go index d4a6f6eea9..2b7f713a34 100644 --- a/ecc/bn254/marshal.go +++ b/ecc/bn254/marshal.go @@ -12,8 +12,6 @@ // See the License for the specific language governing permissions and // limitations under the License. -// Code generated by consensys/gnark-crypto DO NOT EDIT - package bn254 import ( @@ -166,6 +164,26 @@ func (dec *Decoder) Decode(v interface{}) (err error) { dec.n += read64 } return + case *[][][]fr.Element: + if sliceLen, err = dec.readUint32(); err != nil { + return + } + if len(*t) != int(sliceLen) { + *t = make([][][]fr.Element, sliceLen) + } + for i := range *t { + if sliceLen, err = dec.readUint32(); err != nil { + return + } + if len((*t)[i]) != int(sliceLen) { + (*t)[i] = make([][]fr.Element, sliceLen) + } + for j := range (*t)[i] { + read64, err = (*fr.Vector)(&(*t)[i][j]).ReadFrom(dec.r) + dec.n += read64 + } + } + return case *G1Affine: // we start by reading compressed point size, if metadata tells us it is uncompressed, we read more. read, err = io.ReadFull(dec.r, buf[:SizeOfG1AffineCompressed]) @@ -502,6 +520,25 @@ func (enc *Encoder) encode(v interface{}) (err error) { enc.n += written64 } return + case [][][]fr.Element: + // number of collections + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))); err != nil { + return + } + enc.n += 4 + for i := range t { + // size of current collection + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t[i]))); err != nil { + return + } + enc.n += 4 + // write each vector of the current collection + for j := range t[i] { + written64, err = (*fr.Vector)(&t[i][j]).WriteTo(enc.w) + enc.n += written64 + } + } + return case []G1Affine: // write slice length err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))) @@ -620,6 +657,25 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { enc.n += written64 } return + case [][][]fr.Element: + // number of collections + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))); err != nil { + return + } + enc.n += 4 + for i := range t { + // size of current collection + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t[i]))); err != nil { + return + } + enc.n += 4 + // write each vector of the current collection + for j := range t[i] { + written64, err = (*fr.Vector)(&t[i][j]).WriteTo(enc.w) + enc.n += written64 + } + } + return case []G1Affine: // write slice length err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))) diff --git a/ecc/bn254/marshal_test.go b/ecc/bn254/marshal_test.go index 3283470da8..a553ce3182 100644 --- a/ecc/bn254/marshal_test.go +++ b/ecc/bn254/marshal_test.go @@ -12,8 +12,6 @@ // See the License for the specific language governing permissions and // limitations under the License. -// Code generated by consensys/gnark-crypto DO NOT EDIT - package bn254 import ( @@ -72,12 +70,22 @@ func TestEncoder(t *testing.T) { inK[41].SetUint64(42) inL = [][]fr.Element{inJ, inK} inM = [][]uint64{{1, 2}, {4}, {}} + inN := make([][][]fr.Element, 4) + for i := 0; i < 4; i++ { + inN[i] = make([][]fr.Element, i+2) + for j := 0; j < i+2; j++ { + inN[i][j] = make([]fr.Element, j+3) + for k := 0; k < j+3; k++ { + inN[i][j][k].SetRandom() + } + } + } // encode them, compressed and raw var buf, bufRaw bytes.Buffer enc := NewEncoder(&buf) encRaw := NewEncoder(&bufRaw, RawEncoding()) - toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM} + toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM, inN} for _, v := range toEncode { if err := enc.Encode(v); err != nil { t.Fatal(err) @@ -104,8 +112,9 @@ func TestEncoder(t *testing.T) { var outK fr.Vector var outL [][]fr.Element var outM [][]uint64 + var outN [][][]fr.Element - toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM} + toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM, &outN} for _, v := range toDecode { if err := dec.Decode(v); err != nil { t.Fatal(err) @@ -151,6 +160,9 @@ func TestEncoder(t *testing.T) { if !reflect.DeepEqual(inM, outM) { t.Fatal("decode(encode(slice²(uint64))) failed") } + if !reflect.DeepEqual(inN, outN) { + t.Fatal("decode(encode(slice^{3}(uint64))) failed") + } if n != dec.BytesRead() { t.Fatal("bytes read don't match bytes written") } From a7d721497f2a98b1f292886bb685fd3c5a90f930 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Wed, 3 Apr 2024 18:54:49 +0200 Subject: [PATCH 38/66] feat: code gen marshal [][][]frElement --- ecc/bls12-377/marshal.go | 58 +++++++++++++++++++ ecc/bls12-377/marshal_test.go | 19 +++++- ecc/bls12-378/marshal.go | 58 +++++++++++++++++++ ecc/bls12-378/marshal_test.go | 19 +++++- ecc/bls12-381/marshal.go | 58 +++++++++++++++++++ ecc/bls12-381/marshal_test.go | 19 +++++- ecc/bls24-315/marshal.go | 58 +++++++++++++++++++ ecc/bls24-315/marshal_test.go | 19 +++++- ecc/bls24-317/marshal.go | 58 +++++++++++++++++++ ecc/bls24-317/marshal_test.go | 19 +++++- ecc/bn254/marshal.go | 2 + ecc/bn254/marshal_test.go | 5 +- ecc/bw6-633/marshal.go | 58 +++++++++++++++++++ ecc/bw6-633/marshal_test.go | 19 +++++- ecc/bw6-756/marshal.go | 58 +++++++++++++++++++ ecc/bw6-756/marshal_test.go | 19 +++++- ecc/bw6-761/marshal.go | 58 +++++++++++++++++++ ecc/bw6-761/marshal_test.go | 19 +++++- .../generator/ecc/template/marshal.go.tmpl | 39 +++++++++++++ .../ecc/template/tests/marshal.go.tmpl | 19 +++++- 20 files changed, 662 insertions(+), 19 deletions(-) diff --git a/ecc/bls12-377/marshal.go b/ecc/bls12-377/marshal.go index 317f7c5354..100fad0520 100644 --- a/ecc/bls12-377/marshal.go +++ b/ecc/bls12-377/marshal.go @@ -171,6 +171,26 @@ func (dec *Decoder) Decode(v interface{}) (err error) { dec.n += read64 } return + case *[][][]fr.Element: + if sliceLen, err = dec.readUint32(); err != nil { + return + } + if len(*t) != int(sliceLen) { + *t = make([][][]fr.Element, sliceLen) + } + for i := range *t { + if sliceLen, err = dec.readUint32(); err != nil { + return + } + if len((*t)[i]) != int(sliceLen) { + (*t)[i] = make([][]fr.Element, sliceLen) + } + for j := range (*t)[i] { + read64, err = (*fr.Vector)(&(*t)[i][j]).ReadFrom(dec.r) + dec.n += read64 + } + } + return case *G1Affine: // we start by reading compressed point size, if metadata tells us it is uncompressed, we read more. read, err = io.ReadFull(dec.r, buf[:SizeOfG1AffineCompressed]) @@ -537,6 +557,25 @@ func (enc *Encoder) encode(v interface{}) (err error) { enc.n += written64 } return + case [][][]fr.Element: + // number of collections + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))); err != nil { + return + } + enc.n += 4 + for i := range t { + // size of current collection + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t[i]))); err != nil { + return + } + enc.n += 4 + // write each vector of the current collection + for j := range t[i] { + written64, err = (*fr.Vector)(&t[i][j]).WriteTo(enc.w) + enc.n += written64 + } + } + return case []G1Affine: // write slice length err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))) @@ -655,6 +694,25 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { enc.n += written64 } return + case [][][]fr.Element: + // number of collections + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))); err != nil { + return + } + enc.n += 4 + for i := range t { + // size of current collection + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t[i]))); err != nil { + return + } + enc.n += 4 + // write each vector of the current collection + for j := range t[i] { + written64, err = (*fr.Vector)(&t[i][j]).WriteTo(enc.w) + enc.n += written64 + } + } + return case []G1Affine: // write slice length err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))) diff --git a/ecc/bls12-377/marshal_test.go b/ecc/bls12-377/marshal_test.go index ee92ec4fd1..75c2092c68 100644 --- a/ecc/bls12-377/marshal_test.go +++ b/ecc/bls12-377/marshal_test.go @@ -54,6 +54,7 @@ func TestEncoder(t *testing.T) { var inK fr.Vector var inL [][]fr.Element var inM [][]uint64 + var inN [][][]fr.Element // set values of inputs inA = rand.Uint64() //#nosec G404 weak rng is fine here @@ -72,12 +73,22 @@ func TestEncoder(t *testing.T) { inK[41].SetUint64(42) inL = [][]fr.Element{inJ, inK} inM = [][]uint64{{1, 2}, {4}, {}} + inN = make([][][]fr.Element, 4) + for i := 0; i < 4; i++ { + inN[i] = make([][]fr.Element, i+2) + for j := 0; j < i+2; j++ { + inN[i][j] = make([]fr.Element, j+3) + for k := 0; k < j+3; k++ { + inN[i][j][k].SetRandom() + } + } + } // encode them, compressed and raw var buf, bufRaw bytes.Buffer enc := NewEncoder(&buf) encRaw := NewEncoder(&bufRaw, RawEncoding()) - toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM} + toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM, inN} for _, v := range toEncode { if err := enc.Encode(v); err != nil { t.Fatal(err) @@ -104,8 +115,9 @@ func TestEncoder(t *testing.T) { var outK fr.Vector var outL [][]fr.Element var outM [][]uint64 + var outN [][][]fr.Element - toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM} + toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM, &outN} for _, v := range toDecode { if err := dec.Decode(v); err != nil { t.Fatal(err) @@ -151,6 +163,9 @@ func TestEncoder(t *testing.T) { if !reflect.DeepEqual(inM, outM) { t.Fatal("decode(encode(slice²(uint64))) failed") } + if !reflect.DeepEqual(inN, outN) { + t.Fatal("decode(encode(slice^{3}(uint64))) failed") + } if n != dec.BytesRead() { t.Fatal("bytes read don't match bytes written") } diff --git a/ecc/bls12-378/marshal.go b/ecc/bls12-378/marshal.go index 5bfd945ad1..2076922e7b 100644 --- a/ecc/bls12-378/marshal.go +++ b/ecc/bls12-378/marshal.go @@ -171,6 +171,26 @@ func (dec *Decoder) Decode(v interface{}) (err error) { dec.n += read64 } return + case *[][][]fr.Element: + if sliceLen, err = dec.readUint32(); err != nil { + return + } + if len(*t) != int(sliceLen) { + *t = make([][][]fr.Element, sliceLen) + } + for i := range *t { + if sliceLen, err = dec.readUint32(); err != nil { + return + } + if len((*t)[i]) != int(sliceLen) { + (*t)[i] = make([][]fr.Element, sliceLen) + } + for j := range (*t)[i] { + read64, err = (*fr.Vector)(&(*t)[i][j]).ReadFrom(dec.r) + dec.n += read64 + } + } + return case *G1Affine: // we start by reading compressed point size, if metadata tells us it is uncompressed, we read more. read, err = io.ReadFull(dec.r, buf[:SizeOfG1AffineCompressed]) @@ -537,6 +557,25 @@ func (enc *Encoder) encode(v interface{}) (err error) { enc.n += written64 } return + case [][][]fr.Element: + // number of collections + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))); err != nil { + return + } + enc.n += 4 + for i := range t { + // size of current collection + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t[i]))); err != nil { + return + } + enc.n += 4 + // write each vector of the current collection + for j := range t[i] { + written64, err = (*fr.Vector)(&t[i][j]).WriteTo(enc.w) + enc.n += written64 + } + } + return case []G1Affine: // write slice length err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))) @@ -655,6 +694,25 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { enc.n += written64 } return + case [][][]fr.Element: + // number of collections + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))); err != nil { + return + } + enc.n += 4 + for i := range t { + // size of current collection + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t[i]))); err != nil { + return + } + enc.n += 4 + // write each vector of the current collection + for j := range t[i] { + written64, err = (*fr.Vector)(&t[i][j]).WriteTo(enc.w) + enc.n += written64 + } + } + return case []G1Affine: // write slice length err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))) diff --git a/ecc/bls12-378/marshal_test.go b/ecc/bls12-378/marshal_test.go index 40da636abf..191ebe03a9 100644 --- a/ecc/bls12-378/marshal_test.go +++ b/ecc/bls12-378/marshal_test.go @@ -54,6 +54,7 @@ func TestEncoder(t *testing.T) { var inK fr.Vector var inL [][]fr.Element var inM [][]uint64 + var inN [][][]fr.Element // set values of inputs inA = rand.Uint64() //#nosec G404 weak rng is fine here @@ -72,12 +73,22 @@ func TestEncoder(t *testing.T) { inK[41].SetUint64(42) inL = [][]fr.Element{inJ, inK} inM = [][]uint64{{1, 2}, {4}, {}} + inN = make([][][]fr.Element, 4) + for i := 0; i < 4; i++ { + inN[i] = make([][]fr.Element, i+2) + for j := 0; j < i+2; j++ { + inN[i][j] = make([]fr.Element, j+3) + for k := 0; k < j+3; k++ { + inN[i][j][k].SetRandom() + } + } + } // encode them, compressed and raw var buf, bufRaw bytes.Buffer enc := NewEncoder(&buf) encRaw := NewEncoder(&bufRaw, RawEncoding()) - toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM} + toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM, inN} for _, v := range toEncode { if err := enc.Encode(v); err != nil { t.Fatal(err) @@ -104,8 +115,9 @@ func TestEncoder(t *testing.T) { var outK fr.Vector var outL [][]fr.Element var outM [][]uint64 + var outN [][][]fr.Element - toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM} + toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM, &outN} for _, v := range toDecode { if err := dec.Decode(v); err != nil { t.Fatal(err) @@ -151,6 +163,9 @@ func TestEncoder(t *testing.T) { if !reflect.DeepEqual(inM, outM) { t.Fatal("decode(encode(slice²(uint64))) failed") } + if !reflect.DeepEqual(inN, outN) { + t.Fatal("decode(encode(slice^{3}(uint64))) failed") + } if n != dec.BytesRead() { t.Fatal("bytes read don't match bytes written") } diff --git a/ecc/bls12-381/marshal.go b/ecc/bls12-381/marshal.go index 8779479b24..50ce63d9b6 100644 --- a/ecc/bls12-381/marshal.go +++ b/ecc/bls12-381/marshal.go @@ -171,6 +171,26 @@ func (dec *Decoder) Decode(v interface{}) (err error) { dec.n += read64 } return + case *[][][]fr.Element: + if sliceLen, err = dec.readUint32(); err != nil { + return + } + if len(*t) != int(sliceLen) { + *t = make([][][]fr.Element, sliceLen) + } + for i := range *t { + if sliceLen, err = dec.readUint32(); err != nil { + return + } + if len((*t)[i]) != int(sliceLen) { + (*t)[i] = make([][]fr.Element, sliceLen) + } + for j := range (*t)[i] { + read64, err = (*fr.Vector)(&(*t)[i][j]).ReadFrom(dec.r) + dec.n += read64 + } + } + return case *G1Affine: // we start by reading compressed point size, if metadata tells us it is uncompressed, we read more. read, err = io.ReadFull(dec.r, buf[:SizeOfG1AffineCompressed]) @@ -537,6 +557,25 @@ func (enc *Encoder) encode(v interface{}) (err error) { enc.n += written64 } return + case [][][]fr.Element: + // number of collections + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))); err != nil { + return + } + enc.n += 4 + for i := range t { + // size of current collection + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t[i]))); err != nil { + return + } + enc.n += 4 + // write each vector of the current collection + for j := range t[i] { + written64, err = (*fr.Vector)(&t[i][j]).WriteTo(enc.w) + enc.n += written64 + } + } + return case []G1Affine: // write slice length err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))) @@ -655,6 +694,25 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { enc.n += written64 } return + case [][][]fr.Element: + // number of collections + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))); err != nil { + return + } + enc.n += 4 + for i := range t { + // size of current collection + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t[i]))); err != nil { + return + } + enc.n += 4 + // write each vector of the current collection + for j := range t[i] { + written64, err = (*fr.Vector)(&t[i][j]).WriteTo(enc.w) + enc.n += written64 + } + } + return case []G1Affine: // write slice length err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))) diff --git a/ecc/bls12-381/marshal_test.go b/ecc/bls12-381/marshal_test.go index e8854899ea..2bc31cfdfc 100644 --- a/ecc/bls12-381/marshal_test.go +++ b/ecc/bls12-381/marshal_test.go @@ -54,6 +54,7 @@ func TestEncoder(t *testing.T) { var inK fr.Vector var inL [][]fr.Element var inM [][]uint64 + var inN [][][]fr.Element // set values of inputs inA = rand.Uint64() //#nosec G404 weak rng is fine here @@ -72,12 +73,22 @@ func TestEncoder(t *testing.T) { inK[41].SetUint64(42) inL = [][]fr.Element{inJ, inK} inM = [][]uint64{{1, 2}, {4}, {}} + inN = make([][][]fr.Element, 4) + for i := 0; i < 4; i++ { + inN[i] = make([][]fr.Element, i+2) + for j := 0; j < i+2; j++ { + inN[i][j] = make([]fr.Element, j+3) + for k := 0; k < j+3; k++ { + inN[i][j][k].SetRandom() + } + } + } // encode them, compressed and raw var buf, bufRaw bytes.Buffer enc := NewEncoder(&buf) encRaw := NewEncoder(&bufRaw, RawEncoding()) - toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM} + toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM, inN} for _, v := range toEncode { if err := enc.Encode(v); err != nil { t.Fatal(err) @@ -104,8 +115,9 @@ func TestEncoder(t *testing.T) { var outK fr.Vector var outL [][]fr.Element var outM [][]uint64 + var outN [][][]fr.Element - toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM} + toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM, &outN} for _, v := range toDecode { if err := dec.Decode(v); err != nil { t.Fatal(err) @@ -151,6 +163,9 @@ func TestEncoder(t *testing.T) { if !reflect.DeepEqual(inM, outM) { t.Fatal("decode(encode(slice²(uint64))) failed") } + if !reflect.DeepEqual(inN, outN) { + t.Fatal("decode(encode(slice^{3}(uint64))) failed") + } if n != dec.BytesRead() { t.Fatal("bytes read don't match bytes written") } diff --git a/ecc/bls24-315/marshal.go b/ecc/bls24-315/marshal.go index c7635caece..71e4d00afe 100644 --- a/ecc/bls24-315/marshal.go +++ b/ecc/bls24-315/marshal.go @@ -171,6 +171,26 @@ func (dec *Decoder) Decode(v interface{}) (err error) { dec.n += read64 } return + case *[][][]fr.Element: + if sliceLen, err = dec.readUint32(); err != nil { + return + } + if len(*t) != int(sliceLen) { + *t = make([][][]fr.Element, sliceLen) + } + for i := range *t { + if sliceLen, err = dec.readUint32(); err != nil { + return + } + if len((*t)[i]) != int(sliceLen) { + (*t)[i] = make([][]fr.Element, sliceLen) + } + for j := range (*t)[i] { + read64, err = (*fr.Vector)(&(*t)[i][j]).ReadFrom(dec.r) + dec.n += read64 + } + } + return case *G1Affine: // we start by reading compressed point size, if metadata tells us it is uncompressed, we read more. read, err = io.ReadFull(dec.r, buf[:SizeOfG1AffineCompressed]) @@ -537,6 +557,25 @@ func (enc *Encoder) encode(v interface{}) (err error) { enc.n += written64 } return + case [][][]fr.Element: + // number of collections + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))); err != nil { + return + } + enc.n += 4 + for i := range t { + // size of current collection + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t[i]))); err != nil { + return + } + enc.n += 4 + // write each vector of the current collection + for j := range t[i] { + written64, err = (*fr.Vector)(&t[i][j]).WriteTo(enc.w) + enc.n += written64 + } + } + return case []G1Affine: // write slice length err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))) @@ -655,6 +694,25 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { enc.n += written64 } return + case [][][]fr.Element: + // number of collections + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))); err != nil { + return + } + enc.n += 4 + for i := range t { + // size of current collection + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t[i]))); err != nil { + return + } + enc.n += 4 + // write each vector of the current collection + for j := range t[i] { + written64, err = (*fr.Vector)(&t[i][j]).WriteTo(enc.w) + enc.n += written64 + } + } + return case []G1Affine: // write slice length err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))) diff --git a/ecc/bls24-315/marshal_test.go b/ecc/bls24-315/marshal_test.go index 3331ffccfb..54d034b793 100644 --- a/ecc/bls24-315/marshal_test.go +++ b/ecc/bls24-315/marshal_test.go @@ -54,6 +54,7 @@ func TestEncoder(t *testing.T) { var inK fr.Vector var inL [][]fr.Element var inM [][]uint64 + var inN [][][]fr.Element // set values of inputs inA = rand.Uint64() //#nosec G404 weak rng is fine here @@ -72,12 +73,22 @@ func TestEncoder(t *testing.T) { inK[41].SetUint64(42) inL = [][]fr.Element{inJ, inK} inM = [][]uint64{{1, 2}, {4}, {}} + inN = make([][][]fr.Element, 4) + for i := 0; i < 4; i++ { + inN[i] = make([][]fr.Element, i+2) + for j := 0; j < i+2; j++ { + inN[i][j] = make([]fr.Element, j+3) + for k := 0; k < j+3; k++ { + inN[i][j][k].SetRandom() + } + } + } // encode them, compressed and raw var buf, bufRaw bytes.Buffer enc := NewEncoder(&buf) encRaw := NewEncoder(&bufRaw, RawEncoding()) - toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM} + toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM, inN} for _, v := range toEncode { if err := enc.Encode(v); err != nil { t.Fatal(err) @@ -104,8 +115,9 @@ func TestEncoder(t *testing.T) { var outK fr.Vector var outL [][]fr.Element var outM [][]uint64 + var outN [][][]fr.Element - toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM} + toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM, &outN} for _, v := range toDecode { if err := dec.Decode(v); err != nil { t.Fatal(err) @@ -151,6 +163,9 @@ func TestEncoder(t *testing.T) { if !reflect.DeepEqual(inM, outM) { t.Fatal("decode(encode(slice²(uint64))) failed") } + if !reflect.DeepEqual(inN, outN) { + t.Fatal("decode(encode(slice^{3}(uint64))) failed") + } if n != dec.BytesRead() { t.Fatal("bytes read don't match bytes written") } diff --git a/ecc/bls24-317/marshal.go b/ecc/bls24-317/marshal.go index 5e9f325529..3698554e2f 100644 --- a/ecc/bls24-317/marshal.go +++ b/ecc/bls24-317/marshal.go @@ -171,6 +171,26 @@ func (dec *Decoder) Decode(v interface{}) (err error) { dec.n += read64 } return + case *[][][]fr.Element: + if sliceLen, err = dec.readUint32(); err != nil { + return + } + if len(*t) != int(sliceLen) { + *t = make([][][]fr.Element, sliceLen) + } + for i := range *t { + if sliceLen, err = dec.readUint32(); err != nil { + return + } + if len((*t)[i]) != int(sliceLen) { + (*t)[i] = make([][]fr.Element, sliceLen) + } + for j := range (*t)[i] { + read64, err = (*fr.Vector)(&(*t)[i][j]).ReadFrom(dec.r) + dec.n += read64 + } + } + return case *G1Affine: // we start by reading compressed point size, if metadata tells us it is uncompressed, we read more. read, err = io.ReadFull(dec.r, buf[:SizeOfG1AffineCompressed]) @@ -537,6 +557,25 @@ func (enc *Encoder) encode(v interface{}) (err error) { enc.n += written64 } return + case [][][]fr.Element: + // number of collections + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))); err != nil { + return + } + enc.n += 4 + for i := range t { + // size of current collection + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t[i]))); err != nil { + return + } + enc.n += 4 + // write each vector of the current collection + for j := range t[i] { + written64, err = (*fr.Vector)(&t[i][j]).WriteTo(enc.w) + enc.n += written64 + } + } + return case []G1Affine: // write slice length err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))) @@ -655,6 +694,25 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { enc.n += written64 } return + case [][][]fr.Element: + // number of collections + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))); err != nil { + return + } + enc.n += 4 + for i := range t { + // size of current collection + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t[i]))); err != nil { + return + } + enc.n += 4 + // write each vector of the current collection + for j := range t[i] { + written64, err = (*fr.Vector)(&t[i][j]).WriteTo(enc.w) + enc.n += written64 + } + } + return case []G1Affine: // write slice length err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))) diff --git a/ecc/bls24-317/marshal_test.go b/ecc/bls24-317/marshal_test.go index 73f4c658b2..11facbadf1 100644 --- a/ecc/bls24-317/marshal_test.go +++ b/ecc/bls24-317/marshal_test.go @@ -54,6 +54,7 @@ func TestEncoder(t *testing.T) { var inK fr.Vector var inL [][]fr.Element var inM [][]uint64 + var inN [][][]fr.Element // set values of inputs inA = rand.Uint64() //#nosec G404 weak rng is fine here @@ -72,12 +73,22 @@ func TestEncoder(t *testing.T) { inK[41].SetUint64(42) inL = [][]fr.Element{inJ, inK} inM = [][]uint64{{1, 2}, {4}, {}} + inN = make([][][]fr.Element, 4) + for i := 0; i < 4; i++ { + inN[i] = make([][]fr.Element, i+2) + for j := 0; j < i+2; j++ { + inN[i][j] = make([]fr.Element, j+3) + for k := 0; k < j+3; k++ { + inN[i][j][k].SetRandom() + } + } + } // encode them, compressed and raw var buf, bufRaw bytes.Buffer enc := NewEncoder(&buf) encRaw := NewEncoder(&bufRaw, RawEncoding()) - toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM} + toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM, inN} for _, v := range toEncode { if err := enc.Encode(v); err != nil { t.Fatal(err) @@ -104,8 +115,9 @@ func TestEncoder(t *testing.T) { var outK fr.Vector var outL [][]fr.Element var outM [][]uint64 + var outN [][][]fr.Element - toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM} + toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM, &outN} for _, v := range toDecode { if err := dec.Decode(v); err != nil { t.Fatal(err) @@ -151,6 +163,9 @@ func TestEncoder(t *testing.T) { if !reflect.DeepEqual(inM, outM) { t.Fatal("decode(encode(slice²(uint64))) failed") } + if !reflect.DeepEqual(inN, outN) { + t.Fatal("decode(encode(slice^{3}(uint64))) failed") + } if n != dec.BytesRead() { t.Fatal("bytes read don't match bytes written") } diff --git a/ecc/bn254/marshal.go b/ecc/bn254/marshal.go index 2b7f713a34..7fb2512948 100644 --- a/ecc/bn254/marshal.go +++ b/ecc/bn254/marshal.go @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +// Code generated by consensys/gnark-crypto DO NOT EDIT + package bn254 import ( diff --git a/ecc/bn254/marshal_test.go b/ecc/bn254/marshal_test.go index a553ce3182..8ddbfa769c 100644 --- a/ecc/bn254/marshal_test.go +++ b/ecc/bn254/marshal_test.go @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +// Code generated by consensys/gnark-crypto DO NOT EDIT + package bn254 import ( @@ -52,6 +54,7 @@ func TestEncoder(t *testing.T) { var inK fr.Vector var inL [][]fr.Element var inM [][]uint64 + var inN [][][]fr.Element // set values of inputs inA = rand.Uint64() //#nosec G404 weak rng is fine here @@ -70,7 +73,7 @@ func TestEncoder(t *testing.T) { inK[41].SetUint64(42) inL = [][]fr.Element{inJ, inK} inM = [][]uint64{{1, 2}, {4}, {}} - inN := make([][][]fr.Element, 4) + inN = make([][][]fr.Element, 4) for i := 0; i < 4; i++ { inN[i] = make([][]fr.Element, i+2) for j := 0; j < i+2; j++ { diff --git a/ecc/bw6-633/marshal.go b/ecc/bw6-633/marshal.go index 155be56fa0..499b1ad26c 100644 --- a/ecc/bw6-633/marshal.go +++ b/ecc/bw6-633/marshal.go @@ -171,6 +171,26 @@ func (dec *Decoder) Decode(v interface{}) (err error) { dec.n += read64 } return + case *[][][]fr.Element: + if sliceLen, err = dec.readUint32(); err != nil { + return + } + if len(*t) != int(sliceLen) { + *t = make([][][]fr.Element, sliceLen) + } + for i := range *t { + if sliceLen, err = dec.readUint32(); err != nil { + return + } + if len((*t)[i]) != int(sliceLen) { + (*t)[i] = make([][]fr.Element, sliceLen) + } + for j := range (*t)[i] { + read64, err = (*fr.Vector)(&(*t)[i][j]).ReadFrom(dec.r) + dec.n += read64 + } + } + return case *G1Affine: // we start by reading compressed point size, if metadata tells us it is uncompressed, we read more. read, err = io.ReadFull(dec.r, buf[:SizeOfG1AffineCompressed]) @@ -537,6 +557,25 @@ func (enc *Encoder) encode(v interface{}) (err error) { enc.n += written64 } return + case [][][]fr.Element: + // number of collections + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))); err != nil { + return + } + enc.n += 4 + for i := range t { + // size of current collection + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t[i]))); err != nil { + return + } + enc.n += 4 + // write each vector of the current collection + for j := range t[i] { + written64, err = (*fr.Vector)(&t[i][j]).WriteTo(enc.w) + enc.n += written64 + } + } + return case []G1Affine: // write slice length err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))) @@ -655,6 +694,25 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { enc.n += written64 } return + case [][][]fr.Element: + // number of collections + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))); err != nil { + return + } + enc.n += 4 + for i := range t { + // size of current collection + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t[i]))); err != nil { + return + } + enc.n += 4 + // write each vector of the current collection + for j := range t[i] { + written64, err = (*fr.Vector)(&t[i][j]).WriteTo(enc.w) + enc.n += written64 + } + } + return case []G1Affine: // write slice length err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))) diff --git a/ecc/bw6-633/marshal_test.go b/ecc/bw6-633/marshal_test.go index dcf6596e78..e0b83953e3 100644 --- a/ecc/bw6-633/marshal_test.go +++ b/ecc/bw6-633/marshal_test.go @@ -54,6 +54,7 @@ func TestEncoder(t *testing.T) { var inK fr.Vector var inL [][]fr.Element var inM [][]uint64 + var inN [][][]fr.Element // set values of inputs inA = rand.Uint64() //#nosec G404 weak rng is fine here @@ -72,12 +73,22 @@ func TestEncoder(t *testing.T) { inK[41].SetUint64(42) inL = [][]fr.Element{inJ, inK} inM = [][]uint64{{1, 2}, {4}, {}} + inN = make([][][]fr.Element, 4) + for i := 0; i < 4; i++ { + inN[i] = make([][]fr.Element, i+2) + for j := 0; j < i+2; j++ { + inN[i][j] = make([]fr.Element, j+3) + for k := 0; k < j+3; k++ { + inN[i][j][k].SetRandom() + } + } + } // encode them, compressed and raw var buf, bufRaw bytes.Buffer enc := NewEncoder(&buf) encRaw := NewEncoder(&bufRaw, RawEncoding()) - toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM} + toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM, inN} for _, v := range toEncode { if err := enc.Encode(v); err != nil { t.Fatal(err) @@ -104,8 +115,9 @@ func TestEncoder(t *testing.T) { var outK fr.Vector var outL [][]fr.Element var outM [][]uint64 + var outN [][][]fr.Element - toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM} + toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM, &outN} for _, v := range toDecode { if err := dec.Decode(v); err != nil { t.Fatal(err) @@ -151,6 +163,9 @@ func TestEncoder(t *testing.T) { if !reflect.DeepEqual(inM, outM) { t.Fatal("decode(encode(slice²(uint64))) failed") } + if !reflect.DeepEqual(inN, outN) { + t.Fatal("decode(encode(slice^{3}(uint64))) failed") + } if n != dec.BytesRead() { t.Fatal("bytes read don't match bytes written") } diff --git a/ecc/bw6-756/marshal.go b/ecc/bw6-756/marshal.go index 08083e42b8..3959077d9d 100644 --- a/ecc/bw6-756/marshal.go +++ b/ecc/bw6-756/marshal.go @@ -171,6 +171,26 @@ func (dec *Decoder) Decode(v interface{}) (err error) { dec.n += read64 } return + case *[][][]fr.Element: + if sliceLen, err = dec.readUint32(); err != nil { + return + } + if len(*t) != int(sliceLen) { + *t = make([][][]fr.Element, sliceLen) + } + for i := range *t { + if sliceLen, err = dec.readUint32(); err != nil { + return + } + if len((*t)[i]) != int(sliceLen) { + (*t)[i] = make([][]fr.Element, sliceLen) + } + for j := range (*t)[i] { + read64, err = (*fr.Vector)(&(*t)[i][j]).ReadFrom(dec.r) + dec.n += read64 + } + } + return case *G1Affine: // we start by reading compressed point size, if metadata tells us it is uncompressed, we read more. read, err = io.ReadFull(dec.r, buf[:SizeOfG1AffineCompressed]) @@ -537,6 +557,25 @@ func (enc *Encoder) encode(v interface{}) (err error) { enc.n += written64 } return + case [][][]fr.Element: + // number of collections + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))); err != nil { + return + } + enc.n += 4 + for i := range t { + // size of current collection + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t[i]))); err != nil { + return + } + enc.n += 4 + // write each vector of the current collection + for j := range t[i] { + written64, err = (*fr.Vector)(&t[i][j]).WriteTo(enc.w) + enc.n += written64 + } + } + return case []G1Affine: // write slice length err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))) @@ -655,6 +694,25 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { enc.n += written64 } return + case [][][]fr.Element: + // number of collections + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))); err != nil { + return + } + enc.n += 4 + for i := range t { + // size of current collection + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t[i]))); err != nil { + return + } + enc.n += 4 + // write each vector of the current collection + for j := range t[i] { + written64, err = (*fr.Vector)(&t[i][j]).WriteTo(enc.w) + enc.n += written64 + } + } + return case []G1Affine: // write slice length err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))) diff --git a/ecc/bw6-756/marshal_test.go b/ecc/bw6-756/marshal_test.go index 9b9ff619c3..deb7138797 100644 --- a/ecc/bw6-756/marshal_test.go +++ b/ecc/bw6-756/marshal_test.go @@ -54,6 +54,7 @@ func TestEncoder(t *testing.T) { var inK fr.Vector var inL [][]fr.Element var inM [][]uint64 + var inN [][][]fr.Element // set values of inputs inA = rand.Uint64() //#nosec G404 weak rng is fine here @@ -72,12 +73,22 @@ func TestEncoder(t *testing.T) { inK[41].SetUint64(42) inL = [][]fr.Element{inJ, inK} inM = [][]uint64{{1, 2}, {4}, {}} + inN = make([][][]fr.Element, 4) + for i := 0; i < 4; i++ { + inN[i] = make([][]fr.Element, i+2) + for j := 0; j < i+2; j++ { + inN[i][j] = make([]fr.Element, j+3) + for k := 0; k < j+3; k++ { + inN[i][j][k].SetRandom() + } + } + } // encode them, compressed and raw var buf, bufRaw bytes.Buffer enc := NewEncoder(&buf) encRaw := NewEncoder(&bufRaw, RawEncoding()) - toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM} + toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM, inN} for _, v := range toEncode { if err := enc.Encode(v); err != nil { t.Fatal(err) @@ -104,8 +115,9 @@ func TestEncoder(t *testing.T) { var outK fr.Vector var outL [][]fr.Element var outM [][]uint64 + var outN [][][]fr.Element - toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM} + toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM, &outN} for _, v := range toDecode { if err := dec.Decode(v); err != nil { t.Fatal(err) @@ -151,6 +163,9 @@ func TestEncoder(t *testing.T) { if !reflect.DeepEqual(inM, outM) { t.Fatal("decode(encode(slice²(uint64))) failed") } + if !reflect.DeepEqual(inN, outN) { + t.Fatal("decode(encode(slice^{3}(uint64))) failed") + } if n != dec.BytesRead() { t.Fatal("bytes read don't match bytes written") } diff --git a/ecc/bw6-761/marshal.go b/ecc/bw6-761/marshal.go index c232a9ed75..7af9978a10 100644 --- a/ecc/bw6-761/marshal.go +++ b/ecc/bw6-761/marshal.go @@ -171,6 +171,26 @@ func (dec *Decoder) Decode(v interface{}) (err error) { dec.n += read64 } return + case *[][][]fr.Element: + if sliceLen, err = dec.readUint32(); err != nil { + return + } + if len(*t) != int(sliceLen) { + *t = make([][][]fr.Element, sliceLen) + } + for i := range *t { + if sliceLen, err = dec.readUint32(); err != nil { + return + } + if len((*t)[i]) != int(sliceLen) { + (*t)[i] = make([][]fr.Element, sliceLen) + } + for j := range (*t)[i] { + read64, err = (*fr.Vector)(&(*t)[i][j]).ReadFrom(dec.r) + dec.n += read64 + } + } + return case *G1Affine: // we start by reading compressed point size, if metadata tells us it is uncompressed, we read more. read, err = io.ReadFull(dec.r, buf[:SizeOfG1AffineCompressed]) @@ -537,6 +557,25 @@ func (enc *Encoder) encode(v interface{}) (err error) { enc.n += written64 } return + case [][][]fr.Element: + // number of collections + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))); err != nil { + return + } + enc.n += 4 + for i := range t { + // size of current collection + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t[i]))); err != nil { + return + } + enc.n += 4 + // write each vector of the current collection + for j := range t[i] { + written64, err = (*fr.Vector)(&t[i][j]).WriteTo(enc.w) + enc.n += written64 + } + } + return case []G1Affine: // write slice length err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))) @@ -655,6 +694,25 @@ func (enc *Encoder) encodeRaw(v interface{}) (err error) { enc.n += written64 } return + case [][][]fr.Element: + // number of collections + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))); err != nil { + return + } + enc.n += 4 + for i := range t { + // size of current collection + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t[i]))); err != nil { + return + } + enc.n += 4 + // write each vector of the current collection + for j := range t[i] { + written64, err = (*fr.Vector)(&t[i][j]).WriteTo(enc.w) + enc.n += written64 + } + } + return case []G1Affine: // write slice length err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))) diff --git a/ecc/bw6-761/marshal_test.go b/ecc/bw6-761/marshal_test.go index f96ae91944..6ac3db73d3 100644 --- a/ecc/bw6-761/marshal_test.go +++ b/ecc/bw6-761/marshal_test.go @@ -54,6 +54,7 @@ func TestEncoder(t *testing.T) { var inK fr.Vector var inL [][]fr.Element var inM [][]uint64 + var inN [][][]fr.Element // set values of inputs inA = rand.Uint64() //#nosec G404 weak rng is fine here @@ -72,12 +73,22 @@ func TestEncoder(t *testing.T) { inK[41].SetUint64(42) inL = [][]fr.Element{inJ, inK} inM = [][]uint64{{1, 2}, {4}, {}} + inN = make([][][]fr.Element, 4) + for i := 0; i < 4; i++ { + inN[i] = make([][]fr.Element, i+2) + for j := 0; j < i+2; j++ { + inN[i][j] = make([]fr.Element, j+3) + for k := 0; k < j+3; k++ { + inN[i][j][k].SetRandom() + } + } + } // encode them, compressed and raw var buf, bufRaw bytes.Buffer enc := NewEncoder(&buf) encRaw := NewEncoder(&bufRaw, RawEncoding()) - toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM} + toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM, inN} for _, v := range toEncode { if err := enc.Encode(v); err != nil { t.Fatal(err) @@ -104,8 +115,9 @@ func TestEncoder(t *testing.T) { var outK fr.Vector var outL [][]fr.Element var outM [][]uint64 + var outN [][][]fr.Element - toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM} + toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM, &outN} for _, v := range toDecode { if err := dec.Decode(v); err != nil { t.Fatal(err) @@ -151,6 +163,9 @@ func TestEncoder(t *testing.T) { if !reflect.DeepEqual(inM, outM) { t.Fatal("decode(encode(slice²(uint64))) failed") } + if !reflect.DeepEqual(inN, outN) { + t.Fatal("decode(encode(slice^{3}(uint64))) failed") + } if n != dec.BytesRead() { t.Fatal("bytes read don't match bytes written") } diff --git a/internal/generator/ecc/template/marshal.go.tmpl b/internal/generator/ecc/template/marshal.go.tmpl index 1483e0bd76..d26de5fa79 100644 --- a/internal/generator/ecc/template/marshal.go.tmpl +++ b/internal/generator/ecc/template/marshal.go.tmpl @@ -176,6 +176,26 @@ func (dec *Decoder) Decode(v interface{}) (err error) { dec.n+=read64 } return + case *[][][]fr.Element: + if sliceLen, err = dec.readUint32(); err != nil { + return + } + if len(*t) != int(sliceLen) { + *t = make([][][]fr.Element, sliceLen) + } + for i := range *t { + if sliceLen, err = dec.readUint32(); err != nil { + return + } + if len((*t)[i]) != int(sliceLen) { + (*t)[i] = make([][]fr.Element, sliceLen) + } + for j := range (*t)[i] { + read64, err = (*fr.Vector)(&(*t)[i][j]).ReadFrom(dec.r) + dec.n += read64 + } + } + return case *G1Affine: // we start by reading compressed point size, if metadata tells us it is uncompressed, we read more. read, err = io.ReadFull(dec.r, buf[:SizeOfG1AffineCompressed]) @@ -606,6 +626,25 @@ func (enc *Encoder) encode{{- $.Raw}}(v interface{}) (err error) { enc.n += written64 } return + case [][][]fr.Element: + // number of collections + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))); err != nil { + return + } + enc.n += 4 + for i := range t { + // size of current collection + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t[i]))); err != nil { + return + } + enc.n += 4 + // write each vector of the current collection + for j := range t[i] { + written64, err = (*fr.Vector)(&t[i][j]).WriteTo(enc.w) + enc.n += written64 + } + } + return case []G1Affine: // write slice length err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))) diff --git a/internal/generator/ecc/template/tests/marshal.go.tmpl b/internal/generator/ecc/template/tests/marshal.go.tmpl index 9860884e97..85ac331918 100644 --- a/internal/generator/ecc/template/tests/marshal.go.tmpl +++ b/internal/generator/ecc/template/tests/marshal.go.tmpl @@ -44,6 +44,7 @@ func TestEncoder(t *testing.T) { var inK fr.Vector var inL [][]fr.Element var inM [][]uint64 + var inN [][][]fr.Element // set values of inputs inA = rand.Uint64() //#nosec G404 weak rng is fine here @@ -62,12 +63,22 @@ func TestEncoder(t *testing.T) { inK[41].SetUint64(42) inL =[][]fr.Element {inJ, inK} inM = [][]uint64{ {1, 2}, {4}, {} } + inN = make([][][]fr.Element, 4) + for i := 0; i < 4; i++ { + inN[i] = make([][]fr.Element, i+2) + for j := 0; j < i+2; j++ { + inN[i][j] = make([]fr.Element, j+3) + for k := 0; k < j+3; k++ { + inN[i][j][k].SetRandom() + } + } + } // encode them, compressed and raw var buf, bufRaw bytes.Buffer enc := NewEncoder(&buf) encRaw := NewEncoder(&bufRaw, RawEncoding()) - toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM} + toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM, inN} for _, v := range toEncode { if err := enc.Encode(v); err != nil { t.Fatal(err) @@ -95,8 +106,9 @@ func TestEncoder(t *testing.T) { var outK fr.Vector var outL [][]fr.Element var outM [][]uint64 + var outN [][][]fr.Element - toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM} + toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM, &outN} for _, v := range toDecode { if err := dec.Decode(v); err != nil { t.Fatal(err) @@ -142,6 +154,9 @@ func TestEncoder(t *testing.T) { if !reflect.DeepEqual(inM, outM) { t.Fatal("decode(encode(slice²(uint64))) failed") } + if !reflect.DeepEqual(inN, outN) { + t.Fatal("decode(encode(slice^{3}(uint64))) failed") + } if n != dec.BytesRead() { t.Fatal("bytes read don't match bytes written") } From 271ab1e9124b0dd3b8a9170df08944217b00c9ab Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Fri, 26 Apr 2024 18:39:53 +0200 Subject: [PATCH 39/66] fix: fixed order computation template --- internal/generator/fflonk/template/fflonk.test.go.tmpl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/internal/generator/fflonk/template/fflonk.test.go.tmpl b/internal/generator/fflonk/template/fflonk.test.go.tmpl index c65b2afa17..352006a16e 100644 --- a/internal/generator/fflonk/template/fflonk.test.go.tmpl +++ b/internal/generator/fflonk/template/fflonk.test.go.tmpl @@ -109,7 +109,7 @@ func TestGetIthRootOne(t *testing.T) { assert := require.New(t) - order := 9 + order := getNextDivisorRMinusOne(9) omega, err := getIthRootOne(order) assert.NoError(err) var orderBigInt big.Int @@ -117,7 +117,7 @@ func TestGetIthRootOne(t *testing.T) { omega.Exp(omega, &orderBigInt) assert.True(omega.IsOne()) - order = 7 + order = order+1 _, err = getIthRootOne(order) assert.Error(err) } From 5e0b0f5d11bb6c48b3d67aa7a1b5045334c5ee15 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Fri, 26 Apr 2024 18:40:52 +0200 Subject: [PATCH 40/66] feat: code gen --- ecc/bls12-377/fflonk/fflonk_test.go | 4 ++-- ecc/bls12-378/fflonk/fflonk_test.go | 4 ++-- ecc/bls12-381/fflonk/fflonk_test.go | 4 ++-- ecc/bls24-315/fflonk/fflonk_test.go | 4 ++-- ecc/bls24-317/fflonk/fflonk_test.go | 4 ++-- ecc/bn254/fflonk/fflonk_test.go | 4 ++-- ecc/bw6-633/fflonk/fflonk_test.go | 4 ++-- ecc/bw6-756/fflonk/fflonk_test.go | 4 ++-- ecc/bw6-761/fflonk/fflonk_test.go | 4 ++-- 9 files changed, 18 insertions(+), 18 deletions(-) diff --git a/ecc/bls12-377/fflonk/fflonk_test.go b/ecc/bls12-377/fflonk/fflonk_test.go index 4caddc248d..4150f8297b 100644 --- a/ecc/bls12-377/fflonk/fflonk_test.go +++ b/ecc/bls12-377/fflonk/fflonk_test.go @@ -127,7 +127,7 @@ func TestGetIthRootOne(t *testing.T) { assert := require.New(t) - order := 9 + order := getNextDivisorRMinusOne(9) omega, err := getIthRootOne(order) assert.NoError(err) var orderBigInt big.Int @@ -135,7 +135,7 @@ func TestGetIthRootOne(t *testing.T) { omega.Exp(omega, &orderBigInt) assert.True(omega.IsOne()) - order = 7 + order = order + 1 _, err = getIthRootOne(order) assert.Error(err) } diff --git a/ecc/bls12-378/fflonk/fflonk_test.go b/ecc/bls12-378/fflonk/fflonk_test.go index e26d4f7380..75369d27ab 100644 --- a/ecc/bls12-378/fflonk/fflonk_test.go +++ b/ecc/bls12-378/fflonk/fflonk_test.go @@ -127,7 +127,7 @@ func TestGetIthRootOne(t *testing.T) { assert := require.New(t) - order := 9 + order := getNextDivisorRMinusOne(9) omega, err := getIthRootOne(order) assert.NoError(err) var orderBigInt big.Int @@ -135,7 +135,7 @@ func TestGetIthRootOne(t *testing.T) { omega.Exp(omega, &orderBigInt) assert.True(omega.IsOne()) - order = 7 + order = order + 1 _, err = getIthRootOne(order) assert.Error(err) } diff --git a/ecc/bls12-381/fflonk/fflonk_test.go b/ecc/bls12-381/fflonk/fflonk_test.go index 417e22139d..801b0c29e3 100644 --- a/ecc/bls12-381/fflonk/fflonk_test.go +++ b/ecc/bls12-381/fflonk/fflonk_test.go @@ -127,7 +127,7 @@ func TestGetIthRootOne(t *testing.T) { assert := require.New(t) - order := 9 + order := getNextDivisorRMinusOne(9) omega, err := getIthRootOne(order) assert.NoError(err) var orderBigInt big.Int @@ -135,7 +135,7 @@ func TestGetIthRootOne(t *testing.T) { omega.Exp(omega, &orderBigInt) assert.True(omega.IsOne()) - order = 7 + order = order + 1 _, err = getIthRootOne(order) assert.Error(err) } diff --git a/ecc/bls24-315/fflonk/fflonk_test.go b/ecc/bls24-315/fflonk/fflonk_test.go index af7cc9faf7..0c10352d23 100644 --- a/ecc/bls24-315/fflonk/fflonk_test.go +++ b/ecc/bls24-315/fflonk/fflonk_test.go @@ -127,7 +127,7 @@ func TestGetIthRootOne(t *testing.T) { assert := require.New(t) - order := 9 + order := getNextDivisorRMinusOne(9) omega, err := getIthRootOne(order) assert.NoError(err) var orderBigInt big.Int @@ -135,7 +135,7 @@ func TestGetIthRootOne(t *testing.T) { omega.Exp(omega, &orderBigInt) assert.True(omega.IsOne()) - order = 7 + order = order + 1 _, err = getIthRootOne(order) assert.Error(err) } diff --git a/ecc/bls24-317/fflonk/fflonk_test.go b/ecc/bls24-317/fflonk/fflonk_test.go index 5d964e4fda..58e7d67dee 100644 --- a/ecc/bls24-317/fflonk/fflonk_test.go +++ b/ecc/bls24-317/fflonk/fflonk_test.go @@ -127,7 +127,7 @@ func TestGetIthRootOne(t *testing.T) { assert := require.New(t) - order := 9 + order := getNextDivisorRMinusOne(9) omega, err := getIthRootOne(order) assert.NoError(err) var orderBigInt big.Int @@ -135,7 +135,7 @@ func TestGetIthRootOne(t *testing.T) { omega.Exp(omega, &orderBigInt) assert.True(omega.IsOne()) - order = 7 + order = order + 1 _, err = getIthRootOne(order) assert.Error(err) } diff --git a/ecc/bn254/fflonk/fflonk_test.go b/ecc/bn254/fflonk/fflonk_test.go index c04a66ae2d..e16359e196 100644 --- a/ecc/bn254/fflonk/fflonk_test.go +++ b/ecc/bn254/fflonk/fflonk_test.go @@ -127,7 +127,7 @@ func TestGetIthRootOne(t *testing.T) { assert := require.New(t) - order := 9 + order := getNextDivisorRMinusOne(9) omega, err := getIthRootOne(order) assert.NoError(err) var orderBigInt big.Int @@ -135,7 +135,7 @@ func TestGetIthRootOne(t *testing.T) { omega.Exp(omega, &orderBigInt) assert.True(omega.IsOne()) - order = 7 + order = order + 1 _, err = getIthRootOne(order) assert.Error(err) } diff --git a/ecc/bw6-633/fflonk/fflonk_test.go b/ecc/bw6-633/fflonk/fflonk_test.go index 1d031a6929..2643aa6aed 100644 --- a/ecc/bw6-633/fflonk/fflonk_test.go +++ b/ecc/bw6-633/fflonk/fflonk_test.go @@ -127,7 +127,7 @@ func TestGetIthRootOne(t *testing.T) { assert := require.New(t) - order := 9 + order := getNextDivisorRMinusOne(9) omega, err := getIthRootOne(order) assert.NoError(err) var orderBigInt big.Int @@ -135,7 +135,7 @@ func TestGetIthRootOne(t *testing.T) { omega.Exp(omega, &orderBigInt) assert.True(omega.IsOne()) - order = 7 + order = order + 1 _, err = getIthRootOne(order) assert.Error(err) } diff --git a/ecc/bw6-756/fflonk/fflonk_test.go b/ecc/bw6-756/fflonk/fflonk_test.go index d17a5372d2..c4e57bfe35 100644 --- a/ecc/bw6-756/fflonk/fflonk_test.go +++ b/ecc/bw6-756/fflonk/fflonk_test.go @@ -127,7 +127,7 @@ func TestGetIthRootOne(t *testing.T) { assert := require.New(t) - order := 9 + order := getNextDivisorRMinusOne(9) omega, err := getIthRootOne(order) assert.NoError(err) var orderBigInt big.Int @@ -135,7 +135,7 @@ func TestGetIthRootOne(t *testing.T) { omega.Exp(omega, &orderBigInt) assert.True(omega.IsOne()) - order = 7 + order = order + 1 _, err = getIthRootOne(order) assert.Error(err) } diff --git a/ecc/bw6-761/fflonk/fflonk_test.go b/ecc/bw6-761/fflonk/fflonk_test.go index 030b6e8db2..35e07fb18a 100644 --- a/ecc/bw6-761/fflonk/fflonk_test.go +++ b/ecc/bw6-761/fflonk/fflonk_test.go @@ -127,7 +127,7 @@ func TestGetIthRootOne(t *testing.T) { assert := require.New(t) - order := 9 + order := getNextDivisorRMinusOne(9) omega, err := getIthRootOne(order) assert.NoError(err) var orderBigInt big.Int @@ -135,7 +135,7 @@ func TestGetIthRootOne(t *testing.T) { omega.Exp(omega, &orderBigInt) assert.True(omega.IsOne()) - order = 7 + order = order + 1 _, err = getIthRootOne(order) assert.Error(err) } From 8a620c108b4b54fedd3ed8911c11c02bb92ba2ec Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Fri, 26 Apr 2024 18:48:57 +0200 Subject: [PATCH 41/66] fix: fixed srs size --- internal/generator/fflonk/template/fflonk.test.go.tmpl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/generator/fflonk/template/fflonk.test.go.tmpl b/internal/generator/fflonk/template/fflonk.test.go.tmpl index 352006a16e..5f4d581881 100644 --- a/internal/generator/fflonk/template/fflonk.test.go.tmpl +++ b/internal/generator/fflonk/template/fflonk.test.go.tmpl @@ -14,7 +14,7 @@ var testSrs *kzg.SRS var bAlpha *big.Int func init() { - const srsSize = 400 + const srsSize = 600 bAlpha = new(big.Int).SetInt64(42) // randomise ? testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) } From 23deb189b12f0c961728fcfb54bb43cde3a288c6 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Fri, 26 Apr 2024 18:49:46 +0200 Subject: [PATCH 42/66] feat: code gen --- ecc/bls12-377/fflonk/fflonk_test.go | 2 +- ecc/bls12-378/fflonk/fflonk_test.go | 2 +- ecc/bls12-381/fflonk/fflonk_test.go | 2 +- ecc/bls24-315/fflonk/fflonk_test.go | 2 +- ecc/bls24-317/fflonk/fflonk_test.go | 2 +- ecc/bn254/fflonk/fflonk_test.go | 2 +- ecc/bw6-633/fflonk/fflonk_test.go | 2 +- ecc/bw6-756/fflonk/fflonk_test.go | 2 +- ecc/bw6-761/fflonk/fflonk_test.go | 2 +- 9 files changed, 9 insertions(+), 9 deletions(-) diff --git a/ecc/bls12-377/fflonk/fflonk_test.go b/ecc/bls12-377/fflonk/fflonk_test.go index 4150f8297b..96bb98882f 100644 --- a/ecc/bls12-377/fflonk/fflonk_test.go +++ b/ecc/bls12-377/fflonk/fflonk_test.go @@ -32,7 +32,7 @@ var testSrs *kzg.SRS var bAlpha *big.Int func init() { - const srsSize = 400 + const srsSize = 600 bAlpha = new(big.Int).SetInt64(42) // randomise ? testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) } diff --git a/ecc/bls12-378/fflonk/fflonk_test.go b/ecc/bls12-378/fflonk/fflonk_test.go index 75369d27ab..3d16d388a3 100644 --- a/ecc/bls12-378/fflonk/fflonk_test.go +++ b/ecc/bls12-378/fflonk/fflonk_test.go @@ -32,7 +32,7 @@ var testSrs *kzg.SRS var bAlpha *big.Int func init() { - const srsSize = 400 + const srsSize = 600 bAlpha = new(big.Int).SetInt64(42) // randomise ? testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) } diff --git a/ecc/bls12-381/fflonk/fflonk_test.go b/ecc/bls12-381/fflonk/fflonk_test.go index 801b0c29e3..eeb91b531b 100644 --- a/ecc/bls12-381/fflonk/fflonk_test.go +++ b/ecc/bls12-381/fflonk/fflonk_test.go @@ -32,7 +32,7 @@ var testSrs *kzg.SRS var bAlpha *big.Int func init() { - const srsSize = 400 + const srsSize = 600 bAlpha = new(big.Int).SetInt64(42) // randomise ? testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) } diff --git a/ecc/bls24-315/fflonk/fflonk_test.go b/ecc/bls24-315/fflonk/fflonk_test.go index 0c10352d23..9b5819dd2e 100644 --- a/ecc/bls24-315/fflonk/fflonk_test.go +++ b/ecc/bls24-315/fflonk/fflonk_test.go @@ -32,7 +32,7 @@ var testSrs *kzg.SRS var bAlpha *big.Int func init() { - const srsSize = 400 + const srsSize = 600 bAlpha = new(big.Int).SetInt64(42) // randomise ? testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) } diff --git a/ecc/bls24-317/fflonk/fflonk_test.go b/ecc/bls24-317/fflonk/fflonk_test.go index 58e7d67dee..6e9ddf9066 100644 --- a/ecc/bls24-317/fflonk/fflonk_test.go +++ b/ecc/bls24-317/fflonk/fflonk_test.go @@ -32,7 +32,7 @@ var testSrs *kzg.SRS var bAlpha *big.Int func init() { - const srsSize = 400 + const srsSize = 600 bAlpha = new(big.Int).SetInt64(42) // randomise ? testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) } diff --git a/ecc/bn254/fflonk/fflonk_test.go b/ecc/bn254/fflonk/fflonk_test.go index e16359e196..4265cbbe01 100644 --- a/ecc/bn254/fflonk/fflonk_test.go +++ b/ecc/bn254/fflonk/fflonk_test.go @@ -32,7 +32,7 @@ var testSrs *kzg.SRS var bAlpha *big.Int func init() { - const srsSize = 400 + const srsSize = 600 bAlpha = new(big.Int).SetInt64(42) // randomise ? testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) } diff --git a/ecc/bw6-633/fflonk/fflonk_test.go b/ecc/bw6-633/fflonk/fflonk_test.go index 2643aa6aed..bd9cf7f10a 100644 --- a/ecc/bw6-633/fflonk/fflonk_test.go +++ b/ecc/bw6-633/fflonk/fflonk_test.go @@ -32,7 +32,7 @@ var testSrs *kzg.SRS var bAlpha *big.Int func init() { - const srsSize = 400 + const srsSize = 600 bAlpha = new(big.Int).SetInt64(42) // randomise ? testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) } diff --git a/ecc/bw6-756/fflonk/fflonk_test.go b/ecc/bw6-756/fflonk/fflonk_test.go index c4e57bfe35..d3bb131a18 100644 --- a/ecc/bw6-756/fflonk/fflonk_test.go +++ b/ecc/bw6-756/fflonk/fflonk_test.go @@ -32,7 +32,7 @@ var testSrs *kzg.SRS var bAlpha *big.Int func init() { - const srsSize = 400 + const srsSize = 600 bAlpha = new(big.Int).SetInt64(42) // randomise ? testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) } diff --git a/ecc/bw6-761/fflonk/fflonk_test.go b/ecc/bw6-761/fflonk/fflonk_test.go index 35e07fb18a..f6670a6d19 100644 --- a/ecc/bw6-761/fflonk/fflonk_test.go +++ b/ecc/bw6-761/fflonk/fflonk_test.go @@ -32,7 +32,7 @@ var testSrs *kzg.SRS var bAlpha *big.Int func init() { - const srsSize = 400 + const srsSize = 600 bAlpha = new(big.Int).SetInt64(42) // randomise ? testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) } From de8254f87c2d42f1e67ece0ec373ceb00a045606 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Fri, 26 Apr 2024 18:56:34 +0200 Subject: [PATCH 43/66] fix: fixed TestGetIthRootOne template --- internal/generator/fflonk/template/fflonk.test.go.tmpl | 5 ----- 1 file changed, 5 deletions(-) diff --git a/internal/generator/fflonk/template/fflonk.test.go.tmpl b/internal/generator/fflonk/template/fflonk.test.go.tmpl index 5f4d581881..316b720572 100644 --- a/internal/generator/fflonk/template/fflonk.test.go.tmpl +++ b/internal/generator/fflonk/template/fflonk.test.go.tmpl @@ -106,7 +106,6 @@ func TestCommit(t *testing.T) { } func TestGetIthRootOne(t *testing.T) { - assert := require.New(t) order := getNextDivisorRMinusOne(9) @@ -116,8 +115,4 @@ func TestGetIthRootOne(t *testing.T) { orderBigInt.SetUint64(uint64(order)) omega.Exp(omega, &orderBigInt) assert.True(omega.IsOne()) - - order = order+1 - _, err = getIthRootOne(order) - assert.Error(err) } From 2efc99b1ab0ea87ff0ba993ad84b9c47e5caf392 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Fri, 26 Apr 2024 18:58:03 +0200 Subject: [PATCH 44/66] feat: code gen --- ecc/bls12-377/fflonk/fflonk_test.go | 5 ----- ecc/bls12-378/fflonk/fflonk_test.go | 5 ----- ecc/bls12-381/fflonk/fflonk_test.go | 5 ----- ecc/bls24-315/fflonk/fflonk_test.go | 5 ----- ecc/bls24-317/fflonk/fflonk_test.go | 5 ----- ecc/bn254/fflonk/fflonk_test.go | 5 ----- ecc/bw6-633/fflonk/fflonk_test.go | 5 ----- ecc/bw6-756/fflonk/fflonk_test.go | 5 ----- ecc/bw6-761/fflonk/fflonk_test.go | 5 ----- 9 files changed, 45 deletions(-) diff --git a/ecc/bls12-377/fflonk/fflonk_test.go b/ecc/bls12-377/fflonk/fflonk_test.go index 96bb98882f..143f7c552a 100644 --- a/ecc/bls12-377/fflonk/fflonk_test.go +++ b/ecc/bls12-377/fflonk/fflonk_test.go @@ -124,7 +124,6 @@ func TestCommit(t *testing.T) { } func TestGetIthRootOne(t *testing.T) { - assert := require.New(t) order := getNextDivisorRMinusOne(9) @@ -134,8 +133,4 @@ func TestGetIthRootOne(t *testing.T) { orderBigInt.SetUint64(uint64(order)) omega.Exp(omega, &orderBigInt) assert.True(omega.IsOne()) - - order = order + 1 - _, err = getIthRootOne(order) - assert.Error(err) } diff --git a/ecc/bls12-378/fflonk/fflonk_test.go b/ecc/bls12-378/fflonk/fflonk_test.go index 3d16d388a3..f27ea33482 100644 --- a/ecc/bls12-378/fflonk/fflonk_test.go +++ b/ecc/bls12-378/fflonk/fflonk_test.go @@ -124,7 +124,6 @@ func TestCommit(t *testing.T) { } func TestGetIthRootOne(t *testing.T) { - assert := require.New(t) order := getNextDivisorRMinusOne(9) @@ -134,8 +133,4 @@ func TestGetIthRootOne(t *testing.T) { orderBigInt.SetUint64(uint64(order)) omega.Exp(omega, &orderBigInt) assert.True(omega.IsOne()) - - order = order + 1 - _, err = getIthRootOne(order) - assert.Error(err) } diff --git a/ecc/bls12-381/fflonk/fflonk_test.go b/ecc/bls12-381/fflonk/fflonk_test.go index eeb91b531b..77ff8b54f0 100644 --- a/ecc/bls12-381/fflonk/fflonk_test.go +++ b/ecc/bls12-381/fflonk/fflonk_test.go @@ -124,7 +124,6 @@ func TestCommit(t *testing.T) { } func TestGetIthRootOne(t *testing.T) { - assert := require.New(t) order := getNextDivisorRMinusOne(9) @@ -134,8 +133,4 @@ func TestGetIthRootOne(t *testing.T) { orderBigInt.SetUint64(uint64(order)) omega.Exp(omega, &orderBigInt) assert.True(omega.IsOne()) - - order = order + 1 - _, err = getIthRootOne(order) - assert.Error(err) } diff --git a/ecc/bls24-315/fflonk/fflonk_test.go b/ecc/bls24-315/fflonk/fflonk_test.go index 9b5819dd2e..0084bf3a55 100644 --- a/ecc/bls24-315/fflonk/fflonk_test.go +++ b/ecc/bls24-315/fflonk/fflonk_test.go @@ -124,7 +124,6 @@ func TestCommit(t *testing.T) { } func TestGetIthRootOne(t *testing.T) { - assert := require.New(t) order := getNextDivisorRMinusOne(9) @@ -134,8 +133,4 @@ func TestGetIthRootOne(t *testing.T) { orderBigInt.SetUint64(uint64(order)) omega.Exp(omega, &orderBigInt) assert.True(omega.IsOne()) - - order = order + 1 - _, err = getIthRootOne(order) - assert.Error(err) } diff --git a/ecc/bls24-317/fflonk/fflonk_test.go b/ecc/bls24-317/fflonk/fflonk_test.go index 6e9ddf9066..6385ff35ca 100644 --- a/ecc/bls24-317/fflonk/fflonk_test.go +++ b/ecc/bls24-317/fflonk/fflonk_test.go @@ -124,7 +124,6 @@ func TestCommit(t *testing.T) { } func TestGetIthRootOne(t *testing.T) { - assert := require.New(t) order := getNextDivisorRMinusOne(9) @@ -134,8 +133,4 @@ func TestGetIthRootOne(t *testing.T) { orderBigInt.SetUint64(uint64(order)) omega.Exp(omega, &orderBigInt) assert.True(omega.IsOne()) - - order = order + 1 - _, err = getIthRootOne(order) - assert.Error(err) } diff --git a/ecc/bn254/fflonk/fflonk_test.go b/ecc/bn254/fflonk/fflonk_test.go index 4265cbbe01..28a02458f8 100644 --- a/ecc/bn254/fflonk/fflonk_test.go +++ b/ecc/bn254/fflonk/fflonk_test.go @@ -124,7 +124,6 @@ func TestCommit(t *testing.T) { } func TestGetIthRootOne(t *testing.T) { - assert := require.New(t) order := getNextDivisorRMinusOne(9) @@ -134,8 +133,4 @@ func TestGetIthRootOne(t *testing.T) { orderBigInt.SetUint64(uint64(order)) omega.Exp(omega, &orderBigInt) assert.True(omega.IsOne()) - - order = order + 1 - _, err = getIthRootOne(order) - assert.Error(err) } diff --git a/ecc/bw6-633/fflonk/fflonk_test.go b/ecc/bw6-633/fflonk/fflonk_test.go index bd9cf7f10a..7615629666 100644 --- a/ecc/bw6-633/fflonk/fflonk_test.go +++ b/ecc/bw6-633/fflonk/fflonk_test.go @@ -124,7 +124,6 @@ func TestCommit(t *testing.T) { } func TestGetIthRootOne(t *testing.T) { - assert := require.New(t) order := getNextDivisorRMinusOne(9) @@ -134,8 +133,4 @@ func TestGetIthRootOne(t *testing.T) { orderBigInt.SetUint64(uint64(order)) omega.Exp(omega, &orderBigInt) assert.True(omega.IsOne()) - - order = order + 1 - _, err = getIthRootOne(order) - assert.Error(err) } diff --git a/ecc/bw6-756/fflonk/fflonk_test.go b/ecc/bw6-756/fflonk/fflonk_test.go index d3bb131a18..2bacfcdcc0 100644 --- a/ecc/bw6-756/fflonk/fflonk_test.go +++ b/ecc/bw6-756/fflonk/fflonk_test.go @@ -124,7 +124,6 @@ func TestCommit(t *testing.T) { } func TestGetIthRootOne(t *testing.T) { - assert := require.New(t) order := getNextDivisorRMinusOne(9) @@ -134,8 +133,4 @@ func TestGetIthRootOne(t *testing.T) { orderBigInt.SetUint64(uint64(order)) omega.Exp(omega, &orderBigInt) assert.True(omega.IsOne()) - - order = order + 1 - _, err = getIthRootOne(order) - assert.Error(err) } diff --git a/ecc/bw6-761/fflonk/fflonk_test.go b/ecc/bw6-761/fflonk/fflonk_test.go index f6670a6d19..c1f726fc68 100644 --- a/ecc/bw6-761/fflonk/fflonk_test.go +++ b/ecc/bw6-761/fflonk/fflonk_test.go @@ -124,7 +124,6 @@ func TestCommit(t *testing.T) { } func TestGetIthRootOne(t *testing.T) { - assert := require.New(t) order := getNextDivisorRMinusOne(9) @@ -134,8 +133,4 @@ func TestGetIthRootOne(t *testing.T) { orderBigInt.SetUint64(uint64(order)) omega.Exp(omega, &orderBigInt) assert.True(omega.IsOne()) - - order = order + 1 - _, err = getIthRootOne(order) - assert.Error(err) } From 341ba5bffd7d4a3f52155b67a551de9a6acec09a Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Fri, 26 Apr 2024 19:04:26 +0200 Subject: [PATCH 45/66] style: remaing -> remaining --- internal/generator/fflonk/template/fflonk.go.tmpl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/generator/fflonk/template/fflonk.go.tmpl b/internal/generator/fflonk/template/fflonk.go.tmpl index 169e50587b..e20df4a656 100644 --- a/internal/generator/fflonk/template/fflonk.go.tmpl +++ b/internal/generator/fflonk/template/fflonk.go.tmpl @@ -100,7 +100,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, res.ClaimedValues[i][j][k] = eval(p[i][j], pointsPowerM[i][k]) } } - for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaing polynomials are zero + for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaining polynomials are zero res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) } } From 46e95f8247ebc22a8536051abcc23c1b4f67915b Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Fri, 26 Apr 2024 19:07:26 +0200 Subject: [PATCH 46/66] feat: code gen --- ecc/bls12-377/fflonk/fflonk.go | 2 +- ecc/bls12-378/fflonk/fflonk.go | 2 +- ecc/bls12-381/fflonk/fflonk.go | 2 +- ecc/bls24-315/fflonk/fflonk.go | 2 +- ecc/bls24-317/fflonk/fflonk.go | 2 +- ecc/bn254/fflonk/fflonk.go | 2 +- ecc/bw6-633/fflonk/fflonk.go | 2 +- ecc/bw6-756/fflonk/fflonk.go | 2 +- ecc/bw6-761/fflonk/fflonk.go | 2 +- 9 files changed, 9 insertions(+), 9 deletions(-) diff --git a/ecc/bls12-377/fflonk/fflonk.go b/ecc/bls12-377/fflonk/fflonk.go index 75649f5e41..6acd7bc79f 100644 --- a/ecc/bls12-377/fflonk/fflonk.go +++ b/ecc/bls12-377/fflonk/fflonk.go @@ -118,7 +118,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, res.ClaimedValues[i][j][k] = eval(p[i][j], pointsPowerM[i][k]) } } - for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaing polynomials are zero + for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaining polynomials are zero res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) } } diff --git a/ecc/bls12-378/fflonk/fflonk.go b/ecc/bls12-378/fflonk/fflonk.go index d1e276c167..2397f3fc77 100644 --- a/ecc/bls12-378/fflonk/fflonk.go +++ b/ecc/bls12-378/fflonk/fflonk.go @@ -118,7 +118,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, res.ClaimedValues[i][j][k] = eval(p[i][j], pointsPowerM[i][k]) } } - for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaing polynomials are zero + for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaining polynomials are zero res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) } } diff --git a/ecc/bls12-381/fflonk/fflonk.go b/ecc/bls12-381/fflonk/fflonk.go index 79b20c1a8e..037d45885f 100644 --- a/ecc/bls12-381/fflonk/fflonk.go +++ b/ecc/bls12-381/fflonk/fflonk.go @@ -118,7 +118,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, res.ClaimedValues[i][j][k] = eval(p[i][j], pointsPowerM[i][k]) } } - for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaing polynomials are zero + for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaining polynomials are zero res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) } } diff --git a/ecc/bls24-315/fflonk/fflonk.go b/ecc/bls24-315/fflonk/fflonk.go index 708e50ca85..dd7ce2d560 100644 --- a/ecc/bls24-315/fflonk/fflonk.go +++ b/ecc/bls24-315/fflonk/fflonk.go @@ -118,7 +118,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, res.ClaimedValues[i][j][k] = eval(p[i][j], pointsPowerM[i][k]) } } - for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaing polynomials are zero + for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaining polynomials are zero res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) } } diff --git a/ecc/bls24-317/fflonk/fflonk.go b/ecc/bls24-317/fflonk/fflonk.go index f9de5d74e5..c88257f32c 100644 --- a/ecc/bls24-317/fflonk/fflonk.go +++ b/ecc/bls24-317/fflonk/fflonk.go @@ -118,7 +118,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, res.ClaimedValues[i][j][k] = eval(p[i][j], pointsPowerM[i][k]) } } - for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaing polynomials are zero + for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaining polynomials are zero res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) } } diff --git a/ecc/bn254/fflonk/fflonk.go b/ecc/bn254/fflonk/fflonk.go index 32fb98795f..77b37730ba 100644 --- a/ecc/bn254/fflonk/fflonk.go +++ b/ecc/bn254/fflonk/fflonk.go @@ -118,7 +118,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, res.ClaimedValues[i][j][k] = eval(p[i][j], pointsPowerM[i][k]) } } - for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaing polynomials are zero + for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaining polynomials are zero res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) } } diff --git a/ecc/bw6-633/fflonk/fflonk.go b/ecc/bw6-633/fflonk/fflonk.go index 2b07024ab3..c009315e66 100644 --- a/ecc/bw6-633/fflonk/fflonk.go +++ b/ecc/bw6-633/fflonk/fflonk.go @@ -118,7 +118,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, res.ClaimedValues[i][j][k] = eval(p[i][j], pointsPowerM[i][k]) } } - for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaing polynomials are zero + for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaining polynomials are zero res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) } } diff --git a/ecc/bw6-756/fflonk/fflonk.go b/ecc/bw6-756/fflonk/fflonk.go index 38bfd4532f..6c687b4aef 100644 --- a/ecc/bw6-756/fflonk/fflonk.go +++ b/ecc/bw6-756/fflonk/fflonk.go @@ -118,7 +118,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, res.ClaimedValues[i][j][k] = eval(p[i][j], pointsPowerM[i][k]) } } - for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaing polynomials are zero + for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaining polynomials are zero res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) } } diff --git a/ecc/bw6-761/fflonk/fflonk.go b/ecc/bw6-761/fflonk/fflonk.go index 8c6345f209..fe58acc678 100644 --- a/ecc/bw6-761/fflonk/fflonk.go +++ b/ecc/bw6-761/fflonk/fflonk.go @@ -118,7 +118,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, res.ClaimedValues[i][j][k] = eval(p[i][j], pointsPowerM[i][k]) } } - for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaing polynomials are zero + for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaining polynomials are zero res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) } } From ec436b4c2b525f8bc5b1138147bbce59026a8148 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Wed, 18 Sep 2024 18:54:28 +0200 Subject: [PATCH 47/66] fix: fixed panic --- ecc/bls12-377/shplonk/shplonk_test.go | 6 +++++- ecc/bls12-378/shplonk/shplonk_test.go | 6 +++++- ecc/bls12-381/shplonk/shplonk_test.go | 6 +++++- ecc/bls24-315/shplonk/shplonk_test.go | 6 +++++- ecc/bls24-317/shplonk/shplonk_test.go | 6 +++++- ecc/bn254/shplonk/shplonk_test.go | 6 +++++- ecc/bw6-633/shplonk/shplonk_test.go | 6 +++++- ecc/bw6-756/shplonk/shplonk_test.go | 6 +++++- ecc/bw6-761/shplonk/shplonk_test.go | 6 +++++- internal/generator/shplonk/template/shplonk.test.go.tmpl | 6 +++++- 10 files changed, 50 insertions(+), 10 deletions(-) diff --git a/ecc/bls12-377/shplonk/shplonk_test.go b/ecc/bls12-377/shplonk/shplonk_test.go index 69a26be1b8..0b69e54a71 100644 --- a/ecc/bls12-377/shplonk/shplonk_test.go +++ b/ecc/bls12-377/shplonk/shplonk_test.go @@ -34,7 +34,11 @@ var bAlpha *big.Int func init() { const srsSize = 230 bAlpha = new(big.Int).SetInt64(42) // randomise ? - testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + var err error + testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + if err != nil { + panic(err) + } } func TestOpening(t *testing.T) { diff --git a/ecc/bls12-378/shplonk/shplonk_test.go b/ecc/bls12-378/shplonk/shplonk_test.go index dcb5f69421..ac3d707cf5 100644 --- a/ecc/bls12-378/shplonk/shplonk_test.go +++ b/ecc/bls12-378/shplonk/shplonk_test.go @@ -34,7 +34,11 @@ var bAlpha *big.Int func init() { const srsSize = 230 bAlpha = new(big.Int).SetInt64(42) // randomise ? - testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + var err error + testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + if err != nil { + panic(err) + } } func TestOpening(t *testing.T) { diff --git a/ecc/bls12-381/shplonk/shplonk_test.go b/ecc/bls12-381/shplonk/shplonk_test.go index 043238b47d..12468961cb 100644 --- a/ecc/bls12-381/shplonk/shplonk_test.go +++ b/ecc/bls12-381/shplonk/shplonk_test.go @@ -34,7 +34,11 @@ var bAlpha *big.Int func init() { const srsSize = 230 bAlpha = new(big.Int).SetInt64(42) // randomise ? - testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + var err error + testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + if err != nil { + panic(err) + } } func TestOpening(t *testing.T) { diff --git a/ecc/bls24-315/shplonk/shplonk_test.go b/ecc/bls24-315/shplonk/shplonk_test.go index 3e282d99a1..9bc663dcf0 100644 --- a/ecc/bls24-315/shplonk/shplonk_test.go +++ b/ecc/bls24-315/shplonk/shplonk_test.go @@ -34,7 +34,11 @@ var bAlpha *big.Int func init() { const srsSize = 230 bAlpha = new(big.Int).SetInt64(42) // randomise ? - testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + var err error + testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + if err != nil { + panic(err) + } } func TestOpening(t *testing.T) { diff --git a/ecc/bls24-317/shplonk/shplonk_test.go b/ecc/bls24-317/shplonk/shplonk_test.go index 14dd5af291..72c89c9629 100644 --- a/ecc/bls24-317/shplonk/shplonk_test.go +++ b/ecc/bls24-317/shplonk/shplonk_test.go @@ -34,7 +34,11 @@ var bAlpha *big.Int func init() { const srsSize = 230 bAlpha = new(big.Int).SetInt64(42) // randomise ? - testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + var err error + testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + if err != nil { + panic(err) + } } func TestOpening(t *testing.T) { diff --git a/ecc/bn254/shplonk/shplonk_test.go b/ecc/bn254/shplonk/shplonk_test.go index 6d2c2841b0..be2a812dc6 100644 --- a/ecc/bn254/shplonk/shplonk_test.go +++ b/ecc/bn254/shplonk/shplonk_test.go @@ -34,7 +34,11 @@ var bAlpha *big.Int func init() { const srsSize = 230 bAlpha = new(big.Int).SetInt64(42) // randomise ? - testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + var err error + testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + if err != nil { + panic(err) + } } func TestOpening(t *testing.T) { diff --git a/ecc/bw6-633/shplonk/shplonk_test.go b/ecc/bw6-633/shplonk/shplonk_test.go index 807b263e08..cde6f766c2 100644 --- a/ecc/bw6-633/shplonk/shplonk_test.go +++ b/ecc/bw6-633/shplonk/shplonk_test.go @@ -34,7 +34,11 @@ var bAlpha *big.Int func init() { const srsSize = 230 bAlpha = new(big.Int).SetInt64(42) // randomise ? - testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + var err error + testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + if err != nil { + panic(err) + } } func TestOpening(t *testing.T) { diff --git a/ecc/bw6-756/shplonk/shplonk_test.go b/ecc/bw6-756/shplonk/shplonk_test.go index c729294363..2ada895dff 100644 --- a/ecc/bw6-756/shplonk/shplonk_test.go +++ b/ecc/bw6-756/shplonk/shplonk_test.go @@ -34,7 +34,11 @@ var bAlpha *big.Int func init() { const srsSize = 230 bAlpha = new(big.Int).SetInt64(42) // randomise ? - testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + var err error + testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + if err != nil { + panic(err) + } } func TestOpening(t *testing.T) { diff --git a/ecc/bw6-761/shplonk/shplonk_test.go b/ecc/bw6-761/shplonk/shplonk_test.go index e962fe6dff..3fb7e0715c 100644 --- a/ecc/bw6-761/shplonk/shplonk_test.go +++ b/ecc/bw6-761/shplonk/shplonk_test.go @@ -34,7 +34,11 @@ var bAlpha *big.Int func init() { const srsSize = 230 bAlpha = new(big.Int).SetInt64(42) // randomise ? - testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + var err error + testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + if err != nil { + panic(err) + } } func TestOpening(t *testing.T) { diff --git a/internal/generator/shplonk/template/shplonk.test.go.tmpl b/internal/generator/shplonk/template/shplonk.test.go.tmpl index d3f6611b64..c7e3514cfe 100644 --- a/internal/generator/shplonk/template/shplonk.test.go.tmpl +++ b/internal/generator/shplonk/template/shplonk.test.go.tmpl @@ -16,7 +16,11 @@ var bAlpha *big.Int func init() { const srsSize = 230 bAlpha = new(big.Int).SetInt64(42) // randomise ? - testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + var err error + testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + if err!=nil { + panic(err) + } } func TestOpening(t *testing.T) { From d2074285b9a9fda508ba9f44ce6ef23d7840bcc3 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Wed, 18 Sep 2024 18:57:42 +0200 Subject: [PATCH 48/66] feat: randomise SRS --- ecc/bls12-377/shplonk/shplonk_test.go | 4 +++- ecc/bls12-378/shplonk/shplonk_test.go | 4 +++- ecc/bls12-381/shplonk/shplonk_test.go | 4 +++- ecc/bls24-315/shplonk/shplonk_test.go | 4 +++- ecc/bls24-317/shplonk/shplonk_test.go | 4 +++- ecc/bn254/shplonk/shplonk_test.go | 4 +++- ecc/bw6-633/shplonk/shplonk_test.go | 4 +++- ecc/bw6-756/shplonk/shplonk_test.go | 4 +++- ecc/bw6-761/shplonk/shplonk_test.go | 4 +++- internal/generator/shplonk/template/shplonk.test.go.tmpl | 5 ++++- 10 files changed, 31 insertions(+), 10 deletions(-) diff --git a/ecc/bls12-377/shplonk/shplonk_test.go b/ecc/bls12-377/shplonk/shplonk_test.go index 0b69e54a71..b956361f86 100644 --- a/ecc/bls12-377/shplonk/shplonk_test.go +++ b/ecc/bls12-377/shplonk/shplonk_test.go @@ -33,7 +33,9 @@ var bAlpha *big.Int func init() { const srsSize = 230 - bAlpha = new(big.Int).SetInt64(42) // randomise ? + var frAlpha fr.Element + frAlpha.SetRandom() + frAlpha.BigInt(bAlpha) var err error testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) if err != nil { diff --git a/ecc/bls12-378/shplonk/shplonk_test.go b/ecc/bls12-378/shplonk/shplonk_test.go index ac3d707cf5..5bbb9fc122 100644 --- a/ecc/bls12-378/shplonk/shplonk_test.go +++ b/ecc/bls12-378/shplonk/shplonk_test.go @@ -33,7 +33,9 @@ var bAlpha *big.Int func init() { const srsSize = 230 - bAlpha = new(big.Int).SetInt64(42) // randomise ? + var frAlpha fr.Element + frAlpha.SetRandom() + frAlpha.BigInt(bAlpha) var err error testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) if err != nil { diff --git a/ecc/bls12-381/shplonk/shplonk_test.go b/ecc/bls12-381/shplonk/shplonk_test.go index 12468961cb..67eba49ba3 100644 --- a/ecc/bls12-381/shplonk/shplonk_test.go +++ b/ecc/bls12-381/shplonk/shplonk_test.go @@ -33,7 +33,9 @@ var bAlpha *big.Int func init() { const srsSize = 230 - bAlpha = new(big.Int).SetInt64(42) // randomise ? + var frAlpha fr.Element + frAlpha.SetRandom() + frAlpha.BigInt(bAlpha) var err error testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) if err != nil { diff --git a/ecc/bls24-315/shplonk/shplonk_test.go b/ecc/bls24-315/shplonk/shplonk_test.go index 9bc663dcf0..93b6636857 100644 --- a/ecc/bls24-315/shplonk/shplonk_test.go +++ b/ecc/bls24-315/shplonk/shplonk_test.go @@ -33,7 +33,9 @@ var bAlpha *big.Int func init() { const srsSize = 230 - bAlpha = new(big.Int).SetInt64(42) // randomise ? + var frAlpha fr.Element + frAlpha.SetRandom() + frAlpha.BigInt(bAlpha) var err error testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) if err != nil { diff --git a/ecc/bls24-317/shplonk/shplonk_test.go b/ecc/bls24-317/shplonk/shplonk_test.go index 72c89c9629..7afffeb886 100644 --- a/ecc/bls24-317/shplonk/shplonk_test.go +++ b/ecc/bls24-317/shplonk/shplonk_test.go @@ -33,7 +33,9 @@ var bAlpha *big.Int func init() { const srsSize = 230 - bAlpha = new(big.Int).SetInt64(42) // randomise ? + var frAlpha fr.Element + frAlpha.SetRandom() + frAlpha.BigInt(bAlpha) var err error testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) if err != nil { diff --git a/ecc/bn254/shplonk/shplonk_test.go b/ecc/bn254/shplonk/shplonk_test.go index be2a812dc6..7b14e5544f 100644 --- a/ecc/bn254/shplonk/shplonk_test.go +++ b/ecc/bn254/shplonk/shplonk_test.go @@ -33,7 +33,9 @@ var bAlpha *big.Int func init() { const srsSize = 230 - bAlpha = new(big.Int).SetInt64(42) // randomise ? + var frAlpha fr.Element + frAlpha.SetRandom() + frAlpha.BigInt(bAlpha) var err error testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) if err != nil { diff --git a/ecc/bw6-633/shplonk/shplonk_test.go b/ecc/bw6-633/shplonk/shplonk_test.go index cde6f766c2..c813a1bf53 100644 --- a/ecc/bw6-633/shplonk/shplonk_test.go +++ b/ecc/bw6-633/shplonk/shplonk_test.go @@ -33,7 +33,9 @@ var bAlpha *big.Int func init() { const srsSize = 230 - bAlpha = new(big.Int).SetInt64(42) // randomise ? + var frAlpha fr.Element + frAlpha.SetRandom() + frAlpha.BigInt(bAlpha) var err error testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) if err != nil { diff --git a/ecc/bw6-756/shplonk/shplonk_test.go b/ecc/bw6-756/shplonk/shplonk_test.go index 2ada895dff..c72b57d7ec 100644 --- a/ecc/bw6-756/shplonk/shplonk_test.go +++ b/ecc/bw6-756/shplonk/shplonk_test.go @@ -33,7 +33,9 @@ var bAlpha *big.Int func init() { const srsSize = 230 - bAlpha = new(big.Int).SetInt64(42) // randomise ? + var frAlpha fr.Element + frAlpha.SetRandom() + frAlpha.BigInt(bAlpha) var err error testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) if err != nil { diff --git a/ecc/bw6-761/shplonk/shplonk_test.go b/ecc/bw6-761/shplonk/shplonk_test.go index 3fb7e0715c..95702777e1 100644 --- a/ecc/bw6-761/shplonk/shplonk_test.go +++ b/ecc/bw6-761/shplonk/shplonk_test.go @@ -33,7 +33,9 @@ var bAlpha *big.Int func init() { const srsSize = 230 - bAlpha = new(big.Int).SetInt64(42) // randomise ? + var frAlpha fr.Element + frAlpha.SetRandom() + frAlpha.BigInt(bAlpha) var err error testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) if err != nil { diff --git a/internal/generator/shplonk/template/shplonk.test.go.tmpl b/internal/generator/shplonk/template/shplonk.test.go.tmpl index c7e3514cfe..d6e8302b93 100644 --- a/internal/generator/shplonk/template/shplonk.test.go.tmpl +++ b/internal/generator/shplonk/template/shplonk.test.go.tmpl @@ -13,9 +13,12 @@ import ( var testSrs *kzg.SRS var bAlpha *big.Int + func init() { const srsSize = 230 - bAlpha = new(big.Int).SetInt64(42) // randomise ? + var frAlpha fr.Element + frAlpha.SetRandom() + frAlpha.BigInt(bAlpha) var err error testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) if err!=nil { From b4815d64b148817932e0c23d18ae2f778a7a5eda Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Wed, 18 Sep 2024 19:02:36 +0200 Subject: [PATCH 49/66] fix: raise error when nbDigest != number of polynomials --- ecc/bls12-377/shplonk/shplonk.go | 8 ++++++-- ecc/bls12-378/shplonk/shplonk.go | 8 ++++++-- ecc/bls12-381/shplonk/shplonk.go | 8 ++++++-- ecc/bls24-315/shplonk/shplonk.go | 8 ++++++-- ecc/bls24-317/shplonk/shplonk.go | 8 ++++++-- ecc/bn254/shplonk/shplonk.go | 8 ++++++-- ecc/bw6-633/shplonk/shplonk.go | 8 ++++++-- ecc/bw6-756/shplonk/shplonk.go | 8 ++++++-- ecc/bw6-761/shplonk/shplonk.go | 8 ++++++-- internal/generator/shplonk/template/shplonk.go.tmpl | 4 ++++ 10 files changed, 58 insertions(+), 18 deletions(-) diff --git a/ecc/bls12-377/shplonk/shplonk.go b/ecc/bls12-377/shplonk/shplonk.go index 13faee07d4..fb17486b45 100644 --- a/ecc/bls12-377/shplonk/shplonk.go +++ b/ecc/bls12-377/shplonk/shplonk.go @@ -29,8 +29,9 @@ import ( ) var ( - ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") - ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") + ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") + ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") + ErrInvalidNumberOfDigests = errors.New("number of digests should be equal to the number of polynomials") ) // OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. @@ -58,6 +59,9 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E if len(polynomials) != len(points) { return res, ErrInvalidNumberOfPoints } + if len(polynomials) != len(digests) { + return res, ErrInvalidNumberOfDigests + } // transcript fs := fiatshamir.NewTranscript(hf, "gamma", "z") diff --git a/ecc/bls12-378/shplonk/shplonk.go b/ecc/bls12-378/shplonk/shplonk.go index 1f447522ab..6f464422c3 100644 --- a/ecc/bls12-378/shplonk/shplonk.go +++ b/ecc/bls12-378/shplonk/shplonk.go @@ -29,8 +29,9 @@ import ( ) var ( - ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") - ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") + ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") + ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") + ErrInvalidNumberOfDigests = errors.New("number of digests should be equal to the number of polynomials") ) // OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. @@ -58,6 +59,9 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E if len(polynomials) != len(points) { return res, ErrInvalidNumberOfPoints } + if len(polynomials) != len(digests) { + return res, ErrInvalidNumberOfDigests + } // transcript fs := fiatshamir.NewTranscript(hf, "gamma", "z") diff --git a/ecc/bls12-381/shplonk/shplonk.go b/ecc/bls12-381/shplonk/shplonk.go index 43d624ae23..09fc264dd9 100644 --- a/ecc/bls12-381/shplonk/shplonk.go +++ b/ecc/bls12-381/shplonk/shplonk.go @@ -29,8 +29,9 @@ import ( ) var ( - ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") - ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") + ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") + ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") + ErrInvalidNumberOfDigests = errors.New("number of digests should be equal to the number of polynomials") ) // OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. @@ -58,6 +59,9 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E if len(polynomials) != len(points) { return res, ErrInvalidNumberOfPoints } + if len(polynomials) != len(digests) { + return res, ErrInvalidNumberOfDigests + } // transcript fs := fiatshamir.NewTranscript(hf, "gamma", "z") diff --git a/ecc/bls24-315/shplonk/shplonk.go b/ecc/bls24-315/shplonk/shplonk.go index f3be3f5bfa..e0733e5d1c 100644 --- a/ecc/bls24-315/shplonk/shplonk.go +++ b/ecc/bls24-315/shplonk/shplonk.go @@ -29,8 +29,9 @@ import ( ) var ( - ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") - ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") + ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") + ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") + ErrInvalidNumberOfDigests = errors.New("number of digests should be equal to the number of polynomials") ) // OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. @@ -58,6 +59,9 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E if len(polynomials) != len(points) { return res, ErrInvalidNumberOfPoints } + if len(polynomials) != len(digests) { + return res, ErrInvalidNumberOfDigests + } // transcript fs := fiatshamir.NewTranscript(hf, "gamma", "z") diff --git a/ecc/bls24-317/shplonk/shplonk.go b/ecc/bls24-317/shplonk/shplonk.go index dc8cdfa14a..722ca208cd 100644 --- a/ecc/bls24-317/shplonk/shplonk.go +++ b/ecc/bls24-317/shplonk/shplonk.go @@ -29,8 +29,9 @@ import ( ) var ( - ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") - ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") + ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") + ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") + ErrInvalidNumberOfDigests = errors.New("number of digests should be equal to the number of polynomials") ) // OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. @@ -58,6 +59,9 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E if len(polynomials) != len(points) { return res, ErrInvalidNumberOfPoints } + if len(polynomials) != len(digests) { + return res, ErrInvalidNumberOfDigests + } // transcript fs := fiatshamir.NewTranscript(hf, "gamma", "z") diff --git a/ecc/bn254/shplonk/shplonk.go b/ecc/bn254/shplonk/shplonk.go index 3ed334464e..c33fa05c4c 100644 --- a/ecc/bn254/shplonk/shplonk.go +++ b/ecc/bn254/shplonk/shplonk.go @@ -29,8 +29,9 @@ import ( ) var ( - ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") - ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") + ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") + ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") + ErrInvalidNumberOfDigests = errors.New("number of digests should be equal to the number of polynomials") ) // OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. @@ -58,6 +59,9 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E if len(polynomials) != len(points) { return res, ErrInvalidNumberOfPoints } + if len(polynomials) != len(digests) { + return res, ErrInvalidNumberOfDigests + } // transcript fs := fiatshamir.NewTranscript(hf, "gamma", "z") diff --git a/ecc/bw6-633/shplonk/shplonk.go b/ecc/bw6-633/shplonk/shplonk.go index a3d6bd1999..a078923710 100644 --- a/ecc/bw6-633/shplonk/shplonk.go +++ b/ecc/bw6-633/shplonk/shplonk.go @@ -29,8 +29,9 @@ import ( ) var ( - ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") - ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") + ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") + ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") + ErrInvalidNumberOfDigests = errors.New("number of digests should be equal to the number of polynomials") ) // OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. @@ -58,6 +59,9 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E if len(polynomials) != len(points) { return res, ErrInvalidNumberOfPoints } + if len(polynomials) != len(digests) { + return res, ErrInvalidNumberOfDigests + } // transcript fs := fiatshamir.NewTranscript(hf, "gamma", "z") diff --git a/ecc/bw6-756/shplonk/shplonk.go b/ecc/bw6-756/shplonk/shplonk.go index d8353e303b..252c034fca 100644 --- a/ecc/bw6-756/shplonk/shplonk.go +++ b/ecc/bw6-756/shplonk/shplonk.go @@ -29,8 +29,9 @@ import ( ) var ( - ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") - ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") + ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") + ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") + ErrInvalidNumberOfDigests = errors.New("number of digests should be equal to the number of polynomials") ) // OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. @@ -58,6 +59,9 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E if len(polynomials) != len(points) { return res, ErrInvalidNumberOfPoints } + if len(polynomials) != len(digests) { + return res, ErrInvalidNumberOfDigests + } // transcript fs := fiatshamir.NewTranscript(hf, "gamma", "z") diff --git a/ecc/bw6-761/shplonk/shplonk.go b/ecc/bw6-761/shplonk/shplonk.go index a4d81d6def..9e6e90b00e 100644 --- a/ecc/bw6-761/shplonk/shplonk.go +++ b/ecc/bw6-761/shplonk/shplonk.go @@ -29,8 +29,9 @@ import ( ) var ( - ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") - ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") + ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") + ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") + ErrInvalidNumberOfDigests = errors.New("number of digests should be equal to the number of polynomials") ) // OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. @@ -58,6 +59,9 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E if len(polynomials) != len(points) { return res, ErrInvalidNumberOfPoints } + if len(polynomials) != len(digests) { + return res, ErrInvalidNumberOfDigests + } // transcript fs := fiatshamir.NewTranscript(hf, "gamma", "z") diff --git a/internal/generator/shplonk/template/shplonk.go.tmpl b/internal/generator/shplonk/template/shplonk.go.tmpl index c5f8fcbee4..9d6a153c2d 100644 --- a/internal/generator/shplonk/template/shplonk.go.tmpl +++ b/internal/generator/shplonk/template/shplonk.go.tmpl @@ -13,6 +13,7 @@ import ( var ( ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") + ErrInvalidNumberOfDigests = errors.New("number of digests should be equal to the number of polynomials") ) // OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. @@ -40,6 +41,9 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E if len(polynomials) != len(points) { return res, ErrInvalidNumberOfPoints } + if len(polynomials) != len(digests) { + return res, ErrInvalidNumberOfDigests + } // transcript fs := fiatshamir.NewTranscript(hf, "gamma", "z") From 5a5f678981f60f6d056bb36f3f0c0d04dbc6e4b5 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Wed, 18 Sep 2024 19:07:09 +0200 Subject: [PATCH 50/66] style: nbInstances -> nbPolynomials --- ecc/bls12-377/shplonk/shplonk.go | 14 +++++++------- ecc/bls12-378/shplonk/shplonk.go | 14 +++++++------- ecc/bls12-381/shplonk/shplonk.go | 14 +++++++------- ecc/bls24-315/shplonk/shplonk.go | 14 +++++++------- ecc/bls24-317/shplonk/shplonk.go | 14 +++++++------- ecc/bn254/shplonk/shplonk.go | 14 +++++++------- ecc/bw6-633/shplonk/shplonk.go | 14 +++++++------- ecc/bw6-756/shplonk/shplonk.go | 14 +++++++------- ecc/bw6-761/shplonk/shplonk.go | 14 +++++++------- .../generator/shplonk/template/shplonk.go.tmpl | 14 +++++++------- 10 files changed, 70 insertions(+), 70 deletions(-) diff --git a/ecc/bls12-377/shplonk/shplonk.go b/ecc/bls12-377/shplonk/shplonk.go index fb17486b45..25ab085deb 100644 --- a/ecc/bls12-377/shplonk/shplonk.go +++ b/ecc/bls12-377/shplonk/shplonk.go @@ -55,7 +55,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E var res OpeningProof - nbInstances := len(polynomials) + nbPolynomials := len(polynomials) if len(polynomials) != len(points) { return res, ErrInvalidNumberOfPoints } @@ -81,7 +81,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E } nbPoints := 0 sizeSi := make([]int, len(points)) - for i := 0; i < nbInstances; i++ { + for i := 0; i < nbPolynomials; i++ { nbPoints += len(points[i]) sizeSi[i] = len(points[i]) } @@ -90,16 +90,16 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) bufTotalSize := make([]fr.Element, totalSize) f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation - res.ClaimedValues = make([][]fr.Element, nbInstances) - for i := 0; i < nbInstances; i++ { + res.ClaimedValues = make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { res.ClaimedValues[i] = make([]fr.Element, len(points[i])) } var accGamma fr.Element accGamma.SetOne() - ztMinusSi := make([][]fr.Element, nbInstances) - ri := make([][]fr.Element, nbInstances) - for i := 0; i < nbInstances; i++ { + ztMinusSi := make([][]fr.Element, nbPolynomials) + ri := make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { for j := 0; j < len(points[i]); j++ { res.ClaimedValues[i][j] = eval(polynomials[i], points[i][j]) diff --git a/ecc/bls12-378/shplonk/shplonk.go b/ecc/bls12-378/shplonk/shplonk.go index 6f464422c3..8ee2ed9fba 100644 --- a/ecc/bls12-378/shplonk/shplonk.go +++ b/ecc/bls12-378/shplonk/shplonk.go @@ -55,7 +55,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E var res OpeningProof - nbInstances := len(polynomials) + nbPolynomials := len(polynomials) if len(polynomials) != len(points) { return res, ErrInvalidNumberOfPoints } @@ -81,7 +81,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E } nbPoints := 0 sizeSi := make([]int, len(points)) - for i := 0; i < nbInstances; i++ { + for i := 0; i < nbPolynomials; i++ { nbPoints += len(points[i]) sizeSi[i] = len(points[i]) } @@ -90,16 +90,16 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) bufTotalSize := make([]fr.Element, totalSize) f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation - res.ClaimedValues = make([][]fr.Element, nbInstances) - for i := 0; i < nbInstances; i++ { + res.ClaimedValues = make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { res.ClaimedValues[i] = make([]fr.Element, len(points[i])) } var accGamma fr.Element accGamma.SetOne() - ztMinusSi := make([][]fr.Element, nbInstances) - ri := make([][]fr.Element, nbInstances) - for i := 0; i < nbInstances; i++ { + ztMinusSi := make([][]fr.Element, nbPolynomials) + ri := make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { for j := 0; j < len(points[i]); j++ { res.ClaimedValues[i][j] = eval(polynomials[i], points[i][j]) diff --git a/ecc/bls12-381/shplonk/shplonk.go b/ecc/bls12-381/shplonk/shplonk.go index 09fc264dd9..835efaa8fc 100644 --- a/ecc/bls12-381/shplonk/shplonk.go +++ b/ecc/bls12-381/shplonk/shplonk.go @@ -55,7 +55,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E var res OpeningProof - nbInstances := len(polynomials) + nbPolynomials := len(polynomials) if len(polynomials) != len(points) { return res, ErrInvalidNumberOfPoints } @@ -81,7 +81,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E } nbPoints := 0 sizeSi := make([]int, len(points)) - for i := 0; i < nbInstances; i++ { + for i := 0; i < nbPolynomials; i++ { nbPoints += len(points[i]) sizeSi[i] = len(points[i]) } @@ -90,16 +90,16 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) bufTotalSize := make([]fr.Element, totalSize) f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation - res.ClaimedValues = make([][]fr.Element, nbInstances) - for i := 0; i < nbInstances; i++ { + res.ClaimedValues = make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { res.ClaimedValues[i] = make([]fr.Element, len(points[i])) } var accGamma fr.Element accGamma.SetOne() - ztMinusSi := make([][]fr.Element, nbInstances) - ri := make([][]fr.Element, nbInstances) - for i := 0; i < nbInstances; i++ { + ztMinusSi := make([][]fr.Element, nbPolynomials) + ri := make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { for j := 0; j < len(points[i]); j++ { res.ClaimedValues[i][j] = eval(polynomials[i], points[i][j]) diff --git a/ecc/bls24-315/shplonk/shplonk.go b/ecc/bls24-315/shplonk/shplonk.go index e0733e5d1c..2629d18d3d 100644 --- a/ecc/bls24-315/shplonk/shplonk.go +++ b/ecc/bls24-315/shplonk/shplonk.go @@ -55,7 +55,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E var res OpeningProof - nbInstances := len(polynomials) + nbPolynomials := len(polynomials) if len(polynomials) != len(points) { return res, ErrInvalidNumberOfPoints } @@ -81,7 +81,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E } nbPoints := 0 sizeSi := make([]int, len(points)) - for i := 0; i < nbInstances; i++ { + for i := 0; i < nbPolynomials; i++ { nbPoints += len(points[i]) sizeSi[i] = len(points[i]) } @@ -90,16 +90,16 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) bufTotalSize := make([]fr.Element, totalSize) f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation - res.ClaimedValues = make([][]fr.Element, nbInstances) - for i := 0; i < nbInstances; i++ { + res.ClaimedValues = make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { res.ClaimedValues[i] = make([]fr.Element, len(points[i])) } var accGamma fr.Element accGamma.SetOne() - ztMinusSi := make([][]fr.Element, nbInstances) - ri := make([][]fr.Element, nbInstances) - for i := 0; i < nbInstances; i++ { + ztMinusSi := make([][]fr.Element, nbPolynomials) + ri := make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { for j := 0; j < len(points[i]); j++ { res.ClaimedValues[i][j] = eval(polynomials[i], points[i][j]) diff --git a/ecc/bls24-317/shplonk/shplonk.go b/ecc/bls24-317/shplonk/shplonk.go index 722ca208cd..ff8f07e185 100644 --- a/ecc/bls24-317/shplonk/shplonk.go +++ b/ecc/bls24-317/shplonk/shplonk.go @@ -55,7 +55,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E var res OpeningProof - nbInstances := len(polynomials) + nbPolynomials := len(polynomials) if len(polynomials) != len(points) { return res, ErrInvalidNumberOfPoints } @@ -81,7 +81,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E } nbPoints := 0 sizeSi := make([]int, len(points)) - for i := 0; i < nbInstances; i++ { + for i := 0; i < nbPolynomials; i++ { nbPoints += len(points[i]) sizeSi[i] = len(points[i]) } @@ -90,16 +90,16 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) bufTotalSize := make([]fr.Element, totalSize) f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation - res.ClaimedValues = make([][]fr.Element, nbInstances) - for i := 0; i < nbInstances; i++ { + res.ClaimedValues = make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { res.ClaimedValues[i] = make([]fr.Element, len(points[i])) } var accGamma fr.Element accGamma.SetOne() - ztMinusSi := make([][]fr.Element, nbInstances) - ri := make([][]fr.Element, nbInstances) - for i := 0; i < nbInstances; i++ { + ztMinusSi := make([][]fr.Element, nbPolynomials) + ri := make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { for j := 0; j < len(points[i]); j++ { res.ClaimedValues[i][j] = eval(polynomials[i], points[i][j]) diff --git a/ecc/bn254/shplonk/shplonk.go b/ecc/bn254/shplonk/shplonk.go index c33fa05c4c..7a86ed72e6 100644 --- a/ecc/bn254/shplonk/shplonk.go +++ b/ecc/bn254/shplonk/shplonk.go @@ -55,7 +55,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E var res OpeningProof - nbInstances := len(polynomials) + nbPolynomials := len(polynomials) if len(polynomials) != len(points) { return res, ErrInvalidNumberOfPoints } @@ -81,7 +81,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E } nbPoints := 0 sizeSi := make([]int, len(points)) - for i := 0; i < nbInstances; i++ { + for i := 0; i < nbPolynomials; i++ { nbPoints += len(points[i]) sizeSi[i] = len(points[i]) } @@ -90,16 +90,16 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) bufTotalSize := make([]fr.Element, totalSize) f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation - res.ClaimedValues = make([][]fr.Element, nbInstances) - for i := 0; i < nbInstances; i++ { + res.ClaimedValues = make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { res.ClaimedValues[i] = make([]fr.Element, len(points[i])) } var accGamma fr.Element accGamma.SetOne() - ztMinusSi := make([][]fr.Element, nbInstances) - ri := make([][]fr.Element, nbInstances) - for i := 0; i < nbInstances; i++ { + ztMinusSi := make([][]fr.Element, nbPolynomials) + ri := make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { for j := 0; j < len(points[i]); j++ { res.ClaimedValues[i][j] = eval(polynomials[i], points[i][j]) diff --git a/ecc/bw6-633/shplonk/shplonk.go b/ecc/bw6-633/shplonk/shplonk.go index a078923710..91b953d4d7 100644 --- a/ecc/bw6-633/shplonk/shplonk.go +++ b/ecc/bw6-633/shplonk/shplonk.go @@ -55,7 +55,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E var res OpeningProof - nbInstances := len(polynomials) + nbPolynomials := len(polynomials) if len(polynomials) != len(points) { return res, ErrInvalidNumberOfPoints } @@ -81,7 +81,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E } nbPoints := 0 sizeSi := make([]int, len(points)) - for i := 0; i < nbInstances; i++ { + for i := 0; i < nbPolynomials; i++ { nbPoints += len(points[i]) sizeSi[i] = len(points[i]) } @@ -90,16 +90,16 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) bufTotalSize := make([]fr.Element, totalSize) f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation - res.ClaimedValues = make([][]fr.Element, nbInstances) - for i := 0; i < nbInstances; i++ { + res.ClaimedValues = make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { res.ClaimedValues[i] = make([]fr.Element, len(points[i])) } var accGamma fr.Element accGamma.SetOne() - ztMinusSi := make([][]fr.Element, nbInstances) - ri := make([][]fr.Element, nbInstances) - for i := 0; i < nbInstances; i++ { + ztMinusSi := make([][]fr.Element, nbPolynomials) + ri := make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { for j := 0; j < len(points[i]); j++ { res.ClaimedValues[i][j] = eval(polynomials[i], points[i][j]) diff --git a/ecc/bw6-756/shplonk/shplonk.go b/ecc/bw6-756/shplonk/shplonk.go index 252c034fca..dffbf7f409 100644 --- a/ecc/bw6-756/shplonk/shplonk.go +++ b/ecc/bw6-756/shplonk/shplonk.go @@ -55,7 +55,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E var res OpeningProof - nbInstances := len(polynomials) + nbPolynomials := len(polynomials) if len(polynomials) != len(points) { return res, ErrInvalidNumberOfPoints } @@ -81,7 +81,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E } nbPoints := 0 sizeSi := make([]int, len(points)) - for i := 0; i < nbInstances; i++ { + for i := 0; i < nbPolynomials; i++ { nbPoints += len(points[i]) sizeSi[i] = len(points[i]) } @@ -90,16 +90,16 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) bufTotalSize := make([]fr.Element, totalSize) f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation - res.ClaimedValues = make([][]fr.Element, nbInstances) - for i := 0; i < nbInstances; i++ { + res.ClaimedValues = make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { res.ClaimedValues[i] = make([]fr.Element, len(points[i])) } var accGamma fr.Element accGamma.SetOne() - ztMinusSi := make([][]fr.Element, nbInstances) - ri := make([][]fr.Element, nbInstances) - for i := 0; i < nbInstances; i++ { + ztMinusSi := make([][]fr.Element, nbPolynomials) + ri := make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { for j := 0; j < len(points[i]); j++ { res.ClaimedValues[i][j] = eval(polynomials[i], points[i][j]) diff --git a/ecc/bw6-761/shplonk/shplonk.go b/ecc/bw6-761/shplonk/shplonk.go index 9e6e90b00e..49c4e2015e 100644 --- a/ecc/bw6-761/shplonk/shplonk.go +++ b/ecc/bw6-761/shplonk/shplonk.go @@ -55,7 +55,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E var res OpeningProof - nbInstances := len(polynomials) + nbPolynomials := len(polynomials) if len(polynomials) != len(points) { return res, ErrInvalidNumberOfPoints } @@ -81,7 +81,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E } nbPoints := 0 sizeSi := make([]int, len(points)) - for i := 0; i < nbInstances; i++ { + for i := 0; i < nbPolynomials; i++ { nbPoints += len(points[i]) sizeSi[i] = len(points[i]) } @@ -90,16 +90,16 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) bufTotalSize := make([]fr.Element, totalSize) f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation - res.ClaimedValues = make([][]fr.Element, nbInstances) - for i := 0; i < nbInstances; i++ { + res.ClaimedValues = make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { res.ClaimedValues[i] = make([]fr.Element, len(points[i])) } var accGamma fr.Element accGamma.SetOne() - ztMinusSi := make([][]fr.Element, nbInstances) - ri := make([][]fr.Element, nbInstances) - for i := 0; i < nbInstances; i++ { + ztMinusSi := make([][]fr.Element, nbPolynomials) + ri := make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { for j := 0; j < len(points[i]); j++ { res.ClaimedValues[i][j] = eval(polynomials[i], points[i][j]) diff --git a/internal/generator/shplonk/template/shplonk.go.tmpl b/internal/generator/shplonk/template/shplonk.go.tmpl index 9d6a153c2d..76689457d6 100644 --- a/internal/generator/shplonk/template/shplonk.go.tmpl +++ b/internal/generator/shplonk/template/shplonk.go.tmpl @@ -37,7 +37,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E var res OpeningProof - nbInstances := len(polynomials) + nbPolynomials := len(polynomials) if len(polynomials) != len(points) { return res, ErrInvalidNumberOfPoints } @@ -63,7 +63,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E } nbPoints := 0 sizeSi := make([]int, len(points)) - for i := 0; i < nbInstances; i++ { + for i := 0; i < nbPolynomials; i++ { nbPoints += len(points[i]) sizeSi[i] = len(points[i]) } @@ -72,16 +72,16 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E bufMaxSizePolynomials := make([]fr.Element, maxSizePolys) bufTotalSize := make([]fr.Element, totalSize) f := make([]fr.Element, totalSize) // cf https://eprint.iacr.org/2020/081.pdf page 11 for notation - res.ClaimedValues = make([][]fr.Element, nbInstances) - for i := 0; i < nbInstances; i++ { + res.ClaimedValues = make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { res.ClaimedValues[i] = make([]fr.Element, len(points[i])) } var accGamma fr.Element accGamma.SetOne() - ztMinusSi := make([][]fr.Element, nbInstances) - ri := make([][]fr.Element, nbInstances) - for i := 0; i < nbInstances; i++ { + ztMinusSi := make([][]fr.Element, nbPolynomials) + ri := make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { for j := 0; j < len(points[i]); j++ { res.ClaimedValues[i][j] = eval(polynomials[i], points[i][j]) From 3d938b71b89ae053cfb300d213da1528c2137dbc Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Wed, 18 Sep 2024 19:18:19 +0200 Subject: [PATCH 51/66] fix: randomised size poly --- ecc/bls12-377/shplonk/shplonk_test.go | 4 +++- ecc/bls12-378/shplonk/shplonk_test.go | 4 +++- ecc/bls12-381/shplonk/shplonk_test.go | 4 +++- ecc/bls24-315/shplonk/shplonk_test.go | 4 +++- ecc/bls24-317/shplonk/shplonk_test.go | 4 +++- ecc/bn254/shplonk/shplonk_test.go | 4 +++- ecc/bw6-633/shplonk/shplonk_test.go | 4 +++- ecc/bw6-756/shplonk/shplonk_test.go | 4 +++- ecc/bw6-761/shplonk/shplonk_test.go | 4 +++- internal/generator/shplonk/template/shplonk.test.go.tmpl | 4 +++- 10 files changed, 30 insertions(+), 10 deletions(-) diff --git a/ecc/bls12-377/shplonk/shplonk_test.go b/ecc/bls12-377/shplonk/shplonk_test.go index b956361f86..602ee3f9d0 100644 --- a/ecc/bls12-377/shplonk/shplonk_test.go +++ b/ecc/bls12-377/shplonk/shplonk_test.go @@ -19,6 +19,7 @@ package shplonk import ( "crypto/sha256" "math/big" + "math/rand" "testing" "github.com/consensys/gnark-crypto/ecc" @@ -35,6 +36,7 @@ func init() { const srsSize = 230 var frAlpha fr.Element frAlpha.SetRandom() + bAlpha = big.NewInt(0) frAlpha.BigInt(bAlpha) var err error testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) @@ -50,7 +52,7 @@ func TestOpening(t *testing.T) { nbPolys := 2 sizePoly := make([]int, nbPolys) for i := 0; i < nbPolys; i++ { - sizePoly[i] = 10 + i + sizePoly[i] = rand.Intn(10) + 2 } polys := make([][]fr.Element, nbPolys) for i := 0; i < nbPolys; i++ { diff --git a/ecc/bls12-378/shplonk/shplonk_test.go b/ecc/bls12-378/shplonk/shplonk_test.go index 5bbb9fc122..34e4127cc0 100644 --- a/ecc/bls12-378/shplonk/shplonk_test.go +++ b/ecc/bls12-378/shplonk/shplonk_test.go @@ -19,6 +19,7 @@ package shplonk import ( "crypto/sha256" "math/big" + "math/rand" "testing" "github.com/consensys/gnark-crypto/ecc" @@ -35,6 +36,7 @@ func init() { const srsSize = 230 var frAlpha fr.Element frAlpha.SetRandom() + bAlpha = big.NewInt(0) frAlpha.BigInt(bAlpha) var err error testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) @@ -50,7 +52,7 @@ func TestOpening(t *testing.T) { nbPolys := 2 sizePoly := make([]int, nbPolys) for i := 0; i < nbPolys; i++ { - sizePoly[i] = 10 + i + sizePoly[i] = rand.Intn(10) + 2 } polys := make([][]fr.Element, nbPolys) for i := 0; i < nbPolys; i++ { diff --git a/ecc/bls12-381/shplonk/shplonk_test.go b/ecc/bls12-381/shplonk/shplonk_test.go index 67eba49ba3..05fb93a3f6 100644 --- a/ecc/bls12-381/shplonk/shplonk_test.go +++ b/ecc/bls12-381/shplonk/shplonk_test.go @@ -19,6 +19,7 @@ package shplonk import ( "crypto/sha256" "math/big" + "math/rand" "testing" "github.com/consensys/gnark-crypto/ecc" @@ -35,6 +36,7 @@ func init() { const srsSize = 230 var frAlpha fr.Element frAlpha.SetRandom() + bAlpha = big.NewInt(0) frAlpha.BigInt(bAlpha) var err error testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) @@ -50,7 +52,7 @@ func TestOpening(t *testing.T) { nbPolys := 2 sizePoly := make([]int, nbPolys) for i := 0; i < nbPolys; i++ { - sizePoly[i] = 10 + i + sizePoly[i] = rand.Intn(10) + 2 } polys := make([][]fr.Element, nbPolys) for i := 0; i < nbPolys; i++ { diff --git a/ecc/bls24-315/shplonk/shplonk_test.go b/ecc/bls24-315/shplonk/shplonk_test.go index 93b6636857..a8d35f5c00 100644 --- a/ecc/bls24-315/shplonk/shplonk_test.go +++ b/ecc/bls24-315/shplonk/shplonk_test.go @@ -19,6 +19,7 @@ package shplonk import ( "crypto/sha256" "math/big" + "math/rand" "testing" "github.com/consensys/gnark-crypto/ecc" @@ -35,6 +36,7 @@ func init() { const srsSize = 230 var frAlpha fr.Element frAlpha.SetRandom() + bAlpha = big.NewInt(0) frAlpha.BigInt(bAlpha) var err error testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) @@ -50,7 +52,7 @@ func TestOpening(t *testing.T) { nbPolys := 2 sizePoly := make([]int, nbPolys) for i := 0; i < nbPolys; i++ { - sizePoly[i] = 10 + i + sizePoly[i] = rand.Intn(10) + 2 } polys := make([][]fr.Element, nbPolys) for i := 0; i < nbPolys; i++ { diff --git a/ecc/bls24-317/shplonk/shplonk_test.go b/ecc/bls24-317/shplonk/shplonk_test.go index 7afffeb886..9e6d76ded0 100644 --- a/ecc/bls24-317/shplonk/shplonk_test.go +++ b/ecc/bls24-317/shplonk/shplonk_test.go @@ -19,6 +19,7 @@ package shplonk import ( "crypto/sha256" "math/big" + "math/rand" "testing" "github.com/consensys/gnark-crypto/ecc" @@ -35,6 +36,7 @@ func init() { const srsSize = 230 var frAlpha fr.Element frAlpha.SetRandom() + bAlpha = big.NewInt(0) frAlpha.BigInt(bAlpha) var err error testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) @@ -50,7 +52,7 @@ func TestOpening(t *testing.T) { nbPolys := 2 sizePoly := make([]int, nbPolys) for i := 0; i < nbPolys; i++ { - sizePoly[i] = 10 + i + sizePoly[i] = rand.Intn(10) + 2 } polys := make([][]fr.Element, nbPolys) for i := 0; i < nbPolys; i++ { diff --git a/ecc/bn254/shplonk/shplonk_test.go b/ecc/bn254/shplonk/shplonk_test.go index 7b14e5544f..d0e4ce0695 100644 --- a/ecc/bn254/shplonk/shplonk_test.go +++ b/ecc/bn254/shplonk/shplonk_test.go @@ -19,6 +19,7 @@ package shplonk import ( "crypto/sha256" "math/big" + "math/rand" "testing" "github.com/consensys/gnark-crypto/ecc" @@ -35,6 +36,7 @@ func init() { const srsSize = 230 var frAlpha fr.Element frAlpha.SetRandom() + bAlpha = big.NewInt(0) frAlpha.BigInt(bAlpha) var err error testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) @@ -50,7 +52,7 @@ func TestOpening(t *testing.T) { nbPolys := 2 sizePoly := make([]int, nbPolys) for i := 0; i < nbPolys; i++ { - sizePoly[i] = 10 + i + sizePoly[i] = rand.Intn(10) + 2 } polys := make([][]fr.Element, nbPolys) for i := 0; i < nbPolys; i++ { diff --git a/ecc/bw6-633/shplonk/shplonk_test.go b/ecc/bw6-633/shplonk/shplonk_test.go index c813a1bf53..ea171cbefb 100644 --- a/ecc/bw6-633/shplonk/shplonk_test.go +++ b/ecc/bw6-633/shplonk/shplonk_test.go @@ -19,6 +19,7 @@ package shplonk import ( "crypto/sha256" "math/big" + "math/rand" "testing" "github.com/consensys/gnark-crypto/ecc" @@ -35,6 +36,7 @@ func init() { const srsSize = 230 var frAlpha fr.Element frAlpha.SetRandom() + bAlpha = big.NewInt(0) frAlpha.BigInt(bAlpha) var err error testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) @@ -50,7 +52,7 @@ func TestOpening(t *testing.T) { nbPolys := 2 sizePoly := make([]int, nbPolys) for i := 0; i < nbPolys; i++ { - sizePoly[i] = 10 + i + sizePoly[i] = rand.Intn(10) + 2 } polys := make([][]fr.Element, nbPolys) for i := 0; i < nbPolys; i++ { diff --git a/ecc/bw6-756/shplonk/shplonk_test.go b/ecc/bw6-756/shplonk/shplonk_test.go index c72b57d7ec..4643d87606 100644 --- a/ecc/bw6-756/shplonk/shplonk_test.go +++ b/ecc/bw6-756/shplonk/shplonk_test.go @@ -19,6 +19,7 @@ package shplonk import ( "crypto/sha256" "math/big" + "math/rand" "testing" "github.com/consensys/gnark-crypto/ecc" @@ -35,6 +36,7 @@ func init() { const srsSize = 230 var frAlpha fr.Element frAlpha.SetRandom() + bAlpha = big.NewInt(0) frAlpha.BigInt(bAlpha) var err error testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) @@ -50,7 +52,7 @@ func TestOpening(t *testing.T) { nbPolys := 2 sizePoly := make([]int, nbPolys) for i := 0; i < nbPolys; i++ { - sizePoly[i] = 10 + i + sizePoly[i] = rand.Intn(10) + 2 } polys := make([][]fr.Element, nbPolys) for i := 0; i < nbPolys; i++ { diff --git a/ecc/bw6-761/shplonk/shplonk_test.go b/ecc/bw6-761/shplonk/shplonk_test.go index 95702777e1..ca08b62e53 100644 --- a/ecc/bw6-761/shplonk/shplonk_test.go +++ b/ecc/bw6-761/shplonk/shplonk_test.go @@ -19,6 +19,7 @@ package shplonk import ( "crypto/sha256" "math/big" + "math/rand" "testing" "github.com/consensys/gnark-crypto/ecc" @@ -35,6 +36,7 @@ func init() { const srsSize = 230 var frAlpha fr.Element frAlpha.SetRandom() + bAlpha = big.NewInt(0) frAlpha.BigInt(bAlpha) var err error testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) @@ -50,7 +52,7 @@ func TestOpening(t *testing.T) { nbPolys := 2 sizePoly := make([]int, nbPolys) for i := 0; i < nbPolys; i++ { - sizePoly[i] = 10 + i + sizePoly[i] = rand.Intn(10) + 2 } polys := make([][]fr.Element, nbPolys) for i := 0; i < nbPolys; i++ { diff --git a/internal/generator/shplonk/template/shplonk.test.go.tmpl b/internal/generator/shplonk/template/shplonk.test.go.tmpl index d6e8302b93..7d642ed8fa 100644 --- a/internal/generator/shplonk/template/shplonk.test.go.tmpl +++ b/internal/generator/shplonk/template/shplonk.test.go.tmpl @@ -2,6 +2,7 @@ import ( "crypto/sha256" "math/big" "testing" + "math/rand" "github.com/consensys/gnark-crypto/ecc" "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/fr" @@ -18,6 +19,7 @@ func init() { const srsSize = 230 var frAlpha fr.Element frAlpha.SetRandom() + bAlpha = big.NewInt(0) frAlpha.BigInt(bAlpha) var err error testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) @@ -33,7 +35,7 @@ func TestOpening(t *testing.T) { nbPolys := 2 sizePoly := make([]int, nbPolys) for i := 0; i < nbPolys; i++ { - sizePoly[i] = 10 + i + sizePoly[i] = rand.Intn(10) + 2 } polys := make([][]fr.Element, nbPolys) for i := 0; i < nbPolys; i++ { From 86aed27ee6f5c66223a521bb40adb923b4e01a9e Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Wed, 18 Sep 2024 19:28:54 +0200 Subject: [PATCH 52/66] fix: check pairing error --- ecc/bls12-377/shplonk/shplonk.go | 4 ++++ ecc/bls12-378/shplonk/shplonk.go | 4 ++++ ecc/bls12-381/shplonk/shplonk.go | 4 ++++ ecc/bls24-315/shplonk/shplonk.go | 4 ++++ ecc/bls24-317/shplonk/shplonk.go | 4 ++++ ecc/bn254/shplonk/shplonk.go | 4 ++++ ecc/bw6-633/shplonk/shplonk.go | 4 ++++ ecc/bw6-756/shplonk/shplonk.go | 4 ++++ ecc/bw6-761/shplonk/shplonk.go | 4 ++++ internal/generator/shplonk/template/shplonk.go.tmpl | 4 ++++ 10 files changed, 40 insertions(+) diff --git a/ecc/bls12-377/shplonk/shplonk.go b/ecc/bls12-377/shplonk/shplonk.go index 25ab085deb..55619bac39 100644 --- a/ecc/bls12-377/shplonk/shplonk.go +++ b/ecc/bls12-377/shplonk/shplonk.go @@ -32,6 +32,7 @@ var ( ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") ErrInvalidNumberOfDigests = errors.New("number of digests should be equal to the number of polynomials") + ErrPairingCheck = errors.New("pairing product is not 1") ) // OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. @@ -267,6 +268,9 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element []bls12377.G1Affine{f, proof.WPrime}, vk.Lines[:], ) + if err != nil { + return ErrPairingCheck + } if !check { return ErrVerifyOpeningProof diff --git a/ecc/bls12-378/shplonk/shplonk.go b/ecc/bls12-378/shplonk/shplonk.go index 8ee2ed9fba..7eadfcad16 100644 --- a/ecc/bls12-378/shplonk/shplonk.go +++ b/ecc/bls12-378/shplonk/shplonk.go @@ -32,6 +32,7 @@ var ( ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") ErrInvalidNumberOfDigests = errors.New("number of digests should be equal to the number of polynomials") + ErrPairingCheck = errors.New("pairing product is not 1") ) // OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. @@ -267,6 +268,9 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element []bls12378.G1Affine{f, proof.WPrime}, vk.Lines[:], ) + if err != nil { + return ErrPairingCheck + } if !check { return ErrVerifyOpeningProof diff --git a/ecc/bls12-381/shplonk/shplonk.go b/ecc/bls12-381/shplonk/shplonk.go index 835efaa8fc..a9c2c44319 100644 --- a/ecc/bls12-381/shplonk/shplonk.go +++ b/ecc/bls12-381/shplonk/shplonk.go @@ -32,6 +32,7 @@ var ( ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") ErrInvalidNumberOfDigests = errors.New("number of digests should be equal to the number of polynomials") + ErrPairingCheck = errors.New("pairing product is not 1") ) // OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. @@ -267,6 +268,9 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element []bls12381.G1Affine{f, proof.WPrime}, vk.Lines[:], ) + if err != nil { + return ErrPairingCheck + } if !check { return ErrVerifyOpeningProof diff --git a/ecc/bls24-315/shplonk/shplonk.go b/ecc/bls24-315/shplonk/shplonk.go index 2629d18d3d..038a0800e3 100644 --- a/ecc/bls24-315/shplonk/shplonk.go +++ b/ecc/bls24-315/shplonk/shplonk.go @@ -32,6 +32,7 @@ var ( ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") ErrInvalidNumberOfDigests = errors.New("number of digests should be equal to the number of polynomials") + ErrPairingCheck = errors.New("pairing product is not 1") ) // OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. @@ -267,6 +268,9 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element []bls24315.G1Affine{f, proof.WPrime}, vk.Lines[:], ) + if err != nil { + return ErrPairingCheck + } if !check { return ErrVerifyOpeningProof diff --git a/ecc/bls24-317/shplonk/shplonk.go b/ecc/bls24-317/shplonk/shplonk.go index ff8f07e185..2b6c0c553d 100644 --- a/ecc/bls24-317/shplonk/shplonk.go +++ b/ecc/bls24-317/shplonk/shplonk.go @@ -32,6 +32,7 @@ var ( ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") ErrInvalidNumberOfDigests = errors.New("number of digests should be equal to the number of polynomials") + ErrPairingCheck = errors.New("pairing product is not 1") ) // OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. @@ -267,6 +268,9 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element []bls24317.G1Affine{f, proof.WPrime}, vk.Lines[:], ) + if err != nil { + return ErrPairingCheck + } if !check { return ErrVerifyOpeningProof diff --git a/ecc/bn254/shplonk/shplonk.go b/ecc/bn254/shplonk/shplonk.go index 7a86ed72e6..5e9c6d5399 100644 --- a/ecc/bn254/shplonk/shplonk.go +++ b/ecc/bn254/shplonk/shplonk.go @@ -32,6 +32,7 @@ var ( ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") ErrInvalidNumberOfDigests = errors.New("number of digests should be equal to the number of polynomials") + ErrPairingCheck = errors.New("pairing product is not 1") ) // OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. @@ -267,6 +268,9 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element []bn254.G1Affine{f, proof.WPrime}, vk.Lines[:], ) + if err != nil { + return ErrPairingCheck + } if !check { return ErrVerifyOpeningProof diff --git a/ecc/bw6-633/shplonk/shplonk.go b/ecc/bw6-633/shplonk/shplonk.go index 91b953d4d7..c901548d01 100644 --- a/ecc/bw6-633/shplonk/shplonk.go +++ b/ecc/bw6-633/shplonk/shplonk.go @@ -32,6 +32,7 @@ var ( ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") ErrInvalidNumberOfDigests = errors.New("number of digests should be equal to the number of polynomials") + ErrPairingCheck = errors.New("pairing product is not 1") ) // OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. @@ -267,6 +268,9 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element []bw6633.G1Affine{f, proof.WPrime}, vk.Lines[:], ) + if err != nil { + return ErrPairingCheck + } if !check { return ErrVerifyOpeningProof diff --git a/ecc/bw6-756/shplonk/shplonk.go b/ecc/bw6-756/shplonk/shplonk.go index dffbf7f409..eea8739b03 100644 --- a/ecc/bw6-756/shplonk/shplonk.go +++ b/ecc/bw6-756/shplonk/shplonk.go @@ -32,6 +32,7 @@ var ( ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") ErrInvalidNumberOfDigests = errors.New("number of digests should be equal to the number of polynomials") + ErrPairingCheck = errors.New("pairing product is not 1") ) // OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. @@ -267,6 +268,9 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element []bw6756.G1Affine{f, proof.WPrime}, vk.Lines[:], ) + if err != nil { + return ErrPairingCheck + } if !check { return ErrVerifyOpeningProof diff --git a/ecc/bw6-761/shplonk/shplonk.go b/ecc/bw6-761/shplonk/shplonk.go index 49c4e2015e..96806b2e89 100644 --- a/ecc/bw6-761/shplonk/shplonk.go +++ b/ecc/bw6-761/shplonk/shplonk.go @@ -32,6 +32,7 @@ var ( ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") ErrInvalidNumberOfDigests = errors.New("number of digests should be equal to the number of polynomials") + ErrPairingCheck = errors.New("pairing product is not 1") ) // OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. @@ -267,6 +268,9 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element []bw6761.G1Affine{f, proof.WPrime}, vk.Lines[:], ) + if err != nil { + return ErrPairingCheck + } if !check { return ErrVerifyOpeningProof diff --git a/internal/generator/shplonk/template/shplonk.go.tmpl b/internal/generator/shplonk/template/shplonk.go.tmpl index 76689457d6..0f510b4f4a 100644 --- a/internal/generator/shplonk/template/shplonk.go.tmpl +++ b/internal/generator/shplonk/template/shplonk.go.tmpl @@ -14,6 +14,7 @@ var ( ErrInvalidNumberOfPoints = errors.New("number of digests should be equal to the number of points") ErrVerifyOpeningProof = errors.New("can't verify batch opening proof") ErrInvalidNumberOfDigests = errors.New("number of digests should be equal to the number of polynomials") + ErrPairingCheck = errors.New("pairing product is not 1") ) // OpeningProof KZG proof for opening (fᵢ)_{i} at a different points (xᵢ)_{i}. @@ -249,6 +250,9 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element []{{ .CurvePackage }}.G1Affine{f, proof.WPrime}, vk.Lines[:], ) + if err!=nil { + return ErrPairingCheck + } if !check { return ErrVerifyOpeningProof From c3a70ce287ad2fe9f2f4e5ebb1b6e0fffb5ce466 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Thu, 19 Sep 2024 15:12:27 +0200 Subject: [PATCH 53/66] feat: used bufMaxSizePolynomials[:len(polynomials[i])] --- ecc/bls12-377/shplonk/shplonk.go | 4 ++-- ecc/bls12-378/shplonk/shplonk.go | 4 ++-- ecc/bls12-381/shplonk/shplonk.go | 4 ++-- ecc/bls24-315/shplonk/shplonk.go | 4 ++-- ecc/bls24-317/shplonk/shplonk.go | 4 ++-- ecc/bn254/shplonk/shplonk.go | 4 ++-- ecc/bw6-633/shplonk/shplonk.go | 4 ++-- ecc/bw6-756/shplonk/shplonk.go | 4 ++-- ecc/bw6-761/shplonk/shplonk.go | 4 ++-- internal/generator/shplonk/template/shplonk.go.tmpl | 2 +- 10 files changed, 19 insertions(+), 19 deletions(-) diff --git a/ecc/bls12-377/shplonk/shplonk.go b/ecc/bls12-377/shplonk/shplonk.go index 55619bac39..197d994c34 100644 --- a/ecc/bls12-377/shplonk/shplonk.go +++ b/ecc/bls12-377/shplonk/shplonk.go @@ -146,8 +146,8 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E copy(bufMaxSizePolynomials, polynomials[i]) riz := eval(ri[i], z) - bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) - mulByConstant(bufMaxSizePolynomials, gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) + bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) + mulByConstant(bufMaxSizePolynomials[:len(polynomials[i])], gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) for j := 0; j < len(bufMaxSizePolynomials); j++ { l[j].Add(&l[j], &bufMaxSizePolynomials[j]) } diff --git a/ecc/bls12-378/shplonk/shplonk.go b/ecc/bls12-378/shplonk/shplonk.go index 7eadfcad16..afcdbcfe91 100644 --- a/ecc/bls12-378/shplonk/shplonk.go +++ b/ecc/bls12-378/shplonk/shplonk.go @@ -146,8 +146,8 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E copy(bufMaxSizePolynomials, polynomials[i]) riz := eval(ri[i], z) - bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) - mulByConstant(bufMaxSizePolynomials, gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) + bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) + mulByConstant(bufMaxSizePolynomials[:len(polynomials[i])], gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) for j := 0; j < len(bufMaxSizePolynomials); j++ { l[j].Add(&l[j], &bufMaxSizePolynomials[j]) } diff --git a/ecc/bls12-381/shplonk/shplonk.go b/ecc/bls12-381/shplonk/shplonk.go index a9c2c44319..21f226dafa 100644 --- a/ecc/bls12-381/shplonk/shplonk.go +++ b/ecc/bls12-381/shplonk/shplonk.go @@ -146,8 +146,8 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E copy(bufMaxSizePolynomials, polynomials[i]) riz := eval(ri[i], z) - bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) - mulByConstant(bufMaxSizePolynomials, gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) + bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) + mulByConstant(bufMaxSizePolynomials[:len(polynomials[i])], gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) for j := 0; j < len(bufMaxSizePolynomials); j++ { l[j].Add(&l[j], &bufMaxSizePolynomials[j]) } diff --git a/ecc/bls24-315/shplonk/shplonk.go b/ecc/bls24-315/shplonk/shplonk.go index 038a0800e3..4c439a8137 100644 --- a/ecc/bls24-315/shplonk/shplonk.go +++ b/ecc/bls24-315/shplonk/shplonk.go @@ -146,8 +146,8 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E copy(bufMaxSizePolynomials, polynomials[i]) riz := eval(ri[i], z) - bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) - mulByConstant(bufMaxSizePolynomials, gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) + bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) + mulByConstant(bufMaxSizePolynomials[:len(polynomials[i])], gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) for j := 0; j < len(bufMaxSizePolynomials); j++ { l[j].Add(&l[j], &bufMaxSizePolynomials[j]) } diff --git a/ecc/bls24-317/shplonk/shplonk.go b/ecc/bls24-317/shplonk/shplonk.go index 2b6c0c553d..b8a9795369 100644 --- a/ecc/bls24-317/shplonk/shplonk.go +++ b/ecc/bls24-317/shplonk/shplonk.go @@ -146,8 +146,8 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E copy(bufMaxSizePolynomials, polynomials[i]) riz := eval(ri[i], z) - bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) - mulByConstant(bufMaxSizePolynomials, gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) + bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) + mulByConstant(bufMaxSizePolynomials[:len(polynomials[i])], gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) for j := 0; j < len(bufMaxSizePolynomials); j++ { l[j].Add(&l[j], &bufMaxSizePolynomials[j]) } diff --git a/ecc/bn254/shplonk/shplonk.go b/ecc/bn254/shplonk/shplonk.go index 5e9c6d5399..98db94c5a9 100644 --- a/ecc/bn254/shplonk/shplonk.go +++ b/ecc/bn254/shplonk/shplonk.go @@ -146,8 +146,8 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E copy(bufMaxSizePolynomials, polynomials[i]) riz := eval(ri[i], z) - bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) - mulByConstant(bufMaxSizePolynomials, gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) + bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) + mulByConstant(bufMaxSizePolynomials[:len(polynomials[i])], gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) for j := 0; j < len(bufMaxSizePolynomials); j++ { l[j].Add(&l[j], &bufMaxSizePolynomials[j]) } diff --git a/ecc/bw6-633/shplonk/shplonk.go b/ecc/bw6-633/shplonk/shplonk.go index c901548d01..c2962f210a 100644 --- a/ecc/bw6-633/shplonk/shplonk.go +++ b/ecc/bw6-633/shplonk/shplonk.go @@ -146,8 +146,8 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E copy(bufMaxSizePolynomials, polynomials[i]) riz := eval(ri[i], z) - bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) - mulByConstant(bufMaxSizePolynomials, gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) + bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) + mulByConstant(bufMaxSizePolynomials[:len(polynomials[i])], gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) for j := 0; j < len(bufMaxSizePolynomials); j++ { l[j].Add(&l[j], &bufMaxSizePolynomials[j]) } diff --git a/ecc/bw6-756/shplonk/shplonk.go b/ecc/bw6-756/shplonk/shplonk.go index eea8739b03..5864904a44 100644 --- a/ecc/bw6-756/shplonk/shplonk.go +++ b/ecc/bw6-756/shplonk/shplonk.go @@ -146,8 +146,8 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E copy(bufMaxSizePolynomials, polynomials[i]) riz := eval(ri[i], z) - bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) - mulByConstant(bufMaxSizePolynomials, gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) + bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) + mulByConstant(bufMaxSizePolynomials[:len(polynomials[i])], gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) for j := 0; j < len(bufMaxSizePolynomials); j++ { l[j].Add(&l[j], &bufMaxSizePolynomials[j]) } diff --git a/ecc/bw6-761/shplonk/shplonk.go b/ecc/bw6-761/shplonk/shplonk.go index 96806b2e89..0880e5be45 100644 --- a/ecc/bw6-761/shplonk/shplonk.go +++ b/ecc/bw6-761/shplonk/shplonk.go @@ -146,8 +146,8 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E copy(bufMaxSizePolynomials, polynomials[i]) riz := eval(ri[i], z) - bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) - mulByConstant(bufMaxSizePolynomials, gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) + bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) + mulByConstant(bufMaxSizePolynomials[:len(polynomials[i])], gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) for j := 0; j < len(bufMaxSizePolynomials); j++ { l[j].Add(&l[j], &bufMaxSizePolynomials[j]) } diff --git a/internal/generator/shplonk/template/shplonk.go.tmpl b/internal/generator/shplonk/template/shplonk.go.tmpl index 0f510b4f4a..253ed25bb4 100644 --- a/internal/generator/shplonk/template/shplonk.go.tmpl +++ b/internal/generator/shplonk/template/shplonk.go.tmpl @@ -129,7 +129,7 @@ func BatchOpen(polynomials [][]fr.Element, digests []kzg.Digest, points [][]fr.E copy(bufMaxSizePolynomials, polynomials[i]) riz := eval(ri[i], z) bufMaxSizePolynomials[0].Sub(&bufMaxSizePolynomials[0], &riz) // (fᵢ-rᵢ(z)) - mulByConstant(bufMaxSizePolynomials, gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) + mulByConstant(bufMaxSizePolynomials[:len(polynomials[i])], gammaiZtMinusSiZ) // γⁱZ_{T\Sᵢ}(z)(fᵢ-rᵢ(z)) for j := 0; j < len(bufMaxSizePolynomials); j++ { l[j].Add(&l[j], &bufMaxSizePolynomials[j]) } From 699c2138f8258591a1fdbadbf6505ec19051497c Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Thu, 19 Sep 2024 15:28:03 +0200 Subject: [PATCH 54/66] feat: renaming gamma -> challenge --- ecc/bls12-377/shplonk/shplonk.go | 8 ++++---- ecc/bls12-378/shplonk/shplonk.go | 8 ++++---- ecc/bls12-381/shplonk/shplonk.go | 8 ++++---- ecc/bls24-315/shplonk/shplonk.go | 8 ++++---- ecc/bls24-317/shplonk/shplonk.go | 8 ++++---- ecc/bn254/shplonk/shplonk.go | 8 ++++---- ecc/bw6-633/shplonk/shplonk.go | 8 ++++---- ecc/bw6-756/shplonk/shplonk.go | 8 ++++---- ecc/bw6-761/shplonk/shplonk.go | 8 ++++---- internal/generator/shplonk/template/shplonk.go.tmpl | 8 ++++---- 10 files changed, 40 insertions(+), 40 deletions(-) diff --git a/ecc/bls12-377/shplonk/shplonk.go b/ecc/bls12-377/shplonk/shplonk.go index 197d994c34..f2faa6d860 100644 --- a/ecc/bls12-377/shplonk/shplonk.go +++ b/ecc/bls12-377/shplonk/shplonk.go @@ -303,14 +303,14 @@ func deriveChallenge(name string, points [][]fr.Element, digests []kzg.Digest, t } } - gammaByte, err := t.ComputeChallenge(name) + challengeByte, err := t.ComputeChallenge(name) if err != nil { return fr.Element{}, err } - var gamma fr.Element - gamma.SetBytes(gammaByte) + var challenge fr.Element + challenge.SetBytes(challengeByte) - return gamma, nil + return challenge, nil } // ------------------------------ diff --git a/ecc/bls12-378/shplonk/shplonk.go b/ecc/bls12-378/shplonk/shplonk.go index afcdbcfe91..7742ad48a2 100644 --- a/ecc/bls12-378/shplonk/shplonk.go +++ b/ecc/bls12-378/shplonk/shplonk.go @@ -303,14 +303,14 @@ func deriveChallenge(name string, points [][]fr.Element, digests []kzg.Digest, t } } - gammaByte, err := t.ComputeChallenge(name) + challengeByte, err := t.ComputeChallenge(name) if err != nil { return fr.Element{}, err } - var gamma fr.Element - gamma.SetBytes(gammaByte) + var challenge fr.Element + challenge.SetBytes(challengeByte) - return gamma, nil + return challenge, nil } // ------------------------------ diff --git a/ecc/bls12-381/shplonk/shplonk.go b/ecc/bls12-381/shplonk/shplonk.go index 21f226dafa..d5adfa3f07 100644 --- a/ecc/bls12-381/shplonk/shplonk.go +++ b/ecc/bls12-381/shplonk/shplonk.go @@ -303,14 +303,14 @@ func deriveChallenge(name string, points [][]fr.Element, digests []kzg.Digest, t } } - gammaByte, err := t.ComputeChallenge(name) + challengeByte, err := t.ComputeChallenge(name) if err != nil { return fr.Element{}, err } - var gamma fr.Element - gamma.SetBytes(gammaByte) + var challenge fr.Element + challenge.SetBytes(challengeByte) - return gamma, nil + return challenge, nil } // ------------------------------ diff --git a/ecc/bls24-315/shplonk/shplonk.go b/ecc/bls24-315/shplonk/shplonk.go index 4c439a8137..825c756cf3 100644 --- a/ecc/bls24-315/shplonk/shplonk.go +++ b/ecc/bls24-315/shplonk/shplonk.go @@ -303,14 +303,14 @@ func deriveChallenge(name string, points [][]fr.Element, digests []kzg.Digest, t } } - gammaByte, err := t.ComputeChallenge(name) + challengeByte, err := t.ComputeChallenge(name) if err != nil { return fr.Element{}, err } - var gamma fr.Element - gamma.SetBytes(gammaByte) + var challenge fr.Element + challenge.SetBytes(challengeByte) - return gamma, nil + return challenge, nil } // ------------------------------ diff --git a/ecc/bls24-317/shplonk/shplonk.go b/ecc/bls24-317/shplonk/shplonk.go index b8a9795369..33640bb695 100644 --- a/ecc/bls24-317/shplonk/shplonk.go +++ b/ecc/bls24-317/shplonk/shplonk.go @@ -303,14 +303,14 @@ func deriveChallenge(name string, points [][]fr.Element, digests []kzg.Digest, t } } - gammaByte, err := t.ComputeChallenge(name) + challengeByte, err := t.ComputeChallenge(name) if err != nil { return fr.Element{}, err } - var gamma fr.Element - gamma.SetBytes(gammaByte) + var challenge fr.Element + challenge.SetBytes(challengeByte) - return gamma, nil + return challenge, nil } // ------------------------------ diff --git a/ecc/bn254/shplonk/shplonk.go b/ecc/bn254/shplonk/shplonk.go index 98db94c5a9..90c7ea5bb1 100644 --- a/ecc/bn254/shplonk/shplonk.go +++ b/ecc/bn254/shplonk/shplonk.go @@ -303,14 +303,14 @@ func deriveChallenge(name string, points [][]fr.Element, digests []kzg.Digest, t } } - gammaByte, err := t.ComputeChallenge(name) + challengeByte, err := t.ComputeChallenge(name) if err != nil { return fr.Element{}, err } - var gamma fr.Element - gamma.SetBytes(gammaByte) + var challenge fr.Element + challenge.SetBytes(challengeByte) - return gamma, nil + return challenge, nil } // ------------------------------ diff --git a/ecc/bw6-633/shplonk/shplonk.go b/ecc/bw6-633/shplonk/shplonk.go index c2962f210a..af23c6ec58 100644 --- a/ecc/bw6-633/shplonk/shplonk.go +++ b/ecc/bw6-633/shplonk/shplonk.go @@ -303,14 +303,14 @@ func deriveChallenge(name string, points [][]fr.Element, digests []kzg.Digest, t } } - gammaByte, err := t.ComputeChallenge(name) + challengeByte, err := t.ComputeChallenge(name) if err != nil { return fr.Element{}, err } - var gamma fr.Element - gamma.SetBytes(gammaByte) + var challenge fr.Element + challenge.SetBytes(challengeByte) - return gamma, nil + return challenge, nil } // ------------------------------ diff --git a/ecc/bw6-756/shplonk/shplonk.go b/ecc/bw6-756/shplonk/shplonk.go index 5864904a44..c3ac6d7330 100644 --- a/ecc/bw6-756/shplonk/shplonk.go +++ b/ecc/bw6-756/shplonk/shplonk.go @@ -303,14 +303,14 @@ func deriveChallenge(name string, points [][]fr.Element, digests []kzg.Digest, t } } - gammaByte, err := t.ComputeChallenge(name) + challengeByte, err := t.ComputeChallenge(name) if err != nil { return fr.Element{}, err } - var gamma fr.Element - gamma.SetBytes(gammaByte) + var challenge fr.Element + challenge.SetBytes(challengeByte) - return gamma, nil + return challenge, nil } // ------------------------------ diff --git a/ecc/bw6-761/shplonk/shplonk.go b/ecc/bw6-761/shplonk/shplonk.go index 0880e5be45..b02bcd5451 100644 --- a/ecc/bw6-761/shplonk/shplonk.go +++ b/ecc/bw6-761/shplonk/shplonk.go @@ -303,14 +303,14 @@ func deriveChallenge(name string, points [][]fr.Element, digests []kzg.Digest, t } } - gammaByte, err := t.ComputeChallenge(name) + challengeByte, err := t.ComputeChallenge(name) if err != nil { return fr.Element{}, err } - var gamma fr.Element - gamma.SetBytes(gammaByte) + var challenge fr.Element + challenge.SetBytes(challengeByte) - return gamma, nil + return challenge, nil } // ------------------------------ diff --git a/internal/generator/shplonk/template/shplonk.go.tmpl b/internal/generator/shplonk/template/shplonk.go.tmpl index 253ed25bb4..94f83cc92f 100644 --- a/internal/generator/shplonk/template/shplonk.go.tmpl +++ b/internal/generator/shplonk/template/shplonk.go.tmpl @@ -285,14 +285,14 @@ func deriveChallenge(name string, points [][]fr.Element, digests []kzg.Digest, t } } - gammaByte, err := t.ComputeChallenge(name) + challengeByte, err := t.ComputeChallenge(name) if err != nil { return fr.Element{}, err } - var gamma fr.Element - gamma.SetBytes(gammaByte) + var challenge fr.Element + challenge.SetBytes(challengeByte) - return gamma, nil + return challenge, nil } // ------------------------------ From 803e9ce9028c61b94a3adf43b369275a6307ab4f Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Fri, 20 Sep 2024 10:17:27 +0200 Subject: [PATCH 55/66] feat: add round trip serialisation test --- ecc/bls12-377/shplonk/marshal.go | 2 +- ecc/bls12-377/shplonk/shplonk_test.go | 20 +++++++++++++++++++ ecc/bls12-378/shplonk/marshal.go | 2 +- ecc/bls12-378/shplonk/shplonk_test.go | 20 +++++++++++++++++++ ecc/bls12-381/shplonk/marshal.go | 2 +- ecc/bls12-381/shplonk/shplonk_test.go | 20 +++++++++++++++++++ ecc/bls24-315/shplonk/marshal.go | 2 +- ecc/bls24-315/shplonk/shplonk_test.go | 20 +++++++++++++++++++ ecc/bls24-317/shplonk/marshal.go | 2 +- ecc/bls24-317/shplonk/shplonk_test.go | 20 +++++++++++++++++++ ecc/bn254/shplonk/marshal.go | 2 +- ecc/bn254/shplonk/shplonk_test.go | 20 +++++++++++++++++++ ecc/bw6-633/shplonk/marshal.go | 2 +- ecc/bw6-633/shplonk/shplonk_test.go | 20 +++++++++++++++++++ ecc/bw6-756/shplonk/marshal.go | 2 +- ecc/bw6-756/shplonk/shplonk_test.go | 20 +++++++++++++++++++ ecc/bw6-761/shplonk/marshal.go | 2 +- ecc/bw6-761/shplonk/shplonk_test.go | 20 +++++++++++++++++++ .../shplonk/template/marshal.go.tmpl | 2 +- .../shplonk/template/shplonk.test.go.tmpl | 20 +++++++++++++++++++ 20 files changed, 210 insertions(+), 10 deletions(-) diff --git a/ecc/bls12-377/shplonk/marshal.go b/ecc/bls12-377/shplonk/marshal.go index 9e03a86d06..c2b805a8b5 100644 --- a/ecc/bls12-377/shplonk/marshal.go +++ b/ecc/bls12-377/shplonk/marshal.go @@ -29,7 +29,7 @@ func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { toDecode := []interface{}{ &proof.W, &proof.WPrime, - proof.ClaimedValues, + &proof.ClaimedValues, } for _, v := range toDecode { diff --git a/ecc/bls12-377/shplonk/shplonk_test.go b/ecc/bls12-377/shplonk/shplonk_test.go index 602ee3f9d0..9333499557 100644 --- a/ecc/bls12-377/shplonk/shplonk_test.go +++ b/ecc/bls12-377/shplonk/shplonk_test.go @@ -23,8 +23,10 @@ import ( "testing" "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls12-377" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" "github.com/consensys/gnark-crypto/ecc/bls12-377/kzg" + "github.com/consensys/gnark-crypto/utils/testutils" "github.com/stretchr/testify/require" ) @@ -45,6 +47,24 @@ func init() { } } +func TestSerialization(t *testing.T) { + + _, _, g, _ := bls12377.Generators() + var proof OpeningProof + proof.W.Set(&g) + proof.WPrime.Set(&g) + nbClaimedValues := 10 + proof.ClaimedValues = make([][]fr.Element, nbClaimedValues) + for i := 0; i < nbClaimedValues; i++ { + proof.ClaimedValues[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + proof.ClaimedValues[i][j].SetRandom() + } + } + + t.Run("opening proof round trip", testutils.SerializationRoundTrip(&proof)) +} + func TestOpening(t *testing.T) { assert := require.New(t) diff --git a/ecc/bls12-378/shplonk/marshal.go b/ecc/bls12-378/shplonk/marshal.go index 6774448de5..bbe0b8d9e1 100644 --- a/ecc/bls12-378/shplonk/marshal.go +++ b/ecc/bls12-378/shplonk/marshal.go @@ -29,7 +29,7 @@ func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { toDecode := []interface{}{ &proof.W, &proof.WPrime, - proof.ClaimedValues, + &proof.ClaimedValues, } for _, v := range toDecode { diff --git a/ecc/bls12-378/shplonk/shplonk_test.go b/ecc/bls12-378/shplonk/shplonk_test.go index 34e4127cc0..4b73e50e4c 100644 --- a/ecc/bls12-378/shplonk/shplonk_test.go +++ b/ecc/bls12-378/shplonk/shplonk_test.go @@ -23,8 +23,10 @@ import ( "testing" "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls12-378" "github.com/consensys/gnark-crypto/ecc/bls12-378/fr" "github.com/consensys/gnark-crypto/ecc/bls12-378/kzg" + "github.com/consensys/gnark-crypto/utils/testutils" "github.com/stretchr/testify/require" ) @@ -45,6 +47,24 @@ func init() { } } +func TestSerialization(t *testing.T) { + + _, _, g, _ := bls12378.Generators() + var proof OpeningProof + proof.W.Set(&g) + proof.WPrime.Set(&g) + nbClaimedValues := 10 + proof.ClaimedValues = make([][]fr.Element, nbClaimedValues) + for i := 0; i < nbClaimedValues; i++ { + proof.ClaimedValues[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + proof.ClaimedValues[i][j].SetRandom() + } + } + + t.Run("opening proof round trip", testutils.SerializationRoundTrip(&proof)) +} + func TestOpening(t *testing.T) { assert := require.New(t) diff --git a/ecc/bls12-381/shplonk/marshal.go b/ecc/bls12-381/shplonk/marshal.go index b609b73266..e013c2d3ef 100644 --- a/ecc/bls12-381/shplonk/marshal.go +++ b/ecc/bls12-381/shplonk/marshal.go @@ -29,7 +29,7 @@ func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { toDecode := []interface{}{ &proof.W, &proof.WPrime, - proof.ClaimedValues, + &proof.ClaimedValues, } for _, v := range toDecode { diff --git a/ecc/bls12-381/shplonk/shplonk_test.go b/ecc/bls12-381/shplonk/shplonk_test.go index 05fb93a3f6..b007b75590 100644 --- a/ecc/bls12-381/shplonk/shplonk_test.go +++ b/ecc/bls12-381/shplonk/shplonk_test.go @@ -23,8 +23,10 @@ import ( "testing" "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" "github.com/consensys/gnark-crypto/ecc/bls12-381/kzg" + "github.com/consensys/gnark-crypto/utils/testutils" "github.com/stretchr/testify/require" ) @@ -45,6 +47,24 @@ func init() { } } +func TestSerialization(t *testing.T) { + + _, _, g, _ := bls12381.Generators() + var proof OpeningProof + proof.W.Set(&g) + proof.WPrime.Set(&g) + nbClaimedValues := 10 + proof.ClaimedValues = make([][]fr.Element, nbClaimedValues) + for i := 0; i < nbClaimedValues; i++ { + proof.ClaimedValues[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + proof.ClaimedValues[i][j].SetRandom() + } + } + + t.Run("opening proof round trip", testutils.SerializationRoundTrip(&proof)) +} + func TestOpening(t *testing.T) { assert := require.New(t) diff --git a/ecc/bls24-315/shplonk/marshal.go b/ecc/bls24-315/shplonk/marshal.go index a57a90679f..a936f40bbe 100644 --- a/ecc/bls24-315/shplonk/marshal.go +++ b/ecc/bls24-315/shplonk/marshal.go @@ -29,7 +29,7 @@ func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { toDecode := []interface{}{ &proof.W, &proof.WPrime, - proof.ClaimedValues, + &proof.ClaimedValues, } for _, v := range toDecode { diff --git a/ecc/bls24-315/shplonk/shplonk_test.go b/ecc/bls24-315/shplonk/shplonk_test.go index a8d35f5c00..c97b6dda88 100644 --- a/ecc/bls24-315/shplonk/shplonk_test.go +++ b/ecc/bls24-315/shplonk/shplonk_test.go @@ -23,8 +23,10 @@ import ( "testing" "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls24-315" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" "github.com/consensys/gnark-crypto/ecc/bls24-315/kzg" + "github.com/consensys/gnark-crypto/utils/testutils" "github.com/stretchr/testify/require" ) @@ -45,6 +47,24 @@ func init() { } } +func TestSerialization(t *testing.T) { + + _, _, g, _ := bls24315.Generators() + var proof OpeningProof + proof.W.Set(&g) + proof.WPrime.Set(&g) + nbClaimedValues := 10 + proof.ClaimedValues = make([][]fr.Element, nbClaimedValues) + for i := 0; i < nbClaimedValues; i++ { + proof.ClaimedValues[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + proof.ClaimedValues[i][j].SetRandom() + } + } + + t.Run("opening proof round trip", testutils.SerializationRoundTrip(&proof)) +} + func TestOpening(t *testing.T) { assert := require.New(t) diff --git a/ecc/bls24-317/shplonk/marshal.go b/ecc/bls24-317/shplonk/marshal.go index b69c0946ca..f3f4520ac2 100644 --- a/ecc/bls24-317/shplonk/marshal.go +++ b/ecc/bls24-317/shplonk/marshal.go @@ -29,7 +29,7 @@ func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { toDecode := []interface{}{ &proof.W, &proof.WPrime, - proof.ClaimedValues, + &proof.ClaimedValues, } for _, v := range toDecode { diff --git a/ecc/bls24-317/shplonk/shplonk_test.go b/ecc/bls24-317/shplonk/shplonk_test.go index 9e6d76ded0..bd89089b78 100644 --- a/ecc/bls24-317/shplonk/shplonk_test.go +++ b/ecc/bls24-317/shplonk/shplonk_test.go @@ -23,8 +23,10 @@ import ( "testing" "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls24-317" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" "github.com/consensys/gnark-crypto/ecc/bls24-317/kzg" + "github.com/consensys/gnark-crypto/utils/testutils" "github.com/stretchr/testify/require" ) @@ -45,6 +47,24 @@ func init() { } } +func TestSerialization(t *testing.T) { + + _, _, g, _ := bls24317.Generators() + var proof OpeningProof + proof.W.Set(&g) + proof.WPrime.Set(&g) + nbClaimedValues := 10 + proof.ClaimedValues = make([][]fr.Element, nbClaimedValues) + for i := 0; i < nbClaimedValues; i++ { + proof.ClaimedValues[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + proof.ClaimedValues[i][j].SetRandom() + } + } + + t.Run("opening proof round trip", testutils.SerializationRoundTrip(&proof)) +} + func TestOpening(t *testing.T) { assert := require.New(t) diff --git a/ecc/bn254/shplonk/marshal.go b/ecc/bn254/shplonk/marshal.go index 050e1d6fdf..5da8c732a5 100644 --- a/ecc/bn254/shplonk/marshal.go +++ b/ecc/bn254/shplonk/marshal.go @@ -29,7 +29,7 @@ func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { toDecode := []interface{}{ &proof.W, &proof.WPrime, - proof.ClaimedValues, + &proof.ClaimedValues, } for _, v := range toDecode { diff --git a/ecc/bn254/shplonk/shplonk_test.go b/ecc/bn254/shplonk/shplonk_test.go index d0e4ce0695..88fa11fc0a 100644 --- a/ecc/bn254/shplonk/shplonk_test.go +++ b/ecc/bn254/shplonk/shplonk_test.go @@ -23,8 +23,10 @@ import ( "testing" "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark-crypto/ecc/bn254/kzg" + "github.com/consensys/gnark-crypto/utils/testutils" "github.com/stretchr/testify/require" ) @@ -45,6 +47,24 @@ func init() { } } +func TestSerialization(t *testing.T) { + + _, _, g, _ := bn254.Generators() + var proof OpeningProof + proof.W.Set(&g) + proof.WPrime.Set(&g) + nbClaimedValues := 10 + proof.ClaimedValues = make([][]fr.Element, nbClaimedValues) + for i := 0; i < nbClaimedValues; i++ { + proof.ClaimedValues[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + proof.ClaimedValues[i][j].SetRandom() + } + } + + t.Run("opening proof round trip", testutils.SerializationRoundTrip(&proof)) +} + func TestOpening(t *testing.T) { assert := require.New(t) diff --git a/ecc/bw6-633/shplonk/marshal.go b/ecc/bw6-633/shplonk/marshal.go index 2c44c28a65..80661c53db 100644 --- a/ecc/bw6-633/shplonk/marshal.go +++ b/ecc/bw6-633/shplonk/marshal.go @@ -29,7 +29,7 @@ func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { toDecode := []interface{}{ &proof.W, &proof.WPrime, - proof.ClaimedValues, + &proof.ClaimedValues, } for _, v := range toDecode { diff --git a/ecc/bw6-633/shplonk/shplonk_test.go b/ecc/bw6-633/shplonk/shplonk_test.go index ea171cbefb..dcb46ffeea 100644 --- a/ecc/bw6-633/shplonk/shplonk_test.go +++ b/ecc/bw6-633/shplonk/shplonk_test.go @@ -23,8 +23,10 @@ import ( "testing" "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bw6-633" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" "github.com/consensys/gnark-crypto/ecc/bw6-633/kzg" + "github.com/consensys/gnark-crypto/utils/testutils" "github.com/stretchr/testify/require" ) @@ -45,6 +47,24 @@ func init() { } } +func TestSerialization(t *testing.T) { + + _, _, g, _ := bw6633.Generators() + var proof OpeningProof + proof.W.Set(&g) + proof.WPrime.Set(&g) + nbClaimedValues := 10 + proof.ClaimedValues = make([][]fr.Element, nbClaimedValues) + for i := 0; i < nbClaimedValues; i++ { + proof.ClaimedValues[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + proof.ClaimedValues[i][j].SetRandom() + } + } + + t.Run("opening proof round trip", testutils.SerializationRoundTrip(&proof)) +} + func TestOpening(t *testing.T) { assert := require.New(t) diff --git a/ecc/bw6-756/shplonk/marshal.go b/ecc/bw6-756/shplonk/marshal.go index e7b0bf4ed8..8977a57254 100644 --- a/ecc/bw6-756/shplonk/marshal.go +++ b/ecc/bw6-756/shplonk/marshal.go @@ -29,7 +29,7 @@ func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { toDecode := []interface{}{ &proof.W, &proof.WPrime, - proof.ClaimedValues, + &proof.ClaimedValues, } for _, v := range toDecode { diff --git a/ecc/bw6-756/shplonk/shplonk_test.go b/ecc/bw6-756/shplonk/shplonk_test.go index 4643d87606..ce1c480464 100644 --- a/ecc/bw6-756/shplonk/shplonk_test.go +++ b/ecc/bw6-756/shplonk/shplonk_test.go @@ -23,8 +23,10 @@ import ( "testing" "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bw6-756" "github.com/consensys/gnark-crypto/ecc/bw6-756/fr" "github.com/consensys/gnark-crypto/ecc/bw6-756/kzg" + "github.com/consensys/gnark-crypto/utils/testutils" "github.com/stretchr/testify/require" ) @@ -45,6 +47,24 @@ func init() { } } +func TestSerialization(t *testing.T) { + + _, _, g, _ := bw6756.Generators() + var proof OpeningProof + proof.W.Set(&g) + proof.WPrime.Set(&g) + nbClaimedValues := 10 + proof.ClaimedValues = make([][]fr.Element, nbClaimedValues) + for i := 0; i < nbClaimedValues; i++ { + proof.ClaimedValues[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + proof.ClaimedValues[i][j].SetRandom() + } + } + + t.Run("opening proof round trip", testutils.SerializationRoundTrip(&proof)) +} + func TestOpening(t *testing.T) { assert := require.New(t) diff --git a/ecc/bw6-761/shplonk/marshal.go b/ecc/bw6-761/shplonk/marshal.go index f931b1dd51..6e03808eeb 100644 --- a/ecc/bw6-761/shplonk/marshal.go +++ b/ecc/bw6-761/shplonk/marshal.go @@ -29,7 +29,7 @@ func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { toDecode := []interface{}{ &proof.W, &proof.WPrime, - proof.ClaimedValues, + &proof.ClaimedValues, } for _, v := range toDecode { diff --git a/ecc/bw6-761/shplonk/shplonk_test.go b/ecc/bw6-761/shplonk/shplonk_test.go index ca08b62e53..1e4ecc2d81 100644 --- a/ecc/bw6-761/shplonk/shplonk_test.go +++ b/ecc/bw6-761/shplonk/shplonk_test.go @@ -23,8 +23,10 @@ import ( "testing" "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" "github.com/consensys/gnark-crypto/ecc/bw6-761/kzg" + "github.com/consensys/gnark-crypto/utils/testutils" "github.com/stretchr/testify/require" ) @@ -45,6 +47,24 @@ func init() { } } +func TestSerialization(t *testing.T) { + + _, _, g, _ := bw6761.Generators() + var proof OpeningProof + proof.W.Set(&g) + proof.WPrime.Set(&g) + nbClaimedValues := 10 + proof.ClaimedValues = make([][]fr.Element, nbClaimedValues) + for i := 0; i < nbClaimedValues; i++ { + proof.ClaimedValues[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + proof.ClaimedValues[i][j].SetRandom() + } + } + + t.Run("opening proof round trip", testutils.SerializationRoundTrip(&proof)) +} + func TestOpening(t *testing.T) { assert := require.New(t) diff --git a/internal/generator/shplonk/template/marshal.go.tmpl b/internal/generator/shplonk/template/marshal.go.tmpl index 316f852ef5..98f11f61a6 100644 --- a/internal/generator/shplonk/template/marshal.go.tmpl +++ b/internal/generator/shplonk/template/marshal.go.tmpl @@ -11,7 +11,7 @@ func (proof *OpeningProof) ReadFrom(r io.Reader) (int64, error) { toDecode := []interface{}{ &proof.W, &proof.WPrime, - proof.ClaimedValues, + &proof.ClaimedValues, } for _, v := range toDecode { diff --git a/internal/generator/shplonk/template/shplonk.test.go.tmpl b/internal/generator/shplonk/template/shplonk.test.go.tmpl index 7d642ed8fa..775995c032 100644 --- a/internal/generator/shplonk/template/shplonk.test.go.tmpl +++ b/internal/generator/shplonk/template/shplonk.test.go.tmpl @@ -5,8 +5,10 @@ import ( "math/rand" "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}" "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/fr" "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/kzg" + "github.com/consensys/gnark-crypto/utils/testutils" "github.com/stretchr/testify/require" ) @@ -28,6 +30,24 @@ func init() { } } +func TestSerialization(t *testing.T) { + + _, _, g, _ := {{ .CurvePackage }}.Generators() + var proof OpeningProof + proof.W.Set(&g) + proof.WPrime.Set(&g) + nbClaimedValues := 10 + proof.ClaimedValues = make([][]fr.Element, nbClaimedValues) + for i := 0; i < nbClaimedValues; i++ { + proof.ClaimedValues[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + proof.ClaimedValues[i][j].SetRandom() + } + } + + t.Run("opening proof round trip", testutils.SerializationRoundTrip(&proof)) +} + func TestOpening(t *testing.T) { assert := require.New(t) From 4072df91e46e26b561a663afe548b494658f55c9 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Fri, 20 Sep 2024 10:43:48 +0200 Subject: [PATCH 56/66] feat: use GeneratorFullMultiplicativeGroup --- ecc/bls12-377/fflonk/fflonk.go | 14 ++------- ecc/bls12-378/fflonk/fflonk.go | 14 ++------- ecc/bls12-381/fflonk/fflonk.go | 14 ++------- ecc/bls24-315/fflonk/fflonk.go | 14 ++------- ecc/bls24-317/fflonk/fflonk.go | 14 ++------- ecc/bn254/fflonk/fflonk.go | 14 ++------- ecc/bw6-633/fflonk/fflonk.go | 14 ++------- ecc/bw6-756/fflonk/fflonk.go | 14 ++------- ecc/bw6-761/fflonk/fflonk.go | 14 ++------- .../generator/fflonk/template/fflonk.go.tmpl | 30 ++----------------- 10 files changed, 30 insertions(+), 126 deletions(-) diff --git a/ecc/bls12-377/fflonk/fflonk.go b/ecc/bls12-377/fflonk/fflonk.go index 6acd7bc79f..18c0ba6e63 100644 --- a/ecc/bls12-377/fflonk/fflonk.go +++ b/ecc/bls12-377/fflonk/fflonk.go @@ -22,12 +22,13 @@ import ( "math/big" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-377/fr/fft" "github.com/consensys/gnark-crypto/ecc/bls12-377/kzg" "github.com/consensys/gnark-crypto/ecc/bls12-377/shplonk" ) var ( - ErrRootsOne = errors.New("Fr does not contain all the t-th roots of 1") + ErrRootsOne = errors.New("fr does not contain all the t-th roots of 1") ErrNbPolynomialsNbPoints = errors.New("the number of packs of polynomials should be the same as the number of pack of points") ErrInonsistentFolding = errors.New("the outer claimed values are not consistent with the shplonk proof") ErrInconsistentNumberFoldedPoints = errors.New("the number of outer claimed values is inconsistent with the number of claimed values in the shplonk proof") @@ -230,7 +231,7 @@ func getIthRootOne(i int) (fr.Element, error) { if tmpBigInt.Cmp(&zeroBigInt) != 0 { return omega, ErrRootsOne } - genFrStar := getGenFrStar() + genFrStar := fft.GeneratorFullMultiplicativeGroup() tmpBigInt.SetUint64(uint64(i)) tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) omega.Exp(genFrStar, &tmpBigInt) @@ -280,12 +281,3 @@ func eval(f []fr.Element, x fr.Element) fr.Element { } return y } - -// getGenFrStar returns a generator of Fr^{*} -func getGenFrStar() fr.Element { - var res fr.Element - - res.SetUint64(22) - - return res -} diff --git a/ecc/bls12-378/fflonk/fflonk.go b/ecc/bls12-378/fflonk/fflonk.go index 2397f3fc77..bea7e4876a 100644 --- a/ecc/bls12-378/fflonk/fflonk.go +++ b/ecc/bls12-378/fflonk/fflonk.go @@ -22,12 +22,13 @@ import ( "math/big" "github.com/consensys/gnark-crypto/ecc/bls12-378/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-378/fr/fft" "github.com/consensys/gnark-crypto/ecc/bls12-378/kzg" "github.com/consensys/gnark-crypto/ecc/bls12-378/shplonk" ) var ( - ErrRootsOne = errors.New("Fr does not contain all the t-th roots of 1") + ErrRootsOne = errors.New("fr does not contain all the t-th roots of 1") ErrNbPolynomialsNbPoints = errors.New("the number of packs of polynomials should be the same as the number of pack of points") ErrInonsistentFolding = errors.New("the outer claimed values are not consistent with the shplonk proof") ErrInconsistentNumberFoldedPoints = errors.New("the number of outer claimed values is inconsistent with the number of claimed values in the shplonk proof") @@ -230,7 +231,7 @@ func getIthRootOne(i int) (fr.Element, error) { if tmpBigInt.Cmp(&zeroBigInt) != 0 { return omega, ErrRootsOne } - genFrStar := getGenFrStar() + genFrStar := fft.GeneratorFullMultiplicativeGroup() tmpBigInt.SetUint64(uint64(i)) tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) omega.Exp(genFrStar, &tmpBigInt) @@ -280,12 +281,3 @@ func eval(f []fr.Element, x fr.Element) fr.Element { } return y } - -// getGenFrStar returns a generator of Fr^{*} -func getGenFrStar() fr.Element { - var res fr.Element - - res.SetUint64(22) - - return res -} diff --git a/ecc/bls12-381/fflonk/fflonk.go b/ecc/bls12-381/fflonk/fflonk.go index 037d45885f..239a4037a6 100644 --- a/ecc/bls12-381/fflonk/fflonk.go +++ b/ecc/bls12-381/fflonk/fflonk.go @@ -22,12 +22,13 @@ import ( "math/big" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-381/fr/fft" "github.com/consensys/gnark-crypto/ecc/bls12-381/kzg" "github.com/consensys/gnark-crypto/ecc/bls12-381/shplonk" ) var ( - ErrRootsOne = errors.New("Fr does not contain all the t-th roots of 1") + ErrRootsOne = errors.New("fr does not contain all the t-th roots of 1") ErrNbPolynomialsNbPoints = errors.New("the number of packs of polynomials should be the same as the number of pack of points") ErrInonsistentFolding = errors.New("the outer claimed values are not consistent with the shplonk proof") ErrInconsistentNumberFoldedPoints = errors.New("the number of outer claimed values is inconsistent with the number of claimed values in the shplonk proof") @@ -230,7 +231,7 @@ func getIthRootOne(i int) (fr.Element, error) { if tmpBigInt.Cmp(&zeroBigInt) != 0 { return omega, ErrRootsOne } - genFrStar := getGenFrStar() + genFrStar := fft.GeneratorFullMultiplicativeGroup() tmpBigInt.SetUint64(uint64(i)) tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) omega.Exp(genFrStar, &tmpBigInt) @@ -280,12 +281,3 @@ func eval(f []fr.Element, x fr.Element) fr.Element { } return y } - -// getGenFrStar returns a generator of Fr^{*} -func getGenFrStar() fr.Element { - var res fr.Element - - res.SetUint64(7) - - return res -} diff --git a/ecc/bls24-315/fflonk/fflonk.go b/ecc/bls24-315/fflonk/fflonk.go index dd7ce2d560..d7be125897 100644 --- a/ecc/bls24-315/fflonk/fflonk.go +++ b/ecc/bls24-315/fflonk/fflonk.go @@ -22,12 +22,13 @@ import ( "math/big" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-315/fr/fft" "github.com/consensys/gnark-crypto/ecc/bls24-315/kzg" "github.com/consensys/gnark-crypto/ecc/bls24-315/shplonk" ) var ( - ErrRootsOne = errors.New("Fr does not contain all the t-th roots of 1") + ErrRootsOne = errors.New("fr does not contain all the t-th roots of 1") ErrNbPolynomialsNbPoints = errors.New("the number of packs of polynomials should be the same as the number of pack of points") ErrInonsistentFolding = errors.New("the outer claimed values are not consistent with the shplonk proof") ErrInconsistentNumberFoldedPoints = errors.New("the number of outer claimed values is inconsistent with the number of claimed values in the shplonk proof") @@ -230,7 +231,7 @@ func getIthRootOne(i int) (fr.Element, error) { if tmpBigInt.Cmp(&zeroBigInt) != 0 { return omega, ErrRootsOne } - genFrStar := getGenFrStar() + genFrStar := fft.GeneratorFullMultiplicativeGroup() tmpBigInt.SetUint64(uint64(i)) tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) omega.Exp(genFrStar, &tmpBigInt) @@ -280,12 +281,3 @@ func eval(f []fr.Element, x fr.Element) fr.Element { } return y } - -// getGenFrStar returns a generator of Fr^{*} -func getGenFrStar() fr.Element { - var res fr.Element - - res.SetUint64(7) - - return res -} diff --git a/ecc/bls24-317/fflonk/fflonk.go b/ecc/bls24-317/fflonk/fflonk.go index c88257f32c..6a805b805e 100644 --- a/ecc/bls24-317/fflonk/fflonk.go +++ b/ecc/bls24-317/fflonk/fflonk.go @@ -22,12 +22,13 @@ import ( "math/big" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-317/fr/fft" "github.com/consensys/gnark-crypto/ecc/bls24-317/kzg" "github.com/consensys/gnark-crypto/ecc/bls24-317/shplonk" ) var ( - ErrRootsOne = errors.New("Fr does not contain all the t-th roots of 1") + ErrRootsOne = errors.New("fr does not contain all the t-th roots of 1") ErrNbPolynomialsNbPoints = errors.New("the number of packs of polynomials should be the same as the number of pack of points") ErrInonsistentFolding = errors.New("the outer claimed values are not consistent with the shplonk proof") ErrInconsistentNumberFoldedPoints = errors.New("the number of outer claimed values is inconsistent with the number of claimed values in the shplonk proof") @@ -230,7 +231,7 @@ func getIthRootOne(i int) (fr.Element, error) { if tmpBigInt.Cmp(&zeroBigInt) != 0 { return omega, ErrRootsOne } - genFrStar := getGenFrStar() + genFrStar := fft.GeneratorFullMultiplicativeGroup() tmpBigInt.SetUint64(uint64(i)) tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) omega.Exp(genFrStar, &tmpBigInt) @@ -280,12 +281,3 @@ func eval(f []fr.Element, x fr.Element) fr.Element { } return y } - -// getGenFrStar returns a generator of Fr^{*} -func getGenFrStar() fr.Element { - var res fr.Element - - res.SetUint64(7) - - return res -} diff --git a/ecc/bn254/fflonk/fflonk.go b/ecc/bn254/fflonk/fflonk.go index 77b37730ba..0c87f49288 100644 --- a/ecc/bn254/fflonk/fflonk.go +++ b/ecc/bn254/fflonk/fflonk.go @@ -22,12 +22,13 @@ import ( "math/big" "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark-crypto/ecc/bn254/fr/fft" "github.com/consensys/gnark-crypto/ecc/bn254/kzg" "github.com/consensys/gnark-crypto/ecc/bn254/shplonk" ) var ( - ErrRootsOne = errors.New("Fr does not contain all the t-th roots of 1") + ErrRootsOne = errors.New("fr does not contain all the t-th roots of 1") ErrNbPolynomialsNbPoints = errors.New("the number of packs of polynomials should be the same as the number of pack of points") ErrInonsistentFolding = errors.New("the outer claimed values are not consistent with the shplonk proof") ErrInconsistentNumberFoldedPoints = errors.New("the number of outer claimed values is inconsistent with the number of claimed values in the shplonk proof") @@ -230,7 +231,7 @@ func getIthRootOne(i int) (fr.Element, error) { if tmpBigInt.Cmp(&zeroBigInt) != 0 { return omega, ErrRootsOne } - genFrStar := getGenFrStar() + genFrStar := fft.GeneratorFullMultiplicativeGroup() tmpBigInt.SetUint64(uint64(i)) tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) omega.Exp(genFrStar, &tmpBigInt) @@ -280,12 +281,3 @@ func eval(f []fr.Element, x fr.Element) fr.Element { } return y } - -// getGenFrStar returns a generator of Fr^{*} -func getGenFrStar() fr.Element { - var res fr.Element - - res.SetUint64(5) - - return res -} diff --git a/ecc/bw6-633/fflonk/fflonk.go b/ecc/bw6-633/fflonk/fflonk.go index c009315e66..e1e69779c2 100644 --- a/ecc/bw6-633/fflonk/fflonk.go +++ b/ecc/bw6-633/fflonk/fflonk.go @@ -22,12 +22,13 @@ import ( "math/big" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-633/fr/fft" "github.com/consensys/gnark-crypto/ecc/bw6-633/kzg" "github.com/consensys/gnark-crypto/ecc/bw6-633/shplonk" ) var ( - ErrRootsOne = errors.New("Fr does not contain all the t-th roots of 1") + ErrRootsOne = errors.New("fr does not contain all the t-th roots of 1") ErrNbPolynomialsNbPoints = errors.New("the number of packs of polynomials should be the same as the number of pack of points") ErrInonsistentFolding = errors.New("the outer claimed values are not consistent with the shplonk proof") ErrInconsistentNumberFoldedPoints = errors.New("the number of outer claimed values is inconsistent with the number of claimed values in the shplonk proof") @@ -230,7 +231,7 @@ func getIthRootOne(i int) (fr.Element, error) { if tmpBigInt.Cmp(&zeroBigInt) != 0 { return omega, ErrRootsOne } - genFrStar := getGenFrStar() + genFrStar := fft.GeneratorFullMultiplicativeGroup() tmpBigInt.SetUint64(uint64(i)) tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) omega.Exp(genFrStar, &tmpBigInt) @@ -280,12 +281,3 @@ func eval(f []fr.Element, x fr.Element) fr.Element { } return y } - -// getGenFrStar returns a generator of Fr^{*} -func getGenFrStar() fr.Element { - var res fr.Element - - res.SetUint64(13) - - return res -} diff --git a/ecc/bw6-756/fflonk/fflonk.go b/ecc/bw6-756/fflonk/fflonk.go index 6c687b4aef..bf2604a11d 100644 --- a/ecc/bw6-756/fflonk/fflonk.go +++ b/ecc/bw6-756/fflonk/fflonk.go @@ -22,12 +22,13 @@ import ( "math/big" "github.com/consensys/gnark-crypto/ecc/bw6-756/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-756/fr/fft" "github.com/consensys/gnark-crypto/ecc/bw6-756/kzg" "github.com/consensys/gnark-crypto/ecc/bw6-756/shplonk" ) var ( - ErrRootsOne = errors.New("Fr does not contain all the t-th roots of 1") + ErrRootsOne = errors.New("fr does not contain all the t-th roots of 1") ErrNbPolynomialsNbPoints = errors.New("the number of packs of polynomials should be the same as the number of pack of points") ErrInonsistentFolding = errors.New("the outer claimed values are not consistent with the shplonk proof") ErrInconsistentNumberFoldedPoints = errors.New("the number of outer claimed values is inconsistent with the number of claimed values in the shplonk proof") @@ -230,7 +231,7 @@ func getIthRootOne(i int) (fr.Element, error) { if tmpBigInt.Cmp(&zeroBigInt) != 0 { return omega, ErrRootsOne } - genFrStar := getGenFrStar() + genFrStar := fft.GeneratorFullMultiplicativeGroup() tmpBigInt.SetUint64(uint64(i)) tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) omega.Exp(genFrStar, &tmpBigInt) @@ -280,12 +281,3 @@ func eval(f []fr.Element, x fr.Element) fr.Element { } return y } - -// getGenFrStar returns a generator of Fr^{*} -func getGenFrStar() fr.Element { - var res fr.Element - - res.SetUint64(5) - - return res -} diff --git a/ecc/bw6-761/fflonk/fflonk.go b/ecc/bw6-761/fflonk/fflonk.go index fe58acc678..c5878dc8e5 100644 --- a/ecc/bw6-761/fflonk/fflonk.go +++ b/ecc/bw6-761/fflonk/fflonk.go @@ -22,12 +22,13 @@ import ( "math/big" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-761/fr/fft" "github.com/consensys/gnark-crypto/ecc/bw6-761/kzg" "github.com/consensys/gnark-crypto/ecc/bw6-761/shplonk" ) var ( - ErrRootsOne = errors.New("Fr does not contain all the t-th roots of 1") + ErrRootsOne = errors.New("fr does not contain all the t-th roots of 1") ErrNbPolynomialsNbPoints = errors.New("the number of packs of polynomials should be the same as the number of pack of points") ErrInonsistentFolding = errors.New("the outer claimed values are not consistent with the shplonk proof") ErrInconsistentNumberFoldedPoints = errors.New("the number of outer claimed values is inconsistent with the number of claimed values in the shplonk proof") @@ -230,7 +231,7 @@ func getIthRootOne(i int) (fr.Element, error) { if tmpBigInt.Cmp(&zeroBigInt) != 0 { return omega, ErrRootsOne } - genFrStar := getGenFrStar() + genFrStar := fft.GeneratorFullMultiplicativeGroup() tmpBigInt.SetUint64(uint64(i)) tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) omega.Exp(genFrStar, &tmpBigInt) @@ -280,12 +281,3 @@ func eval(f []fr.Element, x fr.Element) fr.Element { } return y } - -// getGenFrStar returns a generator of Fr^{*} -func getGenFrStar() fr.Element { - var res fr.Element - - res.SetUint64(15) - - return res -} diff --git a/internal/generator/fflonk/template/fflonk.go.tmpl b/internal/generator/fflonk/template/fflonk.go.tmpl index e20df4a656..512febb956 100644 --- a/internal/generator/fflonk/template/fflonk.go.tmpl +++ b/internal/generator/fflonk/template/fflonk.go.tmpl @@ -4,12 +4,13 @@ import ( "math/big" "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/fr" + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/fr/fft" "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/kzg" "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/shplonk" ) var ( - ErrRootsOne = errors.New("Fr does not contain all the t-th roots of 1") + ErrRootsOne = errors.New("fr does not contain all the t-th roots of 1") ErrNbPolynomialsNbPoints = errors.New("the number of packs of polynomials should be the same as the number of pack of points") ErrInonsistentFolding = errors.New("the outer claimed values are not consistent with the shplonk proof") ErrInconsistentNumberFoldedPoints = errors.New("the number of outer claimed values is inconsistent with the number of claimed values in the shplonk proof") @@ -212,7 +213,7 @@ func getIthRootOne(i int) (fr.Element, error) { if tmpBigInt.Cmp(&zeroBigInt) != 0 { return omega, ErrRootsOne } - genFrStar := getGenFrStar() + genFrStar := fft.GeneratorFullMultiplicativeGroup() tmpBigInt.SetUint64(uint64(i)) tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) omega.Exp(genFrStar, &tmpBigInt) @@ -261,29 +262,4 @@ func eval(f []fr.Element, x fr.Element) fr.Element { y.Mul(&y, &x).Add(&y, &f[i]) } return y -} - -// getGenFrStar returns a generator of Fr^{*} -func getGenFrStar() fr.Element { - var res fr.Element - {{if eq .Name "bls12-378"}} - res.SetUint64(22) - {{else if eq .Name "bls12-377"}} - res.SetUint64(22) - {{else if eq .Name "bls12-381"}} - res.SetUint64(7) - {{else if eq .Name "bn254"}} - res.SetUint64(5) - {{else if eq .Name "bw6-761"}} - res.SetUint64(15) - {{else if eq .Name "bw6-756"}} - res.SetUint64(5) - {{else if eq .Name "bw6-633"}} - res.SetUint64(13) - {{else if eq .Name "bls24-315"}} - res.SetUint64(7) - {{else if eq .Name "bls24-317"}} - res.SetUint64(7) - {{end}} - return res } \ No newline at end of file From 486f8c5c74792b9ff21f4210970fab7f57d9ffbc Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Fri, 20 Sep 2024 11:17:52 +0200 Subject: [PATCH 57/66] fix: check error --- ecc/bls12-377/fflonk/fflonk.go | 2 +- ecc/bls12-378/fflonk/fflonk.go | 2 +- ecc/bls12-381/fflonk/fflonk.go | 2 +- ecc/bls24-315/fflonk/fflonk.go | 2 +- ecc/bls24-317/fflonk/fflonk.go | 2 +- ecc/bn254/fflonk/fflonk.go | 2 +- ecc/bw6-633/fflonk/fflonk.go | 2 +- ecc/bw6-756/fflonk/fflonk.go | 2 +- ecc/bw6-761/fflonk/fflonk.go | 2 +- internal/generator/fflonk/template/fflonk.go.tmpl | 2 +- 10 files changed, 10 insertions(+), 10 deletions(-) diff --git a/ecc/bls12-377/fflonk/fflonk.go b/ecc/bls12-377/fflonk/fflonk.go index 18c0ba6e63..35668a4c14 100644 --- a/ecc/bls12-377/fflonk/fflonk.go +++ b/ecc/bls12-377/fflonk/fflonk.go @@ -213,7 +213,7 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element } err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) - return nil + return err } // utils diff --git a/ecc/bls12-378/fflonk/fflonk.go b/ecc/bls12-378/fflonk/fflonk.go index bea7e4876a..917f25186e 100644 --- a/ecc/bls12-378/fflonk/fflonk.go +++ b/ecc/bls12-378/fflonk/fflonk.go @@ -213,7 +213,7 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element } err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) - return nil + return err } // utils diff --git a/ecc/bls12-381/fflonk/fflonk.go b/ecc/bls12-381/fflonk/fflonk.go index 239a4037a6..56b0fa1c04 100644 --- a/ecc/bls12-381/fflonk/fflonk.go +++ b/ecc/bls12-381/fflonk/fflonk.go @@ -213,7 +213,7 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element } err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) - return nil + return err } // utils diff --git a/ecc/bls24-315/fflonk/fflonk.go b/ecc/bls24-315/fflonk/fflonk.go index d7be125897..88e8f4c143 100644 --- a/ecc/bls24-315/fflonk/fflonk.go +++ b/ecc/bls24-315/fflonk/fflonk.go @@ -213,7 +213,7 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element } err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) - return nil + return err } // utils diff --git a/ecc/bls24-317/fflonk/fflonk.go b/ecc/bls24-317/fflonk/fflonk.go index 6a805b805e..e867d801cf 100644 --- a/ecc/bls24-317/fflonk/fflonk.go +++ b/ecc/bls24-317/fflonk/fflonk.go @@ -213,7 +213,7 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element } err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) - return nil + return err } // utils diff --git a/ecc/bn254/fflonk/fflonk.go b/ecc/bn254/fflonk/fflonk.go index 0c87f49288..5e4a78fa03 100644 --- a/ecc/bn254/fflonk/fflonk.go +++ b/ecc/bn254/fflonk/fflonk.go @@ -213,7 +213,7 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element } err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) - return nil + return err } // utils diff --git a/ecc/bw6-633/fflonk/fflonk.go b/ecc/bw6-633/fflonk/fflonk.go index e1e69779c2..043ed4d2aa 100644 --- a/ecc/bw6-633/fflonk/fflonk.go +++ b/ecc/bw6-633/fflonk/fflonk.go @@ -213,7 +213,7 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element } err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) - return nil + return err } // utils diff --git a/ecc/bw6-756/fflonk/fflonk.go b/ecc/bw6-756/fflonk/fflonk.go index bf2604a11d..b08a2f7f39 100644 --- a/ecc/bw6-756/fflonk/fflonk.go +++ b/ecc/bw6-756/fflonk/fflonk.go @@ -213,7 +213,7 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element } err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) - return nil + return err } // utils diff --git a/ecc/bw6-761/fflonk/fflonk.go b/ecc/bw6-761/fflonk/fflonk.go index c5878dc8e5..500f788e35 100644 --- a/ecc/bw6-761/fflonk/fflonk.go +++ b/ecc/bw6-761/fflonk/fflonk.go @@ -213,7 +213,7 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element } err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) - return nil + return err } // utils diff --git a/internal/generator/fflonk/template/fflonk.go.tmpl b/internal/generator/fflonk/template/fflonk.go.tmpl index 512febb956..a919355840 100644 --- a/internal/generator/fflonk/template/fflonk.go.tmpl +++ b/internal/generator/fflonk/template/fflonk.go.tmpl @@ -195,7 +195,7 @@ func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element } err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) - return nil + return err } // utils From 8acaa9cf9b0f5821f5f615eac7a4f22b0ee2ed1f Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Fri, 20 Sep 2024 11:50:04 +0200 Subject: [PATCH 58/66] feat: renaming --- ecc/bls12-377/fflonk/fflonk.go | 6 +++--- ecc/bls12-378/fflonk/fflonk.go | 6 +++--- ecc/bls12-381/fflonk/fflonk.go | 6 +++--- ecc/bls24-315/fflonk/fflonk.go | 6 +++--- ecc/bls24-317/fflonk/fflonk.go | 6 +++--- ecc/bn254/fflonk/fflonk.go | 6 +++--- ecc/bw6-633/fflonk/fflonk.go | 6 +++--- ecc/bw6-756/fflonk/fflonk.go | 6 +++--- ecc/bw6-761/fflonk/fflonk.go | 6 +++--- internal/generator/fflonk/template/fflonk.go.tmpl | 6 +++--- 10 files changed, 30 insertions(+), 30 deletions(-) diff --git a/ecc/bls12-377/fflonk/fflonk.go b/ecc/bls12-377/fflonk/fflonk.go index 35668a4c14..df4872da0f 100644 --- a/ecc/bls12-377/fflonk/fflonk.go +++ b/ecc/bls12-377/fflonk/fflonk.go @@ -49,9 +49,9 @@ type OpeningProof struct { ClaimedValues [][][]fr.Element } -// CommitAndFold commits to a list of polynomial by intertwinning them like in the FFT, that is +// FoldAndCommit commits to a list of polynomial by intertwinning them like in the FFT, that is // returns ∑_{i Date: Fri, 20 Sep 2024 14:59:31 +0200 Subject: [PATCH 59/66] feat: fixed size folded polynomials --- ecc/bls12-377/fflonk/fflonk.go | 6 ++++-- ecc/bls12-377/fflonk/fflonk_test.go | 2 +- ecc/bls12-378/fflonk/fflonk.go | 6 ++++-- ecc/bls12-378/fflonk/fflonk_test.go | 2 +- ecc/bls12-381/fflonk/fflonk.go | 6 ++++-- ecc/bls12-381/fflonk/fflonk_test.go | 2 +- ecc/bls24-315/fflonk/fflonk.go | 6 ++++-- ecc/bls24-315/fflonk/fflonk_test.go | 2 +- ecc/bls24-317/fflonk/fflonk.go | 6 ++++-- ecc/bls24-317/fflonk/fflonk_test.go | 2 +- ecc/bn254/fflonk/fflonk.go | 6 ++++-- ecc/bn254/fflonk/fflonk_test.go | 2 +- ecc/bw6-633/fflonk/fflonk.go | 6 ++++-- ecc/bw6-633/fflonk/fflonk_test.go | 2 +- ecc/bw6-756/fflonk/fflonk.go | 6 ++++-- ecc/bw6-756/fflonk/fflonk_test.go | 2 +- ecc/bw6-761/fflonk/fflonk.go | 6 ++++-- ecc/bw6-761/fflonk/fflonk_test.go | 2 +- internal/generator/fflonk/template/fflonk.go.tmpl | 6 ++++-- internal/generator/fflonk/template/fflonk.test.go.tmpl | 2 +- 20 files changed, 50 insertions(+), 30 deletions(-) diff --git a/ecc/bls12-377/fflonk/fflonk.go b/ecc/bls12-377/fflonk/fflonk.go index df4872da0f..376d414800 100644 --- a/ecc/bls12-377/fflonk/fflonk.go +++ b/ecc/bls12-377/fflonk/fflonk.go @@ -57,7 +57,9 @@ func FoldAndCommit(p [][]fr.Element, pk kzg.ProvingKey, nbTasks ...int) (kzg.Dig return com, err } -// Fold returns p folded as in the fft, that is ∑_{i Date: Fri, 20 Sep 2024 15:11:03 +0200 Subject: [PATCH 60/66] feat: renaming --- ecc/bls12-377/fflonk/fflonk.go | 12 ++++++------ ecc/bls12-378/fflonk/fflonk.go | 12 ++++++------ ecc/bls12-381/fflonk/fflonk.go | 12 ++++++------ ecc/bls24-315/fflonk/fflonk.go | 12 ++++++------ ecc/bls24-317/fflonk/fflonk.go | 12 ++++++------ ecc/bn254/fflonk/fflonk.go | 12 ++++++------ ecc/bw6-633/fflonk/fflonk.go | 12 ++++++------ ecc/bw6-756/fflonk/fflonk.go | 12 ++++++------ ecc/bw6-761/fflonk/fflonk.go | 12 ++++++------ internal/generator/fflonk/template/fflonk.go.tmpl | 12 ++++++------ 10 files changed, 60 insertions(+), 60 deletions(-) diff --git a/ecc/bls12-377/fflonk/fflonk.go b/ecc/bls12-377/fflonk/fflonk.go index 376d414800..773f9a00a1 100644 --- a/ecc/bls12-377/fflonk/fflonk.go +++ b/ecc/bls12-377/fflonk/fflonk.go @@ -95,15 +95,15 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, // step 0: compute the relevant powers of the ((Sʲᵢ)ᵢ)ⱼ) nbPolysPerPack := make([]int, len(p)) - nextPowerOfTwoPerPack := make([]int, len(p)) + nextDivisorRminusOnePerPack := make([]int, len(p)) for i := 0; i < len(p); i++ { nbPolysPerPack[i] = len(p[i]) - nextPowerOfTwoPerPack[i] = getNextDivisorRMinusOne(len(p[i])) + nextDivisorRminusOnePerPack[i] = getNextDivisorRMinusOne(len(p[i])) } pointsPowerM := make([][]fr.Element, len(points)) var tmpBigInt big.Int for i := 0; i < len(p); i++ { - tmpBigInt.SetUint64(uint64(nextPowerOfTwoPerPack[i])) + tmpBigInt.SetUint64(uint64(nextDivisorRminusOnePerPack[i])) pointsPowerM[i] = make([]fr.Element, len(points[i])) for j := 0; j < len(points[i]); j++ { pointsPowerM[i][j].Exp(points[i][j], &tmpBigInt) @@ -114,14 +114,14 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, // on the relevant powers of the sets res.ClaimedValues = make([][][]fr.Element, len(p)) for i := 0; i < len(p); i++ { - res.ClaimedValues[i] = make([][]fr.Element, nextPowerOfTwoPerPack[i]) + res.ClaimedValues[i] = make([][]fr.Element, nextDivisorRminusOnePerPack[i]) for j := 0; j < len(p[i]); j++ { res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) for k := 0; k < len(points[i]); k++ { res.ClaimedValues[i][j][k] = eval(p[i][j], pointsPowerM[i][k]) } } - for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaining polynomials are zero + for j := len(p[i]); j < nextDivisorRminusOnePerPack[i]; j++ { // -> the remaining polynomials are zero res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) } } @@ -138,7 +138,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, newPoints := make([][]fr.Element, len(points)) var err error for i := 0; i < len(p); i++ { - newPoints[i], err = extendSet(points[i], nextPowerOfTwoPerPack[i]) + newPoints[i], err = extendSet(points[i], nextDivisorRminusOnePerPack[i]) if err != nil { return res, err } diff --git a/ecc/bls12-378/fflonk/fflonk.go b/ecc/bls12-378/fflonk/fflonk.go index 500870dbbe..ae9834c61a 100644 --- a/ecc/bls12-378/fflonk/fflonk.go +++ b/ecc/bls12-378/fflonk/fflonk.go @@ -95,15 +95,15 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, // step 0: compute the relevant powers of the ((Sʲᵢ)ᵢ)ⱼ) nbPolysPerPack := make([]int, len(p)) - nextPowerOfTwoPerPack := make([]int, len(p)) + nextDivisorRminusOnePerPack := make([]int, len(p)) for i := 0; i < len(p); i++ { nbPolysPerPack[i] = len(p[i]) - nextPowerOfTwoPerPack[i] = getNextDivisorRMinusOne(len(p[i])) + nextDivisorRminusOnePerPack[i] = getNextDivisorRMinusOne(len(p[i])) } pointsPowerM := make([][]fr.Element, len(points)) var tmpBigInt big.Int for i := 0; i < len(p); i++ { - tmpBigInt.SetUint64(uint64(nextPowerOfTwoPerPack[i])) + tmpBigInt.SetUint64(uint64(nextDivisorRminusOnePerPack[i])) pointsPowerM[i] = make([]fr.Element, len(points[i])) for j := 0; j < len(points[i]); j++ { pointsPowerM[i][j].Exp(points[i][j], &tmpBigInt) @@ -114,14 +114,14 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, // on the relevant powers of the sets res.ClaimedValues = make([][][]fr.Element, len(p)) for i := 0; i < len(p); i++ { - res.ClaimedValues[i] = make([][]fr.Element, nextPowerOfTwoPerPack[i]) + res.ClaimedValues[i] = make([][]fr.Element, nextDivisorRminusOnePerPack[i]) for j := 0; j < len(p[i]); j++ { res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) for k := 0; k < len(points[i]); k++ { res.ClaimedValues[i][j][k] = eval(p[i][j], pointsPowerM[i][k]) } } - for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaining polynomials are zero + for j := len(p[i]); j < nextDivisorRminusOnePerPack[i]; j++ { // -> the remaining polynomials are zero res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) } } @@ -138,7 +138,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, newPoints := make([][]fr.Element, len(points)) var err error for i := 0; i < len(p); i++ { - newPoints[i], err = extendSet(points[i], nextPowerOfTwoPerPack[i]) + newPoints[i], err = extendSet(points[i], nextDivisorRminusOnePerPack[i]) if err != nil { return res, err } diff --git a/ecc/bls12-381/fflonk/fflonk.go b/ecc/bls12-381/fflonk/fflonk.go index 16455c1b86..7a8c1f94fd 100644 --- a/ecc/bls12-381/fflonk/fflonk.go +++ b/ecc/bls12-381/fflonk/fflonk.go @@ -95,15 +95,15 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, // step 0: compute the relevant powers of the ((Sʲᵢ)ᵢ)ⱼ) nbPolysPerPack := make([]int, len(p)) - nextPowerOfTwoPerPack := make([]int, len(p)) + nextDivisorRminusOnePerPack := make([]int, len(p)) for i := 0; i < len(p); i++ { nbPolysPerPack[i] = len(p[i]) - nextPowerOfTwoPerPack[i] = getNextDivisorRMinusOne(len(p[i])) + nextDivisorRminusOnePerPack[i] = getNextDivisorRMinusOne(len(p[i])) } pointsPowerM := make([][]fr.Element, len(points)) var tmpBigInt big.Int for i := 0; i < len(p); i++ { - tmpBigInt.SetUint64(uint64(nextPowerOfTwoPerPack[i])) + tmpBigInt.SetUint64(uint64(nextDivisorRminusOnePerPack[i])) pointsPowerM[i] = make([]fr.Element, len(points[i])) for j := 0; j < len(points[i]); j++ { pointsPowerM[i][j].Exp(points[i][j], &tmpBigInt) @@ -114,14 +114,14 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, // on the relevant powers of the sets res.ClaimedValues = make([][][]fr.Element, len(p)) for i := 0; i < len(p); i++ { - res.ClaimedValues[i] = make([][]fr.Element, nextPowerOfTwoPerPack[i]) + res.ClaimedValues[i] = make([][]fr.Element, nextDivisorRminusOnePerPack[i]) for j := 0; j < len(p[i]); j++ { res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) for k := 0; k < len(points[i]); k++ { res.ClaimedValues[i][j][k] = eval(p[i][j], pointsPowerM[i][k]) } } - for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaining polynomials are zero + for j := len(p[i]); j < nextDivisorRminusOnePerPack[i]; j++ { // -> the remaining polynomials are zero res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) } } @@ -138,7 +138,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, newPoints := make([][]fr.Element, len(points)) var err error for i := 0; i < len(p); i++ { - newPoints[i], err = extendSet(points[i], nextPowerOfTwoPerPack[i]) + newPoints[i], err = extendSet(points[i], nextDivisorRminusOnePerPack[i]) if err != nil { return res, err } diff --git a/ecc/bls24-315/fflonk/fflonk.go b/ecc/bls24-315/fflonk/fflonk.go index 678bbd86c9..a8e808680c 100644 --- a/ecc/bls24-315/fflonk/fflonk.go +++ b/ecc/bls24-315/fflonk/fflonk.go @@ -95,15 +95,15 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, // step 0: compute the relevant powers of the ((Sʲᵢ)ᵢ)ⱼ) nbPolysPerPack := make([]int, len(p)) - nextPowerOfTwoPerPack := make([]int, len(p)) + nextDivisorRminusOnePerPack := make([]int, len(p)) for i := 0; i < len(p); i++ { nbPolysPerPack[i] = len(p[i]) - nextPowerOfTwoPerPack[i] = getNextDivisorRMinusOne(len(p[i])) + nextDivisorRminusOnePerPack[i] = getNextDivisorRMinusOne(len(p[i])) } pointsPowerM := make([][]fr.Element, len(points)) var tmpBigInt big.Int for i := 0; i < len(p); i++ { - tmpBigInt.SetUint64(uint64(nextPowerOfTwoPerPack[i])) + tmpBigInt.SetUint64(uint64(nextDivisorRminusOnePerPack[i])) pointsPowerM[i] = make([]fr.Element, len(points[i])) for j := 0; j < len(points[i]); j++ { pointsPowerM[i][j].Exp(points[i][j], &tmpBigInt) @@ -114,14 +114,14 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, // on the relevant powers of the sets res.ClaimedValues = make([][][]fr.Element, len(p)) for i := 0; i < len(p); i++ { - res.ClaimedValues[i] = make([][]fr.Element, nextPowerOfTwoPerPack[i]) + res.ClaimedValues[i] = make([][]fr.Element, nextDivisorRminusOnePerPack[i]) for j := 0; j < len(p[i]); j++ { res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) for k := 0; k < len(points[i]); k++ { res.ClaimedValues[i][j][k] = eval(p[i][j], pointsPowerM[i][k]) } } - for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaining polynomials are zero + for j := len(p[i]); j < nextDivisorRminusOnePerPack[i]; j++ { // -> the remaining polynomials are zero res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) } } @@ -138,7 +138,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, newPoints := make([][]fr.Element, len(points)) var err error for i := 0; i < len(p); i++ { - newPoints[i], err = extendSet(points[i], nextPowerOfTwoPerPack[i]) + newPoints[i], err = extendSet(points[i], nextDivisorRminusOnePerPack[i]) if err != nil { return res, err } diff --git a/ecc/bls24-317/fflonk/fflonk.go b/ecc/bls24-317/fflonk/fflonk.go index 820e90e430..3c736cb1a0 100644 --- a/ecc/bls24-317/fflonk/fflonk.go +++ b/ecc/bls24-317/fflonk/fflonk.go @@ -95,15 +95,15 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, // step 0: compute the relevant powers of the ((Sʲᵢ)ᵢ)ⱼ) nbPolysPerPack := make([]int, len(p)) - nextPowerOfTwoPerPack := make([]int, len(p)) + nextDivisorRminusOnePerPack := make([]int, len(p)) for i := 0; i < len(p); i++ { nbPolysPerPack[i] = len(p[i]) - nextPowerOfTwoPerPack[i] = getNextDivisorRMinusOne(len(p[i])) + nextDivisorRminusOnePerPack[i] = getNextDivisorRMinusOne(len(p[i])) } pointsPowerM := make([][]fr.Element, len(points)) var tmpBigInt big.Int for i := 0; i < len(p); i++ { - tmpBigInt.SetUint64(uint64(nextPowerOfTwoPerPack[i])) + tmpBigInt.SetUint64(uint64(nextDivisorRminusOnePerPack[i])) pointsPowerM[i] = make([]fr.Element, len(points[i])) for j := 0; j < len(points[i]); j++ { pointsPowerM[i][j].Exp(points[i][j], &tmpBigInt) @@ -114,14 +114,14 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, // on the relevant powers of the sets res.ClaimedValues = make([][][]fr.Element, len(p)) for i := 0; i < len(p); i++ { - res.ClaimedValues[i] = make([][]fr.Element, nextPowerOfTwoPerPack[i]) + res.ClaimedValues[i] = make([][]fr.Element, nextDivisorRminusOnePerPack[i]) for j := 0; j < len(p[i]); j++ { res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) for k := 0; k < len(points[i]); k++ { res.ClaimedValues[i][j][k] = eval(p[i][j], pointsPowerM[i][k]) } } - for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaining polynomials are zero + for j := len(p[i]); j < nextDivisorRminusOnePerPack[i]; j++ { // -> the remaining polynomials are zero res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) } } @@ -138,7 +138,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, newPoints := make([][]fr.Element, len(points)) var err error for i := 0; i < len(p); i++ { - newPoints[i], err = extendSet(points[i], nextPowerOfTwoPerPack[i]) + newPoints[i], err = extendSet(points[i], nextDivisorRminusOnePerPack[i]) if err != nil { return res, err } diff --git a/ecc/bn254/fflonk/fflonk.go b/ecc/bn254/fflonk/fflonk.go index b64faf95fd..f2995d3b6e 100644 --- a/ecc/bn254/fflonk/fflonk.go +++ b/ecc/bn254/fflonk/fflonk.go @@ -95,15 +95,15 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, // step 0: compute the relevant powers of the ((Sʲᵢ)ᵢ)ⱼ) nbPolysPerPack := make([]int, len(p)) - nextPowerOfTwoPerPack := make([]int, len(p)) + nextDivisorRminusOnePerPack := make([]int, len(p)) for i := 0; i < len(p); i++ { nbPolysPerPack[i] = len(p[i]) - nextPowerOfTwoPerPack[i] = getNextDivisorRMinusOne(len(p[i])) + nextDivisorRminusOnePerPack[i] = getNextDivisorRMinusOne(len(p[i])) } pointsPowerM := make([][]fr.Element, len(points)) var tmpBigInt big.Int for i := 0; i < len(p); i++ { - tmpBigInt.SetUint64(uint64(nextPowerOfTwoPerPack[i])) + tmpBigInt.SetUint64(uint64(nextDivisorRminusOnePerPack[i])) pointsPowerM[i] = make([]fr.Element, len(points[i])) for j := 0; j < len(points[i]); j++ { pointsPowerM[i][j].Exp(points[i][j], &tmpBigInt) @@ -114,14 +114,14 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, // on the relevant powers of the sets res.ClaimedValues = make([][][]fr.Element, len(p)) for i := 0; i < len(p); i++ { - res.ClaimedValues[i] = make([][]fr.Element, nextPowerOfTwoPerPack[i]) + res.ClaimedValues[i] = make([][]fr.Element, nextDivisorRminusOnePerPack[i]) for j := 0; j < len(p[i]); j++ { res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) for k := 0; k < len(points[i]); k++ { res.ClaimedValues[i][j][k] = eval(p[i][j], pointsPowerM[i][k]) } } - for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaining polynomials are zero + for j := len(p[i]); j < nextDivisorRminusOnePerPack[i]; j++ { // -> the remaining polynomials are zero res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) } } @@ -138,7 +138,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, newPoints := make([][]fr.Element, len(points)) var err error for i := 0; i < len(p); i++ { - newPoints[i], err = extendSet(points[i], nextPowerOfTwoPerPack[i]) + newPoints[i], err = extendSet(points[i], nextDivisorRminusOnePerPack[i]) if err != nil { return res, err } diff --git a/ecc/bw6-633/fflonk/fflonk.go b/ecc/bw6-633/fflonk/fflonk.go index 04e11fb49d..2c1a34f0b4 100644 --- a/ecc/bw6-633/fflonk/fflonk.go +++ b/ecc/bw6-633/fflonk/fflonk.go @@ -95,15 +95,15 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, // step 0: compute the relevant powers of the ((Sʲᵢ)ᵢ)ⱼ) nbPolysPerPack := make([]int, len(p)) - nextPowerOfTwoPerPack := make([]int, len(p)) + nextDivisorRminusOnePerPack := make([]int, len(p)) for i := 0; i < len(p); i++ { nbPolysPerPack[i] = len(p[i]) - nextPowerOfTwoPerPack[i] = getNextDivisorRMinusOne(len(p[i])) + nextDivisorRminusOnePerPack[i] = getNextDivisorRMinusOne(len(p[i])) } pointsPowerM := make([][]fr.Element, len(points)) var tmpBigInt big.Int for i := 0; i < len(p); i++ { - tmpBigInt.SetUint64(uint64(nextPowerOfTwoPerPack[i])) + tmpBigInt.SetUint64(uint64(nextDivisorRminusOnePerPack[i])) pointsPowerM[i] = make([]fr.Element, len(points[i])) for j := 0; j < len(points[i]); j++ { pointsPowerM[i][j].Exp(points[i][j], &tmpBigInt) @@ -114,14 +114,14 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, // on the relevant powers of the sets res.ClaimedValues = make([][][]fr.Element, len(p)) for i := 0; i < len(p); i++ { - res.ClaimedValues[i] = make([][]fr.Element, nextPowerOfTwoPerPack[i]) + res.ClaimedValues[i] = make([][]fr.Element, nextDivisorRminusOnePerPack[i]) for j := 0; j < len(p[i]); j++ { res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) for k := 0; k < len(points[i]); k++ { res.ClaimedValues[i][j][k] = eval(p[i][j], pointsPowerM[i][k]) } } - for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaining polynomials are zero + for j := len(p[i]); j < nextDivisorRminusOnePerPack[i]; j++ { // -> the remaining polynomials are zero res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) } } @@ -138,7 +138,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, newPoints := make([][]fr.Element, len(points)) var err error for i := 0; i < len(p); i++ { - newPoints[i], err = extendSet(points[i], nextPowerOfTwoPerPack[i]) + newPoints[i], err = extendSet(points[i], nextDivisorRminusOnePerPack[i]) if err != nil { return res, err } diff --git a/ecc/bw6-756/fflonk/fflonk.go b/ecc/bw6-756/fflonk/fflonk.go index 91e8026789..5bcafe8217 100644 --- a/ecc/bw6-756/fflonk/fflonk.go +++ b/ecc/bw6-756/fflonk/fflonk.go @@ -95,15 +95,15 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, // step 0: compute the relevant powers of the ((Sʲᵢ)ᵢ)ⱼ) nbPolysPerPack := make([]int, len(p)) - nextPowerOfTwoPerPack := make([]int, len(p)) + nextDivisorRminusOnePerPack := make([]int, len(p)) for i := 0; i < len(p); i++ { nbPolysPerPack[i] = len(p[i]) - nextPowerOfTwoPerPack[i] = getNextDivisorRMinusOne(len(p[i])) + nextDivisorRminusOnePerPack[i] = getNextDivisorRMinusOne(len(p[i])) } pointsPowerM := make([][]fr.Element, len(points)) var tmpBigInt big.Int for i := 0; i < len(p); i++ { - tmpBigInt.SetUint64(uint64(nextPowerOfTwoPerPack[i])) + tmpBigInt.SetUint64(uint64(nextDivisorRminusOnePerPack[i])) pointsPowerM[i] = make([]fr.Element, len(points[i])) for j := 0; j < len(points[i]); j++ { pointsPowerM[i][j].Exp(points[i][j], &tmpBigInt) @@ -114,14 +114,14 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, // on the relevant powers of the sets res.ClaimedValues = make([][][]fr.Element, len(p)) for i := 0; i < len(p); i++ { - res.ClaimedValues[i] = make([][]fr.Element, nextPowerOfTwoPerPack[i]) + res.ClaimedValues[i] = make([][]fr.Element, nextDivisorRminusOnePerPack[i]) for j := 0; j < len(p[i]); j++ { res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) for k := 0; k < len(points[i]); k++ { res.ClaimedValues[i][j][k] = eval(p[i][j], pointsPowerM[i][k]) } } - for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaining polynomials are zero + for j := len(p[i]); j < nextDivisorRminusOnePerPack[i]; j++ { // -> the remaining polynomials are zero res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) } } @@ -138,7 +138,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, newPoints := make([][]fr.Element, len(points)) var err error for i := 0; i < len(p); i++ { - newPoints[i], err = extendSet(points[i], nextPowerOfTwoPerPack[i]) + newPoints[i], err = extendSet(points[i], nextDivisorRminusOnePerPack[i]) if err != nil { return res, err } diff --git a/ecc/bw6-761/fflonk/fflonk.go b/ecc/bw6-761/fflonk/fflonk.go index 3ad69a3a60..d03c9a3ebb 100644 --- a/ecc/bw6-761/fflonk/fflonk.go +++ b/ecc/bw6-761/fflonk/fflonk.go @@ -95,15 +95,15 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, // step 0: compute the relevant powers of the ((Sʲᵢ)ᵢ)ⱼ) nbPolysPerPack := make([]int, len(p)) - nextPowerOfTwoPerPack := make([]int, len(p)) + nextDivisorRminusOnePerPack := make([]int, len(p)) for i := 0; i < len(p); i++ { nbPolysPerPack[i] = len(p[i]) - nextPowerOfTwoPerPack[i] = getNextDivisorRMinusOne(len(p[i])) + nextDivisorRminusOnePerPack[i] = getNextDivisorRMinusOne(len(p[i])) } pointsPowerM := make([][]fr.Element, len(points)) var tmpBigInt big.Int for i := 0; i < len(p); i++ { - tmpBigInt.SetUint64(uint64(nextPowerOfTwoPerPack[i])) + tmpBigInt.SetUint64(uint64(nextDivisorRminusOnePerPack[i])) pointsPowerM[i] = make([]fr.Element, len(points[i])) for j := 0; j < len(points[i]); j++ { pointsPowerM[i][j].Exp(points[i][j], &tmpBigInt) @@ -114,14 +114,14 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, // on the relevant powers of the sets res.ClaimedValues = make([][][]fr.Element, len(p)) for i := 0; i < len(p); i++ { - res.ClaimedValues[i] = make([][]fr.Element, nextPowerOfTwoPerPack[i]) + res.ClaimedValues[i] = make([][]fr.Element, nextDivisorRminusOnePerPack[i]) for j := 0; j < len(p[i]); j++ { res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) for k := 0; k < len(points[i]); k++ { res.ClaimedValues[i][j][k] = eval(p[i][j], pointsPowerM[i][k]) } } - for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaining polynomials are zero + for j := len(p[i]); j < nextDivisorRminusOnePerPack[i]; j++ { // -> the remaining polynomials are zero res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) } } @@ -138,7 +138,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, newPoints := make([][]fr.Element, len(points)) var err error for i := 0; i < len(p); i++ { - newPoints[i], err = extendSet(points[i], nextPowerOfTwoPerPack[i]) + newPoints[i], err = extendSet(points[i], nextDivisorRminusOnePerPack[i]) if err != nil { return res, err } diff --git a/internal/generator/fflonk/template/fflonk.go.tmpl b/internal/generator/fflonk/template/fflonk.go.tmpl index 60232b46fb..b7cfcc6c3d 100644 --- a/internal/generator/fflonk/template/fflonk.go.tmpl +++ b/internal/generator/fflonk/template/fflonk.go.tmpl @@ -77,15 +77,15 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, // step 0: compute the relevant powers of the ((Sʲᵢ)ᵢ)ⱼ) nbPolysPerPack := make([]int, len(p)) - nextPowerOfTwoPerPack := make([]int, len(p)) + nextDivisorRminusOnePerPack := make([]int, len(p)) for i := 0; i < len(p); i++ { nbPolysPerPack[i] = len(p[i]) - nextPowerOfTwoPerPack[i] = getNextDivisorRMinusOne(len(p[i])) + nextDivisorRminusOnePerPack[i] = getNextDivisorRMinusOne(len(p[i])) } pointsPowerM := make([][]fr.Element, len(points)) var tmpBigInt big.Int for i := 0; i < len(p); i++ { - tmpBigInt.SetUint64(uint64(nextPowerOfTwoPerPack[i])) + tmpBigInt.SetUint64(uint64(nextDivisorRminusOnePerPack[i])) pointsPowerM[i] = make([]fr.Element, len(points[i])) for j := 0; j < len(points[i]); j++ { pointsPowerM[i][j].Exp(points[i][j], &tmpBigInt) @@ -96,14 +96,14 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, // on the relevant powers of the sets res.ClaimedValues = make([][][]fr.Element, len(p)) for i := 0; i < len(p); i++ { - res.ClaimedValues[i] = make([][]fr.Element, nextPowerOfTwoPerPack[i]) + res.ClaimedValues[i] = make([][]fr.Element, nextDivisorRminusOnePerPack[i]) for j := 0; j < len(p[i]); j++ { res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) for k := 0; k < len(points[i]); k++ { res.ClaimedValues[i][j][k] = eval(p[i][j], pointsPowerM[i][k]) } } - for j := len(p[i]); j < nextPowerOfTwoPerPack[i]; j++ { // -> the remaining polynomials are zero + for j := len(p[i]); j < nextDivisorRminusOnePerPack[i]; j++ { // -> the remaining polynomials are zero res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) } } @@ -120,7 +120,7 @@ func BatchOpen(p [][][]fr.Element, digests []kzg.Digest, points [][]fr.Element, newPoints := make([][]fr.Element, len(points)) var err error for i := 0; i < len(p); i++ { - newPoints[i], err = extendSet(points[i], nextPowerOfTwoPerPack[i]) + newPoints[i], err = extendSet(points[i], nextDivisorRminusOnePerPack[i]) if err != nil { return res, err } From 83eac47ca06ea4a3a310f6c3abe997d43b34b6c0 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Fri, 20 Sep 2024 15:39:47 +0200 Subject: [PATCH 61/66] feat: number of trials for finding the next divisor of r-1 is now limited --- ecc/bls12-377/fflonk/fflonk.go | 7 ++++++- ecc/bls12-378/fflonk/fflonk.go | 7 ++++++- ecc/bls12-381/fflonk/fflonk.go | 7 ++++++- ecc/bls24-315/fflonk/fflonk.go | 7 ++++++- ecc/bls24-317/fflonk/fflonk.go | 7 ++++++- ecc/bn254/fflonk/fflonk.go | 7 ++++++- ecc/bw6-633/fflonk/fflonk.go | 7 ++++++- ecc/bw6-756/fflonk/fflonk.go | 7 ++++++- ecc/bw6-761/fflonk/fflonk.go | 7 ++++++- internal/generator/fflonk/template/fflonk.go.tmpl | 7 ++++++- 10 files changed, 60 insertions(+), 10 deletions(-) diff --git a/ecc/bls12-377/fflonk/fflonk.go b/ecc/bls12-377/fflonk/fflonk.go index 773f9a00a1..65c19b0516 100644 --- a/ecc/bls12-377/fflonk/fflonk.go +++ b/ecc/bls12-377/fflonk/fflonk.go @@ -249,10 +249,15 @@ func getNextDivisorRMinusOne(i int) int { r.Sub(r, &one) tmp.SetUint64(uint64(i)) tmp.Mod(r, &tmp) - for tmp.Cmp(&zero) != 0 { + nbTrials := 100 // prevent DOS attack if the prime is not smooth + for tmp.Cmp(&zero) != 0 && nbTrials > 0 { i += 1 tmp.SetUint64(uint64(i)) tmp.Mod(r, &tmp) + nbTrials-- + } + if nbTrials == 0 { + panic("did not find any divisor of r-1") } return i } diff --git a/ecc/bls12-378/fflonk/fflonk.go b/ecc/bls12-378/fflonk/fflonk.go index ae9834c61a..ed9539acdd 100644 --- a/ecc/bls12-378/fflonk/fflonk.go +++ b/ecc/bls12-378/fflonk/fflonk.go @@ -249,10 +249,15 @@ func getNextDivisorRMinusOne(i int) int { r.Sub(r, &one) tmp.SetUint64(uint64(i)) tmp.Mod(r, &tmp) - for tmp.Cmp(&zero) != 0 { + nbTrials := 100 // prevent DOS attack if the prime is not smooth + for tmp.Cmp(&zero) != 0 && nbTrials > 0 { i += 1 tmp.SetUint64(uint64(i)) tmp.Mod(r, &tmp) + nbTrials-- + } + if nbTrials == 0 { + panic("did not find any divisor of r-1") } return i } diff --git a/ecc/bls12-381/fflonk/fflonk.go b/ecc/bls12-381/fflonk/fflonk.go index 7a8c1f94fd..293d58f181 100644 --- a/ecc/bls12-381/fflonk/fflonk.go +++ b/ecc/bls12-381/fflonk/fflonk.go @@ -249,10 +249,15 @@ func getNextDivisorRMinusOne(i int) int { r.Sub(r, &one) tmp.SetUint64(uint64(i)) tmp.Mod(r, &tmp) - for tmp.Cmp(&zero) != 0 { + nbTrials := 100 // prevent DOS attack if the prime is not smooth + for tmp.Cmp(&zero) != 0 && nbTrials > 0 { i += 1 tmp.SetUint64(uint64(i)) tmp.Mod(r, &tmp) + nbTrials-- + } + if nbTrials == 0 { + panic("did not find any divisor of r-1") } return i } diff --git a/ecc/bls24-315/fflonk/fflonk.go b/ecc/bls24-315/fflonk/fflonk.go index a8e808680c..d33a0cac62 100644 --- a/ecc/bls24-315/fflonk/fflonk.go +++ b/ecc/bls24-315/fflonk/fflonk.go @@ -249,10 +249,15 @@ func getNextDivisorRMinusOne(i int) int { r.Sub(r, &one) tmp.SetUint64(uint64(i)) tmp.Mod(r, &tmp) - for tmp.Cmp(&zero) != 0 { + nbTrials := 100 // prevent DOS attack if the prime is not smooth + for tmp.Cmp(&zero) != 0 && nbTrials > 0 { i += 1 tmp.SetUint64(uint64(i)) tmp.Mod(r, &tmp) + nbTrials-- + } + if nbTrials == 0 { + panic("did not find any divisor of r-1") } return i } diff --git a/ecc/bls24-317/fflonk/fflonk.go b/ecc/bls24-317/fflonk/fflonk.go index 3c736cb1a0..79bca3973b 100644 --- a/ecc/bls24-317/fflonk/fflonk.go +++ b/ecc/bls24-317/fflonk/fflonk.go @@ -249,10 +249,15 @@ func getNextDivisorRMinusOne(i int) int { r.Sub(r, &one) tmp.SetUint64(uint64(i)) tmp.Mod(r, &tmp) - for tmp.Cmp(&zero) != 0 { + nbTrials := 100 // prevent DOS attack if the prime is not smooth + for tmp.Cmp(&zero) != 0 && nbTrials > 0 { i += 1 tmp.SetUint64(uint64(i)) tmp.Mod(r, &tmp) + nbTrials-- + } + if nbTrials == 0 { + panic("did not find any divisor of r-1") } return i } diff --git a/ecc/bn254/fflonk/fflonk.go b/ecc/bn254/fflonk/fflonk.go index f2995d3b6e..dc7b99e70b 100644 --- a/ecc/bn254/fflonk/fflonk.go +++ b/ecc/bn254/fflonk/fflonk.go @@ -249,10 +249,15 @@ func getNextDivisorRMinusOne(i int) int { r.Sub(r, &one) tmp.SetUint64(uint64(i)) tmp.Mod(r, &tmp) - for tmp.Cmp(&zero) != 0 { + nbTrials := 100 // prevent DOS attack if the prime is not smooth + for tmp.Cmp(&zero) != 0 && nbTrials > 0 { i += 1 tmp.SetUint64(uint64(i)) tmp.Mod(r, &tmp) + nbTrials-- + } + if nbTrials == 0 { + panic("did not find any divisor of r-1") } return i } diff --git a/ecc/bw6-633/fflonk/fflonk.go b/ecc/bw6-633/fflonk/fflonk.go index 2c1a34f0b4..a66233e78d 100644 --- a/ecc/bw6-633/fflonk/fflonk.go +++ b/ecc/bw6-633/fflonk/fflonk.go @@ -249,10 +249,15 @@ func getNextDivisorRMinusOne(i int) int { r.Sub(r, &one) tmp.SetUint64(uint64(i)) tmp.Mod(r, &tmp) - for tmp.Cmp(&zero) != 0 { + nbTrials := 100 // prevent DOS attack if the prime is not smooth + for tmp.Cmp(&zero) != 0 && nbTrials > 0 { i += 1 tmp.SetUint64(uint64(i)) tmp.Mod(r, &tmp) + nbTrials-- + } + if nbTrials == 0 { + panic("did not find any divisor of r-1") } return i } diff --git a/ecc/bw6-756/fflonk/fflonk.go b/ecc/bw6-756/fflonk/fflonk.go index 5bcafe8217..672d78337c 100644 --- a/ecc/bw6-756/fflonk/fflonk.go +++ b/ecc/bw6-756/fflonk/fflonk.go @@ -249,10 +249,15 @@ func getNextDivisorRMinusOne(i int) int { r.Sub(r, &one) tmp.SetUint64(uint64(i)) tmp.Mod(r, &tmp) - for tmp.Cmp(&zero) != 0 { + nbTrials := 100 // prevent DOS attack if the prime is not smooth + for tmp.Cmp(&zero) != 0 && nbTrials > 0 { i += 1 tmp.SetUint64(uint64(i)) tmp.Mod(r, &tmp) + nbTrials-- + } + if nbTrials == 0 { + panic("did not find any divisor of r-1") } return i } diff --git a/ecc/bw6-761/fflonk/fflonk.go b/ecc/bw6-761/fflonk/fflonk.go index d03c9a3ebb..3a18d58b65 100644 --- a/ecc/bw6-761/fflonk/fflonk.go +++ b/ecc/bw6-761/fflonk/fflonk.go @@ -249,10 +249,15 @@ func getNextDivisorRMinusOne(i int) int { r.Sub(r, &one) tmp.SetUint64(uint64(i)) tmp.Mod(r, &tmp) - for tmp.Cmp(&zero) != 0 { + nbTrials := 100 // prevent DOS attack if the prime is not smooth + for tmp.Cmp(&zero) != 0 && nbTrials > 0 { i += 1 tmp.SetUint64(uint64(i)) tmp.Mod(r, &tmp) + nbTrials-- + } + if nbTrials == 0 { + panic("did not find any divisor of r-1") } return i } diff --git a/internal/generator/fflonk/template/fflonk.go.tmpl b/internal/generator/fflonk/template/fflonk.go.tmpl index b7cfcc6c3d..f3baa753f3 100644 --- a/internal/generator/fflonk/template/fflonk.go.tmpl +++ b/internal/generator/fflonk/template/fflonk.go.tmpl @@ -231,10 +231,15 @@ func getNextDivisorRMinusOne(i int) int { r.Sub(r, &one) tmp.SetUint64(uint64(i)) tmp.Mod(r, &tmp) - for tmp.Cmp(&zero) != 0 { + nbTrials := 100 // prevent DOS attack if the prime is not smooth + for tmp.Cmp(&zero) != 0 && nbTrials>0{ i += 1 tmp.SetUint64(uint64(i)) tmp.Mod(r, &tmp) + nbTrials-- + } + if nbTrials==0 { + panic("did not find any divisor of r-1") } return i } From 7fa434fd77cd447e837a4584b0d82089b74f45d4 Mon Sep 17 00:00:00 2001 From: Ivo Kubjas Date: Fri, 4 Oct 2024 13:20:09 +0000 Subject: [PATCH 62/66] chore: ignore gosec false positive --- ecc/bls12-377/shplonk/shplonk_test.go | 2 +- ecc/bls12-381/shplonk/shplonk_test.go | 2 +- ecc/bls24-315/shplonk/shplonk_test.go | 2 +- ecc/bls24-317/shplonk/shplonk_test.go | 2 +- ecc/bn254/shplonk/shplonk_test.go | 2 +- ecc/bw6-633/shplonk/shplonk_test.go | 2 +- ecc/bw6-761/shplonk/shplonk_test.go | 2 +- internal/generator/shplonk/template/shplonk.test.go.tmpl | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/ecc/bls12-377/shplonk/shplonk_test.go b/ecc/bls12-377/shplonk/shplonk_test.go index 9333499557..9b321fa4ef 100644 --- a/ecc/bls12-377/shplonk/shplonk_test.go +++ b/ecc/bls12-377/shplonk/shplonk_test.go @@ -72,7 +72,7 @@ func TestOpening(t *testing.T) { nbPolys := 2 sizePoly := make([]int, nbPolys) for i := 0; i < nbPolys; i++ { - sizePoly[i] = rand.Intn(10) + 2 + sizePoly[i] = rand.Intn(10) + 2 //nolint: gosec // G404, no strong PRNG needed here } polys := make([][]fr.Element, nbPolys) for i := 0; i < nbPolys; i++ { diff --git a/ecc/bls12-381/shplonk/shplonk_test.go b/ecc/bls12-381/shplonk/shplonk_test.go index b007b75590..e2295bbe36 100644 --- a/ecc/bls12-381/shplonk/shplonk_test.go +++ b/ecc/bls12-381/shplonk/shplonk_test.go @@ -72,7 +72,7 @@ func TestOpening(t *testing.T) { nbPolys := 2 sizePoly := make([]int, nbPolys) for i := 0; i < nbPolys; i++ { - sizePoly[i] = rand.Intn(10) + 2 + sizePoly[i] = rand.Intn(10) + 2 //nolint: gosec // G404, no strong PRNG needed here } polys := make([][]fr.Element, nbPolys) for i := 0; i < nbPolys; i++ { diff --git a/ecc/bls24-315/shplonk/shplonk_test.go b/ecc/bls24-315/shplonk/shplonk_test.go index c97b6dda88..4f7f41f122 100644 --- a/ecc/bls24-315/shplonk/shplonk_test.go +++ b/ecc/bls24-315/shplonk/shplonk_test.go @@ -72,7 +72,7 @@ func TestOpening(t *testing.T) { nbPolys := 2 sizePoly := make([]int, nbPolys) for i := 0; i < nbPolys; i++ { - sizePoly[i] = rand.Intn(10) + 2 + sizePoly[i] = rand.Intn(10) + 2 //nolint: gosec // G404, no strong PRNG needed here } polys := make([][]fr.Element, nbPolys) for i := 0; i < nbPolys; i++ { diff --git a/ecc/bls24-317/shplonk/shplonk_test.go b/ecc/bls24-317/shplonk/shplonk_test.go index bd89089b78..33d055815e 100644 --- a/ecc/bls24-317/shplonk/shplonk_test.go +++ b/ecc/bls24-317/shplonk/shplonk_test.go @@ -72,7 +72,7 @@ func TestOpening(t *testing.T) { nbPolys := 2 sizePoly := make([]int, nbPolys) for i := 0; i < nbPolys; i++ { - sizePoly[i] = rand.Intn(10) + 2 + sizePoly[i] = rand.Intn(10) + 2 //nolint: gosec // G404, no strong PRNG needed here } polys := make([][]fr.Element, nbPolys) for i := 0; i < nbPolys; i++ { diff --git a/ecc/bn254/shplonk/shplonk_test.go b/ecc/bn254/shplonk/shplonk_test.go index 88fa11fc0a..d0558c4725 100644 --- a/ecc/bn254/shplonk/shplonk_test.go +++ b/ecc/bn254/shplonk/shplonk_test.go @@ -72,7 +72,7 @@ func TestOpening(t *testing.T) { nbPolys := 2 sizePoly := make([]int, nbPolys) for i := 0; i < nbPolys; i++ { - sizePoly[i] = rand.Intn(10) + 2 + sizePoly[i] = rand.Intn(10) + 2 //nolint: gosec // G404, no strong PRNG needed here } polys := make([][]fr.Element, nbPolys) for i := 0; i < nbPolys; i++ { diff --git a/ecc/bw6-633/shplonk/shplonk_test.go b/ecc/bw6-633/shplonk/shplonk_test.go index dcb46ffeea..2cc4d27f57 100644 --- a/ecc/bw6-633/shplonk/shplonk_test.go +++ b/ecc/bw6-633/shplonk/shplonk_test.go @@ -72,7 +72,7 @@ func TestOpening(t *testing.T) { nbPolys := 2 sizePoly := make([]int, nbPolys) for i := 0; i < nbPolys; i++ { - sizePoly[i] = rand.Intn(10) + 2 + sizePoly[i] = rand.Intn(10) + 2 //nolint: gosec // G404, no strong PRNG needed here } polys := make([][]fr.Element, nbPolys) for i := 0; i < nbPolys; i++ { diff --git a/ecc/bw6-761/shplonk/shplonk_test.go b/ecc/bw6-761/shplonk/shplonk_test.go index 1e4ecc2d81..bcdbccea65 100644 --- a/ecc/bw6-761/shplonk/shplonk_test.go +++ b/ecc/bw6-761/shplonk/shplonk_test.go @@ -72,7 +72,7 @@ func TestOpening(t *testing.T) { nbPolys := 2 sizePoly := make([]int, nbPolys) for i := 0; i < nbPolys; i++ { - sizePoly[i] = rand.Intn(10) + 2 + sizePoly[i] = rand.Intn(10) + 2 //nolint: gosec // G404, no strong PRNG needed here } polys := make([][]fr.Element, nbPolys) for i := 0; i < nbPolys; i++ { diff --git a/internal/generator/shplonk/template/shplonk.test.go.tmpl b/internal/generator/shplonk/template/shplonk.test.go.tmpl index 775995c032..d8ff5b8adb 100644 --- a/internal/generator/shplonk/template/shplonk.test.go.tmpl +++ b/internal/generator/shplonk/template/shplonk.test.go.tmpl @@ -55,7 +55,7 @@ func TestOpening(t *testing.T) { nbPolys := 2 sizePoly := make([]int, nbPolys) for i := 0; i < nbPolys; i++ { - sizePoly[i] = rand.Intn(10) + 2 + sizePoly[i] = rand.Intn(10) + 2 //nolint: gosec // G404, no strong PRNG needed here } polys := make([][]fr.Element, nbPolys) for i := 0; i < nbPolys; i++ { From de674b4c64e955612c351fb16117ff4f89700a46 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Tue, 8 Oct 2024 13:00:12 +0200 Subject: [PATCH 63/66] feat: example shplonk --- ecc/bls12-377/shplonk/example_test.go | 63 +++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100644 ecc/bls12-377/shplonk/example_test.go diff --git a/ecc/bls12-377/shplonk/example_test.go b/ecc/bls12-377/shplonk/example_test.go new file mode 100644 index 0000000000..e913991e42 --- /dev/null +++ b/ecc/bls12-377/shplonk/example_test.go @@ -0,0 +1,63 @@ +package shplonk + +import ( + "crypto/sha256" + "fmt" + + "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-377/kzg" +) + +// This example shows how to batch open a list of polynomials on a set of points, +// where each polynomial is opened on its own set of point. +// That is the i-th polynomial f_i is opened on set of point S_i. +func Example_batchOpen() { + + const nbPolynomials = 10 + + // sample a list of points and a list of polynomials. The i-th polynomial + // is opened on the i-th set of points, there might be several points per set. + points := make([][]fr.Element, nbPolynomials) + polynomials := make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { + + polynomials[i] = make([]fr.Element, 20+2*i) // random size + for j := 0; j < 20+2*i; j++ { + polynomials[i][j].SetRandom() + } + + points[i] = make([]fr.Element, i+1) // random number of point + for j := 0; j < i+1; j++ { + points[i][j].SetRandom() + } + } + + // Create commitments for each polynomials + var err error + digests := make([]kzg.Digest, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { + digests[i], err = kzg.Commit(polynomials[i], testSrs.Pk) + if err != nil { + panic(err) + } + } + + // hash function that is used for the challenge derivation in Fiat Shamir + hf := sha256.New() + + // ceate an opening proof of polynomials[i] on the set points[i] + openingProof, err := BatchOpen(polynomials, digests, points, hf, testSrs.Pk) + if err != nil { + panic(err) + } + + // we verify the proof. If the proof is correct, then openingProof[i][j] contains + // the evaluation of the polynomials[i] on points[i][j] + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + if err != nil { + panic(err) + } + + fmt.Println("verified") + // output: verified +} From 5fc53f3014ee49c028cb80666b79d3f1236d7d4f Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Tue, 8 Oct 2024 13:06:50 +0200 Subject: [PATCH 64/66] feat: code gen --- ecc/bls12-377/shplonk/example_test.go | 16 ++++ ecc/bls12-381/shplonk/example_test.go | 79 +++++++++++++++++++ ecc/bls24-315/shplonk/example_test.go | 79 +++++++++++++++++++ ecc/bls24-317/shplonk/example_test.go | 79 +++++++++++++++++++ ecc/bn254/shplonk/example_test.go | 79 +++++++++++++++++++ ecc/bw6-633/shplonk/example_test.go | 79 +++++++++++++++++++ ecc/bw6-761/shplonk/example_test.go | 79 +++++++++++++++++++ internal/generator/shplonk/generator.go | 1 + .../shplonk/template/example_test.go.tmpl | 61 ++++++++++++++ 9 files changed, 552 insertions(+) create mode 100644 ecc/bls12-381/shplonk/example_test.go create mode 100644 ecc/bls24-315/shplonk/example_test.go create mode 100644 ecc/bls24-317/shplonk/example_test.go create mode 100644 ecc/bn254/shplonk/example_test.go create mode 100644 ecc/bw6-633/shplonk/example_test.go create mode 100644 ecc/bw6-761/shplonk/example_test.go create mode 100644 internal/generator/shplonk/template/example_test.go.tmpl diff --git a/ecc/bls12-377/shplonk/example_test.go b/ecc/bls12-377/shplonk/example_test.go index e913991e42..88c7e07c8d 100644 --- a/ecc/bls12-377/shplonk/example_test.go +++ b/ecc/bls12-377/shplonk/example_test.go @@ -1,3 +1,19 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + package shplonk import ( diff --git a/ecc/bls12-381/shplonk/example_test.go b/ecc/bls12-381/shplonk/example_test.go new file mode 100644 index 0000000000..f575f56875 --- /dev/null +++ b/ecc/bls12-381/shplonk/example_test.go @@ -0,0 +1,79 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "crypto/sha256" + "fmt" + + "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-381/kzg" +) + +// This example shows how to batch open a list of polynomials on a set of points, +// where each polynomial is opened on its own set of point. +// That is the i-th polynomial f_i is opened on set of point S_i. +func Example_batchOpen() { + + const nbPolynomials = 10 + + // sample a list of points and a list of polynomials. The i-th polynomial + // is opened on the i-th set of points, there might be several points per set. + points := make([][]fr.Element, nbPolynomials) + polynomials := make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { + + polynomials[i] = make([]fr.Element, 20+2*i) // random size + for j := 0; j < 20+2*i; j++ { + polynomials[i][j].SetRandom() + } + + points[i] = make([]fr.Element, i+1) // random number of point + for j := 0; j < i+1; j++ { + points[i][j].SetRandom() + } + } + + // Create commitments for each polynomials + var err error + digests := make([]kzg.Digest, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { + digests[i], err = kzg.Commit(polynomials[i], testSrs.Pk) + if err != nil { + panic(err) + } + } + + // hash function that is used for the challenge derivation in Fiat Shamir + hf := sha256.New() + + // ceate an opening proof of polynomials[i] on the set points[i] + openingProof, err := BatchOpen(polynomials, digests, points, hf, testSrs.Pk) + if err != nil { + panic(err) + } + + // we verify the proof. If the proof is correct, then openingProof[i][j] contains + // the evaluation of the polynomials[i] on points[i][j] + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + if err != nil { + panic(err) + } + + fmt.Println("verified") + // output: verified +} diff --git a/ecc/bls24-315/shplonk/example_test.go b/ecc/bls24-315/shplonk/example_test.go new file mode 100644 index 0000000000..2185a37ccb --- /dev/null +++ b/ecc/bls24-315/shplonk/example_test.go @@ -0,0 +1,79 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "crypto/sha256" + "fmt" + + "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-315/kzg" +) + +// This example shows how to batch open a list of polynomials on a set of points, +// where each polynomial is opened on its own set of point. +// That is the i-th polynomial f_i is opened on set of point S_i. +func Example_batchOpen() { + + const nbPolynomials = 10 + + // sample a list of points and a list of polynomials. The i-th polynomial + // is opened on the i-th set of points, there might be several points per set. + points := make([][]fr.Element, nbPolynomials) + polynomials := make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { + + polynomials[i] = make([]fr.Element, 20+2*i) // random size + for j := 0; j < 20+2*i; j++ { + polynomials[i][j].SetRandom() + } + + points[i] = make([]fr.Element, i+1) // random number of point + for j := 0; j < i+1; j++ { + points[i][j].SetRandom() + } + } + + // Create commitments for each polynomials + var err error + digests := make([]kzg.Digest, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { + digests[i], err = kzg.Commit(polynomials[i], testSrs.Pk) + if err != nil { + panic(err) + } + } + + // hash function that is used for the challenge derivation in Fiat Shamir + hf := sha256.New() + + // ceate an opening proof of polynomials[i] on the set points[i] + openingProof, err := BatchOpen(polynomials, digests, points, hf, testSrs.Pk) + if err != nil { + panic(err) + } + + // we verify the proof. If the proof is correct, then openingProof[i][j] contains + // the evaluation of the polynomials[i] on points[i][j] + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + if err != nil { + panic(err) + } + + fmt.Println("verified") + // output: verified +} diff --git a/ecc/bls24-317/shplonk/example_test.go b/ecc/bls24-317/shplonk/example_test.go new file mode 100644 index 0000000000..237b13ef60 --- /dev/null +++ b/ecc/bls24-317/shplonk/example_test.go @@ -0,0 +1,79 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "crypto/sha256" + "fmt" + + "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-317/kzg" +) + +// This example shows how to batch open a list of polynomials on a set of points, +// where each polynomial is opened on its own set of point. +// That is the i-th polynomial f_i is opened on set of point S_i. +func Example_batchOpen() { + + const nbPolynomials = 10 + + // sample a list of points and a list of polynomials. The i-th polynomial + // is opened on the i-th set of points, there might be several points per set. + points := make([][]fr.Element, nbPolynomials) + polynomials := make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { + + polynomials[i] = make([]fr.Element, 20+2*i) // random size + for j := 0; j < 20+2*i; j++ { + polynomials[i][j].SetRandom() + } + + points[i] = make([]fr.Element, i+1) // random number of point + for j := 0; j < i+1; j++ { + points[i][j].SetRandom() + } + } + + // Create commitments for each polynomials + var err error + digests := make([]kzg.Digest, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { + digests[i], err = kzg.Commit(polynomials[i], testSrs.Pk) + if err != nil { + panic(err) + } + } + + // hash function that is used for the challenge derivation in Fiat Shamir + hf := sha256.New() + + // ceate an opening proof of polynomials[i] on the set points[i] + openingProof, err := BatchOpen(polynomials, digests, points, hf, testSrs.Pk) + if err != nil { + panic(err) + } + + // we verify the proof. If the proof is correct, then openingProof[i][j] contains + // the evaluation of the polynomials[i] on points[i][j] + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + if err != nil { + panic(err) + } + + fmt.Println("verified") + // output: verified +} diff --git a/ecc/bn254/shplonk/example_test.go b/ecc/bn254/shplonk/example_test.go new file mode 100644 index 0000000000..e839ce1e16 --- /dev/null +++ b/ecc/bn254/shplonk/example_test.go @@ -0,0 +1,79 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "crypto/sha256" + "fmt" + + "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark-crypto/ecc/bn254/kzg" +) + +// This example shows how to batch open a list of polynomials on a set of points, +// where each polynomial is opened on its own set of point. +// That is the i-th polynomial f_i is opened on set of point S_i. +func Example_batchOpen() { + + const nbPolynomials = 10 + + // sample a list of points and a list of polynomials. The i-th polynomial + // is opened on the i-th set of points, there might be several points per set. + points := make([][]fr.Element, nbPolynomials) + polynomials := make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { + + polynomials[i] = make([]fr.Element, 20+2*i) // random size + for j := 0; j < 20+2*i; j++ { + polynomials[i][j].SetRandom() + } + + points[i] = make([]fr.Element, i+1) // random number of point + for j := 0; j < i+1; j++ { + points[i][j].SetRandom() + } + } + + // Create commitments for each polynomials + var err error + digests := make([]kzg.Digest, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { + digests[i], err = kzg.Commit(polynomials[i], testSrs.Pk) + if err != nil { + panic(err) + } + } + + // hash function that is used for the challenge derivation in Fiat Shamir + hf := sha256.New() + + // ceate an opening proof of polynomials[i] on the set points[i] + openingProof, err := BatchOpen(polynomials, digests, points, hf, testSrs.Pk) + if err != nil { + panic(err) + } + + // we verify the proof. If the proof is correct, then openingProof[i][j] contains + // the evaluation of the polynomials[i] on points[i][j] + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + if err != nil { + panic(err) + } + + fmt.Println("verified") + // output: verified +} diff --git a/ecc/bw6-633/shplonk/example_test.go b/ecc/bw6-633/shplonk/example_test.go new file mode 100644 index 0000000000..d13fd51dfe --- /dev/null +++ b/ecc/bw6-633/shplonk/example_test.go @@ -0,0 +1,79 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "crypto/sha256" + "fmt" + + "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-633/kzg" +) + +// This example shows how to batch open a list of polynomials on a set of points, +// where each polynomial is opened on its own set of point. +// That is the i-th polynomial f_i is opened on set of point S_i. +func Example_batchOpen() { + + const nbPolynomials = 10 + + // sample a list of points and a list of polynomials. The i-th polynomial + // is opened on the i-th set of points, there might be several points per set. + points := make([][]fr.Element, nbPolynomials) + polynomials := make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { + + polynomials[i] = make([]fr.Element, 20+2*i) // random size + for j := 0; j < 20+2*i; j++ { + polynomials[i][j].SetRandom() + } + + points[i] = make([]fr.Element, i+1) // random number of point + for j := 0; j < i+1; j++ { + points[i][j].SetRandom() + } + } + + // Create commitments for each polynomials + var err error + digests := make([]kzg.Digest, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { + digests[i], err = kzg.Commit(polynomials[i], testSrs.Pk) + if err != nil { + panic(err) + } + } + + // hash function that is used for the challenge derivation in Fiat Shamir + hf := sha256.New() + + // ceate an opening proof of polynomials[i] on the set points[i] + openingProof, err := BatchOpen(polynomials, digests, points, hf, testSrs.Pk) + if err != nil { + panic(err) + } + + // we verify the proof. If the proof is correct, then openingProof[i][j] contains + // the evaluation of the polynomials[i] on points[i][j] + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + if err != nil { + panic(err) + } + + fmt.Println("verified") + // output: verified +} diff --git a/ecc/bw6-761/shplonk/example_test.go b/ecc/bw6-761/shplonk/example_test.go new file mode 100644 index 0000000000..6beff08c10 --- /dev/null +++ b/ecc/bw6-761/shplonk/example_test.go @@ -0,0 +1,79 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "crypto/sha256" + "fmt" + + "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-761/kzg" +) + +// This example shows how to batch open a list of polynomials on a set of points, +// where each polynomial is opened on its own set of point. +// That is the i-th polynomial f_i is opened on set of point S_i. +func Example_batchOpen() { + + const nbPolynomials = 10 + + // sample a list of points and a list of polynomials. The i-th polynomial + // is opened on the i-th set of points, there might be several points per set. + points := make([][]fr.Element, nbPolynomials) + polynomials := make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { + + polynomials[i] = make([]fr.Element, 20+2*i) // random size + for j := 0; j < 20+2*i; j++ { + polynomials[i][j].SetRandom() + } + + points[i] = make([]fr.Element, i+1) // random number of point + for j := 0; j < i+1; j++ { + points[i][j].SetRandom() + } + } + + // Create commitments for each polynomials + var err error + digests := make([]kzg.Digest, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { + digests[i], err = kzg.Commit(polynomials[i], testSrs.Pk) + if err != nil { + panic(err) + } + } + + // hash function that is used for the challenge derivation in Fiat Shamir + hf := sha256.New() + + // ceate an opening proof of polynomials[i] on the set points[i] + openingProof, err := BatchOpen(polynomials, digests, points, hf, testSrs.Pk) + if err != nil { + panic(err) + } + + // we verify the proof. If the proof is correct, then openingProof[i][j] contains + // the evaluation of the polynomials[i] on points[i][j] + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + if err != nil { + panic(err) + } + + fmt.Println("verified") + // output: verified +} diff --git a/internal/generator/shplonk/generator.go b/internal/generator/shplonk/generator.go index 94e1118300..15d72c2589 100644 --- a/internal/generator/shplonk/generator.go +++ b/internal/generator/shplonk/generator.go @@ -16,6 +16,7 @@ func Generate(conf config.Curve, baseDir string, bgen *bavard.BatchGenerator) er {File: filepath.Join(baseDir, "shplonk.go"), Templates: []string{"shplonk.go.tmpl"}}, {File: filepath.Join(baseDir, "shplonk_test.go"), Templates: []string{"shplonk.test.go.tmpl"}}, {File: filepath.Join(baseDir, "marshal.go"), Templates: []string{"marshal.go.tmpl"}}, + {File: filepath.Join(baseDir, "example_test.go"), Templates: []string{"example_test.go.tmpl"}}, // {File: filepath.Join(baseDir, "utils.go"), Templates: []string{"utils.go.tmpl"}}, } return bgen.Generate(conf, conf.Package, "./shplonk/template/", entries...) diff --git a/internal/generator/shplonk/template/example_test.go.tmpl b/internal/generator/shplonk/template/example_test.go.tmpl new file mode 100644 index 0000000000..6a492b522e --- /dev/null +++ b/internal/generator/shplonk/template/example_test.go.tmpl @@ -0,0 +1,61 @@ +import ( + "crypto/sha256" + "fmt" + + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/fr" + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/kzg" +) + +// This example shows how to batch open a list of polynomials on a set of points, +// where each polynomial is opened on its own set of point. +// That is the i-th polynomial f_i is opened on set of point S_i. +func Example_batchOpen() { + + const nbPolynomials = 10 + + // sample a list of points and a list of polynomials. The i-th polynomial + // is opened on the i-th set of points, there might be several points per set. + points := make([][]fr.Element, nbPolynomials) + polynomials := make([][]fr.Element, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { + + polynomials[i] = make([]fr.Element, 20+2*i) // random size + for j := 0; j < 20+2*i; j++ { + polynomials[i][j].SetRandom() + } + + points[i] = make([]fr.Element, i+1) // random number of point + for j := 0; j < i+1; j++ { + points[i][j].SetRandom() + } + } + + // Create commitments for each polynomials + var err error + digests := make([]kzg.Digest, nbPolynomials) + for i := 0; i < nbPolynomials; i++ { + digests[i], err = kzg.Commit(polynomials[i], testSrs.Pk) + if err != nil { + panic(err) + } + } + + // hash function that is used for the challenge derivation in Fiat Shamir + hf := sha256.New() + + // ceate an opening proof of polynomials[i] on the set points[i] + openingProof, err := BatchOpen(polynomials, digests, points, hf, testSrs.Pk) + if err != nil { + panic(err) + } + + // we verify the proof. If the proof is correct, then openingProof[i][j] contains + // the evaluation of the polynomials[i] on points[i][j] + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + if err != nil { + panic(err) + } + + fmt.Println("verified") + // output: verified +} From 5cf6c02ad5b47db3a02b8dccbb1c611901a6fc8b Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Tue, 8 Oct 2024 17:09:55 +0200 Subject: [PATCH 65/66] feat: example fflonk --- ecc/bls12-377/fflonk/example_test.go | 68 ++++++++++++++++++++++++++++ 1 file changed, 68 insertions(+) create mode 100644 ecc/bls12-377/fflonk/example_test.go diff --git a/ecc/bls12-377/fflonk/example_test.go b/ecc/bls12-377/fflonk/example_test.go new file mode 100644 index 0000000000..8ef443e87d --- /dev/null +++ b/ecc/bls12-377/fflonk/example_test.go @@ -0,0 +1,68 @@ +package fflonk + +import ( + "crypto/sha256" + + "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-377/kzg" +) + +// This example demonstrates how to open a list of polynomials on a list of points. +func Example_batchOpen() { + + // sample a list of polynomials, we have 5 packs of polynomials, + // each pack will be opened on its own set of points. + nbPacks := 5 + + // The first set of polynomials contains 2 polynomials, the second 3, etc. + // The i-th set of polynomials is opened on the i-th set of points. The first + // set of point contains 4 points, the second 5, etc. + nbPolynomialsPerPack := []int{2, 3, 4, 5, 6} + nbPointsPerPack := []int{4, 5, 6, 7, 8} + points := make([][]fr.Element, nbPacks) + polynomials := make([][][]fr.Element, nbPacks) + for i := 0; i < nbPacks; i++ { + polynomials[i] = make([][]fr.Element, nbPolynomialsPerPack[i]) + for j := 0; j < nbPointsPerPack[i]; j++ { + + // random size for the polynomials + polynomials[i][j] = make([]fr.Element, j+10) + } + + // random number of points per pack + points[i] = make([]fr.Element, i+5) + } + + // commit to the folded Polynomials. In each pack, we fold the polynomials in a similar way + // as in the FFT. If the given pack contains 3 polynomials P1,P2,P3, the folded polynomial + // that we commit to is P1(X^t)+XP2(X^t)+X^2P3(X^t) where t is the smallest number dividing + // r-1 bounding above the number of polynomials, which is 3 here. + var err error + digests := make([]kzg.Digest, nbPacks) + for i := 0; i < nbPacks; i++ { + digests[i], err = FoldAndCommit(polynomials[i], testSrs.Pk) + if err != nil { + panic(err) + } + } + + // compute the opening proof. We first pick a hash function that will be used for the FS challenge + // derivation. + hf := sha256.New() + proof, err := BatchOpen(polynomials, digests, points, hf, testSrs.Pk) + if err != nil { + panic(err) + } + + // Check the opening proof. The claimed values of the i-th pack of polynomials are the evaluation + // of the i-th pack of polynomials, evaluated on the t-th powers of points[i], where t is the smallest + // integer bounding above the number of polynomials in the pack that divides r-1, the field on which + // the polynomials are defined. + // + // For instance, proof.ClaimedValues[i][j][k] contains the evaluation of the j-th polynomial of the i-th + // pack, on points[i][k]^t, where t is defined as above. + err = BatchVerify(proof, digests, points, hf, testSrs.Vk) + if err != nil { + panic(err) + } +} From 6b5bc8d083ea13e4015a9cb1202491b1038e6a42 Mon Sep 17 00:00:00 2001 From: Thomas Piellard Date: Tue, 8 Oct 2024 17:23:22 +0200 Subject: [PATCH 66/66] feat: code gen --- ecc/bls12-377/fflonk/example_test.go | 16 ++++ ecc/bls12-381/fflonk/example_test.go | 84 +++++++++++++++++++ ecc/bls24-315/fflonk/example_test.go | 84 +++++++++++++++++++ ecc/bls24-317/fflonk/example_test.go | 84 +++++++++++++++++++ ecc/bn254/fflonk/example_test.go | 84 +++++++++++++++++++ ecc/bw6-633/fflonk/example_test.go | 84 +++++++++++++++++++ ecc/bw6-761/fflonk/example_test.go | 84 +++++++++++++++++++ internal/generator/fflonk/generator.go | 1 + .../fflonk/template/example_test.go.tmpl | 66 +++++++++++++++ 9 files changed, 587 insertions(+) create mode 100644 ecc/bls12-381/fflonk/example_test.go create mode 100644 ecc/bls24-315/fflonk/example_test.go create mode 100644 ecc/bls24-317/fflonk/example_test.go create mode 100644 ecc/bn254/fflonk/example_test.go create mode 100644 ecc/bw6-633/fflonk/example_test.go create mode 100644 ecc/bw6-761/fflonk/example_test.go create mode 100644 internal/generator/fflonk/template/example_test.go.tmpl diff --git a/ecc/bls12-377/fflonk/example_test.go b/ecc/bls12-377/fflonk/example_test.go index 8ef443e87d..43bde4e804 100644 --- a/ecc/bls12-377/fflonk/example_test.go +++ b/ecc/bls12-377/fflonk/example_test.go @@ -1,3 +1,19 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + package fflonk import ( diff --git a/ecc/bls12-381/fflonk/example_test.go b/ecc/bls12-381/fflonk/example_test.go new file mode 100644 index 0000000000..61bd6079f4 --- /dev/null +++ b/ecc/bls12-381/fflonk/example_test.go @@ -0,0 +1,84 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + + "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-381/kzg" +) + +// This example demonstrates how to open a list of polynomials on a list of points. +func Example_batchOpen() { + + // sample a list of polynomials, we have 5 packs of polynomials, + // each pack will be opened on its own set of points. + nbPacks := 5 + + // The first set of polynomials contains 2 polynomials, the second 3, etc. + // The i-th set of polynomials is opened on the i-th set of points. The first + // set of point contains 4 points, the second 5, etc. + nbPolynomialsPerPack := []int{2, 3, 4, 5, 6} + nbPointsPerPack := []int{4, 5, 6, 7, 8} + points := make([][]fr.Element, nbPacks) + polynomials := make([][][]fr.Element, nbPacks) + for i := 0; i < nbPacks; i++ { + polynomials[i] = make([][]fr.Element, nbPolynomialsPerPack[i]) + for j := 0; j < nbPointsPerPack[i]; j++ { + + // random size for the polynomials + polynomials[i][j] = make([]fr.Element, j+10) + } + + // random number of points per pack + points[i] = make([]fr.Element, i+5) + } + + // commit to the folded Polynomials. In each pack, we fold the polynomials in a similar way + // as in the FFT. If the given pack contains 3 polynomials P1,P2,P3, the folded polynomial + // that we commit to is P1(X^t)+XP2(X^t)+X^2P3(X^t) where t is the smallest number dividing + // r-1 bounding above the number of polynomials, which is 3 here. + var err error + digests := make([]kzg.Digest, nbPacks) + for i := 0; i < nbPacks; i++ { + digests[i], err = FoldAndCommit(polynomials[i], testSrs.Pk) + if err != nil { + panic(err) + } + } + + // compute the opening proof. We first pick a hash function that will be used for the FS challenge + // derivation. + hf := sha256.New() + proof, err := BatchOpen(polynomials, digests, points, hf, testSrs.Pk) + if err != nil { + panic(err) + } + + // Check the opening proof. The claimed values of the i-th pack of polynomials are the evaluation + // of the i-th pack of polynomials, evaluated on the t-th powers of points[i], where t is the smallest + // integer bounding above the number of polynomials in the pack that divides r-1, the field on which + // the polynomials are defined. + // + // For instance, proof.ClaimedValues[i][j][k] contains the evaluation of the j-th polynomial of the i-th + // pack, on points[i][k]^t, where t is defined as above. + err = BatchVerify(proof, digests, points, hf, testSrs.Vk) + if err != nil { + panic(err) + } +} diff --git a/ecc/bls24-315/fflonk/example_test.go b/ecc/bls24-315/fflonk/example_test.go new file mode 100644 index 0000000000..ffc84295c6 --- /dev/null +++ b/ecc/bls24-315/fflonk/example_test.go @@ -0,0 +1,84 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + + "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-315/kzg" +) + +// This example demonstrates how to open a list of polynomials on a list of points. +func Example_batchOpen() { + + // sample a list of polynomials, we have 5 packs of polynomials, + // each pack will be opened on its own set of points. + nbPacks := 5 + + // The first set of polynomials contains 2 polynomials, the second 3, etc. + // The i-th set of polynomials is opened on the i-th set of points. The first + // set of point contains 4 points, the second 5, etc. + nbPolynomialsPerPack := []int{2, 3, 4, 5, 6} + nbPointsPerPack := []int{4, 5, 6, 7, 8} + points := make([][]fr.Element, nbPacks) + polynomials := make([][][]fr.Element, nbPacks) + for i := 0; i < nbPacks; i++ { + polynomials[i] = make([][]fr.Element, nbPolynomialsPerPack[i]) + for j := 0; j < nbPointsPerPack[i]; j++ { + + // random size for the polynomials + polynomials[i][j] = make([]fr.Element, j+10) + } + + // random number of points per pack + points[i] = make([]fr.Element, i+5) + } + + // commit to the folded Polynomials. In each pack, we fold the polynomials in a similar way + // as in the FFT. If the given pack contains 3 polynomials P1,P2,P3, the folded polynomial + // that we commit to is P1(X^t)+XP2(X^t)+X^2P3(X^t) where t is the smallest number dividing + // r-1 bounding above the number of polynomials, which is 3 here. + var err error + digests := make([]kzg.Digest, nbPacks) + for i := 0; i < nbPacks; i++ { + digests[i], err = FoldAndCommit(polynomials[i], testSrs.Pk) + if err != nil { + panic(err) + } + } + + // compute the opening proof. We first pick a hash function that will be used for the FS challenge + // derivation. + hf := sha256.New() + proof, err := BatchOpen(polynomials, digests, points, hf, testSrs.Pk) + if err != nil { + panic(err) + } + + // Check the opening proof. The claimed values of the i-th pack of polynomials are the evaluation + // of the i-th pack of polynomials, evaluated on the t-th powers of points[i], where t is the smallest + // integer bounding above the number of polynomials in the pack that divides r-1, the field on which + // the polynomials are defined. + // + // For instance, proof.ClaimedValues[i][j][k] contains the evaluation of the j-th polynomial of the i-th + // pack, on points[i][k]^t, where t is defined as above. + err = BatchVerify(proof, digests, points, hf, testSrs.Vk) + if err != nil { + panic(err) + } +} diff --git a/ecc/bls24-317/fflonk/example_test.go b/ecc/bls24-317/fflonk/example_test.go new file mode 100644 index 0000000000..3d58b2b5aa --- /dev/null +++ b/ecc/bls24-317/fflonk/example_test.go @@ -0,0 +1,84 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + + "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-317/kzg" +) + +// This example demonstrates how to open a list of polynomials on a list of points. +func Example_batchOpen() { + + // sample a list of polynomials, we have 5 packs of polynomials, + // each pack will be opened on its own set of points. + nbPacks := 5 + + // The first set of polynomials contains 2 polynomials, the second 3, etc. + // The i-th set of polynomials is opened on the i-th set of points. The first + // set of point contains 4 points, the second 5, etc. + nbPolynomialsPerPack := []int{2, 3, 4, 5, 6} + nbPointsPerPack := []int{4, 5, 6, 7, 8} + points := make([][]fr.Element, nbPacks) + polynomials := make([][][]fr.Element, nbPacks) + for i := 0; i < nbPacks; i++ { + polynomials[i] = make([][]fr.Element, nbPolynomialsPerPack[i]) + for j := 0; j < nbPointsPerPack[i]; j++ { + + // random size for the polynomials + polynomials[i][j] = make([]fr.Element, j+10) + } + + // random number of points per pack + points[i] = make([]fr.Element, i+5) + } + + // commit to the folded Polynomials. In each pack, we fold the polynomials in a similar way + // as in the FFT. If the given pack contains 3 polynomials P1,P2,P3, the folded polynomial + // that we commit to is P1(X^t)+XP2(X^t)+X^2P3(X^t) where t is the smallest number dividing + // r-1 bounding above the number of polynomials, which is 3 here. + var err error + digests := make([]kzg.Digest, nbPacks) + for i := 0; i < nbPacks; i++ { + digests[i], err = FoldAndCommit(polynomials[i], testSrs.Pk) + if err != nil { + panic(err) + } + } + + // compute the opening proof. We first pick a hash function that will be used for the FS challenge + // derivation. + hf := sha256.New() + proof, err := BatchOpen(polynomials, digests, points, hf, testSrs.Pk) + if err != nil { + panic(err) + } + + // Check the opening proof. The claimed values of the i-th pack of polynomials are the evaluation + // of the i-th pack of polynomials, evaluated on the t-th powers of points[i], where t is the smallest + // integer bounding above the number of polynomials in the pack that divides r-1, the field on which + // the polynomials are defined. + // + // For instance, proof.ClaimedValues[i][j][k] contains the evaluation of the j-th polynomial of the i-th + // pack, on points[i][k]^t, where t is defined as above. + err = BatchVerify(proof, digests, points, hf, testSrs.Vk) + if err != nil { + panic(err) + } +} diff --git a/ecc/bn254/fflonk/example_test.go b/ecc/bn254/fflonk/example_test.go new file mode 100644 index 0000000000..a3c26b50b2 --- /dev/null +++ b/ecc/bn254/fflonk/example_test.go @@ -0,0 +1,84 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + + "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark-crypto/ecc/bn254/kzg" +) + +// This example demonstrates how to open a list of polynomials on a list of points. +func Example_batchOpen() { + + // sample a list of polynomials, we have 5 packs of polynomials, + // each pack will be opened on its own set of points. + nbPacks := 5 + + // The first set of polynomials contains 2 polynomials, the second 3, etc. + // The i-th set of polynomials is opened on the i-th set of points. The first + // set of point contains 4 points, the second 5, etc. + nbPolynomialsPerPack := []int{2, 3, 4, 5, 6} + nbPointsPerPack := []int{4, 5, 6, 7, 8} + points := make([][]fr.Element, nbPacks) + polynomials := make([][][]fr.Element, nbPacks) + for i := 0; i < nbPacks; i++ { + polynomials[i] = make([][]fr.Element, nbPolynomialsPerPack[i]) + for j := 0; j < nbPointsPerPack[i]; j++ { + + // random size for the polynomials + polynomials[i][j] = make([]fr.Element, j+10) + } + + // random number of points per pack + points[i] = make([]fr.Element, i+5) + } + + // commit to the folded Polynomials. In each pack, we fold the polynomials in a similar way + // as in the FFT. If the given pack contains 3 polynomials P1,P2,P3, the folded polynomial + // that we commit to is P1(X^t)+XP2(X^t)+X^2P3(X^t) where t is the smallest number dividing + // r-1 bounding above the number of polynomials, which is 3 here. + var err error + digests := make([]kzg.Digest, nbPacks) + for i := 0; i < nbPacks; i++ { + digests[i], err = FoldAndCommit(polynomials[i], testSrs.Pk) + if err != nil { + panic(err) + } + } + + // compute the opening proof. We first pick a hash function that will be used for the FS challenge + // derivation. + hf := sha256.New() + proof, err := BatchOpen(polynomials, digests, points, hf, testSrs.Pk) + if err != nil { + panic(err) + } + + // Check the opening proof. The claimed values of the i-th pack of polynomials are the evaluation + // of the i-th pack of polynomials, evaluated on the t-th powers of points[i], where t is the smallest + // integer bounding above the number of polynomials in the pack that divides r-1, the field on which + // the polynomials are defined. + // + // For instance, proof.ClaimedValues[i][j][k] contains the evaluation of the j-th polynomial of the i-th + // pack, on points[i][k]^t, where t is defined as above. + err = BatchVerify(proof, digests, points, hf, testSrs.Vk) + if err != nil { + panic(err) + } +} diff --git a/ecc/bw6-633/fflonk/example_test.go b/ecc/bw6-633/fflonk/example_test.go new file mode 100644 index 0000000000..c02b2b8ae8 --- /dev/null +++ b/ecc/bw6-633/fflonk/example_test.go @@ -0,0 +1,84 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + + "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-633/kzg" +) + +// This example demonstrates how to open a list of polynomials on a list of points. +func Example_batchOpen() { + + // sample a list of polynomials, we have 5 packs of polynomials, + // each pack will be opened on its own set of points. + nbPacks := 5 + + // The first set of polynomials contains 2 polynomials, the second 3, etc. + // The i-th set of polynomials is opened on the i-th set of points. The first + // set of point contains 4 points, the second 5, etc. + nbPolynomialsPerPack := []int{2, 3, 4, 5, 6} + nbPointsPerPack := []int{4, 5, 6, 7, 8} + points := make([][]fr.Element, nbPacks) + polynomials := make([][][]fr.Element, nbPacks) + for i := 0; i < nbPacks; i++ { + polynomials[i] = make([][]fr.Element, nbPolynomialsPerPack[i]) + for j := 0; j < nbPointsPerPack[i]; j++ { + + // random size for the polynomials + polynomials[i][j] = make([]fr.Element, j+10) + } + + // random number of points per pack + points[i] = make([]fr.Element, i+5) + } + + // commit to the folded Polynomials. In each pack, we fold the polynomials in a similar way + // as in the FFT. If the given pack contains 3 polynomials P1,P2,P3, the folded polynomial + // that we commit to is P1(X^t)+XP2(X^t)+X^2P3(X^t) where t is the smallest number dividing + // r-1 bounding above the number of polynomials, which is 3 here. + var err error + digests := make([]kzg.Digest, nbPacks) + for i := 0; i < nbPacks; i++ { + digests[i], err = FoldAndCommit(polynomials[i], testSrs.Pk) + if err != nil { + panic(err) + } + } + + // compute the opening proof. We first pick a hash function that will be used for the FS challenge + // derivation. + hf := sha256.New() + proof, err := BatchOpen(polynomials, digests, points, hf, testSrs.Pk) + if err != nil { + panic(err) + } + + // Check the opening proof. The claimed values of the i-th pack of polynomials are the evaluation + // of the i-th pack of polynomials, evaluated on the t-th powers of points[i], where t is the smallest + // integer bounding above the number of polynomials in the pack that divides r-1, the field on which + // the polynomials are defined. + // + // For instance, proof.ClaimedValues[i][j][k] contains the evaluation of the j-th polynomial of the i-th + // pack, on points[i][k]^t, where t is defined as above. + err = BatchVerify(proof, digests, points, hf, testSrs.Vk) + if err != nil { + panic(err) + } +} diff --git a/ecc/bw6-761/fflonk/example_test.go b/ecc/bw6-761/fflonk/example_test.go new file mode 100644 index 0000000000..ea35977108 --- /dev/null +++ b/ecc/bw6-761/fflonk/example_test.go @@ -0,0 +1,84 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + + "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-761/kzg" +) + +// This example demonstrates how to open a list of polynomials on a list of points. +func Example_batchOpen() { + + // sample a list of polynomials, we have 5 packs of polynomials, + // each pack will be opened on its own set of points. + nbPacks := 5 + + // The first set of polynomials contains 2 polynomials, the second 3, etc. + // The i-th set of polynomials is opened on the i-th set of points. The first + // set of point contains 4 points, the second 5, etc. + nbPolynomialsPerPack := []int{2, 3, 4, 5, 6} + nbPointsPerPack := []int{4, 5, 6, 7, 8} + points := make([][]fr.Element, nbPacks) + polynomials := make([][][]fr.Element, nbPacks) + for i := 0; i < nbPacks; i++ { + polynomials[i] = make([][]fr.Element, nbPolynomialsPerPack[i]) + for j := 0; j < nbPointsPerPack[i]; j++ { + + // random size for the polynomials + polynomials[i][j] = make([]fr.Element, j+10) + } + + // random number of points per pack + points[i] = make([]fr.Element, i+5) + } + + // commit to the folded Polynomials. In each pack, we fold the polynomials in a similar way + // as in the FFT. If the given pack contains 3 polynomials P1,P2,P3, the folded polynomial + // that we commit to is P1(X^t)+XP2(X^t)+X^2P3(X^t) where t is the smallest number dividing + // r-1 bounding above the number of polynomials, which is 3 here. + var err error + digests := make([]kzg.Digest, nbPacks) + for i := 0; i < nbPacks; i++ { + digests[i], err = FoldAndCommit(polynomials[i], testSrs.Pk) + if err != nil { + panic(err) + } + } + + // compute the opening proof. We first pick a hash function that will be used for the FS challenge + // derivation. + hf := sha256.New() + proof, err := BatchOpen(polynomials, digests, points, hf, testSrs.Pk) + if err != nil { + panic(err) + } + + // Check the opening proof. The claimed values of the i-th pack of polynomials are the evaluation + // of the i-th pack of polynomials, evaluated on the t-th powers of points[i], where t is the smallest + // integer bounding above the number of polynomials in the pack that divides r-1, the field on which + // the polynomials are defined. + // + // For instance, proof.ClaimedValues[i][j][k] contains the evaluation of the j-th polynomial of the i-th + // pack, on points[i][k]^t, where t is defined as above. + err = BatchVerify(proof, digests, points, hf, testSrs.Vk) + if err != nil { + panic(err) + } +} diff --git a/internal/generator/fflonk/generator.go b/internal/generator/fflonk/generator.go index 875c129dd8..5eba0c6a95 100644 --- a/internal/generator/fflonk/generator.go +++ b/internal/generator/fflonk/generator.go @@ -16,6 +16,7 @@ func Generate(conf config.Curve, baseDir string, bgen *bavard.BatchGenerator) er {File: filepath.Join(baseDir, "fflonk.go"), Templates: []string{"fflonk.go.tmpl"}}, {File: filepath.Join(baseDir, "fflonk_test.go"), Templates: []string{"fflonk.test.go.tmpl"}}, {File: filepath.Join(baseDir, "marshal.go"), Templates: []string{"marshal.go.tmpl"}}, + {File: filepath.Join(baseDir, "example_test.go"), Templates: []string{"example_test.go.tmpl"}}, } return bgen.Generate(conf, conf.Package, "./fflonk/template/", entries...) diff --git a/internal/generator/fflonk/template/example_test.go.tmpl b/internal/generator/fflonk/template/example_test.go.tmpl new file mode 100644 index 0000000000..050a5f23eb --- /dev/null +++ b/internal/generator/fflonk/template/example_test.go.tmpl @@ -0,0 +1,66 @@ +import ( + "crypto/sha256" + + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/fr" + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/kzg" +) + +// This example demonstrates how to open a list of polynomials on a list of points. +func Example_batchOpen() { + + // sample a list of polynomials, we have 5 packs of polynomials, + // each pack will be opened on its own set of points. + nbPacks := 5 + + // The first set of polynomials contains 2 polynomials, the second 3, etc. + // The i-th set of polynomials is opened on the i-th set of points. The first + // set of point contains 4 points, the second 5, etc. + nbPolynomialsPerPack := []int{2, 3, 4, 5, 6} + nbPointsPerPack := []int{4, 5, 6, 7, 8} + points := make([][]fr.Element, nbPacks) + polynomials := make([][][]fr.Element, nbPacks) + for i := 0; i < nbPacks; i++ { + polynomials[i] = make([][]fr.Element, nbPolynomialsPerPack[i]) + for j := 0; j < nbPointsPerPack[i]; j++ { + + // random size for the polynomials + polynomials[i][j] = make([]fr.Element, j+10) + } + + // random number of points per pack + points[i] = make([]fr.Element, i+5) + } + + // commit to the folded Polynomials. In each pack, we fold the polynomials in a similar way + // as in the FFT. If the given pack contains 3 polynomials P1,P2,P3, the folded polynomial + // that we commit to is P1(X^t)+XP2(X^t)+X^2P3(X^t) where t is the smallest number dividing + // r-1 bounding above the number of polynomials, which is 3 here. + var err error + digests := make([]kzg.Digest, nbPacks) + for i := 0; i < nbPacks; i++ { + digests[i], err = FoldAndCommit(polynomials[i], testSrs.Pk) + if err != nil { + panic(err) + } + } + + // compute the opening proof. We first pick a hash function that will be used for the FS challenge + // derivation. + hf := sha256.New() + proof, err := BatchOpen(polynomials, digests, points, hf, testSrs.Pk) + if err != nil { + panic(err) + } + + // Check the opening proof. The claimed values of the i-th pack of polynomials are the evaluation + // of the i-th pack of polynomials, evaluated on the t-th powers of points[i], where t is the smallest + // integer bounding above the number of polynomials in the pack that divides r-1, the field on which + // the polynomials are defined. + // + // For instance, proof.ClaimedValues[i][j][k] contains the evaluation of the j-th polynomial of the i-th + // pack, on points[i][k]^t, where t is defined as above. + err = BatchVerify(proof, digests, points, hf, testSrs.Vk) + if err != nil { + panic(err) + } +}