diff --git a/ecc/bls12-377/fflonk/doc.go b/ecc/bls12-377/fflonk/doc.go new file mode 100644 index 000000000..694eaa973 --- /dev/null +++ b/ecc/bls12-377/fflonk/doc.go @@ -0,0 +1,21 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +// Package fflonk provides fflonk commitment, based on shplonk. +// +// See https://eprint.iacr.org/2020/081.pdf for shplonk +// See https://eprint.iacr.org/2021/1167.pdf for fflonk. +package fflonk diff --git a/ecc/bls12-377/fflonk/example_test.go b/ecc/bls12-377/fflonk/example_test.go new file mode 100644 index 000000000..43bde4e80 --- /dev/null +++ b/ecc/bls12-377/fflonk/example_test.go @@ -0,0 +1,84 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + + "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-377/kzg" +) + +// This example demonstrates how to open a list of polynomials on a list of points. +func Example_batchOpen() { + + // sample a list of polynomials, we have 5 packs of polynomials, + // each pack will be opened on its own set of points. + nbPacks := 5 + + // The first set of polynomials contains 2 polynomials, the second 3, etc. + // The i-th set of polynomials is opened on the i-th set of points. The first + // set of point contains 4 points, the second 5, etc. + nbPolynomialsPerPack := []int{2, 3, 4, 5, 6} + nbPointsPerPack := []int{4, 5, 6, 7, 8} + points := make([][]fr.Element, nbPacks) + polynomials := make([][][]fr.Element, nbPacks) + for i := 0; i < nbPacks; i++ { + polynomials[i] = make([][]fr.Element, nbPolynomialsPerPack[i]) + for j := 0; j < nbPointsPerPack[i]; j++ { + + // random size for the polynomials + polynomials[i][j] = make([]fr.Element, j+10) + } + + // random number of points per pack + points[i] = make([]fr.Element, i+5) + } + + // commit to the folded Polynomials. In each pack, we fold the polynomials in a similar way + // as in the FFT. If the given pack contains 3 polynomials P1,P2,P3, the folded polynomial + // that we commit to is P1(X^t)+XP2(X^t)+X^2P3(X^t) where t is the smallest number dividing + // r-1 bounding above the number of polynomials, which is 3 here. + var err error + digests := make([]kzg.Digest, nbPacks) + for i := 0; i < nbPacks; i++ { + digests[i], err = FoldAndCommit(polynomials[i], testSrs.Pk) + if err != nil { + panic(err) + } + } + + // compute the opening proof. We first pick a hash function that will be used for the FS challenge + // derivation. + hf := sha256.New() + proof, err := BatchOpen(polynomials, digests, points, hf, testSrs.Pk) + if err != nil { + panic(err) + } + + // Check the opening proof. The claimed values of the i-th pack of polynomials are the evaluation + // of the i-th pack of polynomials, evaluated on the t-th powers of points[i], where t is the smallest + // integer bounding above the number of polynomials in the pack that divides r-1, the field on which + // the polynomials are defined. + // + // For instance, proof.ClaimedValues[i][j][k] contains the evaluation of the j-th polynomial of the i-th + // pack, on points[i][k]^t, where t is defined as above. + err = BatchVerify(proof, digests, points, hf, testSrs.Vk) + if err != nil { + panic(err) + } +} diff --git a/ecc/bls12-377/fflonk/fflonk.go b/ecc/bls12-377/fflonk/fflonk.go new file mode 100644 index 000000000..65c19b051 --- /dev/null +++ b/ecc/bls12-377/fflonk/fflonk.go @@ -0,0 +1,290 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "errors" + "hash" + "math/big" + + "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-377/fr/fft" + "github.com/consensys/gnark-crypto/ecc/bls12-377/kzg" + "github.com/consensys/gnark-crypto/ecc/bls12-377/shplonk" +) + +var ( + ErrRootsOne = errors.New("fr does not contain all the t-th roots of 1") + ErrNbPolynomialsNbPoints = errors.New("the number of packs of polynomials should be the same as the number of pack of points") + ErrInonsistentFolding = errors.New("the outer claimed values are not consistent with the shplonk proof") + ErrInconsistentNumberFoldedPoints = errors.New("the number of outer claimed values is inconsistent with the number of claimed values in the shplonk proof") +) + +// Opening fflonk proof for opening a list of list of polynomials ((fʲᵢ)ᵢ)ⱼ where each +// pack of polynomials (fʲᵢ)ᵢ (the pack is indexed by j) is opened on a powers of elements in +// the set Sʲ (indexed by j), where the power is |(fʲᵢ)ᵢ|. +// +// implements io.ReaderFrom and io.WriterTo +type OpeningProof struct { + + // shplonk opening proof of the folded polynomials + SOpeningProof shplonk.OpeningProof + + // ClaimedValues ClaimedValues[i][j] contains the values + // of fʲᵢ on Sⱼ^{|(fʲᵢ)ᵢ|} + ClaimedValues [][][]fr.Element +} + +// FoldAndCommit commits to a list of polynomial by intertwinning them like in the FFT, that is +// returns ∑_{i the remaining polynomials are zero + res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) + } + } + + // step 2: fold polynomials + foldedPolynomials := make([][]fr.Element, len(p)) + for i := 0; i < len(p); i++ { + foldedPolynomials[i] = Fold(p[i]) + } + + // step 4: compute the associated roots, that is for each point p corresponding + // to a pack i of polynomials, we extend to if + // the i-th pack contains t polynomials where ω is a t-th root of 1 + newPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(p); i++ { + newPoints[i], err = extendSet(points[i], nextDivisorRminusOnePerPack[i]) + if err != nil { + return res, err + } + } + + // step 5: shplonk open the list of single polynomials on the new sets + res.SOpeningProof, err = shplonk.BatchOpen(foldedPolynomials, digests, newPoints, hf, pk, dataTranscript...) + + return res, err + +} + +// BatchVerify uses a proof to check that each digest digests[i] is correctly opened on the set points[i]. +// The digests are the commitments to the folded underlying polynomials. The shplonk proof is +// verified directly using the embedded shplonk proof. This function only computes the consistency +// between the claimed values of the underlying shplonk proof and the outer claimed values, using the fft-like +// folding. Namely, the outer claimed values are the evaluation of the original polynomials (so before they +// were folded) at the relevant powers of the points. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + // step 0: consistency checks between the folded claimed values of shplonk and the claimed + // values at the powers of the Sᵢ + for i := 0; i < len(proof.ClaimedValues); i++ { + sizeSi := len(proof.ClaimedValues[i][0]) + for j := 1; j < len(proof.ClaimedValues[i]); j++ { + // each set of opening must be of the same size (opeings on powers of Si) + if sizeSi != len(proof.ClaimedValues[i][j]) { + return ErrNbPolynomialsNbPoints + } + } + currNbPolynomials := len(proof.ClaimedValues[i]) + sizeSi = sizeSi * currNbPolynomials + // |originalPolynomials_{i}|x|Sᵢ| == |foldedPolynomials|x|folded Sᵢ| + if sizeSi != len(proof.SOpeningProof.ClaimedValues[i]) { + return ErrInconsistentNumberFoldedPoints + } + } + + // step 1: fold the outer claimed values and check that they correspond to the + // shplonk claimed values + var curFoldedClaimedValue, omgeaiPoint fr.Element + for i := 0; i < len(proof.ClaimedValues); i++ { + t := len(proof.ClaimedValues[i]) + omega, err := getIthRootOne(t) + if err != nil { + return err + } + sizeSi := len(proof.ClaimedValues[i][0]) + polyClaimedValues := make([]fr.Element, t) + for j := 0; j < sizeSi; j++ { + for k := 0; k < t; k++ { + polyClaimedValues[k].Set(&proof.ClaimedValues[i][k][j]) + } + omgeaiPoint.Set(&points[i][j]) + for l := 0; l < t; l++ { + curFoldedClaimedValue = eval(polyClaimedValues, omgeaiPoint) + if !curFoldedClaimedValue.Equal(&proof.SOpeningProof.ClaimedValues[i][j*t+l]) { + return ErrInonsistentFolding + } + omgeaiPoint.Mul(&omgeaiPoint, &omega) + } + } + } + + // step 2: verify the embedded shplonk proof + extendedPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(points); i++ { + t := len(proof.ClaimedValues[i]) + extendedPoints[i], err = extendSet(points[i], t) + if err != nil { + return err + } + } + err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) + + return err +} + +// utils + +// getIthRootOne returns a generator of Z/iZ +func getIthRootOne(i int) (fr.Element, error) { + var omega fr.Element + var tmpBigInt, zeroBigInt big.Int + oneBigInt := big.NewInt(1) + zeroBigInt.SetUint64(0) + rMinusOneBigInt := fr.Modulus() + rMinusOneBigInt.Sub(rMinusOneBigInt, oneBigInt) + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Mod(rMinusOneBigInt, &tmpBigInt) + if tmpBigInt.Cmp(&zeroBigInt) != 0 { + return omega, ErrRootsOne + } + genFrStar := fft.GeneratorFullMultiplicativeGroup() + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) + omega.Exp(genFrStar, &tmpBigInt) + return omega, nil +} + +// computes the smallest i bounding above number_polynomials +// and dividing r-1. +func getNextDivisorRMinusOne(i int) int { + var zero, tmp, one big.Int + r := fr.Modulus() + one.SetUint64(1) + r.Sub(r, &one) + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + nbTrials := 100 // prevent DOS attack if the prime is not smooth + for tmp.Cmp(&zero) != 0 && nbTrials > 0 { + i += 1 + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + nbTrials-- + } + if nbTrials == 0 { + panic("did not find any divisor of r-1") + } + return i +} + +// extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] +func extendSet(p []fr.Element, t int) ([]fr.Element, error) { + + omega, err := getIthRootOne(t) + if err != nil { + return nil, err + } + nbPoints := len(p) + newPoints := make([]fr.Element, t*nbPoints) + for i := 0; i < nbPoints; i++ { + newPoints[i*t].Set(&p[i]) + for k := 1; k < t; k++ { + newPoints[i*t+k].Mul(&newPoints[i*t+k-1], &omega) + } + } + + return newPoints, nil +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} diff --git a/ecc/bls12-377/fflonk/fflonk_test.go b/ecc/bls12-377/fflonk/fflonk_test.go new file mode 100644 index 000000000..55e2dcc78 --- /dev/null +++ b/ecc/bls12-377/fflonk/fflonk_test.go @@ -0,0 +1,136 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-377/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 600 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestFflonk(t *testing.T) { + + assert := require.New(t) + + // sample random polynomials of various sizes + nbSets := 5 + p := make([][][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + nbPolysInSet := 9 + p[i] = make([][]fr.Element, nbPolysInSet) + for j := 0; j < nbPolysInSet; j++ { + curSizePoly := j + 10 + p[i][j] = make([]fr.Element, curSizePoly) + for k := 0; k < curSizePoly; k++ { + p[i][j][k].SetRandom() + } + } + } + + // sample random sets Sᵢ + x := make([][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + curSetSize := i + 4 + x[i] = make([]fr.Element, curSetSize) + for j := 0; j < curSetSize; j++ { + x[i][j].SetRandom() + } + } + + // commit to the folded polynomials + digests := make([]kzg.Digest, nbSets) + var err error + for i := 0; i < nbSets; i++ { + digests[i], err = FoldAndCommit(p[i], testSrs.Pk) + assert.NoError(err) + } + + // compute flonk opening proof + hf := sha256.New() + proof, err := BatchOpen(p, digests, x, hf, testSrs.Pk) + assert.NoError(err) + + // check opening proof + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.NoError(err) + + // tamper the proof + proof.ClaimedValues[0][0][0].SetRandom() + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestCommit(t *testing.T) { + + assert := require.New(t) + + // sample polynomials + nbPolys := 2 + p := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + p[i] = make([]fr.Element, i+10) + for j := 0; j < i+10; j++ { + p[i][j].SetRandom() + } + } + + // fflonk commit to them + var x fr.Element + x.SetRandom() + proof, err := kzg.Open(Fold(p), x, testSrs.Pk) + assert.NoError(err) + + // check that Open(C, x) = ∑_{i= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} + +// returns γ*f, re-using f +func mulByConstant(f []fr.Element, gamma fr.Element) []fr.Element { + for i := 0; i < len(f); i++ { + f[i].Mul(&f[i], &gamma) + } + return f +} + +// computes f <- (x-a)*f +// memory of f is re used, need to pass a copy to not modify it +func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { + s := len(f) + var tmp fr.Element + f = append(f, fr.NewElement(0)) + f[s] = f[s-1] + for i := s - 1; i >= 1; i-- { + tmp.Mul(&f[i], &a) + f[i].Sub(&f[i-1], &tmp) + } + f[0].Mul(&f[0], &a).Neg(&f[0]) + return f +} + +// returns S_{T\Sᵢ} where Sᵢ=x[i] +func buildZtMinusSi(x [][]fr.Element, i int) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + bufPoints := make([]fr.Element, 0, nbPoints-len(x[i])) + for j := 0; j < i; j++ { + bufPoints = append(bufPoints, x[j]...) + } + for j := i + 1; j < len(x); j++ { + bufPoints = append(bufPoints, x[j]...) + } + ztMinusSi := buildVanishingPoly(bufPoints) + return ztMinusSi +} + +// returns πᵢ(X-xᵢ) +func buildVanishingPoly(x []fr.Element) []fr.Element { + res := make([]fr.Element, 1, len(x)+1) + res[0].SetOne() + for i := 0; i < len(x); i++ { + res = multiplyLinearFactor(res, x[i]) + } + return res +} + +// returns f such that f(xᵢ)=yᵢ, x and y are assumed to be of the same size +func interpolate(x, y []fr.Element) []fr.Element { + + res := make([]fr.Element, len(x)) + for i := 0; i < len(x); i++ { + li := buildLagrangeFromDomain(x, i) + li = mulByConstant(li, y[i]) + for j := 0; j < len(x); j++ { + res[j].Add(&res[j], &li[j]) + } + } + + return res +} + +// returns f such that f(xⱼ)=δⁱⱼ +func buildLagrangeFromDomain(x []fr.Element, i int) []fr.Element { + xx := make([]fr.Element, len(x)-1) + copy(xx, x[:i]) + copy(xx[i:], x[i+1:]) + res := buildVanishingPoly(xx) + d := eval(res, x[i]) + d.Inverse(&d) + res = mulByConstant(res, d) + return res +} + +// returns f-g, the memory of f is re used, deg(g) << deg(f) here +func sub(f, g []fr.Element) []fr.Element { + for i := 0; i < len(g); i++ { + f[i].Sub(&f[i], &g[i]) + } + return f +} + +// returns f*g using naive multiplication +// deg(f)>>deg(g), deg(small) =~ 10 max +// buf is used as a buffer and should not be f or g +// f and g are not modified +func mul(f, g []fr.Element, res []fr.Element) []fr.Element { + + sizeRes := len(f) + len(g) - 1 + if len(res) < sizeRes { + s := make([]fr.Element, sizeRes-len(res)) + res = append(res, s...) + } + setZero(res) + + var tmp fr.Element + for i := 0; i < len(g); i++ { + for j := 0; j < len(f); j++ { + tmp.Mul(&f[j], &g[i]) + res[j+i].Add(&res[j+i], &tmp) + } + } + return res +} + +// returns f/g (assuming g divides f) +// OK to not use fft if deg(g) is small +// g's leading coefficient is assumed to be 1 +// f memory is re-used for the result, need to pass a copy to not modify it +func div(f, g []fr.Element) []fr.Element { + sizef := len(f) + sizeg := len(g) + stop := sizeg - +1 + var t fr.Element + for i := sizef - 2; i >= stop; i-- { + for j := 0; j < sizeg-1; j++ { + t.Mul(&f[i+1], &g[sizeg-2-j]) + f[i-j].Sub(&f[i-j], &t) + } + } + return f[sizeg-1:] +} diff --git a/ecc/bls12-377/shplonk/shplonk_test.go b/ecc/bls12-377/shplonk/shplonk_test.go new file mode 100644 index 000000000..9b321fa4e --- /dev/null +++ b/ecc/bls12-377/shplonk/shplonk_test.go @@ -0,0 +1,348 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "crypto/sha256" + "math/big" + "math/rand" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls12-377" + "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-377/kzg" + "github.com/consensys/gnark-crypto/utils/testutils" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 230 + var frAlpha fr.Element + frAlpha.SetRandom() + bAlpha = big.NewInt(0) + frAlpha.BigInt(bAlpha) + var err error + testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + if err != nil { + panic(err) + } +} + +func TestSerialization(t *testing.T) { + + _, _, g, _ := bls12377.Generators() + var proof OpeningProof + proof.W.Set(&g) + proof.WPrime.Set(&g) + nbClaimedValues := 10 + proof.ClaimedValues = make([][]fr.Element, nbClaimedValues) + for i := 0; i < nbClaimedValues; i++ { + proof.ClaimedValues[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + proof.ClaimedValues[i][j].SetRandom() + } + } + + t.Run("opening proof round trip", testutils.SerializationRoundTrip(&proof)) +} + +func TestOpening(t *testing.T) { + + assert := require.New(t) + + nbPolys := 2 + sizePoly := make([]int, nbPolys) + for i := 0; i < nbPolys; i++ { + sizePoly[i] = rand.Intn(10) + 2 //nolint: gosec // G404, no strong PRNG needed here + } + polys := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + polys[i] = make([]fr.Element, sizePoly[i]) + for j := 0; j < sizePoly[i]; j++ { + polys[i][j].SetRandom() + } + } + + digests := make([]kzg.Digest, nbPolys) + for i := 0; i < nbPolys; i++ { + digests[i], _ = kzg.Commit(polys[i], testSrs.Pk) + } + + points := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + points[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + points[i][j].SetRandom() + } + } + + hf := sha256.New() + + // correct proof + openingProof, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) + assert.NoError(err) + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.NoError(err) + + // tampered proof + openingProof.ClaimedValues[0][0].SetRandom() + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestBuildZtMinusSi(t *testing.T) { + + nbSi := 10 + points := make([][]fr.Element, nbSi) + sizeSi := make([]int, nbSi) + nbPoints := 0 + for i := 0; i < nbSi; i++ { + sizeSi[i] = 5 + i + nbPoints += sizeSi[i] + points[i] = make([]fr.Element, sizeSi[i]) + for j := 0; j < sizeSi[i]; j++ { + points[i][j].SetRandom() + } + } + for i := 0; i < nbSi; i++ { + ztMinusSi := buildZtMinusSi(points, i) + if len(ztMinusSi) != nbPoints-sizeSi[i]+1 { + t.Fatal("deg(Z_{T-S_{i}}) should be nbPoints-size(S_{i})") + } + for j := 0; j < nbSi; j++ { + if j == i { + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{i}) should not be zero") + } + } + continue + } + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if !y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{j}) should be zero") + } + } + } + } + +} + +func TestInterpolate(t *testing.T) { + + nbPoints := 10 + x := make([]fr.Element, nbPoints) + y := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + y[i].SetRandom() + } + f := interpolate(x, y) + for i := 0; i < nbPoints; i++ { + fx := eval(f, x[i]) + if !fx.Equal(&y[i]) { + t.Fatal("f(x_{i})!=y_{i}") + } + } + +} + +func TestBuildLagrangeFromDomain(t *testing.T) { + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + var r fr.Element + for i := 0; i < nbPoints; i++ { + + l := buildLagrangeFromDomain(points, i) + + // check that l(xᵢ)=1 and l(xⱼ)=0 for j!=i + for j := 0; j < nbPoints; j++ { + y := eval(l, points[j]) + if i == j { + if !y.IsOne() { + t.Fatal("l_{i}(x_{i}) should be equal to 1") + } + } else { + if !y.IsZero() { + t.Fatal("l_{i}(x_{j}) where i!=j should be equal to 0") + } + } + } + r.SetRandom() + y := eval(l, r) + if y.IsZero() { + t.Fatal("l_{i}(x) should not be zero if x is random") + } + } + +} + +func TestBuildVanishingPoly(t *testing.T) { + s := 10 + x := make([]fr.Element, s) + for i := 0; i < s; i++ { + x[i].SetRandom() + } + r := buildVanishingPoly(x) + + if len(r) != s+1 { + t.Fatal("error degree r") + } + + // check that r(xᵢ)=0 for all i + for i := 0; i < len(x); i++ { + y := eval(r, x[i]) + if !y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at xᵢ should be zero") + } + } + + // check that r(y)!=0 for a random point + var a fr.Element + a.SetRandom() + y := eval(r, a) + if y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at r \neq xᵢ should not be zero") + } +} + +func TestMultiplyLinearFactor(t *testing.T) { + + s := 10 + f := make([]fr.Element, s, s+1) + for i := 0; i < 10; i++ { + f[i].SetRandom() + } + + var a, y fr.Element + a.SetRandom() + f = multiplyLinearFactor(f, a) + y = eval(f, a) + if !y.IsZero() { + t.Fatal("(X-a)f(X) should be zero at a") + } + a.SetRandom() + y = eval(f, a) + if y.IsZero() { + t.Fatal("(X-1)f(X) at a random point should not be zero") + } + +} + +func TestNaiveMul(t *testing.T) { + + size := 10 + f := make([]fr.Element, size) + for i := 0; i < size; i++ { + f[i].SetRandom() + } + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + + v := buildVanishingPoly(points) + buf := make([]fr.Element, size+nbPoints-1) + g := mul(f, v, buf) + + // check that g(xᵢ) = 0 + for i := 0; i < nbPoints; i++ { + y := eval(g, points[i]) + if !y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at x_{i} should be zero") + } + } + + // check that g(r) != 0 for a random point + var a fr.Element + a.SetRandom() + y := eval(g, a) + if y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at a random point should not be zero") + } + +} + +func TestDiv(t *testing.T) { + + nbPoints := 10 + s := 10 + f := make([]fr.Element, s, s+nbPoints) + for i := 0; i < s; i++ { + f[i].SetRandom() + } + + // backup + g := make([]fr.Element, s) + copy(g, f) + + // successive divions of linear terms + x := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + q := make([][2]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + q[i][1].SetOne() + q[i][0].Neg(&x[i]) + f = div(f, q[i][:]) + } + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + + // division by a degree > 1 polynomial + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + r := buildVanishingPoly(x) + f = div(f, r) + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + +} diff --git a/ecc/bls12-381/fflonk/doc.go b/ecc/bls12-381/fflonk/doc.go new file mode 100644 index 000000000..694eaa973 --- /dev/null +++ b/ecc/bls12-381/fflonk/doc.go @@ -0,0 +1,21 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +// Package fflonk provides fflonk commitment, based on shplonk. +// +// See https://eprint.iacr.org/2020/081.pdf for shplonk +// See https://eprint.iacr.org/2021/1167.pdf for fflonk. +package fflonk diff --git a/ecc/bls12-381/fflonk/example_test.go b/ecc/bls12-381/fflonk/example_test.go new file mode 100644 index 000000000..61bd6079f --- /dev/null +++ b/ecc/bls12-381/fflonk/example_test.go @@ -0,0 +1,84 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + + "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-381/kzg" +) + +// This example demonstrates how to open a list of polynomials on a list of points. +func Example_batchOpen() { + + // sample a list of polynomials, we have 5 packs of polynomials, + // each pack will be opened on its own set of points. + nbPacks := 5 + + // The first set of polynomials contains 2 polynomials, the second 3, etc. + // The i-th set of polynomials is opened on the i-th set of points. The first + // set of point contains 4 points, the second 5, etc. + nbPolynomialsPerPack := []int{2, 3, 4, 5, 6} + nbPointsPerPack := []int{4, 5, 6, 7, 8} + points := make([][]fr.Element, nbPacks) + polynomials := make([][][]fr.Element, nbPacks) + for i := 0; i < nbPacks; i++ { + polynomials[i] = make([][]fr.Element, nbPolynomialsPerPack[i]) + for j := 0; j < nbPointsPerPack[i]; j++ { + + // random size for the polynomials + polynomials[i][j] = make([]fr.Element, j+10) + } + + // random number of points per pack + points[i] = make([]fr.Element, i+5) + } + + // commit to the folded Polynomials. In each pack, we fold the polynomials in a similar way + // as in the FFT. If the given pack contains 3 polynomials P1,P2,P3, the folded polynomial + // that we commit to is P1(X^t)+XP2(X^t)+X^2P3(X^t) where t is the smallest number dividing + // r-1 bounding above the number of polynomials, which is 3 here. + var err error + digests := make([]kzg.Digest, nbPacks) + for i := 0; i < nbPacks; i++ { + digests[i], err = FoldAndCommit(polynomials[i], testSrs.Pk) + if err != nil { + panic(err) + } + } + + // compute the opening proof. We first pick a hash function that will be used for the FS challenge + // derivation. + hf := sha256.New() + proof, err := BatchOpen(polynomials, digests, points, hf, testSrs.Pk) + if err != nil { + panic(err) + } + + // Check the opening proof. The claimed values of the i-th pack of polynomials are the evaluation + // of the i-th pack of polynomials, evaluated on the t-th powers of points[i], where t is the smallest + // integer bounding above the number of polynomials in the pack that divides r-1, the field on which + // the polynomials are defined. + // + // For instance, proof.ClaimedValues[i][j][k] contains the evaluation of the j-th polynomial of the i-th + // pack, on points[i][k]^t, where t is defined as above. + err = BatchVerify(proof, digests, points, hf, testSrs.Vk) + if err != nil { + panic(err) + } +} diff --git a/ecc/bls12-381/fflonk/fflonk.go b/ecc/bls12-381/fflonk/fflonk.go new file mode 100644 index 000000000..293d58f18 --- /dev/null +++ b/ecc/bls12-381/fflonk/fflonk.go @@ -0,0 +1,290 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "errors" + "hash" + "math/big" + + "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-381/fr/fft" + "github.com/consensys/gnark-crypto/ecc/bls12-381/kzg" + "github.com/consensys/gnark-crypto/ecc/bls12-381/shplonk" +) + +var ( + ErrRootsOne = errors.New("fr does not contain all the t-th roots of 1") + ErrNbPolynomialsNbPoints = errors.New("the number of packs of polynomials should be the same as the number of pack of points") + ErrInonsistentFolding = errors.New("the outer claimed values are not consistent with the shplonk proof") + ErrInconsistentNumberFoldedPoints = errors.New("the number of outer claimed values is inconsistent with the number of claimed values in the shplonk proof") +) + +// Opening fflonk proof for opening a list of list of polynomials ((fʲᵢ)ᵢ)ⱼ where each +// pack of polynomials (fʲᵢ)ᵢ (the pack is indexed by j) is opened on a powers of elements in +// the set Sʲ (indexed by j), where the power is |(fʲᵢ)ᵢ|. +// +// implements io.ReaderFrom and io.WriterTo +type OpeningProof struct { + + // shplonk opening proof of the folded polynomials + SOpeningProof shplonk.OpeningProof + + // ClaimedValues ClaimedValues[i][j] contains the values + // of fʲᵢ on Sⱼ^{|(fʲᵢ)ᵢ|} + ClaimedValues [][][]fr.Element +} + +// FoldAndCommit commits to a list of polynomial by intertwinning them like in the FFT, that is +// returns ∑_{i the remaining polynomials are zero + res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) + } + } + + // step 2: fold polynomials + foldedPolynomials := make([][]fr.Element, len(p)) + for i := 0; i < len(p); i++ { + foldedPolynomials[i] = Fold(p[i]) + } + + // step 4: compute the associated roots, that is for each point p corresponding + // to a pack i of polynomials, we extend to if + // the i-th pack contains t polynomials where ω is a t-th root of 1 + newPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(p); i++ { + newPoints[i], err = extendSet(points[i], nextDivisorRminusOnePerPack[i]) + if err != nil { + return res, err + } + } + + // step 5: shplonk open the list of single polynomials on the new sets + res.SOpeningProof, err = shplonk.BatchOpen(foldedPolynomials, digests, newPoints, hf, pk, dataTranscript...) + + return res, err + +} + +// BatchVerify uses a proof to check that each digest digests[i] is correctly opened on the set points[i]. +// The digests are the commitments to the folded underlying polynomials. The shplonk proof is +// verified directly using the embedded shplonk proof. This function only computes the consistency +// between the claimed values of the underlying shplonk proof and the outer claimed values, using the fft-like +// folding. Namely, the outer claimed values are the evaluation of the original polynomials (so before they +// were folded) at the relevant powers of the points. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + // step 0: consistency checks between the folded claimed values of shplonk and the claimed + // values at the powers of the Sᵢ + for i := 0; i < len(proof.ClaimedValues); i++ { + sizeSi := len(proof.ClaimedValues[i][0]) + for j := 1; j < len(proof.ClaimedValues[i]); j++ { + // each set of opening must be of the same size (opeings on powers of Si) + if sizeSi != len(proof.ClaimedValues[i][j]) { + return ErrNbPolynomialsNbPoints + } + } + currNbPolynomials := len(proof.ClaimedValues[i]) + sizeSi = sizeSi * currNbPolynomials + // |originalPolynomials_{i}|x|Sᵢ| == |foldedPolynomials|x|folded Sᵢ| + if sizeSi != len(proof.SOpeningProof.ClaimedValues[i]) { + return ErrInconsistentNumberFoldedPoints + } + } + + // step 1: fold the outer claimed values and check that they correspond to the + // shplonk claimed values + var curFoldedClaimedValue, omgeaiPoint fr.Element + for i := 0; i < len(proof.ClaimedValues); i++ { + t := len(proof.ClaimedValues[i]) + omega, err := getIthRootOne(t) + if err != nil { + return err + } + sizeSi := len(proof.ClaimedValues[i][0]) + polyClaimedValues := make([]fr.Element, t) + for j := 0; j < sizeSi; j++ { + for k := 0; k < t; k++ { + polyClaimedValues[k].Set(&proof.ClaimedValues[i][k][j]) + } + omgeaiPoint.Set(&points[i][j]) + for l := 0; l < t; l++ { + curFoldedClaimedValue = eval(polyClaimedValues, omgeaiPoint) + if !curFoldedClaimedValue.Equal(&proof.SOpeningProof.ClaimedValues[i][j*t+l]) { + return ErrInonsistentFolding + } + omgeaiPoint.Mul(&omgeaiPoint, &omega) + } + } + } + + // step 2: verify the embedded shplonk proof + extendedPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(points); i++ { + t := len(proof.ClaimedValues[i]) + extendedPoints[i], err = extendSet(points[i], t) + if err != nil { + return err + } + } + err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) + + return err +} + +// utils + +// getIthRootOne returns a generator of Z/iZ +func getIthRootOne(i int) (fr.Element, error) { + var omega fr.Element + var tmpBigInt, zeroBigInt big.Int + oneBigInt := big.NewInt(1) + zeroBigInt.SetUint64(0) + rMinusOneBigInt := fr.Modulus() + rMinusOneBigInt.Sub(rMinusOneBigInt, oneBigInt) + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Mod(rMinusOneBigInt, &tmpBigInt) + if tmpBigInt.Cmp(&zeroBigInt) != 0 { + return omega, ErrRootsOne + } + genFrStar := fft.GeneratorFullMultiplicativeGroup() + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) + omega.Exp(genFrStar, &tmpBigInt) + return omega, nil +} + +// computes the smallest i bounding above number_polynomials +// and dividing r-1. +func getNextDivisorRMinusOne(i int) int { + var zero, tmp, one big.Int + r := fr.Modulus() + one.SetUint64(1) + r.Sub(r, &one) + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + nbTrials := 100 // prevent DOS attack if the prime is not smooth + for tmp.Cmp(&zero) != 0 && nbTrials > 0 { + i += 1 + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + nbTrials-- + } + if nbTrials == 0 { + panic("did not find any divisor of r-1") + } + return i +} + +// extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] +func extendSet(p []fr.Element, t int) ([]fr.Element, error) { + + omega, err := getIthRootOne(t) + if err != nil { + return nil, err + } + nbPoints := len(p) + newPoints := make([]fr.Element, t*nbPoints) + for i := 0; i < nbPoints; i++ { + newPoints[i*t].Set(&p[i]) + for k := 1; k < t; k++ { + newPoints[i*t+k].Mul(&newPoints[i*t+k-1], &omega) + } + } + + return newPoints, nil +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} diff --git a/ecc/bls12-381/fflonk/fflonk_test.go b/ecc/bls12-381/fflonk/fflonk_test.go new file mode 100644 index 000000000..ad8519fed --- /dev/null +++ b/ecc/bls12-381/fflonk/fflonk_test.go @@ -0,0 +1,136 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-381/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 600 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestFflonk(t *testing.T) { + + assert := require.New(t) + + // sample random polynomials of various sizes + nbSets := 5 + p := make([][][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + nbPolysInSet := 9 + p[i] = make([][]fr.Element, nbPolysInSet) + for j := 0; j < nbPolysInSet; j++ { + curSizePoly := j + 10 + p[i][j] = make([]fr.Element, curSizePoly) + for k := 0; k < curSizePoly; k++ { + p[i][j][k].SetRandom() + } + } + } + + // sample random sets Sᵢ + x := make([][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + curSetSize := i + 4 + x[i] = make([]fr.Element, curSetSize) + for j := 0; j < curSetSize; j++ { + x[i][j].SetRandom() + } + } + + // commit to the folded polynomials + digests := make([]kzg.Digest, nbSets) + var err error + for i := 0; i < nbSets; i++ { + digests[i], err = FoldAndCommit(p[i], testSrs.Pk) + assert.NoError(err) + } + + // compute flonk opening proof + hf := sha256.New() + proof, err := BatchOpen(p, digests, x, hf, testSrs.Pk) + assert.NoError(err) + + // check opening proof + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.NoError(err) + + // tamper the proof + proof.ClaimedValues[0][0][0].SetRandom() + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestCommit(t *testing.T) { + + assert := require.New(t) + + // sample polynomials + nbPolys := 2 + p := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + p[i] = make([]fr.Element, i+10) + for j := 0; j < i+10; j++ { + p[i][j].SetRandom() + } + } + + // fflonk commit to them + var x fr.Element + x.SetRandom() + proof, err := kzg.Open(Fold(p), x, testSrs.Pk) + assert.NoError(err) + + // check that Open(C, x) = ∑_{i= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} + +// returns γ*f, re-using f +func mulByConstant(f []fr.Element, gamma fr.Element) []fr.Element { + for i := 0; i < len(f); i++ { + f[i].Mul(&f[i], &gamma) + } + return f +} + +// computes f <- (x-a)*f +// memory of f is re used, need to pass a copy to not modify it +func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { + s := len(f) + var tmp fr.Element + f = append(f, fr.NewElement(0)) + f[s] = f[s-1] + for i := s - 1; i >= 1; i-- { + tmp.Mul(&f[i], &a) + f[i].Sub(&f[i-1], &tmp) + } + f[0].Mul(&f[0], &a).Neg(&f[0]) + return f +} + +// returns S_{T\Sᵢ} where Sᵢ=x[i] +func buildZtMinusSi(x [][]fr.Element, i int) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + bufPoints := make([]fr.Element, 0, nbPoints-len(x[i])) + for j := 0; j < i; j++ { + bufPoints = append(bufPoints, x[j]...) + } + for j := i + 1; j < len(x); j++ { + bufPoints = append(bufPoints, x[j]...) + } + ztMinusSi := buildVanishingPoly(bufPoints) + return ztMinusSi +} + +// returns πᵢ(X-xᵢ) +func buildVanishingPoly(x []fr.Element) []fr.Element { + res := make([]fr.Element, 1, len(x)+1) + res[0].SetOne() + for i := 0; i < len(x); i++ { + res = multiplyLinearFactor(res, x[i]) + } + return res +} + +// returns f such that f(xᵢ)=yᵢ, x and y are assumed to be of the same size +func interpolate(x, y []fr.Element) []fr.Element { + + res := make([]fr.Element, len(x)) + for i := 0; i < len(x); i++ { + li := buildLagrangeFromDomain(x, i) + li = mulByConstant(li, y[i]) + for j := 0; j < len(x); j++ { + res[j].Add(&res[j], &li[j]) + } + } + + return res +} + +// returns f such that f(xⱼ)=δⁱⱼ +func buildLagrangeFromDomain(x []fr.Element, i int) []fr.Element { + xx := make([]fr.Element, len(x)-1) + copy(xx, x[:i]) + copy(xx[i:], x[i+1:]) + res := buildVanishingPoly(xx) + d := eval(res, x[i]) + d.Inverse(&d) + res = mulByConstant(res, d) + return res +} + +// returns f-g, the memory of f is re used, deg(g) << deg(f) here +func sub(f, g []fr.Element) []fr.Element { + for i := 0; i < len(g); i++ { + f[i].Sub(&f[i], &g[i]) + } + return f +} + +// returns f*g using naive multiplication +// deg(f)>>deg(g), deg(small) =~ 10 max +// buf is used as a buffer and should not be f or g +// f and g are not modified +func mul(f, g []fr.Element, res []fr.Element) []fr.Element { + + sizeRes := len(f) + len(g) - 1 + if len(res) < sizeRes { + s := make([]fr.Element, sizeRes-len(res)) + res = append(res, s...) + } + setZero(res) + + var tmp fr.Element + for i := 0; i < len(g); i++ { + for j := 0; j < len(f); j++ { + tmp.Mul(&f[j], &g[i]) + res[j+i].Add(&res[j+i], &tmp) + } + } + return res +} + +// returns f/g (assuming g divides f) +// OK to not use fft if deg(g) is small +// g's leading coefficient is assumed to be 1 +// f memory is re-used for the result, need to pass a copy to not modify it +func div(f, g []fr.Element) []fr.Element { + sizef := len(f) + sizeg := len(g) + stop := sizeg - +1 + var t fr.Element + for i := sizef - 2; i >= stop; i-- { + for j := 0; j < sizeg-1; j++ { + t.Mul(&f[i+1], &g[sizeg-2-j]) + f[i-j].Sub(&f[i-j], &t) + } + } + return f[sizeg-1:] +} diff --git a/ecc/bls12-381/shplonk/shplonk_test.go b/ecc/bls12-381/shplonk/shplonk_test.go new file mode 100644 index 000000000..e2295bbe3 --- /dev/null +++ b/ecc/bls12-381/shplonk/shplonk_test.go @@ -0,0 +1,348 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "crypto/sha256" + "math/big" + "math/rand" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls12-381" + "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-381/kzg" + "github.com/consensys/gnark-crypto/utils/testutils" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 230 + var frAlpha fr.Element + frAlpha.SetRandom() + bAlpha = big.NewInt(0) + frAlpha.BigInt(bAlpha) + var err error + testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + if err != nil { + panic(err) + } +} + +func TestSerialization(t *testing.T) { + + _, _, g, _ := bls12381.Generators() + var proof OpeningProof + proof.W.Set(&g) + proof.WPrime.Set(&g) + nbClaimedValues := 10 + proof.ClaimedValues = make([][]fr.Element, nbClaimedValues) + for i := 0; i < nbClaimedValues; i++ { + proof.ClaimedValues[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + proof.ClaimedValues[i][j].SetRandom() + } + } + + t.Run("opening proof round trip", testutils.SerializationRoundTrip(&proof)) +} + +func TestOpening(t *testing.T) { + + assert := require.New(t) + + nbPolys := 2 + sizePoly := make([]int, nbPolys) + for i := 0; i < nbPolys; i++ { + sizePoly[i] = rand.Intn(10) + 2 //nolint: gosec // G404, no strong PRNG needed here + } + polys := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + polys[i] = make([]fr.Element, sizePoly[i]) + for j := 0; j < sizePoly[i]; j++ { + polys[i][j].SetRandom() + } + } + + digests := make([]kzg.Digest, nbPolys) + for i := 0; i < nbPolys; i++ { + digests[i], _ = kzg.Commit(polys[i], testSrs.Pk) + } + + points := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + points[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + points[i][j].SetRandom() + } + } + + hf := sha256.New() + + // correct proof + openingProof, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) + assert.NoError(err) + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.NoError(err) + + // tampered proof + openingProof.ClaimedValues[0][0].SetRandom() + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestBuildZtMinusSi(t *testing.T) { + + nbSi := 10 + points := make([][]fr.Element, nbSi) + sizeSi := make([]int, nbSi) + nbPoints := 0 + for i := 0; i < nbSi; i++ { + sizeSi[i] = 5 + i + nbPoints += sizeSi[i] + points[i] = make([]fr.Element, sizeSi[i]) + for j := 0; j < sizeSi[i]; j++ { + points[i][j].SetRandom() + } + } + for i := 0; i < nbSi; i++ { + ztMinusSi := buildZtMinusSi(points, i) + if len(ztMinusSi) != nbPoints-sizeSi[i]+1 { + t.Fatal("deg(Z_{T-S_{i}}) should be nbPoints-size(S_{i})") + } + for j := 0; j < nbSi; j++ { + if j == i { + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{i}) should not be zero") + } + } + continue + } + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if !y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{j}) should be zero") + } + } + } + } + +} + +func TestInterpolate(t *testing.T) { + + nbPoints := 10 + x := make([]fr.Element, nbPoints) + y := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + y[i].SetRandom() + } + f := interpolate(x, y) + for i := 0; i < nbPoints; i++ { + fx := eval(f, x[i]) + if !fx.Equal(&y[i]) { + t.Fatal("f(x_{i})!=y_{i}") + } + } + +} + +func TestBuildLagrangeFromDomain(t *testing.T) { + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + var r fr.Element + for i := 0; i < nbPoints; i++ { + + l := buildLagrangeFromDomain(points, i) + + // check that l(xᵢ)=1 and l(xⱼ)=0 for j!=i + for j := 0; j < nbPoints; j++ { + y := eval(l, points[j]) + if i == j { + if !y.IsOne() { + t.Fatal("l_{i}(x_{i}) should be equal to 1") + } + } else { + if !y.IsZero() { + t.Fatal("l_{i}(x_{j}) where i!=j should be equal to 0") + } + } + } + r.SetRandom() + y := eval(l, r) + if y.IsZero() { + t.Fatal("l_{i}(x) should not be zero if x is random") + } + } + +} + +func TestBuildVanishingPoly(t *testing.T) { + s := 10 + x := make([]fr.Element, s) + for i := 0; i < s; i++ { + x[i].SetRandom() + } + r := buildVanishingPoly(x) + + if len(r) != s+1 { + t.Fatal("error degree r") + } + + // check that r(xᵢ)=0 for all i + for i := 0; i < len(x); i++ { + y := eval(r, x[i]) + if !y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at xᵢ should be zero") + } + } + + // check that r(y)!=0 for a random point + var a fr.Element + a.SetRandom() + y := eval(r, a) + if y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at r \neq xᵢ should not be zero") + } +} + +func TestMultiplyLinearFactor(t *testing.T) { + + s := 10 + f := make([]fr.Element, s, s+1) + for i := 0; i < 10; i++ { + f[i].SetRandom() + } + + var a, y fr.Element + a.SetRandom() + f = multiplyLinearFactor(f, a) + y = eval(f, a) + if !y.IsZero() { + t.Fatal("(X-a)f(X) should be zero at a") + } + a.SetRandom() + y = eval(f, a) + if y.IsZero() { + t.Fatal("(X-1)f(X) at a random point should not be zero") + } + +} + +func TestNaiveMul(t *testing.T) { + + size := 10 + f := make([]fr.Element, size) + for i := 0; i < size; i++ { + f[i].SetRandom() + } + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + + v := buildVanishingPoly(points) + buf := make([]fr.Element, size+nbPoints-1) + g := mul(f, v, buf) + + // check that g(xᵢ) = 0 + for i := 0; i < nbPoints; i++ { + y := eval(g, points[i]) + if !y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at x_{i} should be zero") + } + } + + // check that g(r) != 0 for a random point + var a fr.Element + a.SetRandom() + y := eval(g, a) + if y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at a random point should not be zero") + } + +} + +func TestDiv(t *testing.T) { + + nbPoints := 10 + s := 10 + f := make([]fr.Element, s, s+nbPoints) + for i := 0; i < s; i++ { + f[i].SetRandom() + } + + // backup + g := make([]fr.Element, s) + copy(g, f) + + // successive divions of linear terms + x := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + q := make([][2]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + q[i][1].SetOne() + q[i][0].Neg(&x[i]) + f = div(f, q[i][:]) + } + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + + // division by a degree > 1 polynomial + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + r := buildVanishingPoly(x) + f = div(f, r) + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + +} diff --git a/ecc/bls24-315/fflonk/doc.go b/ecc/bls24-315/fflonk/doc.go new file mode 100644 index 000000000..694eaa973 --- /dev/null +++ b/ecc/bls24-315/fflonk/doc.go @@ -0,0 +1,21 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +// Package fflonk provides fflonk commitment, based on shplonk. +// +// See https://eprint.iacr.org/2020/081.pdf for shplonk +// See https://eprint.iacr.org/2021/1167.pdf for fflonk. +package fflonk diff --git a/ecc/bls24-315/fflonk/example_test.go b/ecc/bls24-315/fflonk/example_test.go new file mode 100644 index 000000000..ffc84295c --- /dev/null +++ b/ecc/bls24-315/fflonk/example_test.go @@ -0,0 +1,84 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + + "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-315/kzg" +) + +// This example demonstrates how to open a list of polynomials on a list of points. +func Example_batchOpen() { + + // sample a list of polynomials, we have 5 packs of polynomials, + // each pack will be opened on its own set of points. + nbPacks := 5 + + // The first set of polynomials contains 2 polynomials, the second 3, etc. + // The i-th set of polynomials is opened on the i-th set of points. The first + // set of point contains 4 points, the second 5, etc. + nbPolynomialsPerPack := []int{2, 3, 4, 5, 6} + nbPointsPerPack := []int{4, 5, 6, 7, 8} + points := make([][]fr.Element, nbPacks) + polynomials := make([][][]fr.Element, nbPacks) + for i := 0; i < nbPacks; i++ { + polynomials[i] = make([][]fr.Element, nbPolynomialsPerPack[i]) + for j := 0; j < nbPointsPerPack[i]; j++ { + + // random size for the polynomials + polynomials[i][j] = make([]fr.Element, j+10) + } + + // random number of points per pack + points[i] = make([]fr.Element, i+5) + } + + // commit to the folded Polynomials. In each pack, we fold the polynomials in a similar way + // as in the FFT. If the given pack contains 3 polynomials P1,P2,P3, the folded polynomial + // that we commit to is P1(X^t)+XP2(X^t)+X^2P3(X^t) where t is the smallest number dividing + // r-1 bounding above the number of polynomials, which is 3 here. + var err error + digests := make([]kzg.Digest, nbPacks) + for i := 0; i < nbPacks; i++ { + digests[i], err = FoldAndCommit(polynomials[i], testSrs.Pk) + if err != nil { + panic(err) + } + } + + // compute the opening proof. We first pick a hash function that will be used for the FS challenge + // derivation. + hf := sha256.New() + proof, err := BatchOpen(polynomials, digests, points, hf, testSrs.Pk) + if err != nil { + panic(err) + } + + // Check the opening proof. The claimed values of the i-th pack of polynomials are the evaluation + // of the i-th pack of polynomials, evaluated on the t-th powers of points[i], where t is the smallest + // integer bounding above the number of polynomials in the pack that divides r-1, the field on which + // the polynomials are defined. + // + // For instance, proof.ClaimedValues[i][j][k] contains the evaluation of the j-th polynomial of the i-th + // pack, on points[i][k]^t, where t is defined as above. + err = BatchVerify(proof, digests, points, hf, testSrs.Vk) + if err != nil { + panic(err) + } +} diff --git a/ecc/bls24-315/fflonk/fflonk.go b/ecc/bls24-315/fflonk/fflonk.go new file mode 100644 index 000000000..d33a0cac6 --- /dev/null +++ b/ecc/bls24-315/fflonk/fflonk.go @@ -0,0 +1,290 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "errors" + "hash" + "math/big" + + "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-315/fr/fft" + "github.com/consensys/gnark-crypto/ecc/bls24-315/kzg" + "github.com/consensys/gnark-crypto/ecc/bls24-315/shplonk" +) + +var ( + ErrRootsOne = errors.New("fr does not contain all the t-th roots of 1") + ErrNbPolynomialsNbPoints = errors.New("the number of packs of polynomials should be the same as the number of pack of points") + ErrInonsistentFolding = errors.New("the outer claimed values are not consistent with the shplonk proof") + ErrInconsistentNumberFoldedPoints = errors.New("the number of outer claimed values is inconsistent with the number of claimed values in the shplonk proof") +) + +// Opening fflonk proof for opening a list of list of polynomials ((fʲᵢ)ᵢ)ⱼ where each +// pack of polynomials (fʲᵢ)ᵢ (the pack is indexed by j) is opened on a powers of elements in +// the set Sʲ (indexed by j), where the power is |(fʲᵢ)ᵢ|. +// +// implements io.ReaderFrom and io.WriterTo +type OpeningProof struct { + + // shplonk opening proof of the folded polynomials + SOpeningProof shplonk.OpeningProof + + // ClaimedValues ClaimedValues[i][j] contains the values + // of fʲᵢ on Sⱼ^{|(fʲᵢ)ᵢ|} + ClaimedValues [][][]fr.Element +} + +// FoldAndCommit commits to a list of polynomial by intertwinning them like in the FFT, that is +// returns ∑_{i the remaining polynomials are zero + res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) + } + } + + // step 2: fold polynomials + foldedPolynomials := make([][]fr.Element, len(p)) + for i := 0; i < len(p); i++ { + foldedPolynomials[i] = Fold(p[i]) + } + + // step 4: compute the associated roots, that is for each point p corresponding + // to a pack i of polynomials, we extend to if + // the i-th pack contains t polynomials where ω is a t-th root of 1 + newPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(p); i++ { + newPoints[i], err = extendSet(points[i], nextDivisorRminusOnePerPack[i]) + if err != nil { + return res, err + } + } + + // step 5: shplonk open the list of single polynomials on the new sets + res.SOpeningProof, err = shplonk.BatchOpen(foldedPolynomials, digests, newPoints, hf, pk, dataTranscript...) + + return res, err + +} + +// BatchVerify uses a proof to check that each digest digests[i] is correctly opened on the set points[i]. +// The digests are the commitments to the folded underlying polynomials. The shplonk proof is +// verified directly using the embedded shplonk proof. This function only computes the consistency +// between the claimed values of the underlying shplonk proof and the outer claimed values, using the fft-like +// folding. Namely, the outer claimed values are the evaluation of the original polynomials (so before they +// were folded) at the relevant powers of the points. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + // step 0: consistency checks between the folded claimed values of shplonk and the claimed + // values at the powers of the Sᵢ + for i := 0; i < len(proof.ClaimedValues); i++ { + sizeSi := len(proof.ClaimedValues[i][0]) + for j := 1; j < len(proof.ClaimedValues[i]); j++ { + // each set of opening must be of the same size (opeings on powers of Si) + if sizeSi != len(proof.ClaimedValues[i][j]) { + return ErrNbPolynomialsNbPoints + } + } + currNbPolynomials := len(proof.ClaimedValues[i]) + sizeSi = sizeSi * currNbPolynomials + // |originalPolynomials_{i}|x|Sᵢ| == |foldedPolynomials|x|folded Sᵢ| + if sizeSi != len(proof.SOpeningProof.ClaimedValues[i]) { + return ErrInconsistentNumberFoldedPoints + } + } + + // step 1: fold the outer claimed values and check that they correspond to the + // shplonk claimed values + var curFoldedClaimedValue, omgeaiPoint fr.Element + for i := 0; i < len(proof.ClaimedValues); i++ { + t := len(proof.ClaimedValues[i]) + omega, err := getIthRootOne(t) + if err != nil { + return err + } + sizeSi := len(proof.ClaimedValues[i][0]) + polyClaimedValues := make([]fr.Element, t) + for j := 0; j < sizeSi; j++ { + for k := 0; k < t; k++ { + polyClaimedValues[k].Set(&proof.ClaimedValues[i][k][j]) + } + omgeaiPoint.Set(&points[i][j]) + for l := 0; l < t; l++ { + curFoldedClaimedValue = eval(polyClaimedValues, omgeaiPoint) + if !curFoldedClaimedValue.Equal(&proof.SOpeningProof.ClaimedValues[i][j*t+l]) { + return ErrInonsistentFolding + } + omgeaiPoint.Mul(&omgeaiPoint, &omega) + } + } + } + + // step 2: verify the embedded shplonk proof + extendedPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(points); i++ { + t := len(proof.ClaimedValues[i]) + extendedPoints[i], err = extendSet(points[i], t) + if err != nil { + return err + } + } + err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) + + return err +} + +// utils + +// getIthRootOne returns a generator of Z/iZ +func getIthRootOne(i int) (fr.Element, error) { + var omega fr.Element + var tmpBigInt, zeroBigInt big.Int + oneBigInt := big.NewInt(1) + zeroBigInt.SetUint64(0) + rMinusOneBigInt := fr.Modulus() + rMinusOneBigInt.Sub(rMinusOneBigInt, oneBigInt) + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Mod(rMinusOneBigInt, &tmpBigInt) + if tmpBigInt.Cmp(&zeroBigInt) != 0 { + return omega, ErrRootsOne + } + genFrStar := fft.GeneratorFullMultiplicativeGroup() + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) + omega.Exp(genFrStar, &tmpBigInt) + return omega, nil +} + +// computes the smallest i bounding above number_polynomials +// and dividing r-1. +func getNextDivisorRMinusOne(i int) int { + var zero, tmp, one big.Int + r := fr.Modulus() + one.SetUint64(1) + r.Sub(r, &one) + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + nbTrials := 100 // prevent DOS attack if the prime is not smooth + for tmp.Cmp(&zero) != 0 && nbTrials > 0 { + i += 1 + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + nbTrials-- + } + if nbTrials == 0 { + panic("did not find any divisor of r-1") + } + return i +} + +// extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] +func extendSet(p []fr.Element, t int) ([]fr.Element, error) { + + omega, err := getIthRootOne(t) + if err != nil { + return nil, err + } + nbPoints := len(p) + newPoints := make([]fr.Element, t*nbPoints) + for i := 0; i < nbPoints; i++ { + newPoints[i*t].Set(&p[i]) + for k := 1; k < t; k++ { + newPoints[i*t+k].Mul(&newPoints[i*t+k-1], &omega) + } + } + + return newPoints, nil +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} diff --git a/ecc/bls24-315/fflonk/fflonk_test.go b/ecc/bls24-315/fflonk/fflonk_test.go new file mode 100644 index 000000000..aed103091 --- /dev/null +++ b/ecc/bls24-315/fflonk/fflonk_test.go @@ -0,0 +1,136 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-315/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 600 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestFflonk(t *testing.T) { + + assert := require.New(t) + + // sample random polynomials of various sizes + nbSets := 5 + p := make([][][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + nbPolysInSet := 9 + p[i] = make([][]fr.Element, nbPolysInSet) + for j := 0; j < nbPolysInSet; j++ { + curSizePoly := j + 10 + p[i][j] = make([]fr.Element, curSizePoly) + for k := 0; k < curSizePoly; k++ { + p[i][j][k].SetRandom() + } + } + } + + // sample random sets Sᵢ + x := make([][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + curSetSize := i + 4 + x[i] = make([]fr.Element, curSetSize) + for j := 0; j < curSetSize; j++ { + x[i][j].SetRandom() + } + } + + // commit to the folded polynomials + digests := make([]kzg.Digest, nbSets) + var err error + for i := 0; i < nbSets; i++ { + digests[i], err = FoldAndCommit(p[i], testSrs.Pk) + assert.NoError(err) + } + + // compute flonk opening proof + hf := sha256.New() + proof, err := BatchOpen(p, digests, x, hf, testSrs.Pk) + assert.NoError(err) + + // check opening proof + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.NoError(err) + + // tamper the proof + proof.ClaimedValues[0][0][0].SetRandom() + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestCommit(t *testing.T) { + + assert := require.New(t) + + // sample polynomials + nbPolys := 2 + p := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + p[i] = make([]fr.Element, i+10) + for j := 0; j < i+10; j++ { + p[i][j].SetRandom() + } + } + + // fflonk commit to them + var x fr.Element + x.SetRandom() + proof, err := kzg.Open(Fold(p), x, testSrs.Pk) + assert.NoError(err) + + // check that Open(C, x) = ∑_{i= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} + +// returns γ*f, re-using f +func mulByConstant(f []fr.Element, gamma fr.Element) []fr.Element { + for i := 0; i < len(f); i++ { + f[i].Mul(&f[i], &gamma) + } + return f +} + +// computes f <- (x-a)*f +// memory of f is re used, need to pass a copy to not modify it +func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { + s := len(f) + var tmp fr.Element + f = append(f, fr.NewElement(0)) + f[s] = f[s-1] + for i := s - 1; i >= 1; i-- { + tmp.Mul(&f[i], &a) + f[i].Sub(&f[i-1], &tmp) + } + f[0].Mul(&f[0], &a).Neg(&f[0]) + return f +} + +// returns S_{T\Sᵢ} where Sᵢ=x[i] +func buildZtMinusSi(x [][]fr.Element, i int) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + bufPoints := make([]fr.Element, 0, nbPoints-len(x[i])) + for j := 0; j < i; j++ { + bufPoints = append(bufPoints, x[j]...) + } + for j := i + 1; j < len(x); j++ { + bufPoints = append(bufPoints, x[j]...) + } + ztMinusSi := buildVanishingPoly(bufPoints) + return ztMinusSi +} + +// returns πᵢ(X-xᵢ) +func buildVanishingPoly(x []fr.Element) []fr.Element { + res := make([]fr.Element, 1, len(x)+1) + res[0].SetOne() + for i := 0; i < len(x); i++ { + res = multiplyLinearFactor(res, x[i]) + } + return res +} + +// returns f such that f(xᵢ)=yᵢ, x and y are assumed to be of the same size +func interpolate(x, y []fr.Element) []fr.Element { + + res := make([]fr.Element, len(x)) + for i := 0; i < len(x); i++ { + li := buildLagrangeFromDomain(x, i) + li = mulByConstant(li, y[i]) + for j := 0; j < len(x); j++ { + res[j].Add(&res[j], &li[j]) + } + } + + return res +} + +// returns f such that f(xⱼ)=δⁱⱼ +func buildLagrangeFromDomain(x []fr.Element, i int) []fr.Element { + xx := make([]fr.Element, len(x)-1) + copy(xx, x[:i]) + copy(xx[i:], x[i+1:]) + res := buildVanishingPoly(xx) + d := eval(res, x[i]) + d.Inverse(&d) + res = mulByConstant(res, d) + return res +} + +// returns f-g, the memory of f is re used, deg(g) << deg(f) here +func sub(f, g []fr.Element) []fr.Element { + for i := 0; i < len(g); i++ { + f[i].Sub(&f[i], &g[i]) + } + return f +} + +// returns f*g using naive multiplication +// deg(f)>>deg(g), deg(small) =~ 10 max +// buf is used as a buffer and should not be f or g +// f and g are not modified +func mul(f, g []fr.Element, res []fr.Element) []fr.Element { + + sizeRes := len(f) + len(g) - 1 + if len(res) < sizeRes { + s := make([]fr.Element, sizeRes-len(res)) + res = append(res, s...) + } + setZero(res) + + var tmp fr.Element + for i := 0; i < len(g); i++ { + for j := 0; j < len(f); j++ { + tmp.Mul(&f[j], &g[i]) + res[j+i].Add(&res[j+i], &tmp) + } + } + return res +} + +// returns f/g (assuming g divides f) +// OK to not use fft if deg(g) is small +// g's leading coefficient is assumed to be 1 +// f memory is re-used for the result, need to pass a copy to not modify it +func div(f, g []fr.Element) []fr.Element { + sizef := len(f) + sizeg := len(g) + stop := sizeg - +1 + var t fr.Element + for i := sizef - 2; i >= stop; i-- { + for j := 0; j < sizeg-1; j++ { + t.Mul(&f[i+1], &g[sizeg-2-j]) + f[i-j].Sub(&f[i-j], &t) + } + } + return f[sizeg-1:] +} diff --git a/ecc/bls24-315/shplonk/shplonk_test.go b/ecc/bls24-315/shplonk/shplonk_test.go new file mode 100644 index 000000000..4f7f41f12 --- /dev/null +++ b/ecc/bls24-315/shplonk/shplonk_test.go @@ -0,0 +1,348 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "crypto/sha256" + "math/big" + "math/rand" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls24-315" + "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-315/kzg" + "github.com/consensys/gnark-crypto/utils/testutils" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 230 + var frAlpha fr.Element + frAlpha.SetRandom() + bAlpha = big.NewInt(0) + frAlpha.BigInt(bAlpha) + var err error + testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + if err != nil { + panic(err) + } +} + +func TestSerialization(t *testing.T) { + + _, _, g, _ := bls24315.Generators() + var proof OpeningProof + proof.W.Set(&g) + proof.WPrime.Set(&g) + nbClaimedValues := 10 + proof.ClaimedValues = make([][]fr.Element, nbClaimedValues) + for i := 0; i < nbClaimedValues; i++ { + proof.ClaimedValues[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + proof.ClaimedValues[i][j].SetRandom() + } + } + + t.Run("opening proof round trip", testutils.SerializationRoundTrip(&proof)) +} + +func TestOpening(t *testing.T) { + + assert := require.New(t) + + nbPolys := 2 + sizePoly := make([]int, nbPolys) + for i := 0; i < nbPolys; i++ { + sizePoly[i] = rand.Intn(10) + 2 //nolint: gosec // G404, no strong PRNG needed here + } + polys := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + polys[i] = make([]fr.Element, sizePoly[i]) + for j := 0; j < sizePoly[i]; j++ { + polys[i][j].SetRandom() + } + } + + digests := make([]kzg.Digest, nbPolys) + for i := 0; i < nbPolys; i++ { + digests[i], _ = kzg.Commit(polys[i], testSrs.Pk) + } + + points := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + points[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + points[i][j].SetRandom() + } + } + + hf := sha256.New() + + // correct proof + openingProof, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) + assert.NoError(err) + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.NoError(err) + + // tampered proof + openingProof.ClaimedValues[0][0].SetRandom() + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestBuildZtMinusSi(t *testing.T) { + + nbSi := 10 + points := make([][]fr.Element, nbSi) + sizeSi := make([]int, nbSi) + nbPoints := 0 + for i := 0; i < nbSi; i++ { + sizeSi[i] = 5 + i + nbPoints += sizeSi[i] + points[i] = make([]fr.Element, sizeSi[i]) + for j := 0; j < sizeSi[i]; j++ { + points[i][j].SetRandom() + } + } + for i := 0; i < nbSi; i++ { + ztMinusSi := buildZtMinusSi(points, i) + if len(ztMinusSi) != nbPoints-sizeSi[i]+1 { + t.Fatal("deg(Z_{T-S_{i}}) should be nbPoints-size(S_{i})") + } + for j := 0; j < nbSi; j++ { + if j == i { + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{i}) should not be zero") + } + } + continue + } + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if !y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{j}) should be zero") + } + } + } + } + +} + +func TestInterpolate(t *testing.T) { + + nbPoints := 10 + x := make([]fr.Element, nbPoints) + y := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + y[i].SetRandom() + } + f := interpolate(x, y) + for i := 0; i < nbPoints; i++ { + fx := eval(f, x[i]) + if !fx.Equal(&y[i]) { + t.Fatal("f(x_{i})!=y_{i}") + } + } + +} + +func TestBuildLagrangeFromDomain(t *testing.T) { + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + var r fr.Element + for i := 0; i < nbPoints; i++ { + + l := buildLagrangeFromDomain(points, i) + + // check that l(xᵢ)=1 and l(xⱼ)=0 for j!=i + for j := 0; j < nbPoints; j++ { + y := eval(l, points[j]) + if i == j { + if !y.IsOne() { + t.Fatal("l_{i}(x_{i}) should be equal to 1") + } + } else { + if !y.IsZero() { + t.Fatal("l_{i}(x_{j}) where i!=j should be equal to 0") + } + } + } + r.SetRandom() + y := eval(l, r) + if y.IsZero() { + t.Fatal("l_{i}(x) should not be zero if x is random") + } + } + +} + +func TestBuildVanishingPoly(t *testing.T) { + s := 10 + x := make([]fr.Element, s) + for i := 0; i < s; i++ { + x[i].SetRandom() + } + r := buildVanishingPoly(x) + + if len(r) != s+1 { + t.Fatal("error degree r") + } + + // check that r(xᵢ)=0 for all i + for i := 0; i < len(x); i++ { + y := eval(r, x[i]) + if !y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at xᵢ should be zero") + } + } + + // check that r(y)!=0 for a random point + var a fr.Element + a.SetRandom() + y := eval(r, a) + if y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at r \neq xᵢ should not be zero") + } +} + +func TestMultiplyLinearFactor(t *testing.T) { + + s := 10 + f := make([]fr.Element, s, s+1) + for i := 0; i < 10; i++ { + f[i].SetRandom() + } + + var a, y fr.Element + a.SetRandom() + f = multiplyLinearFactor(f, a) + y = eval(f, a) + if !y.IsZero() { + t.Fatal("(X-a)f(X) should be zero at a") + } + a.SetRandom() + y = eval(f, a) + if y.IsZero() { + t.Fatal("(X-1)f(X) at a random point should not be zero") + } + +} + +func TestNaiveMul(t *testing.T) { + + size := 10 + f := make([]fr.Element, size) + for i := 0; i < size; i++ { + f[i].SetRandom() + } + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + + v := buildVanishingPoly(points) + buf := make([]fr.Element, size+nbPoints-1) + g := mul(f, v, buf) + + // check that g(xᵢ) = 0 + for i := 0; i < nbPoints; i++ { + y := eval(g, points[i]) + if !y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at x_{i} should be zero") + } + } + + // check that g(r) != 0 for a random point + var a fr.Element + a.SetRandom() + y := eval(g, a) + if y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at a random point should not be zero") + } + +} + +func TestDiv(t *testing.T) { + + nbPoints := 10 + s := 10 + f := make([]fr.Element, s, s+nbPoints) + for i := 0; i < s; i++ { + f[i].SetRandom() + } + + // backup + g := make([]fr.Element, s) + copy(g, f) + + // successive divions of linear terms + x := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + q := make([][2]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + q[i][1].SetOne() + q[i][0].Neg(&x[i]) + f = div(f, q[i][:]) + } + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + + // division by a degree > 1 polynomial + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + r := buildVanishingPoly(x) + f = div(f, r) + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + +} diff --git a/ecc/bls24-317/fflonk/doc.go b/ecc/bls24-317/fflonk/doc.go new file mode 100644 index 000000000..694eaa973 --- /dev/null +++ b/ecc/bls24-317/fflonk/doc.go @@ -0,0 +1,21 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +// Package fflonk provides fflonk commitment, based on shplonk. +// +// See https://eprint.iacr.org/2020/081.pdf for shplonk +// See https://eprint.iacr.org/2021/1167.pdf for fflonk. +package fflonk diff --git a/ecc/bls24-317/fflonk/example_test.go b/ecc/bls24-317/fflonk/example_test.go new file mode 100644 index 000000000..3d58b2b5a --- /dev/null +++ b/ecc/bls24-317/fflonk/example_test.go @@ -0,0 +1,84 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + + "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-317/kzg" +) + +// This example demonstrates how to open a list of polynomials on a list of points. +func Example_batchOpen() { + + // sample a list of polynomials, we have 5 packs of polynomials, + // each pack will be opened on its own set of points. + nbPacks := 5 + + // The first set of polynomials contains 2 polynomials, the second 3, etc. + // The i-th set of polynomials is opened on the i-th set of points. The first + // set of point contains 4 points, the second 5, etc. + nbPolynomialsPerPack := []int{2, 3, 4, 5, 6} + nbPointsPerPack := []int{4, 5, 6, 7, 8} + points := make([][]fr.Element, nbPacks) + polynomials := make([][][]fr.Element, nbPacks) + for i := 0; i < nbPacks; i++ { + polynomials[i] = make([][]fr.Element, nbPolynomialsPerPack[i]) + for j := 0; j < nbPointsPerPack[i]; j++ { + + // random size for the polynomials + polynomials[i][j] = make([]fr.Element, j+10) + } + + // random number of points per pack + points[i] = make([]fr.Element, i+5) + } + + // commit to the folded Polynomials. In each pack, we fold the polynomials in a similar way + // as in the FFT. If the given pack contains 3 polynomials P1,P2,P3, the folded polynomial + // that we commit to is P1(X^t)+XP2(X^t)+X^2P3(X^t) where t is the smallest number dividing + // r-1 bounding above the number of polynomials, which is 3 here. + var err error + digests := make([]kzg.Digest, nbPacks) + for i := 0; i < nbPacks; i++ { + digests[i], err = FoldAndCommit(polynomials[i], testSrs.Pk) + if err != nil { + panic(err) + } + } + + // compute the opening proof. We first pick a hash function that will be used for the FS challenge + // derivation. + hf := sha256.New() + proof, err := BatchOpen(polynomials, digests, points, hf, testSrs.Pk) + if err != nil { + panic(err) + } + + // Check the opening proof. The claimed values of the i-th pack of polynomials are the evaluation + // of the i-th pack of polynomials, evaluated on the t-th powers of points[i], where t is the smallest + // integer bounding above the number of polynomials in the pack that divides r-1, the field on which + // the polynomials are defined. + // + // For instance, proof.ClaimedValues[i][j][k] contains the evaluation of the j-th polynomial of the i-th + // pack, on points[i][k]^t, where t is defined as above. + err = BatchVerify(proof, digests, points, hf, testSrs.Vk) + if err != nil { + panic(err) + } +} diff --git a/ecc/bls24-317/fflonk/fflonk.go b/ecc/bls24-317/fflonk/fflonk.go new file mode 100644 index 000000000..79bca3973 --- /dev/null +++ b/ecc/bls24-317/fflonk/fflonk.go @@ -0,0 +1,290 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "errors" + "hash" + "math/big" + + "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-317/fr/fft" + "github.com/consensys/gnark-crypto/ecc/bls24-317/kzg" + "github.com/consensys/gnark-crypto/ecc/bls24-317/shplonk" +) + +var ( + ErrRootsOne = errors.New("fr does not contain all the t-th roots of 1") + ErrNbPolynomialsNbPoints = errors.New("the number of packs of polynomials should be the same as the number of pack of points") + ErrInonsistentFolding = errors.New("the outer claimed values are not consistent with the shplonk proof") + ErrInconsistentNumberFoldedPoints = errors.New("the number of outer claimed values is inconsistent with the number of claimed values in the shplonk proof") +) + +// Opening fflonk proof for opening a list of list of polynomials ((fʲᵢ)ᵢ)ⱼ where each +// pack of polynomials (fʲᵢ)ᵢ (the pack is indexed by j) is opened on a powers of elements in +// the set Sʲ (indexed by j), where the power is |(fʲᵢ)ᵢ|. +// +// implements io.ReaderFrom and io.WriterTo +type OpeningProof struct { + + // shplonk opening proof of the folded polynomials + SOpeningProof shplonk.OpeningProof + + // ClaimedValues ClaimedValues[i][j] contains the values + // of fʲᵢ on Sⱼ^{|(fʲᵢ)ᵢ|} + ClaimedValues [][][]fr.Element +} + +// FoldAndCommit commits to a list of polynomial by intertwinning them like in the FFT, that is +// returns ∑_{i the remaining polynomials are zero + res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) + } + } + + // step 2: fold polynomials + foldedPolynomials := make([][]fr.Element, len(p)) + for i := 0; i < len(p); i++ { + foldedPolynomials[i] = Fold(p[i]) + } + + // step 4: compute the associated roots, that is for each point p corresponding + // to a pack i of polynomials, we extend to if + // the i-th pack contains t polynomials where ω is a t-th root of 1 + newPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(p); i++ { + newPoints[i], err = extendSet(points[i], nextDivisorRminusOnePerPack[i]) + if err != nil { + return res, err + } + } + + // step 5: shplonk open the list of single polynomials on the new sets + res.SOpeningProof, err = shplonk.BatchOpen(foldedPolynomials, digests, newPoints, hf, pk, dataTranscript...) + + return res, err + +} + +// BatchVerify uses a proof to check that each digest digests[i] is correctly opened on the set points[i]. +// The digests are the commitments to the folded underlying polynomials. The shplonk proof is +// verified directly using the embedded shplonk proof. This function only computes the consistency +// between the claimed values of the underlying shplonk proof and the outer claimed values, using the fft-like +// folding. Namely, the outer claimed values are the evaluation of the original polynomials (so before they +// were folded) at the relevant powers of the points. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + // step 0: consistency checks between the folded claimed values of shplonk and the claimed + // values at the powers of the Sᵢ + for i := 0; i < len(proof.ClaimedValues); i++ { + sizeSi := len(proof.ClaimedValues[i][0]) + for j := 1; j < len(proof.ClaimedValues[i]); j++ { + // each set of opening must be of the same size (opeings on powers of Si) + if sizeSi != len(proof.ClaimedValues[i][j]) { + return ErrNbPolynomialsNbPoints + } + } + currNbPolynomials := len(proof.ClaimedValues[i]) + sizeSi = sizeSi * currNbPolynomials + // |originalPolynomials_{i}|x|Sᵢ| == |foldedPolynomials|x|folded Sᵢ| + if sizeSi != len(proof.SOpeningProof.ClaimedValues[i]) { + return ErrInconsistentNumberFoldedPoints + } + } + + // step 1: fold the outer claimed values and check that they correspond to the + // shplonk claimed values + var curFoldedClaimedValue, omgeaiPoint fr.Element + for i := 0; i < len(proof.ClaimedValues); i++ { + t := len(proof.ClaimedValues[i]) + omega, err := getIthRootOne(t) + if err != nil { + return err + } + sizeSi := len(proof.ClaimedValues[i][0]) + polyClaimedValues := make([]fr.Element, t) + for j := 0; j < sizeSi; j++ { + for k := 0; k < t; k++ { + polyClaimedValues[k].Set(&proof.ClaimedValues[i][k][j]) + } + omgeaiPoint.Set(&points[i][j]) + for l := 0; l < t; l++ { + curFoldedClaimedValue = eval(polyClaimedValues, omgeaiPoint) + if !curFoldedClaimedValue.Equal(&proof.SOpeningProof.ClaimedValues[i][j*t+l]) { + return ErrInonsistentFolding + } + omgeaiPoint.Mul(&omgeaiPoint, &omega) + } + } + } + + // step 2: verify the embedded shplonk proof + extendedPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(points); i++ { + t := len(proof.ClaimedValues[i]) + extendedPoints[i], err = extendSet(points[i], t) + if err != nil { + return err + } + } + err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) + + return err +} + +// utils + +// getIthRootOne returns a generator of Z/iZ +func getIthRootOne(i int) (fr.Element, error) { + var omega fr.Element + var tmpBigInt, zeroBigInt big.Int + oneBigInt := big.NewInt(1) + zeroBigInt.SetUint64(0) + rMinusOneBigInt := fr.Modulus() + rMinusOneBigInt.Sub(rMinusOneBigInt, oneBigInt) + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Mod(rMinusOneBigInt, &tmpBigInt) + if tmpBigInt.Cmp(&zeroBigInt) != 0 { + return omega, ErrRootsOne + } + genFrStar := fft.GeneratorFullMultiplicativeGroup() + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) + omega.Exp(genFrStar, &tmpBigInt) + return omega, nil +} + +// computes the smallest i bounding above number_polynomials +// and dividing r-1. +func getNextDivisorRMinusOne(i int) int { + var zero, tmp, one big.Int + r := fr.Modulus() + one.SetUint64(1) + r.Sub(r, &one) + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + nbTrials := 100 // prevent DOS attack if the prime is not smooth + for tmp.Cmp(&zero) != 0 && nbTrials > 0 { + i += 1 + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + nbTrials-- + } + if nbTrials == 0 { + panic("did not find any divisor of r-1") + } + return i +} + +// extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] +func extendSet(p []fr.Element, t int) ([]fr.Element, error) { + + omega, err := getIthRootOne(t) + if err != nil { + return nil, err + } + nbPoints := len(p) + newPoints := make([]fr.Element, t*nbPoints) + for i := 0; i < nbPoints; i++ { + newPoints[i*t].Set(&p[i]) + for k := 1; k < t; k++ { + newPoints[i*t+k].Mul(&newPoints[i*t+k-1], &omega) + } + } + + return newPoints, nil +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} diff --git a/ecc/bls24-317/fflonk/fflonk_test.go b/ecc/bls24-317/fflonk/fflonk_test.go new file mode 100644 index 000000000..f6d821387 --- /dev/null +++ b/ecc/bls24-317/fflonk/fflonk_test.go @@ -0,0 +1,136 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-317/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 600 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestFflonk(t *testing.T) { + + assert := require.New(t) + + // sample random polynomials of various sizes + nbSets := 5 + p := make([][][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + nbPolysInSet := 9 + p[i] = make([][]fr.Element, nbPolysInSet) + for j := 0; j < nbPolysInSet; j++ { + curSizePoly := j + 10 + p[i][j] = make([]fr.Element, curSizePoly) + for k := 0; k < curSizePoly; k++ { + p[i][j][k].SetRandom() + } + } + } + + // sample random sets Sᵢ + x := make([][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + curSetSize := i + 4 + x[i] = make([]fr.Element, curSetSize) + for j := 0; j < curSetSize; j++ { + x[i][j].SetRandom() + } + } + + // commit to the folded polynomials + digests := make([]kzg.Digest, nbSets) + var err error + for i := 0; i < nbSets; i++ { + digests[i], err = FoldAndCommit(p[i], testSrs.Pk) + assert.NoError(err) + } + + // compute flonk opening proof + hf := sha256.New() + proof, err := BatchOpen(p, digests, x, hf, testSrs.Pk) + assert.NoError(err) + + // check opening proof + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.NoError(err) + + // tamper the proof + proof.ClaimedValues[0][0][0].SetRandom() + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestCommit(t *testing.T) { + + assert := require.New(t) + + // sample polynomials + nbPolys := 2 + p := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + p[i] = make([]fr.Element, i+10) + for j := 0; j < i+10; j++ { + p[i][j].SetRandom() + } + } + + // fflonk commit to them + var x fr.Element + x.SetRandom() + proof, err := kzg.Open(Fold(p), x, testSrs.Pk) + assert.NoError(err) + + // check that Open(C, x) = ∑_{i= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} + +// returns γ*f, re-using f +func mulByConstant(f []fr.Element, gamma fr.Element) []fr.Element { + for i := 0; i < len(f); i++ { + f[i].Mul(&f[i], &gamma) + } + return f +} + +// computes f <- (x-a)*f +// memory of f is re used, need to pass a copy to not modify it +func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { + s := len(f) + var tmp fr.Element + f = append(f, fr.NewElement(0)) + f[s] = f[s-1] + for i := s - 1; i >= 1; i-- { + tmp.Mul(&f[i], &a) + f[i].Sub(&f[i-1], &tmp) + } + f[0].Mul(&f[0], &a).Neg(&f[0]) + return f +} + +// returns S_{T\Sᵢ} where Sᵢ=x[i] +func buildZtMinusSi(x [][]fr.Element, i int) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + bufPoints := make([]fr.Element, 0, nbPoints-len(x[i])) + for j := 0; j < i; j++ { + bufPoints = append(bufPoints, x[j]...) + } + for j := i + 1; j < len(x); j++ { + bufPoints = append(bufPoints, x[j]...) + } + ztMinusSi := buildVanishingPoly(bufPoints) + return ztMinusSi +} + +// returns πᵢ(X-xᵢ) +func buildVanishingPoly(x []fr.Element) []fr.Element { + res := make([]fr.Element, 1, len(x)+1) + res[0].SetOne() + for i := 0; i < len(x); i++ { + res = multiplyLinearFactor(res, x[i]) + } + return res +} + +// returns f such that f(xᵢ)=yᵢ, x and y are assumed to be of the same size +func interpolate(x, y []fr.Element) []fr.Element { + + res := make([]fr.Element, len(x)) + for i := 0; i < len(x); i++ { + li := buildLagrangeFromDomain(x, i) + li = mulByConstant(li, y[i]) + for j := 0; j < len(x); j++ { + res[j].Add(&res[j], &li[j]) + } + } + + return res +} + +// returns f such that f(xⱼ)=δⁱⱼ +func buildLagrangeFromDomain(x []fr.Element, i int) []fr.Element { + xx := make([]fr.Element, len(x)-1) + copy(xx, x[:i]) + copy(xx[i:], x[i+1:]) + res := buildVanishingPoly(xx) + d := eval(res, x[i]) + d.Inverse(&d) + res = mulByConstant(res, d) + return res +} + +// returns f-g, the memory of f is re used, deg(g) << deg(f) here +func sub(f, g []fr.Element) []fr.Element { + for i := 0; i < len(g); i++ { + f[i].Sub(&f[i], &g[i]) + } + return f +} + +// returns f*g using naive multiplication +// deg(f)>>deg(g), deg(small) =~ 10 max +// buf is used as a buffer and should not be f or g +// f and g are not modified +func mul(f, g []fr.Element, res []fr.Element) []fr.Element { + + sizeRes := len(f) + len(g) - 1 + if len(res) < sizeRes { + s := make([]fr.Element, sizeRes-len(res)) + res = append(res, s...) + } + setZero(res) + + var tmp fr.Element + for i := 0; i < len(g); i++ { + for j := 0; j < len(f); j++ { + tmp.Mul(&f[j], &g[i]) + res[j+i].Add(&res[j+i], &tmp) + } + } + return res +} + +// returns f/g (assuming g divides f) +// OK to not use fft if deg(g) is small +// g's leading coefficient is assumed to be 1 +// f memory is re-used for the result, need to pass a copy to not modify it +func div(f, g []fr.Element) []fr.Element { + sizef := len(f) + sizeg := len(g) + stop := sizeg - +1 + var t fr.Element + for i := sizef - 2; i >= stop; i-- { + for j := 0; j < sizeg-1; j++ { + t.Mul(&f[i+1], &g[sizeg-2-j]) + f[i-j].Sub(&f[i-j], &t) + } + } + return f[sizeg-1:] +} diff --git a/ecc/bls24-317/shplonk/shplonk_test.go b/ecc/bls24-317/shplonk/shplonk_test.go new file mode 100644 index 000000000..33d055815 --- /dev/null +++ b/ecc/bls24-317/shplonk/shplonk_test.go @@ -0,0 +1,348 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "crypto/sha256" + "math/big" + "math/rand" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bls24-317" + "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-317/kzg" + "github.com/consensys/gnark-crypto/utils/testutils" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 230 + var frAlpha fr.Element + frAlpha.SetRandom() + bAlpha = big.NewInt(0) + frAlpha.BigInt(bAlpha) + var err error + testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + if err != nil { + panic(err) + } +} + +func TestSerialization(t *testing.T) { + + _, _, g, _ := bls24317.Generators() + var proof OpeningProof + proof.W.Set(&g) + proof.WPrime.Set(&g) + nbClaimedValues := 10 + proof.ClaimedValues = make([][]fr.Element, nbClaimedValues) + for i := 0; i < nbClaimedValues; i++ { + proof.ClaimedValues[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + proof.ClaimedValues[i][j].SetRandom() + } + } + + t.Run("opening proof round trip", testutils.SerializationRoundTrip(&proof)) +} + +func TestOpening(t *testing.T) { + + assert := require.New(t) + + nbPolys := 2 + sizePoly := make([]int, nbPolys) + for i := 0; i < nbPolys; i++ { + sizePoly[i] = rand.Intn(10) + 2 //nolint: gosec // G404, no strong PRNG needed here + } + polys := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + polys[i] = make([]fr.Element, sizePoly[i]) + for j := 0; j < sizePoly[i]; j++ { + polys[i][j].SetRandom() + } + } + + digests := make([]kzg.Digest, nbPolys) + for i := 0; i < nbPolys; i++ { + digests[i], _ = kzg.Commit(polys[i], testSrs.Pk) + } + + points := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + points[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + points[i][j].SetRandom() + } + } + + hf := sha256.New() + + // correct proof + openingProof, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) + assert.NoError(err) + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.NoError(err) + + // tampered proof + openingProof.ClaimedValues[0][0].SetRandom() + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestBuildZtMinusSi(t *testing.T) { + + nbSi := 10 + points := make([][]fr.Element, nbSi) + sizeSi := make([]int, nbSi) + nbPoints := 0 + for i := 0; i < nbSi; i++ { + sizeSi[i] = 5 + i + nbPoints += sizeSi[i] + points[i] = make([]fr.Element, sizeSi[i]) + for j := 0; j < sizeSi[i]; j++ { + points[i][j].SetRandom() + } + } + for i := 0; i < nbSi; i++ { + ztMinusSi := buildZtMinusSi(points, i) + if len(ztMinusSi) != nbPoints-sizeSi[i]+1 { + t.Fatal("deg(Z_{T-S_{i}}) should be nbPoints-size(S_{i})") + } + for j := 0; j < nbSi; j++ { + if j == i { + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{i}) should not be zero") + } + } + continue + } + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if !y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{j}) should be zero") + } + } + } + } + +} + +func TestInterpolate(t *testing.T) { + + nbPoints := 10 + x := make([]fr.Element, nbPoints) + y := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + y[i].SetRandom() + } + f := interpolate(x, y) + for i := 0; i < nbPoints; i++ { + fx := eval(f, x[i]) + if !fx.Equal(&y[i]) { + t.Fatal("f(x_{i})!=y_{i}") + } + } + +} + +func TestBuildLagrangeFromDomain(t *testing.T) { + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + var r fr.Element + for i := 0; i < nbPoints; i++ { + + l := buildLagrangeFromDomain(points, i) + + // check that l(xᵢ)=1 and l(xⱼ)=0 for j!=i + for j := 0; j < nbPoints; j++ { + y := eval(l, points[j]) + if i == j { + if !y.IsOne() { + t.Fatal("l_{i}(x_{i}) should be equal to 1") + } + } else { + if !y.IsZero() { + t.Fatal("l_{i}(x_{j}) where i!=j should be equal to 0") + } + } + } + r.SetRandom() + y := eval(l, r) + if y.IsZero() { + t.Fatal("l_{i}(x) should not be zero if x is random") + } + } + +} + +func TestBuildVanishingPoly(t *testing.T) { + s := 10 + x := make([]fr.Element, s) + for i := 0; i < s; i++ { + x[i].SetRandom() + } + r := buildVanishingPoly(x) + + if len(r) != s+1 { + t.Fatal("error degree r") + } + + // check that r(xᵢ)=0 for all i + for i := 0; i < len(x); i++ { + y := eval(r, x[i]) + if !y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at xᵢ should be zero") + } + } + + // check that r(y)!=0 for a random point + var a fr.Element + a.SetRandom() + y := eval(r, a) + if y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at r \neq xᵢ should not be zero") + } +} + +func TestMultiplyLinearFactor(t *testing.T) { + + s := 10 + f := make([]fr.Element, s, s+1) + for i := 0; i < 10; i++ { + f[i].SetRandom() + } + + var a, y fr.Element + a.SetRandom() + f = multiplyLinearFactor(f, a) + y = eval(f, a) + if !y.IsZero() { + t.Fatal("(X-a)f(X) should be zero at a") + } + a.SetRandom() + y = eval(f, a) + if y.IsZero() { + t.Fatal("(X-1)f(X) at a random point should not be zero") + } + +} + +func TestNaiveMul(t *testing.T) { + + size := 10 + f := make([]fr.Element, size) + for i := 0; i < size; i++ { + f[i].SetRandom() + } + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + + v := buildVanishingPoly(points) + buf := make([]fr.Element, size+nbPoints-1) + g := mul(f, v, buf) + + // check that g(xᵢ) = 0 + for i := 0; i < nbPoints; i++ { + y := eval(g, points[i]) + if !y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at x_{i} should be zero") + } + } + + // check that g(r) != 0 for a random point + var a fr.Element + a.SetRandom() + y := eval(g, a) + if y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at a random point should not be zero") + } + +} + +func TestDiv(t *testing.T) { + + nbPoints := 10 + s := 10 + f := make([]fr.Element, s, s+nbPoints) + for i := 0; i < s; i++ { + f[i].SetRandom() + } + + // backup + g := make([]fr.Element, s) + copy(g, f) + + // successive divions of linear terms + x := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + q := make([][2]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + q[i][1].SetOne() + q[i][0].Neg(&x[i]) + f = div(f, q[i][:]) + } + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + + // division by a degree > 1 polynomial + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + r := buildVanishingPoly(x) + f = div(f, r) + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + +} diff --git a/ecc/bn254/fflonk/doc.go b/ecc/bn254/fflonk/doc.go new file mode 100644 index 000000000..694eaa973 --- /dev/null +++ b/ecc/bn254/fflonk/doc.go @@ -0,0 +1,21 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +// Package fflonk provides fflonk commitment, based on shplonk. +// +// See https://eprint.iacr.org/2020/081.pdf for shplonk +// See https://eprint.iacr.org/2021/1167.pdf for fflonk. +package fflonk diff --git a/ecc/bn254/fflonk/example_test.go b/ecc/bn254/fflonk/example_test.go new file mode 100644 index 000000000..a3c26b50b --- /dev/null +++ b/ecc/bn254/fflonk/example_test.go @@ -0,0 +1,84 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + + "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark-crypto/ecc/bn254/kzg" +) + +// This example demonstrates how to open a list of polynomials on a list of points. +func Example_batchOpen() { + + // sample a list of polynomials, we have 5 packs of polynomials, + // each pack will be opened on its own set of points. + nbPacks := 5 + + // The first set of polynomials contains 2 polynomials, the second 3, etc. + // The i-th set of polynomials is opened on the i-th set of points. The first + // set of point contains 4 points, the second 5, etc. + nbPolynomialsPerPack := []int{2, 3, 4, 5, 6} + nbPointsPerPack := []int{4, 5, 6, 7, 8} + points := make([][]fr.Element, nbPacks) + polynomials := make([][][]fr.Element, nbPacks) + for i := 0; i < nbPacks; i++ { + polynomials[i] = make([][]fr.Element, nbPolynomialsPerPack[i]) + for j := 0; j < nbPointsPerPack[i]; j++ { + + // random size for the polynomials + polynomials[i][j] = make([]fr.Element, j+10) + } + + // random number of points per pack + points[i] = make([]fr.Element, i+5) + } + + // commit to the folded Polynomials. In each pack, we fold the polynomials in a similar way + // as in the FFT. If the given pack contains 3 polynomials P1,P2,P3, the folded polynomial + // that we commit to is P1(X^t)+XP2(X^t)+X^2P3(X^t) where t is the smallest number dividing + // r-1 bounding above the number of polynomials, which is 3 here. + var err error + digests := make([]kzg.Digest, nbPacks) + for i := 0; i < nbPacks; i++ { + digests[i], err = FoldAndCommit(polynomials[i], testSrs.Pk) + if err != nil { + panic(err) + } + } + + // compute the opening proof. We first pick a hash function that will be used for the FS challenge + // derivation. + hf := sha256.New() + proof, err := BatchOpen(polynomials, digests, points, hf, testSrs.Pk) + if err != nil { + panic(err) + } + + // Check the opening proof. The claimed values of the i-th pack of polynomials are the evaluation + // of the i-th pack of polynomials, evaluated on the t-th powers of points[i], where t is the smallest + // integer bounding above the number of polynomials in the pack that divides r-1, the field on which + // the polynomials are defined. + // + // For instance, proof.ClaimedValues[i][j][k] contains the evaluation of the j-th polynomial of the i-th + // pack, on points[i][k]^t, where t is defined as above. + err = BatchVerify(proof, digests, points, hf, testSrs.Vk) + if err != nil { + panic(err) + } +} diff --git a/ecc/bn254/fflonk/fflonk.go b/ecc/bn254/fflonk/fflonk.go new file mode 100644 index 000000000..dc7b99e70 --- /dev/null +++ b/ecc/bn254/fflonk/fflonk.go @@ -0,0 +1,290 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "errors" + "hash" + "math/big" + + "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark-crypto/ecc/bn254/fr/fft" + "github.com/consensys/gnark-crypto/ecc/bn254/kzg" + "github.com/consensys/gnark-crypto/ecc/bn254/shplonk" +) + +var ( + ErrRootsOne = errors.New("fr does not contain all the t-th roots of 1") + ErrNbPolynomialsNbPoints = errors.New("the number of packs of polynomials should be the same as the number of pack of points") + ErrInonsistentFolding = errors.New("the outer claimed values are not consistent with the shplonk proof") + ErrInconsistentNumberFoldedPoints = errors.New("the number of outer claimed values is inconsistent with the number of claimed values in the shplonk proof") +) + +// Opening fflonk proof for opening a list of list of polynomials ((fʲᵢ)ᵢ)ⱼ where each +// pack of polynomials (fʲᵢ)ᵢ (the pack is indexed by j) is opened on a powers of elements in +// the set Sʲ (indexed by j), where the power is |(fʲᵢ)ᵢ|. +// +// implements io.ReaderFrom and io.WriterTo +type OpeningProof struct { + + // shplonk opening proof of the folded polynomials + SOpeningProof shplonk.OpeningProof + + // ClaimedValues ClaimedValues[i][j] contains the values + // of fʲᵢ on Sⱼ^{|(fʲᵢ)ᵢ|} + ClaimedValues [][][]fr.Element +} + +// FoldAndCommit commits to a list of polynomial by intertwinning them like in the FFT, that is +// returns ∑_{i the remaining polynomials are zero + res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) + } + } + + // step 2: fold polynomials + foldedPolynomials := make([][]fr.Element, len(p)) + for i := 0; i < len(p); i++ { + foldedPolynomials[i] = Fold(p[i]) + } + + // step 4: compute the associated roots, that is for each point p corresponding + // to a pack i of polynomials, we extend to if + // the i-th pack contains t polynomials where ω is a t-th root of 1 + newPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(p); i++ { + newPoints[i], err = extendSet(points[i], nextDivisorRminusOnePerPack[i]) + if err != nil { + return res, err + } + } + + // step 5: shplonk open the list of single polynomials on the new sets + res.SOpeningProof, err = shplonk.BatchOpen(foldedPolynomials, digests, newPoints, hf, pk, dataTranscript...) + + return res, err + +} + +// BatchVerify uses a proof to check that each digest digests[i] is correctly opened on the set points[i]. +// The digests are the commitments to the folded underlying polynomials. The shplonk proof is +// verified directly using the embedded shplonk proof. This function only computes the consistency +// between the claimed values of the underlying shplonk proof and the outer claimed values, using the fft-like +// folding. Namely, the outer claimed values are the evaluation of the original polynomials (so before they +// were folded) at the relevant powers of the points. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + // step 0: consistency checks between the folded claimed values of shplonk and the claimed + // values at the powers of the Sᵢ + for i := 0; i < len(proof.ClaimedValues); i++ { + sizeSi := len(proof.ClaimedValues[i][0]) + for j := 1; j < len(proof.ClaimedValues[i]); j++ { + // each set of opening must be of the same size (opeings on powers of Si) + if sizeSi != len(proof.ClaimedValues[i][j]) { + return ErrNbPolynomialsNbPoints + } + } + currNbPolynomials := len(proof.ClaimedValues[i]) + sizeSi = sizeSi * currNbPolynomials + // |originalPolynomials_{i}|x|Sᵢ| == |foldedPolynomials|x|folded Sᵢ| + if sizeSi != len(proof.SOpeningProof.ClaimedValues[i]) { + return ErrInconsistentNumberFoldedPoints + } + } + + // step 1: fold the outer claimed values and check that they correspond to the + // shplonk claimed values + var curFoldedClaimedValue, omgeaiPoint fr.Element + for i := 0; i < len(proof.ClaimedValues); i++ { + t := len(proof.ClaimedValues[i]) + omega, err := getIthRootOne(t) + if err != nil { + return err + } + sizeSi := len(proof.ClaimedValues[i][0]) + polyClaimedValues := make([]fr.Element, t) + for j := 0; j < sizeSi; j++ { + for k := 0; k < t; k++ { + polyClaimedValues[k].Set(&proof.ClaimedValues[i][k][j]) + } + omgeaiPoint.Set(&points[i][j]) + for l := 0; l < t; l++ { + curFoldedClaimedValue = eval(polyClaimedValues, omgeaiPoint) + if !curFoldedClaimedValue.Equal(&proof.SOpeningProof.ClaimedValues[i][j*t+l]) { + return ErrInonsistentFolding + } + omgeaiPoint.Mul(&omgeaiPoint, &omega) + } + } + } + + // step 2: verify the embedded shplonk proof + extendedPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(points); i++ { + t := len(proof.ClaimedValues[i]) + extendedPoints[i], err = extendSet(points[i], t) + if err != nil { + return err + } + } + err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) + + return err +} + +// utils + +// getIthRootOne returns a generator of Z/iZ +func getIthRootOne(i int) (fr.Element, error) { + var omega fr.Element + var tmpBigInt, zeroBigInt big.Int + oneBigInt := big.NewInt(1) + zeroBigInt.SetUint64(0) + rMinusOneBigInt := fr.Modulus() + rMinusOneBigInt.Sub(rMinusOneBigInt, oneBigInt) + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Mod(rMinusOneBigInt, &tmpBigInt) + if tmpBigInt.Cmp(&zeroBigInt) != 0 { + return omega, ErrRootsOne + } + genFrStar := fft.GeneratorFullMultiplicativeGroup() + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) + omega.Exp(genFrStar, &tmpBigInt) + return omega, nil +} + +// computes the smallest i bounding above number_polynomials +// and dividing r-1. +func getNextDivisorRMinusOne(i int) int { + var zero, tmp, one big.Int + r := fr.Modulus() + one.SetUint64(1) + r.Sub(r, &one) + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + nbTrials := 100 // prevent DOS attack if the prime is not smooth + for tmp.Cmp(&zero) != 0 && nbTrials > 0 { + i += 1 + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + nbTrials-- + } + if nbTrials == 0 { + panic("did not find any divisor of r-1") + } + return i +} + +// extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] +func extendSet(p []fr.Element, t int) ([]fr.Element, error) { + + omega, err := getIthRootOne(t) + if err != nil { + return nil, err + } + nbPoints := len(p) + newPoints := make([]fr.Element, t*nbPoints) + for i := 0; i < nbPoints; i++ { + newPoints[i*t].Set(&p[i]) + for k := 1; k < t; k++ { + newPoints[i*t+k].Mul(&newPoints[i*t+k-1], &omega) + } + } + + return newPoints, nil +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} diff --git a/ecc/bn254/fflonk/fflonk_test.go b/ecc/bn254/fflonk/fflonk_test.go new file mode 100644 index 000000000..7f9441f4f --- /dev/null +++ b/ecc/bn254/fflonk/fflonk_test.go @@ -0,0 +1,136 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark-crypto/ecc/bn254/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 600 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestFflonk(t *testing.T) { + + assert := require.New(t) + + // sample random polynomials of various sizes + nbSets := 5 + p := make([][][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + nbPolysInSet := 9 + p[i] = make([][]fr.Element, nbPolysInSet) + for j := 0; j < nbPolysInSet; j++ { + curSizePoly := j + 10 + p[i][j] = make([]fr.Element, curSizePoly) + for k := 0; k < curSizePoly; k++ { + p[i][j][k].SetRandom() + } + } + } + + // sample random sets Sᵢ + x := make([][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + curSetSize := i + 4 + x[i] = make([]fr.Element, curSetSize) + for j := 0; j < curSetSize; j++ { + x[i][j].SetRandom() + } + } + + // commit to the folded polynomials + digests := make([]kzg.Digest, nbSets) + var err error + for i := 0; i < nbSets; i++ { + digests[i], err = FoldAndCommit(p[i], testSrs.Pk) + assert.NoError(err) + } + + // compute flonk opening proof + hf := sha256.New() + proof, err := BatchOpen(p, digests, x, hf, testSrs.Pk) + assert.NoError(err) + + // check opening proof + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.NoError(err) + + // tamper the proof + proof.ClaimedValues[0][0][0].SetRandom() + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestCommit(t *testing.T) { + + assert := require.New(t) + + // sample polynomials + nbPolys := 2 + p := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + p[i] = make([]fr.Element, i+10) + for j := 0; j < i+10; j++ { + p[i][j].SetRandom() + } + } + + // fflonk commit to them + var x fr.Element + x.SetRandom() + proof, err := kzg.Open(Fold(p), x, testSrs.Pk) + assert.NoError(err) + + // check that Open(C, x) = ∑_{i= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} + +// returns γ*f, re-using f +func mulByConstant(f []fr.Element, gamma fr.Element) []fr.Element { + for i := 0; i < len(f); i++ { + f[i].Mul(&f[i], &gamma) + } + return f +} + +// computes f <- (x-a)*f +// memory of f is re used, need to pass a copy to not modify it +func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { + s := len(f) + var tmp fr.Element + f = append(f, fr.NewElement(0)) + f[s] = f[s-1] + for i := s - 1; i >= 1; i-- { + tmp.Mul(&f[i], &a) + f[i].Sub(&f[i-1], &tmp) + } + f[0].Mul(&f[0], &a).Neg(&f[0]) + return f +} + +// returns S_{T\Sᵢ} where Sᵢ=x[i] +func buildZtMinusSi(x [][]fr.Element, i int) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + bufPoints := make([]fr.Element, 0, nbPoints-len(x[i])) + for j := 0; j < i; j++ { + bufPoints = append(bufPoints, x[j]...) + } + for j := i + 1; j < len(x); j++ { + bufPoints = append(bufPoints, x[j]...) + } + ztMinusSi := buildVanishingPoly(bufPoints) + return ztMinusSi +} + +// returns πᵢ(X-xᵢ) +func buildVanishingPoly(x []fr.Element) []fr.Element { + res := make([]fr.Element, 1, len(x)+1) + res[0].SetOne() + for i := 0; i < len(x); i++ { + res = multiplyLinearFactor(res, x[i]) + } + return res +} + +// returns f such that f(xᵢ)=yᵢ, x and y are assumed to be of the same size +func interpolate(x, y []fr.Element) []fr.Element { + + res := make([]fr.Element, len(x)) + for i := 0; i < len(x); i++ { + li := buildLagrangeFromDomain(x, i) + li = mulByConstant(li, y[i]) + for j := 0; j < len(x); j++ { + res[j].Add(&res[j], &li[j]) + } + } + + return res +} + +// returns f such that f(xⱼ)=δⁱⱼ +func buildLagrangeFromDomain(x []fr.Element, i int) []fr.Element { + xx := make([]fr.Element, len(x)-1) + copy(xx, x[:i]) + copy(xx[i:], x[i+1:]) + res := buildVanishingPoly(xx) + d := eval(res, x[i]) + d.Inverse(&d) + res = mulByConstant(res, d) + return res +} + +// returns f-g, the memory of f is re used, deg(g) << deg(f) here +func sub(f, g []fr.Element) []fr.Element { + for i := 0; i < len(g); i++ { + f[i].Sub(&f[i], &g[i]) + } + return f +} + +// returns f*g using naive multiplication +// deg(f)>>deg(g), deg(small) =~ 10 max +// buf is used as a buffer and should not be f or g +// f and g are not modified +func mul(f, g []fr.Element, res []fr.Element) []fr.Element { + + sizeRes := len(f) + len(g) - 1 + if len(res) < sizeRes { + s := make([]fr.Element, sizeRes-len(res)) + res = append(res, s...) + } + setZero(res) + + var tmp fr.Element + for i := 0; i < len(g); i++ { + for j := 0; j < len(f); j++ { + tmp.Mul(&f[j], &g[i]) + res[j+i].Add(&res[j+i], &tmp) + } + } + return res +} + +// returns f/g (assuming g divides f) +// OK to not use fft if deg(g) is small +// g's leading coefficient is assumed to be 1 +// f memory is re-used for the result, need to pass a copy to not modify it +func div(f, g []fr.Element) []fr.Element { + sizef := len(f) + sizeg := len(g) + stop := sizeg - +1 + var t fr.Element + for i := sizef - 2; i >= stop; i-- { + for j := 0; j < sizeg-1; j++ { + t.Mul(&f[i+1], &g[sizeg-2-j]) + f[i-j].Sub(&f[i-j], &t) + } + } + return f[sizeg-1:] +} diff --git a/ecc/bn254/shplonk/shplonk_test.go b/ecc/bn254/shplonk/shplonk_test.go new file mode 100644 index 000000000..d0558c472 --- /dev/null +++ b/ecc/bn254/shplonk/shplonk_test.go @@ -0,0 +1,348 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "crypto/sha256" + "math/big" + "math/rand" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bn254" + "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark-crypto/ecc/bn254/kzg" + "github.com/consensys/gnark-crypto/utils/testutils" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 230 + var frAlpha fr.Element + frAlpha.SetRandom() + bAlpha = big.NewInt(0) + frAlpha.BigInt(bAlpha) + var err error + testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + if err != nil { + panic(err) + } +} + +func TestSerialization(t *testing.T) { + + _, _, g, _ := bn254.Generators() + var proof OpeningProof + proof.W.Set(&g) + proof.WPrime.Set(&g) + nbClaimedValues := 10 + proof.ClaimedValues = make([][]fr.Element, nbClaimedValues) + for i := 0; i < nbClaimedValues; i++ { + proof.ClaimedValues[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + proof.ClaimedValues[i][j].SetRandom() + } + } + + t.Run("opening proof round trip", testutils.SerializationRoundTrip(&proof)) +} + +func TestOpening(t *testing.T) { + + assert := require.New(t) + + nbPolys := 2 + sizePoly := make([]int, nbPolys) + for i := 0; i < nbPolys; i++ { + sizePoly[i] = rand.Intn(10) + 2 //nolint: gosec // G404, no strong PRNG needed here + } + polys := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + polys[i] = make([]fr.Element, sizePoly[i]) + for j := 0; j < sizePoly[i]; j++ { + polys[i][j].SetRandom() + } + } + + digests := make([]kzg.Digest, nbPolys) + for i := 0; i < nbPolys; i++ { + digests[i], _ = kzg.Commit(polys[i], testSrs.Pk) + } + + points := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + points[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + points[i][j].SetRandom() + } + } + + hf := sha256.New() + + // correct proof + openingProof, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) + assert.NoError(err) + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.NoError(err) + + // tampered proof + openingProof.ClaimedValues[0][0].SetRandom() + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestBuildZtMinusSi(t *testing.T) { + + nbSi := 10 + points := make([][]fr.Element, nbSi) + sizeSi := make([]int, nbSi) + nbPoints := 0 + for i := 0; i < nbSi; i++ { + sizeSi[i] = 5 + i + nbPoints += sizeSi[i] + points[i] = make([]fr.Element, sizeSi[i]) + for j := 0; j < sizeSi[i]; j++ { + points[i][j].SetRandom() + } + } + for i := 0; i < nbSi; i++ { + ztMinusSi := buildZtMinusSi(points, i) + if len(ztMinusSi) != nbPoints-sizeSi[i]+1 { + t.Fatal("deg(Z_{T-S_{i}}) should be nbPoints-size(S_{i})") + } + for j := 0; j < nbSi; j++ { + if j == i { + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{i}) should not be zero") + } + } + continue + } + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if !y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{j}) should be zero") + } + } + } + } + +} + +func TestInterpolate(t *testing.T) { + + nbPoints := 10 + x := make([]fr.Element, nbPoints) + y := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + y[i].SetRandom() + } + f := interpolate(x, y) + for i := 0; i < nbPoints; i++ { + fx := eval(f, x[i]) + if !fx.Equal(&y[i]) { + t.Fatal("f(x_{i})!=y_{i}") + } + } + +} + +func TestBuildLagrangeFromDomain(t *testing.T) { + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + var r fr.Element + for i := 0; i < nbPoints; i++ { + + l := buildLagrangeFromDomain(points, i) + + // check that l(xᵢ)=1 and l(xⱼ)=0 for j!=i + for j := 0; j < nbPoints; j++ { + y := eval(l, points[j]) + if i == j { + if !y.IsOne() { + t.Fatal("l_{i}(x_{i}) should be equal to 1") + } + } else { + if !y.IsZero() { + t.Fatal("l_{i}(x_{j}) where i!=j should be equal to 0") + } + } + } + r.SetRandom() + y := eval(l, r) + if y.IsZero() { + t.Fatal("l_{i}(x) should not be zero if x is random") + } + } + +} + +func TestBuildVanishingPoly(t *testing.T) { + s := 10 + x := make([]fr.Element, s) + for i := 0; i < s; i++ { + x[i].SetRandom() + } + r := buildVanishingPoly(x) + + if len(r) != s+1 { + t.Fatal("error degree r") + } + + // check that r(xᵢ)=0 for all i + for i := 0; i < len(x); i++ { + y := eval(r, x[i]) + if !y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at xᵢ should be zero") + } + } + + // check that r(y)!=0 for a random point + var a fr.Element + a.SetRandom() + y := eval(r, a) + if y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at r \neq xᵢ should not be zero") + } +} + +func TestMultiplyLinearFactor(t *testing.T) { + + s := 10 + f := make([]fr.Element, s, s+1) + for i := 0; i < 10; i++ { + f[i].SetRandom() + } + + var a, y fr.Element + a.SetRandom() + f = multiplyLinearFactor(f, a) + y = eval(f, a) + if !y.IsZero() { + t.Fatal("(X-a)f(X) should be zero at a") + } + a.SetRandom() + y = eval(f, a) + if y.IsZero() { + t.Fatal("(X-1)f(X) at a random point should not be zero") + } + +} + +func TestNaiveMul(t *testing.T) { + + size := 10 + f := make([]fr.Element, size) + for i := 0; i < size; i++ { + f[i].SetRandom() + } + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + + v := buildVanishingPoly(points) + buf := make([]fr.Element, size+nbPoints-1) + g := mul(f, v, buf) + + // check that g(xᵢ) = 0 + for i := 0; i < nbPoints; i++ { + y := eval(g, points[i]) + if !y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at x_{i} should be zero") + } + } + + // check that g(r) != 0 for a random point + var a fr.Element + a.SetRandom() + y := eval(g, a) + if y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at a random point should not be zero") + } + +} + +func TestDiv(t *testing.T) { + + nbPoints := 10 + s := 10 + f := make([]fr.Element, s, s+nbPoints) + for i := 0; i < s; i++ { + f[i].SetRandom() + } + + // backup + g := make([]fr.Element, s) + copy(g, f) + + // successive divions of linear terms + x := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + q := make([][2]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + q[i][1].SetOne() + q[i][0].Neg(&x[i]) + f = div(f, q[i][:]) + } + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + + // division by a degree > 1 polynomial + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + r := buildVanishingPoly(x) + f = div(f, r) + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + +} diff --git a/ecc/bw6-633/fflonk/doc.go b/ecc/bw6-633/fflonk/doc.go new file mode 100644 index 000000000..694eaa973 --- /dev/null +++ b/ecc/bw6-633/fflonk/doc.go @@ -0,0 +1,21 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +// Package fflonk provides fflonk commitment, based on shplonk. +// +// See https://eprint.iacr.org/2020/081.pdf for shplonk +// See https://eprint.iacr.org/2021/1167.pdf for fflonk. +package fflonk diff --git a/ecc/bw6-633/fflonk/example_test.go b/ecc/bw6-633/fflonk/example_test.go new file mode 100644 index 000000000..c02b2b8ae --- /dev/null +++ b/ecc/bw6-633/fflonk/example_test.go @@ -0,0 +1,84 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + + "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-633/kzg" +) + +// This example demonstrates how to open a list of polynomials on a list of points. +func Example_batchOpen() { + + // sample a list of polynomials, we have 5 packs of polynomials, + // each pack will be opened on its own set of points. + nbPacks := 5 + + // The first set of polynomials contains 2 polynomials, the second 3, etc. + // The i-th set of polynomials is opened on the i-th set of points. The first + // set of point contains 4 points, the second 5, etc. + nbPolynomialsPerPack := []int{2, 3, 4, 5, 6} + nbPointsPerPack := []int{4, 5, 6, 7, 8} + points := make([][]fr.Element, nbPacks) + polynomials := make([][][]fr.Element, nbPacks) + for i := 0; i < nbPacks; i++ { + polynomials[i] = make([][]fr.Element, nbPolynomialsPerPack[i]) + for j := 0; j < nbPointsPerPack[i]; j++ { + + // random size for the polynomials + polynomials[i][j] = make([]fr.Element, j+10) + } + + // random number of points per pack + points[i] = make([]fr.Element, i+5) + } + + // commit to the folded Polynomials. In each pack, we fold the polynomials in a similar way + // as in the FFT. If the given pack contains 3 polynomials P1,P2,P3, the folded polynomial + // that we commit to is P1(X^t)+XP2(X^t)+X^2P3(X^t) where t is the smallest number dividing + // r-1 bounding above the number of polynomials, which is 3 here. + var err error + digests := make([]kzg.Digest, nbPacks) + for i := 0; i < nbPacks; i++ { + digests[i], err = FoldAndCommit(polynomials[i], testSrs.Pk) + if err != nil { + panic(err) + } + } + + // compute the opening proof. We first pick a hash function that will be used for the FS challenge + // derivation. + hf := sha256.New() + proof, err := BatchOpen(polynomials, digests, points, hf, testSrs.Pk) + if err != nil { + panic(err) + } + + // Check the opening proof. The claimed values of the i-th pack of polynomials are the evaluation + // of the i-th pack of polynomials, evaluated on the t-th powers of points[i], where t is the smallest + // integer bounding above the number of polynomials in the pack that divides r-1, the field on which + // the polynomials are defined. + // + // For instance, proof.ClaimedValues[i][j][k] contains the evaluation of the j-th polynomial of the i-th + // pack, on points[i][k]^t, where t is defined as above. + err = BatchVerify(proof, digests, points, hf, testSrs.Vk) + if err != nil { + panic(err) + } +} diff --git a/ecc/bw6-633/fflonk/fflonk.go b/ecc/bw6-633/fflonk/fflonk.go new file mode 100644 index 000000000..a66233e78 --- /dev/null +++ b/ecc/bw6-633/fflonk/fflonk.go @@ -0,0 +1,290 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "errors" + "hash" + "math/big" + + "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-633/fr/fft" + "github.com/consensys/gnark-crypto/ecc/bw6-633/kzg" + "github.com/consensys/gnark-crypto/ecc/bw6-633/shplonk" +) + +var ( + ErrRootsOne = errors.New("fr does not contain all the t-th roots of 1") + ErrNbPolynomialsNbPoints = errors.New("the number of packs of polynomials should be the same as the number of pack of points") + ErrInonsistentFolding = errors.New("the outer claimed values are not consistent with the shplonk proof") + ErrInconsistentNumberFoldedPoints = errors.New("the number of outer claimed values is inconsistent with the number of claimed values in the shplonk proof") +) + +// Opening fflonk proof for opening a list of list of polynomials ((fʲᵢ)ᵢ)ⱼ where each +// pack of polynomials (fʲᵢ)ᵢ (the pack is indexed by j) is opened on a powers of elements in +// the set Sʲ (indexed by j), where the power is |(fʲᵢ)ᵢ|. +// +// implements io.ReaderFrom and io.WriterTo +type OpeningProof struct { + + // shplonk opening proof of the folded polynomials + SOpeningProof shplonk.OpeningProof + + // ClaimedValues ClaimedValues[i][j] contains the values + // of fʲᵢ on Sⱼ^{|(fʲᵢ)ᵢ|} + ClaimedValues [][][]fr.Element +} + +// FoldAndCommit commits to a list of polynomial by intertwinning them like in the FFT, that is +// returns ∑_{i the remaining polynomials are zero + res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) + } + } + + // step 2: fold polynomials + foldedPolynomials := make([][]fr.Element, len(p)) + for i := 0; i < len(p); i++ { + foldedPolynomials[i] = Fold(p[i]) + } + + // step 4: compute the associated roots, that is for each point p corresponding + // to a pack i of polynomials, we extend to if + // the i-th pack contains t polynomials where ω is a t-th root of 1 + newPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(p); i++ { + newPoints[i], err = extendSet(points[i], nextDivisorRminusOnePerPack[i]) + if err != nil { + return res, err + } + } + + // step 5: shplonk open the list of single polynomials on the new sets + res.SOpeningProof, err = shplonk.BatchOpen(foldedPolynomials, digests, newPoints, hf, pk, dataTranscript...) + + return res, err + +} + +// BatchVerify uses a proof to check that each digest digests[i] is correctly opened on the set points[i]. +// The digests are the commitments to the folded underlying polynomials. The shplonk proof is +// verified directly using the embedded shplonk proof. This function only computes the consistency +// between the claimed values of the underlying shplonk proof and the outer claimed values, using the fft-like +// folding. Namely, the outer claimed values are the evaluation of the original polynomials (so before they +// were folded) at the relevant powers of the points. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + // step 0: consistency checks between the folded claimed values of shplonk and the claimed + // values at the powers of the Sᵢ + for i := 0; i < len(proof.ClaimedValues); i++ { + sizeSi := len(proof.ClaimedValues[i][0]) + for j := 1; j < len(proof.ClaimedValues[i]); j++ { + // each set of opening must be of the same size (opeings on powers of Si) + if sizeSi != len(proof.ClaimedValues[i][j]) { + return ErrNbPolynomialsNbPoints + } + } + currNbPolynomials := len(proof.ClaimedValues[i]) + sizeSi = sizeSi * currNbPolynomials + // |originalPolynomials_{i}|x|Sᵢ| == |foldedPolynomials|x|folded Sᵢ| + if sizeSi != len(proof.SOpeningProof.ClaimedValues[i]) { + return ErrInconsistentNumberFoldedPoints + } + } + + // step 1: fold the outer claimed values and check that they correspond to the + // shplonk claimed values + var curFoldedClaimedValue, omgeaiPoint fr.Element + for i := 0; i < len(proof.ClaimedValues); i++ { + t := len(proof.ClaimedValues[i]) + omega, err := getIthRootOne(t) + if err != nil { + return err + } + sizeSi := len(proof.ClaimedValues[i][0]) + polyClaimedValues := make([]fr.Element, t) + for j := 0; j < sizeSi; j++ { + for k := 0; k < t; k++ { + polyClaimedValues[k].Set(&proof.ClaimedValues[i][k][j]) + } + omgeaiPoint.Set(&points[i][j]) + for l := 0; l < t; l++ { + curFoldedClaimedValue = eval(polyClaimedValues, omgeaiPoint) + if !curFoldedClaimedValue.Equal(&proof.SOpeningProof.ClaimedValues[i][j*t+l]) { + return ErrInonsistentFolding + } + omgeaiPoint.Mul(&omgeaiPoint, &omega) + } + } + } + + // step 2: verify the embedded shplonk proof + extendedPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(points); i++ { + t := len(proof.ClaimedValues[i]) + extendedPoints[i], err = extendSet(points[i], t) + if err != nil { + return err + } + } + err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) + + return err +} + +// utils + +// getIthRootOne returns a generator of Z/iZ +func getIthRootOne(i int) (fr.Element, error) { + var omega fr.Element + var tmpBigInt, zeroBigInt big.Int + oneBigInt := big.NewInt(1) + zeroBigInt.SetUint64(0) + rMinusOneBigInt := fr.Modulus() + rMinusOneBigInt.Sub(rMinusOneBigInt, oneBigInt) + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Mod(rMinusOneBigInt, &tmpBigInt) + if tmpBigInt.Cmp(&zeroBigInt) != 0 { + return omega, ErrRootsOne + } + genFrStar := fft.GeneratorFullMultiplicativeGroup() + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) + omega.Exp(genFrStar, &tmpBigInt) + return omega, nil +} + +// computes the smallest i bounding above number_polynomials +// and dividing r-1. +func getNextDivisorRMinusOne(i int) int { + var zero, tmp, one big.Int + r := fr.Modulus() + one.SetUint64(1) + r.Sub(r, &one) + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + nbTrials := 100 // prevent DOS attack if the prime is not smooth + for tmp.Cmp(&zero) != 0 && nbTrials > 0 { + i += 1 + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + nbTrials-- + } + if nbTrials == 0 { + panic("did not find any divisor of r-1") + } + return i +} + +// extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] +func extendSet(p []fr.Element, t int) ([]fr.Element, error) { + + omega, err := getIthRootOne(t) + if err != nil { + return nil, err + } + nbPoints := len(p) + newPoints := make([]fr.Element, t*nbPoints) + for i := 0; i < nbPoints; i++ { + newPoints[i*t].Set(&p[i]) + for k := 1; k < t; k++ { + newPoints[i*t+k].Mul(&newPoints[i*t+k-1], &omega) + } + } + + return newPoints, nil +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} diff --git a/ecc/bw6-633/fflonk/fflonk_test.go b/ecc/bw6-633/fflonk/fflonk_test.go new file mode 100644 index 000000000..ae1755145 --- /dev/null +++ b/ecc/bw6-633/fflonk/fflonk_test.go @@ -0,0 +1,136 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-633/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 600 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestFflonk(t *testing.T) { + + assert := require.New(t) + + // sample random polynomials of various sizes + nbSets := 5 + p := make([][][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + nbPolysInSet := 9 + p[i] = make([][]fr.Element, nbPolysInSet) + for j := 0; j < nbPolysInSet; j++ { + curSizePoly := j + 10 + p[i][j] = make([]fr.Element, curSizePoly) + for k := 0; k < curSizePoly; k++ { + p[i][j][k].SetRandom() + } + } + } + + // sample random sets Sᵢ + x := make([][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + curSetSize := i + 4 + x[i] = make([]fr.Element, curSetSize) + for j := 0; j < curSetSize; j++ { + x[i][j].SetRandom() + } + } + + // commit to the folded polynomials + digests := make([]kzg.Digest, nbSets) + var err error + for i := 0; i < nbSets; i++ { + digests[i], err = FoldAndCommit(p[i], testSrs.Pk) + assert.NoError(err) + } + + // compute flonk opening proof + hf := sha256.New() + proof, err := BatchOpen(p, digests, x, hf, testSrs.Pk) + assert.NoError(err) + + // check opening proof + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.NoError(err) + + // tamper the proof + proof.ClaimedValues[0][0][0].SetRandom() + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestCommit(t *testing.T) { + + assert := require.New(t) + + // sample polynomials + nbPolys := 2 + p := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + p[i] = make([]fr.Element, i+10) + for j := 0; j < i+10; j++ { + p[i][j].SetRandom() + } + } + + // fflonk commit to them + var x fr.Element + x.SetRandom() + proof, err := kzg.Open(Fold(p), x, testSrs.Pk) + assert.NoError(err) + + // check that Open(C, x) = ∑_{i= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} + +// returns γ*f, re-using f +func mulByConstant(f []fr.Element, gamma fr.Element) []fr.Element { + for i := 0; i < len(f); i++ { + f[i].Mul(&f[i], &gamma) + } + return f +} + +// computes f <- (x-a)*f +// memory of f is re used, need to pass a copy to not modify it +func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { + s := len(f) + var tmp fr.Element + f = append(f, fr.NewElement(0)) + f[s] = f[s-1] + for i := s - 1; i >= 1; i-- { + tmp.Mul(&f[i], &a) + f[i].Sub(&f[i-1], &tmp) + } + f[0].Mul(&f[0], &a).Neg(&f[0]) + return f +} + +// returns S_{T\Sᵢ} where Sᵢ=x[i] +func buildZtMinusSi(x [][]fr.Element, i int) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + bufPoints := make([]fr.Element, 0, nbPoints-len(x[i])) + for j := 0; j < i; j++ { + bufPoints = append(bufPoints, x[j]...) + } + for j := i + 1; j < len(x); j++ { + bufPoints = append(bufPoints, x[j]...) + } + ztMinusSi := buildVanishingPoly(bufPoints) + return ztMinusSi +} + +// returns πᵢ(X-xᵢ) +func buildVanishingPoly(x []fr.Element) []fr.Element { + res := make([]fr.Element, 1, len(x)+1) + res[0].SetOne() + for i := 0; i < len(x); i++ { + res = multiplyLinearFactor(res, x[i]) + } + return res +} + +// returns f such that f(xᵢ)=yᵢ, x and y are assumed to be of the same size +func interpolate(x, y []fr.Element) []fr.Element { + + res := make([]fr.Element, len(x)) + for i := 0; i < len(x); i++ { + li := buildLagrangeFromDomain(x, i) + li = mulByConstant(li, y[i]) + for j := 0; j < len(x); j++ { + res[j].Add(&res[j], &li[j]) + } + } + + return res +} + +// returns f such that f(xⱼ)=δⁱⱼ +func buildLagrangeFromDomain(x []fr.Element, i int) []fr.Element { + xx := make([]fr.Element, len(x)-1) + copy(xx, x[:i]) + copy(xx[i:], x[i+1:]) + res := buildVanishingPoly(xx) + d := eval(res, x[i]) + d.Inverse(&d) + res = mulByConstant(res, d) + return res +} + +// returns f-g, the memory of f is re used, deg(g) << deg(f) here +func sub(f, g []fr.Element) []fr.Element { + for i := 0; i < len(g); i++ { + f[i].Sub(&f[i], &g[i]) + } + return f +} + +// returns f*g using naive multiplication +// deg(f)>>deg(g), deg(small) =~ 10 max +// buf is used as a buffer and should not be f or g +// f and g are not modified +func mul(f, g []fr.Element, res []fr.Element) []fr.Element { + + sizeRes := len(f) + len(g) - 1 + if len(res) < sizeRes { + s := make([]fr.Element, sizeRes-len(res)) + res = append(res, s...) + } + setZero(res) + + var tmp fr.Element + for i := 0; i < len(g); i++ { + for j := 0; j < len(f); j++ { + tmp.Mul(&f[j], &g[i]) + res[j+i].Add(&res[j+i], &tmp) + } + } + return res +} + +// returns f/g (assuming g divides f) +// OK to not use fft if deg(g) is small +// g's leading coefficient is assumed to be 1 +// f memory is re-used for the result, need to pass a copy to not modify it +func div(f, g []fr.Element) []fr.Element { + sizef := len(f) + sizeg := len(g) + stop := sizeg - +1 + var t fr.Element + for i := sizef - 2; i >= stop; i-- { + for j := 0; j < sizeg-1; j++ { + t.Mul(&f[i+1], &g[sizeg-2-j]) + f[i-j].Sub(&f[i-j], &t) + } + } + return f[sizeg-1:] +} diff --git a/ecc/bw6-633/shplonk/shplonk_test.go b/ecc/bw6-633/shplonk/shplonk_test.go new file mode 100644 index 000000000..2cc4d27f5 --- /dev/null +++ b/ecc/bw6-633/shplonk/shplonk_test.go @@ -0,0 +1,348 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "crypto/sha256" + "math/big" + "math/rand" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bw6-633" + "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-633/kzg" + "github.com/consensys/gnark-crypto/utils/testutils" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 230 + var frAlpha fr.Element + frAlpha.SetRandom() + bAlpha = big.NewInt(0) + frAlpha.BigInt(bAlpha) + var err error + testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + if err != nil { + panic(err) + } +} + +func TestSerialization(t *testing.T) { + + _, _, g, _ := bw6633.Generators() + var proof OpeningProof + proof.W.Set(&g) + proof.WPrime.Set(&g) + nbClaimedValues := 10 + proof.ClaimedValues = make([][]fr.Element, nbClaimedValues) + for i := 0; i < nbClaimedValues; i++ { + proof.ClaimedValues[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + proof.ClaimedValues[i][j].SetRandom() + } + } + + t.Run("opening proof round trip", testutils.SerializationRoundTrip(&proof)) +} + +func TestOpening(t *testing.T) { + + assert := require.New(t) + + nbPolys := 2 + sizePoly := make([]int, nbPolys) + for i := 0; i < nbPolys; i++ { + sizePoly[i] = rand.Intn(10) + 2 //nolint: gosec // G404, no strong PRNG needed here + } + polys := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + polys[i] = make([]fr.Element, sizePoly[i]) + for j := 0; j < sizePoly[i]; j++ { + polys[i][j].SetRandom() + } + } + + digests := make([]kzg.Digest, nbPolys) + for i := 0; i < nbPolys; i++ { + digests[i], _ = kzg.Commit(polys[i], testSrs.Pk) + } + + points := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + points[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + points[i][j].SetRandom() + } + } + + hf := sha256.New() + + // correct proof + openingProof, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) + assert.NoError(err) + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.NoError(err) + + // tampered proof + openingProof.ClaimedValues[0][0].SetRandom() + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestBuildZtMinusSi(t *testing.T) { + + nbSi := 10 + points := make([][]fr.Element, nbSi) + sizeSi := make([]int, nbSi) + nbPoints := 0 + for i := 0; i < nbSi; i++ { + sizeSi[i] = 5 + i + nbPoints += sizeSi[i] + points[i] = make([]fr.Element, sizeSi[i]) + for j := 0; j < sizeSi[i]; j++ { + points[i][j].SetRandom() + } + } + for i := 0; i < nbSi; i++ { + ztMinusSi := buildZtMinusSi(points, i) + if len(ztMinusSi) != nbPoints-sizeSi[i]+1 { + t.Fatal("deg(Z_{T-S_{i}}) should be nbPoints-size(S_{i})") + } + for j := 0; j < nbSi; j++ { + if j == i { + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{i}) should not be zero") + } + } + continue + } + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if !y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{j}) should be zero") + } + } + } + } + +} + +func TestInterpolate(t *testing.T) { + + nbPoints := 10 + x := make([]fr.Element, nbPoints) + y := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + y[i].SetRandom() + } + f := interpolate(x, y) + for i := 0; i < nbPoints; i++ { + fx := eval(f, x[i]) + if !fx.Equal(&y[i]) { + t.Fatal("f(x_{i})!=y_{i}") + } + } + +} + +func TestBuildLagrangeFromDomain(t *testing.T) { + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + var r fr.Element + for i := 0; i < nbPoints; i++ { + + l := buildLagrangeFromDomain(points, i) + + // check that l(xᵢ)=1 and l(xⱼ)=0 for j!=i + for j := 0; j < nbPoints; j++ { + y := eval(l, points[j]) + if i == j { + if !y.IsOne() { + t.Fatal("l_{i}(x_{i}) should be equal to 1") + } + } else { + if !y.IsZero() { + t.Fatal("l_{i}(x_{j}) where i!=j should be equal to 0") + } + } + } + r.SetRandom() + y := eval(l, r) + if y.IsZero() { + t.Fatal("l_{i}(x) should not be zero if x is random") + } + } + +} + +func TestBuildVanishingPoly(t *testing.T) { + s := 10 + x := make([]fr.Element, s) + for i := 0; i < s; i++ { + x[i].SetRandom() + } + r := buildVanishingPoly(x) + + if len(r) != s+1 { + t.Fatal("error degree r") + } + + // check that r(xᵢ)=0 for all i + for i := 0; i < len(x); i++ { + y := eval(r, x[i]) + if !y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at xᵢ should be zero") + } + } + + // check that r(y)!=0 for a random point + var a fr.Element + a.SetRandom() + y := eval(r, a) + if y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at r \neq xᵢ should not be zero") + } +} + +func TestMultiplyLinearFactor(t *testing.T) { + + s := 10 + f := make([]fr.Element, s, s+1) + for i := 0; i < 10; i++ { + f[i].SetRandom() + } + + var a, y fr.Element + a.SetRandom() + f = multiplyLinearFactor(f, a) + y = eval(f, a) + if !y.IsZero() { + t.Fatal("(X-a)f(X) should be zero at a") + } + a.SetRandom() + y = eval(f, a) + if y.IsZero() { + t.Fatal("(X-1)f(X) at a random point should not be zero") + } + +} + +func TestNaiveMul(t *testing.T) { + + size := 10 + f := make([]fr.Element, size) + for i := 0; i < size; i++ { + f[i].SetRandom() + } + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + + v := buildVanishingPoly(points) + buf := make([]fr.Element, size+nbPoints-1) + g := mul(f, v, buf) + + // check that g(xᵢ) = 0 + for i := 0; i < nbPoints; i++ { + y := eval(g, points[i]) + if !y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at x_{i} should be zero") + } + } + + // check that g(r) != 0 for a random point + var a fr.Element + a.SetRandom() + y := eval(g, a) + if y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at a random point should not be zero") + } + +} + +func TestDiv(t *testing.T) { + + nbPoints := 10 + s := 10 + f := make([]fr.Element, s, s+nbPoints) + for i := 0; i < s; i++ { + f[i].SetRandom() + } + + // backup + g := make([]fr.Element, s) + copy(g, f) + + // successive divions of linear terms + x := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + q := make([][2]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + q[i][1].SetOne() + q[i][0].Neg(&x[i]) + f = div(f, q[i][:]) + } + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + + // division by a degree > 1 polynomial + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + r := buildVanishingPoly(x) + f = div(f, r) + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + +} diff --git a/ecc/bw6-761/fflonk/doc.go b/ecc/bw6-761/fflonk/doc.go new file mode 100644 index 000000000..694eaa973 --- /dev/null +++ b/ecc/bw6-761/fflonk/doc.go @@ -0,0 +1,21 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +// Package fflonk provides fflonk commitment, based on shplonk. +// +// See https://eprint.iacr.org/2020/081.pdf for shplonk +// See https://eprint.iacr.org/2021/1167.pdf for fflonk. +package fflonk diff --git a/ecc/bw6-761/fflonk/example_test.go b/ecc/bw6-761/fflonk/example_test.go new file mode 100644 index 000000000..ea3597710 --- /dev/null +++ b/ecc/bw6-761/fflonk/example_test.go @@ -0,0 +1,84 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + + "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-761/kzg" +) + +// This example demonstrates how to open a list of polynomials on a list of points. +func Example_batchOpen() { + + // sample a list of polynomials, we have 5 packs of polynomials, + // each pack will be opened on its own set of points. + nbPacks := 5 + + // The first set of polynomials contains 2 polynomials, the second 3, etc. + // The i-th set of polynomials is opened on the i-th set of points. The first + // set of point contains 4 points, the second 5, etc. + nbPolynomialsPerPack := []int{2, 3, 4, 5, 6} + nbPointsPerPack := []int{4, 5, 6, 7, 8} + points := make([][]fr.Element, nbPacks) + polynomials := make([][][]fr.Element, nbPacks) + for i := 0; i < nbPacks; i++ { + polynomials[i] = make([][]fr.Element, nbPolynomialsPerPack[i]) + for j := 0; j < nbPointsPerPack[i]; j++ { + + // random size for the polynomials + polynomials[i][j] = make([]fr.Element, j+10) + } + + // random number of points per pack + points[i] = make([]fr.Element, i+5) + } + + // commit to the folded Polynomials. In each pack, we fold the polynomials in a similar way + // as in the FFT. If the given pack contains 3 polynomials P1,P2,P3, the folded polynomial + // that we commit to is P1(X^t)+XP2(X^t)+X^2P3(X^t) where t is the smallest number dividing + // r-1 bounding above the number of polynomials, which is 3 here. + var err error + digests := make([]kzg.Digest, nbPacks) + for i := 0; i < nbPacks; i++ { + digests[i], err = FoldAndCommit(polynomials[i], testSrs.Pk) + if err != nil { + panic(err) + } + } + + // compute the opening proof. We first pick a hash function that will be used for the FS challenge + // derivation. + hf := sha256.New() + proof, err := BatchOpen(polynomials, digests, points, hf, testSrs.Pk) + if err != nil { + panic(err) + } + + // Check the opening proof. The claimed values of the i-th pack of polynomials are the evaluation + // of the i-th pack of polynomials, evaluated on the t-th powers of points[i], where t is the smallest + // integer bounding above the number of polynomials in the pack that divides r-1, the field on which + // the polynomials are defined. + // + // For instance, proof.ClaimedValues[i][j][k] contains the evaluation of the j-th polynomial of the i-th + // pack, on points[i][k]^t, where t is defined as above. + err = BatchVerify(proof, digests, points, hf, testSrs.Vk) + if err != nil { + panic(err) + } +} diff --git a/ecc/bw6-761/fflonk/fflonk.go b/ecc/bw6-761/fflonk/fflonk.go new file mode 100644 index 000000000..3a18d58b6 --- /dev/null +++ b/ecc/bw6-761/fflonk/fflonk.go @@ -0,0 +1,290 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "errors" + "hash" + "math/big" + + "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-761/fr/fft" + "github.com/consensys/gnark-crypto/ecc/bw6-761/kzg" + "github.com/consensys/gnark-crypto/ecc/bw6-761/shplonk" +) + +var ( + ErrRootsOne = errors.New("fr does not contain all the t-th roots of 1") + ErrNbPolynomialsNbPoints = errors.New("the number of packs of polynomials should be the same as the number of pack of points") + ErrInonsistentFolding = errors.New("the outer claimed values are not consistent with the shplonk proof") + ErrInconsistentNumberFoldedPoints = errors.New("the number of outer claimed values is inconsistent with the number of claimed values in the shplonk proof") +) + +// Opening fflonk proof for opening a list of list of polynomials ((fʲᵢ)ᵢ)ⱼ where each +// pack of polynomials (fʲᵢ)ᵢ (the pack is indexed by j) is opened on a powers of elements in +// the set Sʲ (indexed by j), where the power is |(fʲᵢ)ᵢ|. +// +// implements io.ReaderFrom and io.WriterTo +type OpeningProof struct { + + // shplonk opening proof of the folded polynomials + SOpeningProof shplonk.OpeningProof + + // ClaimedValues ClaimedValues[i][j] contains the values + // of fʲᵢ on Sⱼ^{|(fʲᵢ)ᵢ|} + ClaimedValues [][][]fr.Element +} + +// FoldAndCommit commits to a list of polynomial by intertwinning them like in the FFT, that is +// returns ∑_{i the remaining polynomials are zero + res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) + } + } + + // step 2: fold polynomials + foldedPolynomials := make([][]fr.Element, len(p)) + for i := 0; i < len(p); i++ { + foldedPolynomials[i] = Fold(p[i]) + } + + // step 4: compute the associated roots, that is for each point p corresponding + // to a pack i of polynomials, we extend to if + // the i-th pack contains t polynomials where ω is a t-th root of 1 + newPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(p); i++ { + newPoints[i], err = extendSet(points[i], nextDivisorRminusOnePerPack[i]) + if err != nil { + return res, err + } + } + + // step 5: shplonk open the list of single polynomials on the new sets + res.SOpeningProof, err = shplonk.BatchOpen(foldedPolynomials, digests, newPoints, hf, pk, dataTranscript...) + + return res, err + +} + +// BatchVerify uses a proof to check that each digest digests[i] is correctly opened on the set points[i]. +// The digests are the commitments to the folded underlying polynomials. The shplonk proof is +// verified directly using the embedded shplonk proof. This function only computes the consistency +// between the claimed values of the underlying shplonk proof and the outer claimed values, using the fft-like +// folding. Namely, the outer claimed values are the evaluation of the original polynomials (so before they +// were folded) at the relevant powers of the points. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + // step 0: consistency checks between the folded claimed values of shplonk and the claimed + // values at the powers of the Sᵢ + for i := 0; i < len(proof.ClaimedValues); i++ { + sizeSi := len(proof.ClaimedValues[i][0]) + for j := 1; j < len(proof.ClaimedValues[i]); j++ { + // each set of opening must be of the same size (opeings on powers of Si) + if sizeSi != len(proof.ClaimedValues[i][j]) { + return ErrNbPolynomialsNbPoints + } + } + currNbPolynomials := len(proof.ClaimedValues[i]) + sizeSi = sizeSi * currNbPolynomials + // |originalPolynomials_{i}|x|Sᵢ| == |foldedPolynomials|x|folded Sᵢ| + if sizeSi != len(proof.SOpeningProof.ClaimedValues[i]) { + return ErrInconsistentNumberFoldedPoints + } + } + + // step 1: fold the outer claimed values and check that they correspond to the + // shplonk claimed values + var curFoldedClaimedValue, omgeaiPoint fr.Element + for i := 0; i < len(proof.ClaimedValues); i++ { + t := len(proof.ClaimedValues[i]) + omega, err := getIthRootOne(t) + if err != nil { + return err + } + sizeSi := len(proof.ClaimedValues[i][0]) + polyClaimedValues := make([]fr.Element, t) + for j := 0; j < sizeSi; j++ { + for k := 0; k < t; k++ { + polyClaimedValues[k].Set(&proof.ClaimedValues[i][k][j]) + } + omgeaiPoint.Set(&points[i][j]) + for l := 0; l < t; l++ { + curFoldedClaimedValue = eval(polyClaimedValues, omgeaiPoint) + if !curFoldedClaimedValue.Equal(&proof.SOpeningProof.ClaimedValues[i][j*t+l]) { + return ErrInonsistentFolding + } + omgeaiPoint.Mul(&omgeaiPoint, &omega) + } + } + } + + // step 2: verify the embedded shplonk proof + extendedPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(points); i++ { + t := len(proof.ClaimedValues[i]) + extendedPoints[i], err = extendSet(points[i], t) + if err != nil { + return err + } + } + err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) + + return err +} + +// utils + +// getIthRootOne returns a generator of Z/iZ +func getIthRootOne(i int) (fr.Element, error) { + var omega fr.Element + var tmpBigInt, zeroBigInt big.Int + oneBigInt := big.NewInt(1) + zeroBigInt.SetUint64(0) + rMinusOneBigInt := fr.Modulus() + rMinusOneBigInt.Sub(rMinusOneBigInt, oneBigInt) + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Mod(rMinusOneBigInt, &tmpBigInt) + if tmpBigInt.Cmp(&zeroBigInt) != 0 { + return omega, ErrRootsOne + } + genFrStar := fft.GeneratorFullMultiplicativeGroup() + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) + omega.Exp(genFrStar, &tmpBigInt) + return omega, nil +} + +// computes the smallest i bounding above number_polynomials +// and dividing r-1. +func getNextDivisorRMinusOne(i int) int { + var zero, tmp, one big.Int + r := fr.Modulus() + one.SetUint64(1) + r.Sub(r, &one) + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + nbTrials := 100 // prevent DOS attack if the prime is not smooth + for tmp.Cmp(&zero) != 0 && nbTrials > 0 { + i += 1 + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + nbTrials-- + } + if nbTrials == 0 { + panic("did not find any divisor of r-1") + } + return i +} + +// extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] +func extendSet(p []fr.Element, t int) ([]fr.Element, error) { + + omega, err := getIthRootOne(t) + if err != nil { + return nil, err + } + nbPoints := len(p) + newPoints := make([]fr.Element, t*nbPoints) + for i := 0; i < nbPoints; i++ { + newPoints[i*t].Set(&p[i]) + for k := 1; k < t; k++ { + newPoints[i*t+k].Mul(&newPoints[i*t+k-1], &omega) + } + } + + return newPoints, nil +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} diff --git a/ecc/bw6-761/fflonk/fflonk_test.go b/ecc/bw6-761/fflonk/fflonk_test.go new file mode 100644 index 000000000..0802648d0 --- /dev/null +++ b/ecc/bw6-761/fflonk/fflonk_test.go @@ -0,0 +1,136 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package fflonk + +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-761/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 600 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestFflonk(t *testing.T) { + + assert := require.New(t) + + // sample random polynomials of various sizes + nbSets := 5 + p := make([][][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + nbPolysInSet := 9 + p[i] = make([][]fr.Element, nbPolysInSet) + for j := 0; j < nbPolysInSet; j++ { + curSizePoly := j + 10 + p[i][j] = make([]fr.Element, curSizePoly) + for k := 0; k < curSizePoly; k++ { + p[i][j][k].SetRandom() + } + } + } + + // sample random sets Sᵢ + x := make([][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + curSetSize := i + 4 + x[i] = make([]fr.Element, curSetSize) + for j := 0; j < curSetSize; j++ { + x[i][j].SetRandom() + } + } + + // commit to the folded polynomials + digests := make([]kzg.Digest, nbSets) + var err error + for i := 0; i < nbSets; i++ { + digests[i], err = FoldAndCommit(p[i], testSrs.Pk) + assert.NoError(err) + } + + // compute flonk opening proof + hf := sha256.New() + proof, err := BatchOpen(p, digests, x, hf, testSrs.Pk) + assert.NoError(err) + + // check opening proof + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.NoError(err) + + // tamper the proof + proof.ClaimedValues[0][0][0].SetRandom() + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestCommit(t *testing.T) { + + assert := require.New(t) + + // sample polynomials + nbPolys := 2 + p := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + p[i] = make([]fr.Element, i+10) + for j := 0; j < i+10; j++ { + p[i][j].SetRandom() + } + } + + // fflonk commit to them + var x fr.Element + x.SetRandom() + proof, err := kzg.Open(Fold(p), x, testSrs.Pk) + assert.NoError(err) + + // check that Open(C, x) = ∑_{i= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} + +// returns γ*f, re-using f +func mulByConstant(f []fr.Element, gamma fr.Element) []fr.Element { + for i := 0; i < len(f); i++ { + f[i].Mul(&f[i], &gamma) + } + return f +} + +// computes f <- (x-a)*f +// memory of f is re used, need to pass a copy to not modify it +func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { + s := len(f) + var tmp fr.Element + f = append(f, fr.NewElement(0)) + f[s] = f[s-1] + for i := s - 1; i >= 1; i-- { + tmp.Mul(&f[i], &a) + f[i].Sub(&f[i-1], &tmp) + } + f[0].Mul(&f[0], &a).Neg(&f[0]) + return f +} + +// returns S_{T\Sᵢ} where Sᵢ=x[i] +func buildZtMinusSi(x [][]fr.Element, i int) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + bufPoints := make([]fr.Element, 0, nbPoints-len(x[i])) + for j := 0; j < i; j++ { + bufPoints = append(bufPoints, x[j]...) + } + for j := i + 1; j < len(x); j++ { + bufPoints = append(bufPoints, x[j]...) + } + ztMinusSi := buildVanishingPoly(bufPoints) + return ztMinusSi +} + +// returns πᵢ(X-xᵢ) +func buildVanishingPoly(x []fr.Element) []fr.Element { + res := make([]fr.Element, 1, len(x)+1) + res[0].SetOne() + for i := 0; i < len(x); i++ { + res = multiplyLinearFactor(res, x[i]) + } + return res +} + +// returns f such that f(xᵢ)=yᵢ, x and y are assumed to be of the same size +func interpolate(x, y []fr.Element) []fr.Element { + + res := make([]fr.Element, len(x)) + for i := 0; i < len(x); i++ { + li := buildLagrangeFromDomain(x, i) + li = mulByConstant(li, y[i]) + for j := 0; j < len(x); j++ { + res[j].Add(&res[j], &li[j]) + } + } + + return res +} + +// returns f such that f(xⱼ)=δⁱⱼ +func buildLagrangeFromDomain(x []fr.Element, i int) []fr.Element { + xx := make([]fr.Element, len(x)-1) + copy(xx, x[:i]) + copy(xx[i:], x[i+1:]) + res := buildVanishingPoly(xx) + d := eval(res, x[i]) + d.Inverse(&d) + res = mulByConstant(res, d) + return res +} + +// returns f-g, the memory of f is re used, deg(g) << deg(f) here +func sub(f, g []fr.Element) []fr.Element { + for i := 0; i < len(g); i++ { + f[i].Sub(&f[i], &g[i]) + } + return f +} + +// returns f*g using naive multiplication +// deg(f)>>deg(g), deg(small) =~ 10 max +// buf is used as a buffer and should not be f or g +// f and g are not modified +func mul(f, g []fr.Element, res []fr.Element) []fr.Element { + + sizeRes := len(f) + len(g) - 1 + if len(res) < sizeRes { + s := make([]fr.Element, sizeRes-len(res)) + res = append(res, s...) + } + setZero(res) + + var tmp fr.Element + for i := 0; i < len(g); i++ { + for j := 0; j < len(f); j++ { + tmp.Mul(&f[j], &g[i]) + res[j+i].Add(&res[j+i], &tmp) + } + } + return res +} + +// returns f/g (assuming g divides f) +// OK to not use fft if deg(g) is small +// g's leading coefficient is assumed to be 1 +// f memory is re-used for the result, need to pass a copy to not modify it +func div(f, g []fr.Element) []fr.Element { + sizef := len(f) + sizeg := len(g) + stop := sizeg - +1 + var t fr.Element + for i := sizef - 2; i >= stop; i-- { + for j := 0; j < sizeg-1; j++ { + t.Mul(&f[i+1], &g[sizeg-2-j]) + f[i-j].Sub(&f[i-j], &t) + } + } + return f[sizeg-1:] +} diff --git a/ecc/bw6-761/shplonk/shplonk_test.go b/ecc/bw6-761/shplonk/shplonk_test.go new file mode 100644 index 000000000..bcdbccea6 --- /dev/null +++ b/ecc/bw6-761/shplonk/shplonk_test.go @@ -0,0 +1,348 @@ +// Copyright 2020 Consensys Software Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package shplonk + +import ( + "crypto/sha256" + "math/big" + "math/rand" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/bw6-761" + "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-761/kzg" + "github.com/consensys/gnark-crypto/utils/testutils" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 230 + var frAlpha fr.Element + frAlpha.SetRandom() + bAlpha = big.NewInt(0) + frAlpha.BigInt(bAlpha) + var err error + testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + if err != nil { + panic(err) + } +} + +func TestSerialization(t *testing.T) { + + _, _, g, _ := bw6761.Generators() + var proof OpeningProof + proof.W.Set(&g) + proof.WPrime.Set(&g) + nbClaimedValues := 10 + proof.ClaimedValues = make([][]fr.Element, nbClaimedValues) + for i := 0; i < nbClaimedValues; i++ { + proof.ClaimedValues[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + proof.ClaimedValues[i][j].SetRandom() + } + } + + t.Run("opening proof round trip", testutils.SerializationRoundTrip(&proof)) +} + +func TestOpening(t *testing.T) { + + assert := require.New(t) + + nbPolys := 2 + sizePoly := make([]int, nbPolys) + for i := 0; i < nbPolys; i++ { + sizePoly[i] = rand.Intn(10) + 2 //nolint: gosec // G404, no strong PRNG needed here + } + polys := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + polys[i] = make([]fr.Element, sizePoly[i]) + for j := 0; j < sizePoly[i]; j++ { + polys[i][j].SetRandom() + } + } + + digests := make([]kzg.Digest, nbPolys) + for i := 0; i < nbPolys; i++ { + digests[i], _ = kzg.Commit(polys[i], testSrs.Pk) + } + + points := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + points[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + points[i][j].SetRandom() + } + } + + hf := sha256.New() + + // correct proof + openingProof, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) + assert.NoError(err) + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.NoError(err) + + // tampered proof + openingProof.ClaimedValues[0][0].SetRandom() + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestBuildZtMinusSi(t *testing.T) { + + nbSi := 10 + points := make([][]fr.Element, nbSi) + sizeSi := make([]int, nbSi) + nbPoints := 0 + for i := 0; i < nbSi; i++ { + sizeSi[i] = 5 + i + nbPoints += sizeSi[i] + points[i] = make([]fr.Element, sizeSi[i]) + for j := 0; j < sizeSi[i]; j++ { + points[i][j].SetRandom() + } + } + for i := 0; i < nbSi; i++ { + ztMinusSi := buildZtMinusSi(points, i) + if len(ztMinusSi) != nbPoints-sizeSi[i]+1 { + t.Fatal("deg(Z_{T-S_{i}}) should be nbPoints-size(S_{i})") + } + for j := 0; j < nbSi; j++ { + if j == i { + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{i}) should not be zero") + } + } + continue + } + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if !y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{j}) should be zero") + } + } + } + } + +} + +func TestInterpolate(t *testing.T) { + + nbPoints := 10 + x := make([]fr.Element, nbPoints) + y := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + y[i].SetRandom() + } + f := interpolate(x, y) + for i := 0; i < nbPoints; i++ { + fx := eval(f, x[i]) + if !fx.Equal(&y[i]) { + t.Fatal("f(x_{i})!=y_{i}") + } + } + +} + +func TestBuildLagrangeFromDomain(t *testing.T) { + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + var r fr.Element + for i := 0; i < nbPoints; i++ { + + l := buildLagrangeFromDomain(points, i) + + // check that l(xᵢ)=1 and l(xⱼ)=0 for j!=i + for j := 0; j < nbPoints; j++ { + y := eval(l, points[j]) + if i == j { + if !y.IsOne() { + t.Fatal("l_{i}(x_{i}) should be equal to 1") + } + } else { + if !y.IsZero() { + t.Fatal("l_{i}(x_{j}) where i!=j should be equal to 0") + } + } + } + r.SetRandom() + y := eval(l, r) + if y.IsZero() { + t.Fatal("l_{i}(x) should not be zero if x is random") + } + } + +} + +func TestBuildVanishingPoly(t *testing.T) { + s := 10 + x := make([]fr.Element, s) + for i := 0; i < s; i++ { + x[i].SetRandom() + } + r := buildVanishingPoly(x) + + if len(r) != s+1 { + t.Fatal("error degree r") + } + + // check that r(xᵢ)=0 for all i + for i := 0; i < len(x); i++ { + y := eval(r, x[i]) + if !y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at xᵢ should be zero") + } + } + + // check that r(y)!=0 for a random point + var a fr.Element + a.SetRandom() + y := eval(r, a) + if y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at r \neq xᵢ should not be zero") + } +} + +func TestMultiplyLinearFactor(t *testing.T) { + + s := 10 + f := make([]fr.Element, s, s+1) + for i := 0; i < 10; i++ { + f[i].SetRandom() + } + + var a, y fr.Element + a.SetRandom() + f = multiplyLinearFactor(f, a) + y = eval(f, a) + if !y.IsZero() { + t.Fatal("(X-a)f(X) should be zero at a") + } + a.SetRandom() + y = eval(f, a) + if y.IsZero() { + t.Fatal("(X-1)f(X) at a random point should not be zero") + } + +} + +func TestNaiveMul(t *testing.T) { + + size := 10 + f := make([]fr.Element, size) + for i := 0; i < size; i++ { + f[i].SetRandom() + } + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + + v := buildVanishingPoly(points) + buf := make([]fr.Element, size+nbPoints-1) + g := mul(f, v, buf) + + // check that g(xᵢ) = 0 + for i := 0; i < nbPoints; i++ { + y := eval(g, points[i]) + if !y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at x_{i} should be zero") + } + } + + // check that g(r) != 0 for a random point + var a fr.Element + a.SetRandom() + y := eval(g, a) + if y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at a random point should not be zero") + } + +} + +func TestDiv(t *testing.T) { + + nbPoints := 10 + s := 10 + f := make([]fr.Element, s, s+nbPoints) + for i := 0; i < s; i++ { + f[i].SetRandom() + } + + // backup + g := make([]fr.Element, s) + copy(g, f) + + // successive divions of linear terms + x := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + q := make([][2]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + q[i][1].SetOne() + q[i][0].Neg(&x[i]) + f = div(f, q[i][:]) + } + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + + // division by a degree > 1 polynomial + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + r := buildVanishingPoly(x) + f = div(f, r) + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + +} diff --git a/internal/generator/ecc/template/marshal.go.tmpl b/internal/generator/ecc/template/marshal.go.tmpl index 1483e0bd7..d26de5fa7 100644 --- a/internal/generator/ecc/template/marshal.go.tmpl +++ b/internal/generator/ecc/template/marshal.go.tmpl @@ -176,6 +176,26 @@ func (dec *Decoder) Decode(v interface{}) (err error) { dec.n+=read64 } return + case *[][][]fr.Element: + if sliceLen, err = dec.readUint32(); err != nil { + return + } + if len(*t) != int(sliceLen) { + *t = make([][][]fr.Element, sliceLen) + } + for i := range *t { + if sliceLen, err = dec.readUint32(); err != nil { + return + } + if len((*t)[i]) != int(sliceLen) { + (*t)[i] = make([][]fr.Element, sliceLen) + } + for j := range (*t)[i] { + read64, err = (*fr.Vector)(&(*t)[i][j]).ReadFrom(dec.r) + dec.n += read64 + } + } + return case *G1Affine: // we start by reading compressed point size, if metadata tells us it is uncompressed, we read more. read, err = io.ReadFull(dec.r, buf[:SizeOfG1AffineCompressed]) @@ -606,6 +626,25 @@ func (enc *Encoder) encode{{- $.Raw}}(v interface{}) (err error) { enc.n += written64 } return + case [][][]fr.Element: + // number of collections + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))); err != nil { + return + } + enc.n += 4 + for i := range t { + // size of current collection + if err = binary.Write(enc.w, binary.BigEndian, uint32(len(t[i]))); err != nil { + return + } + enc.n += 4 + // write each vector of the current collection + for j := range t[i] { + written64, err = (*fr.Vector)(&t[i][j]).WriteTo(enc.w) + enc.n += written64 + } + } + return case []G1Affine: // write slice length err = binary.Write(enc.w, binary.BigEndian, uint32(len(t))) diff --git a/internal/generator/ecc/template/tests/marshal.go.tmpl b/internal/generator/ecc/template/tests/marshal.go.tmpl index eb214a00d..fd613092a 100644 --- a/internal/generator/ecc/template/tests/marshal.go.tmpl +++ b/internal/generator/ecc/template/tests/marshal.go.tmpl @@ -45,6 +45,7 @@ func TestEncoder(t *testing.T) { var inK fr.Vector var inL [][]fr.Element var inM [][]uint64 + var inN [][][]fr.Element // set values of inputs inA = rand.Uint64() //#nosec G404 weak rng is fine here @@ -63,12 +64,22 @@ func TestEncoder(t *testing.T) { inK[41].SetUint64(42) inL =[][]fr.Element {inJ, inK} inM = [][]uint64{ {1, 2}, {4}, {} } + inN = make([][][]fr.Element, 4) + for i := 0; i < 4; i++ { + inN[i] = make([][]fr.Element, i+2) + for j := 0; j < i+2; j++ { + inN[i][j] = make([]fr.Element, j+3) + for k := 0; k < j+3; k++ { + inN[i][j][k].SetRandom() + } + } + } // encode them, compressed and raw var buf, bufRaw bytes.Buffer enc := NewEncoder(&buf) encRaw := NewEncoder(&bufRaw, RawEncoding()) - toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM} + toEncode := []interface{}{inA, &inB, &inC, &inD, &inE, &inF, inG, inH, inI, inJ, inK, inL, inM, inN} for _, v := range toEncode { if err := enc.Encode(v); err != nil { t.Fatal(err) @@ -96,8 +107,9 @@ func TestEncoder(t *testing.T) { var outK fr.Vector var outL [][]fr.Element var outM [][]uint64 + var outN [][][]fr.Element - toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM} + toDecode := []interface{}{&outA, &outB, &outC, &outD, &outE, &outF, &outG, &outH, &outI, &outJ, &outK, &outL, &outM, &outN} for _, v := range toDecode { if err := dec.Decode(v); err != nil { t.Fatal(err) @@ -143,6 +155,9 @@ func TestEncoder(t *testing.T) { if !reflect.DeepEqual(inM, outM) { t.Fatal("decode(encode(slice²(uint64))) failed") } + if !reflect.DeepEqual(inN, outN) { + t.Fatal("decode(encode(slice^{3}(uint64))) failed") + } if n != dec.BytesRead() { t.Fatal("bytes read don't match bytes written") } diff --git a/internal/generator/fflonk/generator.go b/internal/generator/fflonk/generator.go new file mode 100644 index 000000000..5eba0c6a9 --- /dev/null +++ b/internal/generator/fflonk/generator.go @@ -0,0 +1,23 @@ +package fflonk + +import ( + "path/filepath" + + "github.com/consensys/bavard" + "github.com/consensys/gnark-crypto/internal/generator/config" +) + +func Generate(conf config.Curve, baseDir string, bgen *bavard.BatchGenerator) error { + + // kzg commitment scheme + conf.Package = "fflonk" + entries := []bavard.Entry{ + {File: filepath.Join(baseDir, "doc.go"), Templates: []string{"doc.go.tmpl"}}, + {File: filepath.Join(baseDir, "fflonk.go"), Templates: []string{"fflonk.go.tmpl"}}, + {File: filepath.Join(baseDir, "fflonk_test.go"), Templates: []string{"fflonk.test.go.tmpl"}}, + {File: filepath.Join(baseDir, "marshal.go"), Templates: []string{"marshal.go.tmpl"}}, + {File: filepath.Join(baseDir, "example_test.go"), Templates: []string{"example_test.go.tmpl"}}, + } + return bgen.Generate(conf, conf.Package, "./fflonk/template/", entries...) + +} diff --git a/internal/generator/fflonk/template/doc.go.tmpl b/internal/generator/fflonk/template/doc.go.tmpl new file mode 100644 index 000000000..382fe8636 --- /dev/null +++ b/internal/generator/fflonk/template/doc.go.tmpl @@ -0,0 +1,5 @@ +// Package {{.Package}} provides fflonk commitment, based on shplonk. +// +// See https://eprint.iacr.org/2020/081.pdf for shplonk +// See https://eprint.iacr.org/2021/1167.pdf for fflonk. +package {{.Package}} \ No newline at end of file diff --git a/internal/generator/fflonk/template/example_test.go.tmpl b/internal/generator/fflonk/template/example_test.go.tmpl new file mode 100644 index 000000000..050a5f23e --- /dev/null +++ b/internal/generator/fflonk/template/example_test.go.tmpl @@ -0,0 +1,66 @@ +import ( + "crypto/sha256" + + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/fr" + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/kzg" +) + +// This example demonstrates how to open a list of polynomials on a list of points. +func Example_batchOpen() { + + // sample a list of polynomials, we have 5 packs of polynomials, + // each pack will be opened on its own set of points. + nbPacks := 5 + + // The first set of polynomials contains 2 polynomials, the second 3, etc. + // The i-th set of polynomials is opened on the i-th set of points. The first + // set of point contains 4 points, the second 5, etc. + nbPolynomialsPerPack := []int{2, 3, 4, 5, 6} + nbPointsPerPack := []int{4, 5, 6, 7, 8} + points := make([][]fr.Element, nbPacks) + polynomials := make([][][]fr.Element, nbPacks) + for i := 0; i < nbPacks; i++ { + polynomials[i] = make([][]fr.Element, nbPolynomialsPerPack[i]) + for j := 0; j < nbPointsPerPack[i]; j++ { + + // random size for the polynomials + polynomials[i][j] = make([]fr.Element, j+10) + } + + // random number of points per pack + points[i] = make([]fr.Element, i+5) + } + + // commit to the folded Polynomials. In each pack, we fold the polynomials in a similar way + // as in the FFT. If the given pack contains 3 polynomials P1,P2,P3, the folded polynomial + // that we commit to is P1(X^t)+XP2(X^t)+X^2P3(X^t) where t is the smallest number dividing + // r-1 bounding above the number of polynomials, which is 3 here. + var err error + digests := make([]kzg.Digest, nbPacks) + for i := 0; i < nbPacks; i++ { + digests[i], err = FoldAndCommit(polynomials[i], testSrs.Pk) + if err != nil { + panic(err) + } + } + + // compute the opening proof. We first pick a hash function that will be used for the FS challenge + // derivation. + hf := sha256.New() + proof, err := BatchOpen(polynomials, digests, points, hf, testSrs.Pk) + if err != nil { + panic(err) + } + + // Check the opening proof. The claimed values of the i-th pack of polynomials are the evaluation + // of the i-th pack of polynomials, evaluated on the t-th powers of points[i], where t is the smallest + // integer bounding above the number of polynomials in the pack that divides r-1, the field on which + // the polynomials are defined. + // + // For instance, proof.ClaimedValues[i][j][k] contains the evaluation of the j-th polynomial of the i-th + // pack, on points[i][k]^t, where t is defined as above. + err = BatchVerify(proof, digests, points, hf, testSrs.Vk) + if err != nil { + panic(err) + } +} diff --git a/internal/generator/fflonk/template/fflonk.go.tmpl b/internal/generator/fflonk/template/fflonk.go.tmpl new file mode 100644 index 000000000..f3baa753f --- /dev/null +++ b/internal/generator/fflonk/template/fflonk.go.tmpl @@ -0,0 +1,272 @@ +import ( + "errors" + "hash" + "math/big" + + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/fr" + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/fr/fft" + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/kzg" + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/shplonk" +) + +var ( + ErrRootsOne = errors.New("fr does not contain all the t-th roots of 1") + ErrNbPolynomialsNbPoints = errors.New("the number of packs of polynomials should be the same as the number of pack of points") + ErrInonsistentFolding = errors.New("the outer claimed values are not consistent with the shplonk proof") + ErrInconsistentNumberFoldedPoints = errors.New("the number of outer claimed values is inconsistent with the number of claimed values in the shplonk proof") +) + +// Opening fflonk proof for opening a list of list of polynomials ((fʲᵢ)ᵢ)ⱼ where each +// pack of polynomials (fʲᵢ)ᵢ (the pack is indexed by j) is opened on a powers of elements in +// the set Sʲ (indexed by j), where the power is |(fʲᵢ)ᵢ|. +// +// implements io.ReaderFrom and io.WriterTo +type OpeningProof struct { + + // shplonk opening proof of the folded polynomials + SOpeningProof shplonk.OpeningProof + + // ClaimedValues ClaimedValues[i][j] contains the values + // of fʲᵢ on Sⱼ^{|(fʲᵢ)ᵢ|} + ClaimedValues [][][]fr.Element +} + +// FoldAndCommit commits to a list of polynomial by intertwinning them like in the FFT, that is +// returns ∑_{i the remaining polynomials are zero + res.ClaimedValues[i][j] = make([]fr.Element, len(points[i])) + } + } + + // step 2: fold polynomials + foldedPolynomials := make([][]fr.Element, len(p)) + for i := 0; i < len(p); i++ { + foldedPolynomials[i] = Fold(p[i]) + } + + // step 4: compute the associated roots, that is for each point p corresponding + // to a pack i of polynomials, we extend to if + // the i-th pack contains t polynomials where ω is a t-th root of 1 + newPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(p); i++ { + newPoints[i], err = extendSet(points[i], nextDivisorRminusOnePerPack[i]) + if err != nil { + return res, err + } + } + + // step 5: shplonk open the list of single polynomials on the new sets + res.SOpeningProof, err = shplonk.BatchOpen(foldedPolynomials, digests, newPoints, hf, pk, dataTranscript...) + + return res, err + +} + +// BatchVerify uses a proof to check that each digest digests[i] is correctly opened on the set points[i]. +// The digests are the commitments to the folded underlying polynomials. The shplonk proof is +// verified directly using the embedded shplonk proof. This function only computes the consistency +// between the claimed values of the underlying shplonk proof and the outer claimed values, using the fft-like +// folding. Namely, the outer claimed values are the evaluation of the original polynomials (so before they +// were folded) at the relevant powers of the points. +func BatchVerify(proof OpeningProof, digests []kzg.Digest, points [][]fr.Element, hf hash.Hash, vk kzg.VerifyingKey, dataTranscript ...[]byte) error { + + // step 0: consistency checks between the folded claimed values of shplonk and the claimed + // values at the powers of the Sᵢ + for i := 0; i < len(proof.ClaimedValues); i++ { + sizeSi := len(proof.ClaimedValues[i][0]) + for j := 1; j < len(proof.ClaimedValues[i]); j++ { + // each set of opening must be of the same size (opeings on powers of Si) + if sizeSi != len(proof.ClaimedValues[i][j]) { + return ErrNbPolynomialsNbPoints + } + } + currNbPolynomials := len(proof.ClaimedValues[i]) + sizeSi = sizeSi * currNbPolynomials + // |originalPolynomials_{i}|x|Sᵢ| == |foldedPolynomials|x|folded Sᵢ| + if sizeSi != len(proof.SOpeningProof.ClaimedValues[i]) { + return ErrInconsistentNumberFoldedPoints + } + } + + // step 1: fold the outer claimed values and check that they correspond to the + // shplonk claimed values + var curFoldedClaimedValue, omgeaiPoint fr.Element + for i := 0; i < len(proof.ClaimedValues); i++ { + t := len(proof.ClaimedValues[i]) + omega, err := getIthRootOne(t) + if err != nil { + return err + } + sizeSi := len(proof.ClaimedValues[i][0]) + polyClaimedValues := make([]fr.Element, t) + for j := 0; j < sizeSi; j++ { + for k := 0; k < t; k++ { + polyClaimedValues[k].Set(&proof.ClaimedValues[i][k][j]) + } + omgeaiPoint.Set(&points[i][j]) + for l := 0; l < t; l++ { + curFoldedClaimedValue = eval(polyClaimedValues, omgeaiPoint) + if !curFoldedClaimedValue.Equal(&proof.SOpeningProof.ClaimedValues[i][j*t+l]) { + return ErrInonsistentFolding + } + omgeaiPoint.Mul(&omgeaiPoint, &omega) + } + } + } + + // step 2: verify the embedded shplonk proof + extendedPoints := make([][]fr.Element, len(points)) + var err error + for i := 0; i < len(points); i++ { + t := len(proof.ClaimedValues[i]) + extendedPoints[i], err = extendSet(points[i], t) + if err != nil { + return err + } + } + err = shplonk.BatchVerify(proof.SOpeningProof, digests, extendedPoints, hf, vk, dataTranscript...) + + return err +} + +// utils + +// getIthRootOne returns a generator of Z/iZ +func getIthRootOne(i int) (fr.Element, error) { + var omega fr.Element + var tmpBigInt, zeroBigInt big.Int + oneBigInt := big.NewInt(1) + zeroBigInt.SetUint64(0) + rMinusOneBigInt := fr.Modulus() + rMinusOneBigInt.Sub(rMinusOneBigInt, oneBigInt) + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Mod(rMinusOneBigInt, &tmpBigInt) + if tmpBigInt.Cmp(&zeroBigInt) != 0 { + return omega, ErrRootsOne + } + genFrStar := fft.GeneratorFullMultiplicativeGroup() + tmpBigInt.SetUint64(uint64(i)) + tmpBigInt.Div(rMinusOneBigInt, &tmpBigInt) + omega.Exp(genFrStar, &tmpBigInt) + return omega, nil +} + +// computes the smallest i bounding above number_polynomials +// and dividing r-1. +func getNextDivisorRMinusOne(i int) int { + var zero, tmp, one big.Int + r := fr.Modulus() + one.SetUint64(1) + r.Sub(r, &one) + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + nbTrials := 100 // prevent DOS attack if the prime is not smooth + for tmp.Cmp(&zero) != 0 && nbTrials>0{ + i += 1 + tmp.SetUint64(uint64(i)) + tmp.Mod(r, &tmp) + nbTrials-- + } + if nbTrials==0 { + panic("did not find any divisor of r-1") + } + return i +} + +// extendSet returns [p[0], ω p[0], .. ,ωᵗ⁻¹p[0],p[1],..,ωᵗ⁻¹p[1],..] +func extendSet(p []fr.Element, t int) ([]fr.Element, error) { + + omega, err := getIthRootOne(t) + if err != nil { + return nil, err + } + nbPoints := len(p) + newPoints := make([]fr.Element, t*nbPoints) + for i := 0; i < nbPoints; i++ { + newPoints[i*t].Set(&p[i]) + for k := 1; k < t; k++ { + newPoints[i*t+k].Mul(&newPoints[i*t+k-1], &omega) + } + } + + return newPoints, nil +} + +func eval(f []fr.Element, x fr.Element) fr.Element { + var y fr.Element + for i := len(f) - 1; i >= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} \ No newline at end of file diff --git a/internal/generator/fflonk/template/fflonk.test.go.tmpl b/internal/generator/fflonk/template/fflonk.test.go.tmpl new file mode 100644 index 000000000..b35a53230 --- /dev/null +++ b/internal/generator/fflonk/template/fflonk.test.go.tmpl @@ -0,0 +1,118 @@ +import ( + "crypto/sha256" + "math/big" + "testing" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/fr" + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/kzg" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + +func init() { + const srsSize = 600 + bAlpha = new(big.Int).SetInt64(42) // randomise ? + testSrs, _ = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) +} + +func TestFflonk(t *testing.T) { + + assert := require.New(t) + + // sample random polynomials of various sizes + nbSets := 5 + p := make([][][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + nbPolysInSet := 9 + p[i] = make([][]fr.Element, nbPolysInSet) + for j := 0; j < nbPolysInSet; j++ { + curSizePoly := j + 10 + p[i][j] = make([]fr.Element, curSizePoly) + for k := 0; k < curSizePoly; k++ { + p[i][j][k].SetRandom() + } + } + } + + // sample random sets Sᵢ + x := make([][]fr.Element, nbSets) + for i := 0; i < nbSets; i++ { + curSetSize := i + 4 + x[i] = make([]fr.Element, curSetSize) + for j := 0; j < curSetSize; j++ { + x[i][j].SetRandom() + } + } + + // commit to the folded polynomials + digests := make([]kzg.Digest, nbSets) + var err error + for i := 0; i < nbSets; i++ { + digests[i], err = FoldAndCommit(p[i], testSrs.Pk) + assert.NoError(err) + } + + // compute flonk opening proof + hf := sha256.New() + proof, err := BatchOpen(p, digests, x, hf, testSrs.Pk) + assert.NoError(err) + + // check opening proof + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.NoError(err) + + // tamper the proof + proof.ClaimedValues[0][0][0].SetRandom() + err = BatchVerify(proof, digests, x, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestCommit(t *testing.T) { + + assert := require.New(t) + + // sample polynomials + nbPolys := 2 + p := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + p[i] = make([]fr.Element, i+10) + for j := 0; j < i+10; j++ { + p[i][j].SetRandom() + } + } + + // fflonk commit to them + var x fr.Element + x.SetRandom() + proof, err := kzg.Open(Fold(p), x, testSrs.Pk) + assert.NoError(err) + + // check that Open(C, x) = ∑_{i= 0; i-- { + y.Mul(&y, &x).Add(&y, &f[i]) + } + return y +} + +// returns γ*f, re-using f +func mulByConstant(f []fr.Element, gamma fr.Element) []fr.Element { + for i := 0; i < len(f); i++ { + f[i].Mul(&f[i], &gamma) + } + return f +} + +// computes f <- (x-a)*f +// memory of f is re used, need to pass a copy to not modify it +func multiplyLinearFactor(f []fr.Element, a fr.Element) []fr.Element { + s := len(f) + var tmp fr.Element + f = append(f, fr.NewElement(0)) + f[s] = f[s-1] + for i := s - 1; i >= 1; i-- { + tmp.Mul(&f[i], &a) + f[i].Sub(&f[i-1], &tmp) + } + f[0].Mul(&f[0], &a).Neg(&f[0]) + return f +} + +// returns S_{T\Sᵢ} where Sᵢ=x[i] +func buildZtMinusSi(x [][]fr.Element, i int) []fr.Element { + nbPoints := 0 + for i := 0; i < len(x); i++ { + nbPoints += len(x[i]) + } + bufPoints := make([]fr.Element, 0, nbPoints-len(x[i])) + for j := 0; j < i; j++ { + bufPoints = append(bufPoints, x[j]...) + } + for j := i + 1; j < len(x); j++ { + bufPoints = append(bufPoints, x[j]...) + } + ztMinusSi := buildVanishingPoly(bufPoints) + return ztMinusSi +} + +// returns πᵢ(X-xᵢ) +func buildVanishingPoly(x []fr.Element) []fr.Element { + res := make([]fr.Element, 1, len(x)+1) + res[0].SetOne() + for i := 0; i < len(x); i++ { + res = multiplyLinearFactor(res, x[i]) + } + return res +} + +// returns f such that f(xᵢ)=yᵢ, x and y are assumed to be of the same size +func interpolate(x, y []fr.Element) []fr.Element { + + res := make([]fr.Element, len(x)) + for i := 0; i < len(x); i++ { + li := buildLagrangeFromDomain(x, i) + li = mulByConstant(li, y[i]) + for j := 0; j < len(x); j++ { + res[j].Add(&res[j], &li[j]) + } + } + + return res +} + +// returns f such that f(xⱼ)=δⁱⱼ +func buildLagrangeFromDomain(x []fr.Element, i int) []fr.Element { + xx := make([]fr.Element, len(x)-1) + copy(xx, x[:i]) + copy(xx[i:], x[i+1:]) + res := buildVanishingPoly(xx) + d := eval(res, x[i]) + d.Inverse(&d) + res = mulByConstant(res, d) + return res +} + +// returns f-g, the memory of f is re used, deg(g) << deg(f) here +func sub(f, g []fr.Element) []fr.Element { + for i := 0; i < len(g); i++ { + f[i].Sub(&f[i], &g[i]) + } + return f +} + +// returns f*g using naive multiplication +// deg(f)>>deg(g), deg(small) =~ 10 max +// buf is used as a buffer and should not be f or g +// f and g are not modified +func mul(f, g []fr.Element, res []fr.Element) []fr.Element { + + sizeRes := len(f) + len(g) - 1 + if len(res) < sizeRes { + s := make([]fr.Element, sizeRes-len(res)) + res = append(res, s...) + } + setZero(res) + + var tmp fr.Element + for i := 0; i < len(g); i++ { + for j := 0; j < len(f); j++ { + tmp.Mul(&f[j], &g[i]) + res[j+i].Add(&res[j+i], &tmp) + } + } + return res +} + +// returns f/g (assuming g divides f) +// OK to not use fft if deg(g) is small +// g's leading coefficient is assumed to be 1 +// f memory is re-used for the result, need to pass a copy to not modify it +func div(f, g []fr.Element) []fr.Element { + sizef := len(f) + sizeg := len(g) + stop := sizeg - +1 + var t fr.Element + for i := sizef - 2; i >= stop; i-- { + for j := 0; j < sizeg-1; j++ { + t.Mul(&f[i+1], &g[sizeg-2-j]) + f[i-j].Sub(&f[i-j], &t) + } + } + return f[sizeg-1:] +} diff --git a/internal/generator/shplonk/template/shplonk.test.go.tmpl b/internal/generator/shplonk/template/shplonk.test.go.tmpl new file mode 100644 index 000000000..d8ff5b8ad --- /dev/null +++ b/internal/generator/shplonk/template/shplonk.test.go.tmpl @@ -0,0 +1,331 @@ +import ( + "crypto/sha256" + "math/big" + "testing" + "math/rand" + + "github.com/consensys/gnark-crypto/ecc" + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}" + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/fr" + "github.com/consensys/gnark-crypto/ecc/{{ .Name }}/kzg" + "github.com/consensys/gnark-crypto/utils/testutils" + "github.com/stretchr/testify/require" +) + +// Test SRS re-used across tests of the KZG scheme +var testSrs *kzg.SRS +var bAlpha *big.Int + + +func init() { + const srsSize = 230 + var frAlpha fr.Element + frAlpha.SetRandom() + bAlpha = big.NewInt(0) + frAlpha.BigInt(bAlpha) + var err error + testSrs, err = kzg.NewSRS(ecc.NextPowerOfTwo(srsSize), bAlpha) + if err!=nil { + panic(err) + } +} + +func TestSerialization(t *testing.T) { + + _, _, g, _ := {{ .CurvePackage }}.Generators() + var proof OpeningProof + proof.W.Set(&g) + proof.WPrime.Set(&g) + nbClaimedValues := 10 + proof.ClaimedValues = make([][]fr.Element, nbClaimedValues) + for i := 0; i < nbClaimedValues; i++ { + proof.ClaimedValues[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + proof.ClaimedValues[i][j].SetRandom() + } + } + + t.Run("opening proof round trip", testutils.SerializationRoundTrip(&proof)) +} + +func TestOpening(t *testing.T) { + + assert := require.New(t) + + nbPolys := 2 + sizePoly := make([]int, nbPolys) + for i := 0; i < nbPolys; i++ { + sizePoly[i] = rand.Intn(10) + 2 //nolint: gosec // G404, no strong PRNG needed here + } + polys := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + polys[i] = make([]fr.Element, sizePoly[i]) + for j := 0; j < sizePoly[i]; j++ { + polys[i][j].SetRandom() + } + } + + digests := make([]kzg.Digest, nbPolys) + for i := 0; i < nbPolys; i++ { + digests[i], _ = kzg.Commit(polys[i], testSrs.Pk) + } + + points := make([][]fr.Element, nbPolys) + for i := 0; i < nbPolys; i++ { + points[i] = make([]fr.Element, i+2) + for j := 0; j < i+2; j++ { + points[i][j].SetRandom() + } + } + + hf := sha256.New() + + // correct proof + openingProof, err := BatchOpen(polys, digests, points, hf, testSrs.Pk) + assert.NoError(err) + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.NoError(err) + + // tampered proof + openingProof.ClaimedValues[0][0].SetRandom() + err = BatchVerify(openingProof, digests, points, hf, testSrs.Vk) + assert.Error(err) + +} + +func TestBuildZtMinusSi(t *testing.T) { + + nbSi := 10 + points := make([][]fr.Element, nbSi) + sizeSi := make([]int, nbSi) + nbPoints := 0 + for i := 0; i < nbSi; i++ { + sizeSi[i] = 5 + i + nbPoints += sizeSi[i] + points[i] = make([]fr.Element, sizeSi[i]) + for j := 0; j < sizeSi[i]; j++ { + points[i][j].SetRandom() + } + } + for i := 0; i < nbSi; i++ { + ztMinusSi := buildZtMinusSi(points, i) + if len(ztMinusSi) != nbPoints-sizeSi[i]+1 { + t.Fatal("deg(Z_{T-S_{i}}) should be nbPoints-size(S_{i})") + } + for j := 0; j < nbSi; j++ { + if j == i { + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{i}) should not be zero") + } + } + continue + } + for k := 0; k < sizeSi[j]; k++ { + y := eval(ztMinusSi, points[j][k]) + if !y.IsZero() { + t.Fatal("Z_{T-S_{i}}(S_{j}) should be zero") + } + } + } + } + +} + +func TestInterpolate(t *testing.T) { + + nbPoints := 10 + x := make([]fr.Element, nbPoints) + y := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + y[i].SetRandom() + } + f := interpolate(x, y) + for i := 0; i < nbPoints; i++ { + fx := eval(f, x[i]) + if !fx.Equal(&y[i]) { + t.Fatal("f(x_{i})!=y_{i}") + } + } + +} + +func TestBuildLagrangeFromDomain(t *testing.T) { + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + var r fr.Element + for i := 0; i < nbPoints; i++ { + + l := buildLagrangeFromDomain(points, i) + + // check that l(xᵢ)=1 and l(xⱼ)=0 for j!=i + for j := 0; j < nbPoints; j++ { + y := eval(l, points[j]) + if i == j { + if !y.IsOne() { + t.Fatal("l_{i}(x_{i}) should be equal to 1") + } + } else { + if !y.IsZero() { + t.Fatal("l_{i}(x_{j}) where i!=j should be equal to 0") + } + } + } + r.SetRandom() + y := eval(l, r) + if y.IsZero() { + t.Fatal("l_{i}(x) should not be zero if x is random") + } + } + +} + +func TestBuildVanishingPoly(t *testing.T) { + s := 10 + x := make([]fr.Element, s) + for i := 0; i < s; i++ { + x[i].SetRandom() + } + r := buildVanishingPoly(x) + + if len(r) != s+1 { + t.Fatal("error degree r") + } + + // check that r(xᵢ)=0 for all i + for i := 0; i < len(x); i++ { + y := eval(r, x[i]) + if !y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at xᵢ should be zero") + } + } + + // check that r(y)!=0 for a random point + var a fr.Element + a.SetRandom() + y := eval(r, a) + if y.IsZero() { + t.Fatal("πᵢ(X-xᵢ) at r \neq xᵢ should not be zero") + } +} + +func TestMultiplyLinearFactor(t *testing.T) { + + s := 10 + f := make([]fr.Element, s, s+1) + for i := 0; i < 10; i++ { + f[i].SetRandom() + } + + var a, y fr.Element + a.SetRandom() + f = multiplyLinearFactor(f, a) + y = eval(f, a) + if !y.IsZero() { + t.Fatal("(X-a)f(X) should be zero at a") + } + a.SetRandom() + y = eval(f, a) + if y.IsZero() { + t.Fatal("(X-1)f(X) at a random point should not be zero") + } + +} + +func TestNaiveMul(t *testing.T) { + + size := 10 + f := make([]fr.Element, size) + for i := 0; i < size; i++ { + f[i].SetRandom() + } + + nbPoints := 10 + points := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + points[i].SetRandom() + } + + v := buildVanishingPoly(points) + buf := make([]fr.Element, size+nbPoints-1) + g := mul(f, v, buf) + + // check that g(xᵢ) = 0 + for i := 0; i < nbPoints; i++ { + y := eval(g, points[i]) + if !y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at x_{i} should be zero") + } + } + + // check that g(r) != 0 for a random point + var a fr.Element + a.SetRandom() + y := eval(g, a) + if y.IsZero() { + t.Fatal("f(X)(X-x_{1})..(X-x_{n}) at a random point should not be zero") + } + +} + +func TestDiv(t *testing.T) { + + nbPoints := 10 + s := 10 + f := make([]fr.Element, s, s+nbPoints) + for i := 0; i < s; i++ { + f[i].SetRandom() + } + + // backup + g := make([]fr.Element, s) + copy(g, f) + + // successive divions of linear terms + x := make([]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + q := make([][2]fr.Element, nbPoints) + for i := 0; i < nbPoints; i++ { + q[i][1].SetOne() + q[i][0].Neg(&x[i]) + f = div(f, q[i][:]) + } + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + + // division by a degree > 1 polynomial + for i := 0; i < nbPoints; i++ { + x[i].SetRandom() + f = multiplyLinearFactor(f, x[i]) + } + r := buildVanishingPoly(x) + f = div(f, r) + + // g should be equal to f + if len(f) != len(g) { + t.Fatal("lengths don't match") + } + for i := 0; i < len(g); i++ { + if !f[i].Equal(&g[i]) { + t.Fatal("f(x)(x-a)/(x-a) should be equal to f(x)") + } + } + +}