From 2858a224c878de47cfd40e66ce4c8963a44b5463 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Thu, 10 Oct 2024 11:50:41 -0500 Subject: [PATCH 01/10] remove zombie code: rand in InitPhase1 --- backend/groth16/bn254/mpcsetup/phase1.go | 9 --------- 1 file changed, 9 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index a912a473aa..e09e34b0ce 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -52,15 +52,6 @@ type Phase1 struct { func InitPhase1(power int) (phase1 Phase1) { N := int(math.Pow(2, float64(power))) - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetOne() - alpha.SetOne() - beta.SetOne() - phase1.PublicKeys.Tau = newPublicKey(tau, nil, 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, nil, 2) - phase1.PublicKeys.Beta = newPublicKey(beta, nil, 3) - // First contribution use generators _, _, g1, g2 := curve.Generators() phase1.Parameters.G2.Beta.Set(&g2) From 3477517d49ad686696e3b8d8511fa677870bc11e Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Thu, 10 Oct 2024 12:13:12 -0500 Subject: [PATCH 02/10] docs comment genR --- backend/groth16/bn254/mpcsetup/utils.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index e3b47d1121..f455173232 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -156,6 +156,8 @@ func linearCombinationG2(A []curve.G2Affine) (L1, L2 curve.G2Affine) { } // Generate R in G₂ as Hash(gˢ, gˢˣ, challenge, dst) +// it is to be used as a challenge for generating a proof of knowledge to x +// π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { var buf bytes.Buffer buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) From 2e425fc40bf766f68931393b0b073a2fb06e92fb Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Fri, 11 Oct 2024 14:29:30 -0500 Subject: [PATCH 03/10] revert bring init back --- backend/groth16/bn254/mpcsetup/phase1.go | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index e09e34b0ce..a912a473aa 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -52,6 +52,15 @@ type Phase1 struct { func InitPhase1(power int) (phase1 Phase1) { N := int(math.Pow(2, float64(power))) + // Generate key pairs + var tau, alpha, beta fr.Element + tau.SetOne() + alpha.SetOne() + beta.SetOne() + phase1.PublicKeys.Tau = newPublicKey(tau, nil, 1) + phase1.PublicKeys.Alpha = newPublicKey(alpha, nil, 2) + phase1.PublicKeys.Beta = newPublicKey(beta, nil, 3) + // First contribution use generators _, _, g1, g2 := curve.Generators() phase1.Parameters.G2.Beta.Set(&g2) From 2719edb918720627d29d27fca45189d478971352 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Fri, 11 Oct 2024 15:29:14 -0500 Subject: [PATCH 04/10] refactor: updateValue and verify --- backend/groth16/bn254/mpcsetup/utils.go | 121 +++++++++++++++++++++--- 1 file changed, 110 insertions(+), 11 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index f455173232..e4bd05fa0d 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -18,9 +18,11 @@ package mpcsetup import ( "bytes" + "crypto/rand" "math/big" "math/bits" "runtime" + "time" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" @@ -31,7 +33,7 @@ import ( type PublicKey struct { SG curve.G1Affine SXG curve.G1Affine - XR curve.G2Affine + XR curve.G2Affine // XR = X.R ∈ 𝔾₂ proof of knowledge } func newPublicKey(x fr.Element, challenge []byte, dst byte) PublicKey { @@ -69,7 +71,7 @@ func bitReverse[T any](a []T) { } } -// Returns [1, a, a², ..., aⁿ⁻¹ ] in Montgomery form +// Returns [1, a, a², ..., aⁿ⁻¹ ] func powers(a fr.Element, n int) []fr.Element { result := make([]fr.Element, n) result[0] = fr.NewElement(1) @@ -79,7 +81,7 @@ func powers(a fr.Element, n int) []fr.Element { return result } -// Returns [aᵢAᵢ, ...] in G1 +// Returns [aᵢAᵢ, ...]∈𝔾₁ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { utils.Parallelize(len(A), func(start, end int) { var tmp big.Int @@ -90,7 +92,7 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { }) } -// Returns [aᵢAᵢ, ...] in G2 +// Returns [aᵢAᵢ, ...]∈𝔾₂ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { utils.Parallelize(len(A), func(start, end int) { var tmp big.Int @@ -101,16 +103,22 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } +/* // Check e(a₁, a₂) = e(b₁, b₂) func sameRatio(a1, b1 curve.G1Affine, a2, b2 curve.G2Affine) bool { if !a1.IsInSubGroup() || !b1.IsInSubGroup() || !a2.IsInSubGroup() || !b2.IsInSubGroup() { panic("invalid point not in subgroup") } - var na2 curve.G2Affine - na2.Neg(&a2) + return sameRatioUnsafe(a1, b1, a2, b2) +}*/ + +// Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. +func sameRatioUnsafe(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { + var nd1 curve.G1Affine + nd1.Neg(&d1) res, err := curve.PairingCheck( - []curve.G1Affine{a1, b1}, - []curve.G2Affine{na2, b2}) + []curve.G1Affine{n1, nd1}, + []curve.G2Affine{d2, n2}) if err != nil { panic(err) } @@ -129,7 +137,7 @@ func merge(A, B []curve.G1Affine) (a, b curve.G1Affine) { return } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G1 +// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁∈𝔾₁ func linearCombinationG1(A []curve.G1Affine) (L1, L2 curve.G1Affine) { nc := runtime.NumCPU() n := len(A) @@ -142,7 +150,7 @@ func linearCombinationG1(A []curve.G1Affine) (L1, L2 curve.G1Affine) { return } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G2 +// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁∈𝔾₂ func linearCombinationG2(A []curve.G2Affine) (L1, L2 curve.G2Affine) { nc := runtime.NumCPU() n := len(A) @@ -155,7 +163,7 @@ func linearCombinationG2(A []curve.G2Affine) (L1, L2 curve.G2Affine) { return } -// Generate R in G₂ as Hash(gˢ, gˢˣ, challenge, dst) +// Generate R∈𝔾₂ as Hash(gˢ, gˢˣ, challenge, dst) // it is to be used as a challenge for generating a proof of knowledge to x // π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { @@ -170,3 +178,94 @@ func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { } return spG2 } + +type RandomBeacon func(time.Time) []byte + +// func (rb RandomBeacon) GenerateChallenge(...) []byte {} + +type pair struct { + g1 curve.G1Affine + g2 *curve.G2Affine // optional; some values expect to have a 𝔾₂ representation, some don't. +} + +// check that g1, g2 are valid as updated values, i.e. in their subgroups, and non-zero +func (p *pair) validUpdate() bool { + // if the contribution is 0 the product is doomed to be 0. + // no need to check this for g2 independently because if g1 is 0 and g2 is not, consistency checks will fail + return !p.g1.IsInfinity() && p.g1.IsInSubGroup() && (p.g2 == nil || p.g2.IsInSubGroup()) +} + +type valueUpdate struct { + contributionCommitment curve.G1Affine // x or [Xⱼ]₁ + contributionPok curve.G2Affine // π ≔ x.r ∈ 𝔾₂ + updatedCommitment pair // [X₁..Xⱼ] +} + +// updateValue produces values associated with contribution to an existing value. +// if prevCommitment contains only a 𝔾₁ value, then so will updatedCommitment +func updateValue(prevCommitment pair, challenge []byte, dst byte) valueUpdate { + var x valueUpdate + contributionValue, err := rand.Int(rand.Reader, fr.Modulus()) + + eraseToxicWaste := func() { + if contributionValue == nil { + return + } + for i := range contributionValue.Bits() { // TODO check that this works + contributionValue.Bits()[i] = 0 + } + } + defer eraseToxicWaste() + + if err != nil { + panic(err) + } + + _, _, g1, _ := curve.Generators() + x.contributionCommitment.ScalarMultiplication(&g1, contributionValue) + x.updatedCommitment.g1.ScalarMultiplication(&prevCommitment.g1, contributionValue) + if prevCommitment.g2 != nil { // TODO make sure this is correct + x.updatedCommitment.g2 = new(curve.G2Affine).ScalarMultiplication(prevCommitment.g2, contributionValue) + } + + // proof of knowledge to commitment. Algorithm 3 from section 3.7 + pokBase := genR(x.contributionCommitment, x.updatedCommitment.g1, challenge, dst) // r + x.contributionPok.ScalarMultiplication(&pokBase, contributionValue) + + return x +} + +// verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 +// it checks the proof of knowledge of the contribution, and the fact that the product of the contribution +// and previous commitment makes the new commitment. +// prevCommitment is assumed to be valid. No subgroup check and the like. +func (x *valueUpdate) verify(prevCommitment pair, challenge []byte, dst byte) bool { + noG2 := prevCommitment.g2 == nil + if noG2 != (x.updatedCommitment.g2 == nil) { // no erasing or creating g2 values + return false + } + + if !x.contributionPok.IsInSubGroup() || !x.contributionCommitment.IsInSubGroup() || !x.updatedCommitment.validUpdate() { + return false + } + + // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 + r := genR(x.contributionCommitment, x.updatedCommitment.g1, challenge, dst) // verification challenge in the form of a g2 base + _, _, g1, _ := curve.Generators() + if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π ?= x.r i.e. x/g1 =? π/r + return false + } + + // check that the updated/previous ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. + if !noG2 && !sameRatioUnsafe(x.updatedCommitment.g1, prevCommitment.g1, *x.updatedCommitment.g2, *prevCommitment.g2) { + return false + } + + // now verify that updated₁/previous₁ = x ( = x/g1 = π/r ) + // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values + if !sameRatioUnsafe(x.updatedCommitment.g1, prevCommitment.g1, x.contributionPok, r) { + return false + } + + return true +} From 7c93ec0e415cd8982ebf13db3c375767e02ef833 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Fri, 11 Oct 2024 15:43:56 -0500 Subject: [PATCH 05/10] feat new phase struct --- backend/groth16/bn254/mpcsetup/phase1.go | 3 +++ backend/groth16/bn254/mpcsetup/utils.go | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index a912a473aa..1095392b36 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -25,6 +25,9 @@ import ( "math/big" ) +type phase1 struct { +} + // Phase1 represents the Phase1 of the MPC described in // https://eprint.iacr.org/2017/1050.pdf // diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index e4bd05fa0d..e45ea811a2 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -252,7 +252,7 @@ func (x *valueUpdate) verify(prevCommitment pair, challenge []byte, dst byte) bo // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 r := genR(x.contributionCommitment, x.updatedCommitment.g1, challenge, dst) // verification challenge in the form of a g2 base _, _, g1, _ := curve.Generators() - if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π ?= x.r i.e. x/g1 =? π/r + if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r return false } From c35033691005ec06fd4f0f422a5a39117e566feb Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Fri, 11 Oct 2024 18:04:06 -0500 Subject: [PATCH 06/10] refactor phase1 --- backend/groth16/bn254/mpcsetup/phase1.go | 204 +++++++++++++++++++---- backend/groth16/bn254/mpcsetup/utils.go | 101 ++++++----- 2 files changed, 231 insertions(+), 74 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 1095392b36..afed0e31a6 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -19,13 +19,89 @@ package mpcsetup import ( "crypto/sha256" "errors" + "fmt" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "math" "math/big" + "runtime" + "sync" ) +// Phase1 represents the Phase1 of the MPC described in +// https://eprint.iacr.org/2017/1050.pdf +// +// Also known as "Powers of Tau" type phase1 struct { + Principal struct { // "main" contributions + Tau, Alpha, Beta valueUpdate + } + G1Derived struct { + Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ⁿ⁻²]₁} + AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τⁿ⁻¹]₁} + BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τⁿ⁻¹]₁} + } + G2Derived struct { + Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τⁿ⁻¹]₂} + } + Challenge []byte // Hash of the transcript PRIOR to this participant +} + +func eraseBigInts(i ...*big.Int) { + for _, i := range i { + if i != nil { + for j := range i.Bits() { + i.Bits()[j] = 0 + } + } + } +} + +func eraseFrVectors(v ...[]fr.Element) { + for _, v := range v { + for i := range v { + v[i].SetZero() + } + } +} + +// Contribute contributes randomness to the phase1 object. This mutates phase1. +// p is trusted to be well-formed. The ReadFrom function performs such basic sanity checks. +func (p *phase1) Contribute() { + N := len(p.G2Derived.Tau) + challenge := p.hash() + + // Generate main value updates + var tau, alpha, beta *big.Int + p.Principal.Tau, tau = updateValue(p.Principal.Tau.updatedCommitment, challenge, 1) + p.Principal.Alpha, alpha = updateValue(p.Principal.Alpha.updatedCommitment, challenge, 2) + p.Principal.Beta, beta = updateValue(p.Principal.Beta.updatedCommitment, challenge, 3) + + defer eraseBigInts(tau, alpha, beta) + + // Compute τ, ατ, and βτ + taus := powers(tau, 2*N-1) + alphaTau := make([]fr.Element, N) + betaTau := make([]fr.Element, N) + + defer eraseFrVectors(taus, alphaTau, betaTau) + + alphaTau[0].SetBigInt(alpha) + betaTau[0].SetBigInt(beta) + for i := 1; i < N; i++ { + alphaTau[i].Mul(&taus[i], &alphaTau[0]) + betaTau[i].Mul(&taus[i], &betaTau[0]) + } + + // Update using previous parameters + // TODO @gbotrel working with jacobian points here will help with perf. + scaleG1InPlace(p.G1Derived.Tau, taus) + scaleG2InPlace(p.G2Derived.Tau, taus[0:N]) + scaleG1InPlace(p.G1Derived.AlphaTau, alphaTau) + scaleG1InPlace(p.G1Derived.BetaTau, betaTau) + + p.Challenge = challenge } // Phase1 represents the Phase1 of the MPC described in @@ -88,42 +164,6 @@ func InitPhase1(power int) (phase1 Phase1) { return } -// Contribute contributes randomness to the phase1 object. This mutates phase1. -func (phase1 *Phase1) Contribute() { - N := len(phase1.Parameters.G2.Tau) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetRandom() - alpha.SetRandom() - beta.SetRandom() - phase1.PublicKeys.Tau = newPublicKey(tau, phase1.Hash[:], 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, phase1.Hash[:], 2) - phase1.PublicKeys.Beta = newPublicKey(beta, phase1.Hash[:], 3) - - // Compute powers of τ, ατ, and βτ - taus := powers(tau, 2*N-1) - alphaTau := make([]fr.Element, N) - betaTau := make([]fr.Element, N) - for i := 0; i < N; i++ { - alphaTau[i].Mul(&taus[i], &alpha) - betaTau[i].Mul(&taus[i], &beta) - } - - // Update using previous parameters - // TODO @gbotrel working with jacobian points here will help with perf. - scaleG1InPlace(phase1.Parameters.G1.Tau, taus) - scaleG2InPlace(phase1.Parameters.G2.Tau, taus[0:N]) - scaleG1InPlace(phase1.Parameters.G1.AlphaTau, alphaTau) - scaleG1InPlace(phase1.Parameters.G1.BetaTau, betaTau) - var betaBI big.Int - beta.BigInt(&betaBI) - phase1.Parameters.G2.Beta.ScalarMultiplication(&phase1.Parameters.G2.Beta, &betaBI) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() -} - func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { contribs := append([]*Phase1{c0, c1}, c...) for i := 0; i < len(contribs)-1; i++ { @@ -134,6 +174,97 @@ func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { return nil } +// Verify assumes previous is correct +func (p *phase1) Verify(previous *phase1) error { + + if err := p.Principal.Tau.verify(previous.Principal.Tau.updatedCommitment, p.Challenge, 1); err != nil { + return fmt.Errorf("failed to verify contribution to τ: %w", err) + } + if err := p.Principal.Alpha.verify(previous.Principal.Alpha.updatedCommitment, p.Challenge, 2); err != nil { + return fmt.Errorf("failed to verify contribution to α: %w", err) + } + if err := p.Principal.Beta.verify(previous.Principal.Beta.updatedCommitment, p.Challenge, 3); err != nil { + return fmt.Errorf("failed to verify contribution to β: %w", err) + } + + if !areInSubGroupG1(p.G1Derived.Tau) || !areInSubGroupG1(p.G1Derived.BetaTau) || !areInSubGroupG1(p.G1Derived.AlphaTau) { + return errors.New("derived values 𝔾₁ subgroup check failed") + } + if !areInSubGroupG2(p.G2Derived.Tau) { + return errors.New("derived values 𝔾₂ subgroup check failed") + } + + _, _, g1, g2 := curve.Generators() + + // for 1 ≤ i ≤ 2N-3 we want to check τⁱ⁺¹/τⁱ = τ + // i.e. e(τⁱ⁺¹,[1]₂) = e(τⁱ,[τ]₂). Due to bi-linearity we can instead check + // e(∑rⁱ⁻¹τⁱ⁺¹,[1]₂) = e(∑rⁱ⁻¹τⁱ,[τ]₂), which is tantamount to the check + // ∑rⁱ⁻¹τⁱ⁺¹ / ∑rⁱ⁻¹τⁱ = τ + r := linearCombCoeffs(len(p.G1Derived.Tau) - 1) // the longest of all lengths + // will be reusing the coefficient TODO @Tabaie make sure that's okay + nc := runtime.NumCPU() + var ( + tauT1, tauS1, alphaTT, alphaTS, betaTT, betaTS curve.G1Affine + tauT2, tauS2 curve.G2Affine + wg sync.WaitGroup + ) + + mulExpG1 := func(v *curve.G1Affine, points []curve.G1Affine, nbTasks int) { + if _, err := v.MultiExp(points, r[:len(points)], ecc.MultiExpConfig{NbTasks: nbTasks}); err != nil { + panic(err) + } + wg.Done() + } + + mulExpG2 := func(v *curve.G2Affine, points []curve.G2Affine, nbTasks int) { + if _, err := v.MultiExp(points, r[:len(points)], ecc.MultiExpConfig{NbTasks: nbTasks}); err != nil { + panic(err) + } + wg.Done() + } + + if nc < 2 { + mulExpG1(&tauT1, truncate(p.G1Derived.Tau), nc) + mulExpG1(&tauS1, p.G1Derived.Tau[1:], nc) + } else { + // larger tasks than the others. better get them done together + wg.Add(2) + go mulExpG1(&tauT1, truncate(p.G1Derived.Tau), nc/2) // truncated: smaller powers + mulExpG1(&tauS1, p.G1Derived.Tau[1:], nc - nc/2) // shifted: larger powers + wg.Wait() + } + + if nc < 4 { + mulExpG1(&alphaTT, truncate(p.G1Derived.AlphaTau), nc) + mulExpG1(&alphaTS, p.G1Derived.AlphaTau[1:], nc) + mulExpG1(&betaTT, truncate(p.G1Derived.BetaTau), nc) + mulExpG1(&betaTS, p.G1Derived.BetaTau[1:], nc) + } else { + wg.Add(4) + go mulExpG1(&alphaTT, truncate(p.G1Derived.AlphaTau), nc/4) + go mulExpG1(&alphaTS, p.G1Derived.AlphaTau[1:], nc/2 - nc/4) + go mulExpG1(&betaTT, truncate(p.G1Derived.BetaTau), nc/4) + mulExpG1(&betaTS, p.G1Derived.BetaTau[1:], nc - nc/2 - nc/4) + wg.Wait() + } + + + + if err := tauT1.MultiExp.G1Derived.Tau[:len(p.G1Derived.Tau)-1], r, ecc.MultiExpConfig{NbTasks: nc/2}) + + tauT1, tauS1 := linearCombinationG1(r, p.G1Derived.Tau[1:]) // at this point we should already know that tau[0] = infty and tau[1] = τ. ReadFrom is in charge of ensuring that. + + + if !sameRatioUnsafe(tauS1, tauT1, *p.Principal.Tau.updatedCommitment.g2, g2) { + return errors.New("couldn't verify 𝔾₁ representations of the τⁱ") + } + tauT2, tauS2 := linearCombinationG2(r, p.G2Derived.Tau[1:]) + if !sameRatioUnsafe(p.Principal.Tau.updatedCommitment.g1, g1, tauS2, tauT2) { + return errors.New("couldn't verify 𝔾₂ representations of the τⁱ") + } + +} + // verifyPhase1 checks that a contribution is based on a known previous Phase1 state. func verifyPhase1(current, contribution *Phase1) error { // Compute R for τ, α, β @@ -153,6 +284,7 @@ func verifyPhase1(current, contribution *Phase1) error { } // Check for valid updates using previous parameters + // if !sameRatio(contribution.Parameters.G1.Tau[1], current.Parameters.G1.Tau[1], tauR, contribution.PublicKeys.Tau.XR) { return errors.New("couldn't verify that [τ]₁ is based on previous contribution") } diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index e45ea811a2..499fda08d1 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -19,6 +19,7 @@ package mpcsetup import ( "bytes" "crypto/rand" + "errors" "math/big" "math/bits" "runtime" @@ -71,12 +72,22 @@ func bitReverse[T any](a []T) { } } +func linearCombCoeffs(n int) []fr.Element { + a, err := rand.Int(rand.Reader, fr.Modulus()) + if err != nil { + panic(err) + } + return powers(a, n) +} + // Returns [1, a, a², ..., aⁿ⁻¹ ] -func powers(a fr.Element, n int) []fr.Element { +func powers(a *big.Int, n int) []fr.Element { + var aMont fr.Element + aMont.SetBigInt(a) result := make([]fr.Element, n) result[0] = fr.NewElement(1) for i := 1; i < n; i++ { - result[i].Mul(&result[i-1], &a) + result[i].Mul(&result[i-1], &aMont) } return result } @@ -137,29 +148,23 @@ func merge(A, B []curve.G1Affine) (a, b curve.G1Affine) { return } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁∈𝔾₁ -func linearCombinationG1(A []curve.G1Affine) (L1, L2 curve.G1Affine) { - nc := runtime.NumCPU() +// truncated = ∑ rᵢAᵢ, shifted = ∑ rᵢAᵢ₊₁∈𝔾₁ +func linearCombinationG1(r []fr.Element, A []curve.G1Affine, nbTasks int) curve.G1Affine { n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() - } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) + r = r[:n-1] + var res curve.G1Affine + res.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) + shifted.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) return } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁∈𝔾₂ -func linearCombinationG2(A []curve.G2Affine) (L1, L2 curve.G2Affine) { +// truncated = ∑ rᵢAᵢ, shifted = ∑ rᵢAᵢ₊₁∈𝔾₂ +func linearCombinationG2(r []fr.Element, A []curve.G2Affine) (truncated, shifted curve.G2Affine) { nc := runtime.NumCPU() n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() - } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) + r = r[:n-1] + truncated.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) + shifted.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) return } @@ -203,20 +208,11 @@ type valueUpdate struct { // updateValue produces values associated with contribution to an existing value. // if prevCommitment contains only a 𝔾₁ value, then so will updatedCommitment -func updateValue(prevCommitment pair, challenge []byte, dst byte) valueUpdate { +// the second output is toxic waste. It is the caller's responsibility to safely "dispose" of it. +func updateValue(prevCommitment pair, challenge []byte, dst byte) (valueUpdate, *big.Int) { var x valueUpdate contributionValue, err := rand.Int(rand.Reader, fr.Modulus()) - eraseToxicWaste := func() { - if contributionValue == nil { - return - } - for i := range contributionValue.Bits() { // TODO check that this works - contributionValue.Bits()[i] = 0 - } - } - defer eraseToxicWaste() - if err != nil { panic(err) } @@ -232,40 +228,69 @@ func updateValue(prevCommitment pair, challenge []byte, dst byte) valueUpdate { pokBase := genR(x.contributionCommitment, x.updatedCommitment.g1, challenge, dst) // r x.contributionPok.ScalarMultiplication(&pokBase, contributionValue) - return x + return x, contributionValue } // verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 // it checks the proof of knowledge of the contribution, and the fact that the product of the contribution // and previous commitment makes the new commitment. // prevCommitment is assumed to be valid. No subgroup check and the like. -func (x *valueUpdate) verify(prevCommitment pair, challenge []byte, dst byte) bool { +func (x *valueUpdate) verify(prevCommitment pair, challenge []byte, dst byte) error { noG2 := prevCommitment.g2 == nil - if noG2 != (x.updatedCommitment.g2 == nil) { // no erasing or creating g2 values - return false + if noG2 != (x.updatedCommitment.g2 == nil) { + return errors.New("erasing or creating g2 values") } if !x.contributionPok.IsInSubGroup() || !x.contributionCommitment.IsInSubGroup() || !x.updatedCommitment.validUpdate() { - return false + return errors.New("contribution values subgroup check failed") } // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 r := genR(x.contributionCommitment, x.updatedCommitment.g1, challenge, dst) // verification challenge in the form of a g2 base _, _, g1, _ := curve.Generators() if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r - return false + return errors.New("contribution proof of knowledge verification failed") } // check that the updated/previous ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. if !noG2 && !sameRatioUnsafe(x.updatedCommitment.g1, prevCommitment.g1, *x.updatedCommitment.g2, *prevCommitment.g2) { - return false + return errors.New("g2 update inconsistent") } // now verify that updated₁/previous₁ = x ( = x/g1 = π/r ) // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values if !sameRatioUnsafe(x.updatedCommitment.g1, prevCommitment.g1, x.contributionPok, r) { - return false + return errors.New("g1 update inconsistent") } + return nil +} + +func toRefs[T any](s []T) []*T { + res := make([]*T, len(s)) + for i := range s { + res[i] = &s[i] + } + return res +} + +func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { + for i := range s { + if !s[i].IsInSubGroup() { + return false + } + } return true } + +func areInSubGroupG1(s []curve.G1Affine) bool { + return areInSubGroup(toRefs(s)) +} + +func areInSubGroupG2(s []curve.G2Affine) bool { + return areInSubGroup(toRefs(s)) +} + +func truncate[T any](s []T) []T { + return s[:len(s)-1] +} From fcde6650dd764c5161e0153f6b9864f55b8caf79 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 14 Oct 2024 16:55:43 -0500 Subject: [PATCH 07/10] feat complete phase1 verification --- backend/groth16/bn254/mpcsetup/phase1.go | 63 ++++++++++-------------- backend/groth16/bn254/mpcsetup/utils.go | 34 +++++++++++++ 2 files changed, 59 insertions(+), 38 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index afed0e31a6..34a157411f 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -202,11 +202,14 @@ func (p *phase1) Verify(previous *phase1) error { // ∑rⁱ⁻¹τⁱ⁺¹ / ∑rⁱ⁻¹τⁱ = τ r := linearCombCoeffs(len(p.G1Derived.Tau) - 1) // the longest of all lengths // will be reusing the coefficient TODO @Tabaie make sure that's okay + + tauT1, tauS1 := linearCombinationsG1(r, p.G1Derived.Tau) + tauT2, tauS2 := linearCombinationsG2(r, p.G2Derived.Tau) + nc := runtime.NumCPU() var ( - tauT1, tauS1, alphaTT, alphaTS, betaTT, betaTS curve.G1Affine - tauT2, tauS2 curve.G2Affine - wg sync.WaitGroup + alphaTS, betaTS curve.G1Affine + wg sync.WaitGroup ) mulExpG1 := func(v *curve.G1Affine, points []curve.G1Affine, nbTasks int) { @@ -216,53 +219,37 @@ func (p *phase1) Verify(previous *phase1) error { wg.Done() } - mulExpG2 := func(v *curve.G2Affine, points []curve.G2Affine, nbTasks int) { - if _, err := v.MultiExp(points, r[:len(points)], ecc.MultiExpConfig{NbTasks: nbTasks}); err != nil { - panic(err) - } - wg.Done() - } - - if nc < 2 { - mulExpG1(&tauT1, truncate(p.G1Derived.Tau), nc) - mulExpG1(&tauS1, p.G1Derived.Tau[1:], nc) - } else { - // larger tasks than the others. better get them done together - wg.Add(2) - go mulExpG1(&tauT1, truncate(p.G1Derived.Tau), nc/2) // truncated: smaller powers - mulExpG1(&tauS1, p.G1Derived.Tau[1:], nc - nc/2) // shifted: larger powers + if nc >= 2 { + wg.Add(2) // small tasks over 𝔾₁ + go mulExpG1(&alphaTS, p.G1Derived.AlphaTau[1:], nc/2) + mulExpG1(&betaTS, p.G1Derived.BetaTau[1:], nc-nc/2) wg.Wait() - } - - if nc < 4 { - mulExpG1(&alphaTT, truncate(p.G1Derived.AlphaTau), nc) + } else { mulExpG1(&alphaTS, p.G1Derived.AlphaTau[1:], nc) - mulExpG1(&betaTT, truncate(p.G1Derived.BetaTau), nc) mulExpG1(&betaTS, p.G1Derived.BetaTau[1:], nc) - } else { - wg.Add(4) - go mulExpG1(&alphaTT, truncate(p.G1Derived.AlphaTau), nc/4) - go mulExpG1(&alphaTS, p.G1Derived.AlphaTau[1:], nc/2 - nc/4) - go mulExpG1(&betaTT, truncate(p.G1Derived.BetaTau), nc/4) - mulExpG1(&betaTS, p.G1Derived.BetaTau[1:], nc - nc/2 - nc/4) - wg.Wait() } - - - if err := tauT1.MultiExp.G1Derived.Tau[:len(p.G1Derived.Tau)-1], r, ecc.MultiExpConfig{NbTasks: nc/2}) - - tauT1, tauS1 := linearCombinationG1(r, p.G1Derived.Tau[1:]) // at this point we should already know that tau[0] = infty and tau[1] = τ. ReadFrom is in charge of ensuring that. - - if !sameRatioUnsafe(tauS1, tauT1, *p.Principal.Tau.updatedCommitment.g2, g2) { return errors.New("couldn't verify 𝔾₁ representations of the τⁱ") } - tauT2, tauS2 := linearCombinationG2(r, p.G2Derived.Tau[1:]) + if !sameRatioUnsafe(p.Principal.Tau.updatedCommitment.g1, g1, tauS2, tauT2) { return errors.New("couldn't verify 𝔾₂ representations of the τⁱ") } + // for 1 ≤ i < N we want to check ατⁱ/τⁱ = α + // with a similar bi-linearity argument as above we can do this with a single pairing check + // Note that the check at i = 0 is part of the well-formedness requirement and is not checked here, + // but guaranteed by ReadFrom. + + if !sameRatioUnsafe(alphaTS, tauS1, *p.Principal.Alpha.updatedCommitment.g2, g2) { + return errors.New("couldn't verify the ατⁱ") + } + if !sameRatioUnsafe(betaTS, tauS1, *p.Principal.Beta.updatedCommitment.g2, g2) { + return errors.New("couldn't verify the βτⁱ") + } + + return nil } // verifyPhase1 checks that a contribution is based on a known previous Phase1 state. diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index 499fda08d1..6a446e3236 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -168,6 +168,40 @@ func linearCombinationG2(r []fr.Element, A []curve.G2Affine) (truncated, shifted return } +// linearCombinationsG1 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i +// Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers) +func linearCombinationsG1(rPowers []fr.Element, A []curve.G1Affine) (truncated, shifted curve.G1Affine) { + // the common section, 1 to N-2 + var common curve.G1Affine + common.MultiExp(A[1:len(A)-1], rPowers[:len(A)-2], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}) // A[1] + r.A[2] + ... + rᴺ⁻³.A[N-2] + + var c big.Int + rPowers[1].BigInt(&c) + truncated.ScalarMultiplication(&common, &c).Add(&truncated, &A[0]) // A[0] + r.A[1] + r².A[2] + ... + rᴺ⁻².A[N-2] + + rPowers[len(A)-1].BigInt(&c) + shifted.ScalarMultiplication(&A[len(A)-1], &c).Add(&shifted, &common) + + return +} + +// linearCombinationsG2 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i +// Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers) +func linearCombinationsG2(rPowers []fr.Element, A []curve.G2Affine) (truncated, shifted curve.G2Affine) { + // the common section, 1 to N-2 + var common curve.G2Affine + common.MultiExp(A[1:len(A)-1], rPowers[:len(A)-2], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}) // A[1] + r.A[2] + ... + rᴺ⁻³.A[N-2] + + var c big.Int + rPowers[1].BigInt(&c) + truncated.ScalarMultiplication(&common, &c).Add(&truncated, &A[0]) // A[0] + r.A[1] + r².A[2] + ... + rᴺ⁻².A[N-2] + + rPowers[len(A)-1].BigInt(&c) + shifted.ScalarMultiplication(&A[len(A)-1], &c).Add(&shifted, &common) + + return +} + // Generate R∈𝔾₂ as Hash(gˢ, gˢˣ, challenge, dst) // it is to be used as a challenge for generating a proof of knowledge to x // π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) From 6fc1ffe2e25596cd19e0936fbfcb2611c310c7d7 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 14 Oct 2024 17:30:13 -0500 Subject: [PATCH 08/10] fix check without assuming alpha, beta in G2 --- backend/groth16/bn254/mpcsetup/marshal.go | 70 ++++---- backend/groth16/bn254/mpcsetup/phase1.go | 172 ++++--------------- backend/groth16/bn254/mpcsetup/setup_test.go | 16 +- backend/groth16/bn254/mpcsetup/utils.go | 27 +++ 4 files changed, 99 insertions(+), 186 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/marshal.go b/backend/groth16/bn254/mpcsetup/marshal.go index 08cb2ae3d1..2985022bbd 100644 --- a/backend/groth16/bn254/mpcsetup/marshal.go +++ b/backend/groth16/bn254/mpcsetup/marshal.go @@ -22,31 +22,31 @@ import ( ) // WriteTo implements io.WriterTo -func (phase1 *Phase1) WriteTo(writer io.Writer) (int64, error) { - n, err := phase1.writeTo(writer) +func (p *Phase1) WriteTo(writer io.Writer) (int64, error) { + n, err := p.writeTo(writer) if err != nil { return n, err } - nBytes, err := writer.Write(phase1.Hash) + nBytes, err := writer.Write(p.Hash) return int64(nBytes) + n, err } -func (phase1 *Phase1) writeTo(writer io.Writer) (int64, error) { +func (p *Phase1) writeTo(writer io.Writer) (int64, error) { toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - phase1.Parameters.G1.Tau, - phase1.Parameters.G1.AlphaTau, - phase1.Parameters.G1.BetaTau, - phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, + &p.PublicKeys.Tau.SG, + &p.PublicKeys.Tau.SXG, + &p.PublicKeys.Tau.XR, + &p.PublicKeys.Alpha.SG, + &p.PublicKeys.Alpha.SXG, + &p.PublicKeys.Alpha.XR, + &p.PublicKeys.Beta.SG, + &p.PublicKeys.Beta.SXG, + &p.PublicKeys.Beta.XR, + p.Parameters.G1.Tau, + p.Parameters.G1.AlphaTau, + p.Parameters.G1.BetaTau, + p.Parameters.G2.Tau, + &p.Parameters.G2.Beta, } enc := curve.NewEncoder(writer) @@ -59,22 +59,22 @@ func (phase1 *Phase1) writeTo(writer io.Writer) (int64, error) { } // ReadFrom implements io.ReaderFrom -func (phase1 *Phase1) ReadFrom(reader io.Reader) (int64, error) { +func (p *Phase1) ReadFrom(reader io.Reader) (int64, error) { toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - &phase1.Parameters.G1.Tau, - &phase1.Parameters.G1.AlphaTau, - &phase1.Parameters.G1.BetaTau, - &phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, + &p.PublicKeys.Tau.SG, + &p.PublicKeys.Tau.SXG, + &p.PublicKeys.Tau.XR, + &p.PublicKeys.Alpha.SG, + &p.PublicKeys.Alpha.SXG, + &p.PublicKeys.Alpha.XR, + &p.PublicKeys.Beta.SG, + &p.PublicKeys.Beta.SXG, + &p.PublicKeys.Beta.XR, + &p.Parameters.G1.Tau, + &p.Parameters.G1.AlphaTau, + &p.Parameters.G1.BetaTau, + &p.Parameters.G2.Tau, + &p.Parameters.G2.Beta, } dec := curve.NewDecoder(reader) @@ -83,8 +83,8 @@ func (phase1 *Phase1) ReadFrom(reader io.Reader) (int64, error) { return dec.BytesRead(), err } } - phase1.Hash = make([]byte, 32) - nBytes, err := reader.Read(phase1.Hash) + p.Hash = make([]byte, 32) + nBytes, err := reader.Read(p.Hash) return dec.BytesRead() + int64(nBytes), err } diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 34a157411f..1099d727c2 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -17,23 +17,21 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" "fmt" - "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "math" "math/big" - "runtime" - "sync" ) // Phase1 represents the Phase1 of the MPC described in // https://eprint.iacr.org/2017/1050.pdf // // Also known as "Powers of Tau" -type phase1 struct { +type Phase1 struct { Principal struct { // "main" contributions Tau, Alpha, Beta valueUpdate } @@ -48,27 +46,9 @@ type phase1 struct { Challenge []byte // Hash of the transcript PRIOR to this participant } -func eraseBigInts(i ...*big.Int) { - for _, i := range i { - if i != nil { - for j := range i.Bits() { - i.Bits()[j] = 0 - } - } - } -} - -func eraseFrVectors(v ...[]fr.Element) { - for _, v := range v { - for i := range v { - v[i].SetZero() - } - } -} - -// Contribute contributes randomness to the phase1 object. This mutates phase1. +// Contribute contributes randomness to the Phase1 object. This mutates Phase1. // p is trusted to be well-formed. The ReadFrom function performs such basic sanity checks. -func (p *phase1) Contribute() { +func (p *Phase1) Contribute() { N := len(p.G2Derived.Tau) challenge := p.hash() @@ -104,33 +84,16 @@ func (p *phase1) Contribute() { p.Challenge = challenge } -// Phase1 represents the Phase1 of the MPC described in -// https://eprint.iacr.org/2017/1050.pdf -// -// Also known as "Powers of Tau" -type Phase1 struct { - Parameters struct { - G1 struct { - Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ⁿ⁻²]₁} - AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τⁿ⁻¹]₁} - BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τⁿ⁻¹]₁} - } - G2 struct { - Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τⁿ⁻¹]₂} - Beta curve.G2Affine // [β]₂ - } - } - PublicKeys struct { - Tau, Alpha, Beta PublicKey - } - Hash []byte // sha256 hash -} - // InitPhase1 initialize phase 1 of the MPC. This is called once by the coordinator before // any randomness contribution is made (see Contribute()). func InitPhase1(power int) (phase1 Phase1) { N := int(math.Pow(2, float64(power))) + _, _, g1, g2 := curve.Generators() + + phase1.Challenge = []byte{0} + phase1.Principal.Alpha.setEmpty() + // Generate key pairs var tau, alpha, beta fr.Element tau.SetOne() @@ -167,7 +130,7 @@ func InitPhase1(power int) (phase1 Phase1) { func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { contribs := append([]*Phase1{c0, c1}, c...) for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase1(contribs[i], contribs[i+1]); err != nil { + if err := contribs[i].Verify(contribs[i+1]); err != nil { return err } } @@ -175,7 +138,14 @@ func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { } // Verify assumes previous is correct -func (p *phase1) Verify(previous *phase1) error { +func (p *Phase1) Verify(previous *Phase1) error { + + if prevHash := previous.hash(); !bytes.Equal(p.Challenge, previous.hash()) { // if chain-verifying contributions, challenge fields are optional as they can be computed as we go + if len(p.Challenge) != 0 { + return errors.New("the challenge does not match the previous phase's hash") + } + p.Challenge = prevHash + } if err := p.Principal.Tau.verify(previous.Principal.Tau.updatedCommitment, p.Challenge, 1); err != nil { return fmt.Errorf("failed to verify contribution to τ: %w", err) @@ -205,29 +175,8 @@ func (p *phase1) Verify(previous *phase1) error { tauT1, tauS1 := linearCombinationsG1(r, p.G1Derived.Tau) tauT2, tauS2 := linearCombinationsG2(r, p.G2Derived.Tau) - - nc := runtime.NumCPU() - var ( - alphaTS, betaTS curve.G1Affine - wg sync.WaitGroup - ) - - mulExpG1 := func(v *curve.G1Affine, points []curve.G1Affine, nbTasks int) { - if _, err := v.MultiExp(points, r[:len(points)], ecc.MultiExpConfig{NbTasks: nbTasks}); err != nil { - panic(err) - } - wg.Done() - } - - if nc >= 2 { - wg.Add(2) // small tasks over 𝔾₁ - go mulExpG1(&alphaTS, p.G1Derived.AlphaTau[1:], nc/2) - mulExpG1(&betaTS, p.G1Derived.BetaTau[1:], nc-nc/2) - wg.Wait() - } else { - mulExpG1(&alphaTS, p.G1Derived.AlphaTau[1:], nc) - mulExpG1(&betaTS, p.G1Derived.BetaTau[1:], nc) - } + alphaTT, alphaTS := linearCombinationsG1(r, p.G1Derived.AlphaTau) + betaTT, betaTS := linearCombinationsG1(r, p.G1Derived.BetaTau) if !sameRatioUnsafe(tauS1, tauT1, *p.Principal.Tau.updatedCommitment.g2, g2) { return errors.New("couldn't verify 𝔾₁ representations of the τⁱ") @@ -237,89 +186,26 @@ func (p *phase1) Verify(previous *phase1) error { return errors.New("couldn't verify 𝔾₂ representations of the τⁱ") } - // for 1 ≤ i < N we want to check ατⁱ/τⁱ = α + // for 0 ≤ i < N we want to check the ατⁱ + // By well-formedness checked by ReadFrom, we assume that ατ⁰ = α + // For 0 < i < N we check that ατⁱ/ατⁱ⁻¹ = τ, since we have a representation of τ in 𝔾₂ // with a similar bi-linearity argument as above we can do this with a single pairing check - // Note that the check at i = 0 is part of the well-formedness requirement and is not checked here, - // but guaranteed by ReadFrom. - if !sameRatioUnsafe(alphaTS, tauS1, *p.Principal.Alpha.updatedCommitment.g2, g2) { + if !sameRatioUnsafe(alphaTS, alphaTT, *p.Principal.Tau.updatedCommitment.g2, g2) { return errors.New("couldn't verify the ατⁱ") } - if !sameRatioUnsafe(betaTS, tauS1, *p.Principal.Beta.updatedCommitment.g2, g2) { + if !sameRatioUnsafe(betaTS, betaTT, *p.Principal.Tau.updatedCommitment.g2, g2) { return errors.New("couldn't verify the βτⁱ") } return nil } -// verifyPhase1 checks that a contribution is based on a known previous Phase1 state. -func verifyPhase1(current, contribution *Phase1) error { - // Compute R for τ, α, β - tauR := genR(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, current.Hash[:], 1) - alphaR := genR(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, current.Hash[:], 2) - betaR := genR(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, current.Hash[:], 3) - - // Check for knowledge of toxic parameters - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.PublicKeys.Tau.XR, tauR) { - return errors.New("couldn't verify public key of τ") - } - if !sameRatio(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, contribution.PublicKeys.Alpha.XR, alphaR) { - return errors.New("couldn't verify public key of α") - } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.PublicKeys.Beta.XR, betaR) { - return errors.New("couldn't verify public key of β") - } - - // Check for valid updates using previous parameters - // - if !sameRatio(contribution.Parameters.G1.Tau[1], current.Parameters.G1.Tau[1], tauR, contribution.PublicKeys.Tau.XR) { - return errors.New("couldn't verify that [τ]₁ is based on previous contribution") - } - if !sameRatio(contribution.Parameters.G1.AlphaTau[0], current.Parameters.G1.AlphaTau[0], alphaR, contribution.PublicKeys.Alpha.XR) { - return errors.New("couldn't verify that [α]₁ is based on previous contribution") - } - if !sameRatio(contribution.Parameters.G1.BetaTau[0], current.Parameters.G1.BetaTau[0], betaR, contribution.PublicKeys.Beta.XR) { - return errors.New("couldn't verify that [β]₁ is based on previous contribution") - } - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.Parameters.G2.Tau[1], current.Parameters.G2.Tau[1]) { - return errors.New("couldn't verify that [τ]₂ is based on previous contribution") - } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.Parameters.G2.Beta, current.Parameters.G2.Beta) { - return errors.New("couldn't verify that [β]₂ is based on previous contribution") +func (p *Phase1) hash() []byte { + if len(p.Challenge) == 0 { + panic("challenge field missing") } - - // Check for valid updates using powers of τ - _, _, g1, g2 := curve.Generators() - tauL1, tauL2 := linearCombinationG1(contribution.Parameters.G1.Tau) - if !sameRatio(tauL1, tauL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of τ in G₁") - } - alphaL1, alphaL2 := linearCombinationG1(contribution.Parameters.G1.AlphaTau) - if !sameRatio(alphaL1, alphaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") - } - betaL1, betaL2 := linearCombinationG1(contribution.Parameters.G1.BetaTau) - if !sameRatio(betaL1, betaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") - } - tau2L1, tau2L2 := linearCombinationG2(contribution.Parameters.G2.Tau) - if !sameRatio(contribution.Parameters.G1.Tau[1], g1, tau2L1, tau2L2) { - return errors.New("couldn't verify valid powers of τ in G₂") - } - - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") - } - } - - return nil -} - -func (phase1 *Phase1) hash() []byte { sha := sha256.New() - phase1.writeTo(sha) + p.writeTo(sha) return sha.Sum(nil) } diff --git a/backend/groth16/bn254/mpcsetup/setup_test.go b/backend/groth16/bn254/mpcsetup/setup_test.go index 63b717cac4..797af8119b 100644 --- a/backend/groth16/bn254/mpcsetup/setup_test.go +++ b/backend/groth16/bn254/mpcsetup/setup_test.go @@ -168,17 +168,17 @@ func (circuit *Circuit) Define(api frontend.API) error { return nil } -func (phase1 *Phase1) clone() Phase1 { +func (p *Phase1) clone() Phase1 { r := Phase1{} - r.Parameters.G1.Tau = append(r.Parameters.G1.Tau, phase1.Parameters.G1.Tau...) - r.Parameters.G1.AlphaTau = append(r.Parameters.G1.AlphaTau, phase1.Parameters.G1.AlphaTau...) - r.Parameters.G1.BetaTau = append(r.Parameters.G1.BetaTau, phase1.Parameters.G1.BetaTau...) + r.Parameters.G1.Tau = append(r.Parameters.G1.Tau, p.Parameters.G1.Tau...) + r.Parameters.G1.AlphaTau = append(r.Parameters.G1.AlphaTau, p.Parameters.G1.AlphaTau...) + r.Parameters.G1.BetaTau = append(r.Parameters.G1.BetaTau, p.Parameters.G1.BetaTau...) - r.Parameters.G2.Tau = append(r.Parameters.G2.Tau, phase1.Parameters.G2.Tau...) - r.Parameters.G2.Beta = phase1.Parameters.G2.Beta + r.Parameters.G2.Tau = append(r.Parameters.G2.Tau, p.Parameters.G2.Tau...) + r.Parameters.G2.Beta = p.Parameters.G2.Beta - r.PublicKeys = phase1.PublicKeys - r.Hash = append(r.Hash, phase1.Hash...) + r.PublicKeys = p.PublicKeys + r.Hash = append(r.Hash, p.Hash...) return r } diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index 6a446e3236..c1a334b739 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -300,6 +300,15 @@ func (x *valueUpdate) verify(prevCommitment pair, challenge []byte, dst byte) er return nil } +// setEmpty does not provide proofs, only sets the value to [1] +func (x *valueUpdate) setEmpty(g1Only bool) { + _, _, g1, g2 := curve.Generators() + x.updatedCommitment.g1.Set(&g1) + if !g1Only { + x.updatedCommitment.g2 = &g2 + } +} + func toRefs[T any](s []T) []*T { res := make([]*T, len(s)) for i := range s { @@ -328,3 +337,21 @@ func areInSubGroupG2(s []curve.G2Affine) bool { func truncate[T any](s []T) []T { return s[:len(s)-1] } + +func eraseBigInts(i ...*big.Int) { + for _, i := range i { + if i != nil { + for j := range i.Bits() { + i.Bits()[j] = 0 + } + } + } +} + +func eraseFrVectors(v ...[]fr.Element) { + for _, v := range v { + for i := range v { + v[i].SetZero() + } + } +} From 4601ed7be8b5acdeff97deee173ff1bd380bca07 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 14 Oct 2024 17:34:51 -0500 Subject: [PATCH 09/10] refactor initPhase1 --- .../groth16/bn254/mpcsetup/marshal_test.go | 2 +- backend/groth16/bn254/mpcsetup/phase1.go | 49 +++++++------------ backend/groth16/bn254/mpcsetup/setup_test.go | 8 +-- 3 files changed, 22 insertions(+), 37 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/marshal_test.go b/backend/groth16/bn254/mpcsetup/marshal_test.go index 386e3faf66..c8d7a6b004 100644 --- a/backend/groth16/bn254/mpcsetup/marshal_test.go +++ b/backend/groth16/bn254/mpcsetup/marshal_test.go @@ -34,7 +34,7 @@ func TestContributionSerialization(t *testing.T) { assert := require.New(t) // Phase 1 - srs1 := InitPhase1(9) + srs1 := NewPhase1(9) srs1.Contribute() assert.NoError(gnarkio.RoundTripCheck(&srs1, func() interface{} { return new(Phase1) })) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 1099d727c2..7eddd73e78 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -84,46 +84,31 @@ func (p *Phase1) Contribute() { p.Challenge = challenge } -// InitPhase1 initialize phase 1 of the MPC. This is called once by the coordinator before +// NewPhase1 initialize phase 1 of the MPC. This is called once by the coordinator before // any randomness contribution is made (see Contribute()). -func InitPhase1(power int) (phase1 Phase1) { +func NewPhase1(power int) (phase1 Phase1) { N := int(math.Pow(2, float64(power))) _, _, g1, g2 := curve.Generators() phase1.Challenge = []byte{0} - phase1.Principal.Alpha.setEmpty() - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetOne() - alpha.SetOne() - beta.SetOne() - phase1.PublicKeys.Tau = newPublicKey(tau, nil, 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, nil, 2) - phase1.PublicKeys.Beta = newPublicKey(beta, nil, 3) - - // First contribution use generators - _, _, g1, g2 := curve.Generators() - phase1.Parameters.G2.Beta.Set(&g2) - phase1.Parameters.G1.Tau = make([]curve.G1Affine, 2*N-1) - phase1.Parameters.G2.Tau = make([]curve.G2Affine, N) - phase1.Parameters.G1.AlphaTau = make([]curve.G1Affine, N) - phase1.Parameters.G1.BetaTau = make([]curve.G1Affine, N) - for i := 0; i < len(phase1.Parameters.G1.Tau); i++ { - phase1.Parameters.G1.Tau[i].Set(&g1) - } - for i := 0; i < len(phase1.Parameters.G2.Tau); i++ { - phase1.Parameters.G2.Tau[i].Set(&g2) - phase1.Parameters.G1.AlphaTau[i].Set(&g1) - phase1.Parameters.G1.BetaTau[i].Set(&g1) + phase1.Principal.Alpha.setEmpty(true) + phase1.Principal.Beta.setEmpty(true) + phase1.Principal.Tau.setEmpty(false) + + phase1.G1Derived.Tau = make([]curve.G1Affine, 2*N-1) + phase1.G2Derived.Tau = make([]curve.G2Affine, N) + phase1.G1Derived.AlphaTau = make([]curve.G1Affine, N) + phase1.G1Derived.BetaTau = make([]curve.G1Affine, N) + for i := range phase1.G1Derived.Tau { + phase1.G1Derived.Tau[i].Set(&g1) + } + for i := range phase1.G2Derived.Tau { + phase1.G2Derived.Tau[i].Set(&g2) + phase1.G1Derived.AlphaTau[i].Set(&g1) + phase1.G1Derived.BetaTau[i].Set(&g1) } - phase1.Parameters.G2.Beta.Set(&g2) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() - return } diff --git a/backend/groth16/bn254/mpcsetup/setup_test.go b/backend/groth16/bn254/mpcsetup/setup_test.go index 797af8119b..3689969c69 100644 --- a/backend/groth16/bn254/mpcsetup/setup_test.go +++ b/backend/groth16/bn254/mpcsetup/setup_test.go @@ -40,7 +40,7 @@ func TestSetupCircuit(t *testing.T) { assert := require.New(t) - srs1 := InitPhase1(power) + srs1 := NewPhase1(power) // Make and verify contributions for phase1 for i := 1; i < nContributionsPhase1; i++ { @@ -104,12 +104,12 @@ func BenchmarkPhase1(b *testing.B) { b.Run("init", func(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { - _ = InitPhase1(power) + _ = NewPhase1(power) } }) b.Run("contrib", func(b *testing.B) { - srs1 := InitPhase1(power) + srs1 := NewPhase1(power) b.ResetTimer() for i := 0; i < b.N; i++ { srs1.Contribute() @@ -120,7 +120,7 @@ func BenchmarkPhase1(b *testing.B) { func BenchmarkPhase2(b *testing.B) { const power = 14 - srs1 := InitPhase1(power) + srs1 := NewPhase1(power) srs1.Contribute() var myCircuit Circuit From 9b82466305117a832de6fb3ee88981fd9683d24d Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Tue, 15 Oct 2024 15:45:13 -0500 Subject: [PATCH 10/10] feat phase1 marshal --- backend/groth16/bn254/mpcsetup/marshal.go | 98 ++++++++++--------- .../groth16/bn254/mpcsetup/marshal_test.go | 3 +- backend/groth16/bn254/mpcsetup/phase1.go | 51 +++++----- backend/groth16/bn254/mpcsetup/utils.go | 24 +---- 4 files changed, 82 insertions(+), 94 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/marshal.go b/backend/groth16/bn254/mpcsetup/marshal.go index 2985022bbd..34c7b95a23 100644 --- a/backend/groth16/bn254/mpcsetup/marshal.go +++ b/backend/groth16/bn254/mpcsetup/marshal.go @@ -21,36 +21,48 @@ import ( "io" ) -// WriteTo implements io.WriterTo -func (p *Phase1) WriteTo(writer io.Writer) (int64, error) { - n, err := p.writeTo(writer) - if err != nil { - return n, err +func appendToSlice[T any](s []interface{}, v []T) []interface{} { + for i := range v { + s = append(s, v[i]) } - nBytes, err := writer.Write(p.Hash) - return int64(nBytes) + n, err + return s } -func (p *Phase1) writeTo(writer io.Writer) (int64, error) { - toEncode := []interface{}{ - &p.PublicKeys.Tau.SG, - &p.PublicKeys.Tau.SXG, - &p.PublicKeys.Tau.XR, - &p.PublicKeys.Alpha.SG, - &p.PublicKeys.Alpha.SXG, - &p.PublicKeys.Alpha.XR, - &p.PublicKeys.Beta.SG, - &p.PublicKeys.Beta.SXG, - &p.PublicKeys.Beta.XR, - p.Parameters.G1.Tau, - p.Parameters.G1.AlphaTau, - p.Parameters.G1.BetaTau, - p.Parameters.G2.Tau, - &p.Parameters.G2.Beta, +func (p *Phase1) toSlice() []interface{} { + N := len(p.G2Derived.Tau) + estimatedNbElems := 5*N + 5 + // size N 1 + // commitment, proof of knowledge, and 𝔾₁ representation for τ, α, and β 9 + // 𝔾₂ representation for τ and β 2 + // [τⁱ]₁ for 2 ≤ i ≤ 2N-2 2N-3 + // [τⁱ]₂ for 2 ≤ i ≤ N-1 N-2 + // [ατⁱ]₁ for 1 ≤ i ≤ N-1 N-1 + // [βτⁱ]₁ for 1 ≤ i ≤ N-1 N-1 + + toEncode := make([]interface{}, 1, estimatedNbElems) + + toEncode[0] = N + toEncode = p.Principal.Tau.appendRefsToSlice(toEncode) + toEncode = p.Principal.Alpha.appendRefsToSlice(toEncode) + toEncode = p.Principal.Beta.appendRefsToSlice(toEncode) + + toEncode = appendToSlice(toEncode, p.G1Derived.Tau[2:]) + toEncode = appendToSlice(toEncode, p.G2Derived.Tau[2:]) + toEncode = appendToSlice(toEncode, p.G1Derived.BetaTau[1:]) + toEncode = appendToSlice(toEncode, p.G1Derived.AlphaTau[1:]) + + if len(toEncode) != estimatedNbElems { + panic("incorrect length estimate") } + return toEncode +} + +// WriteTo implements io.WriterTo +func (p *Phase1) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) - for _, v := range toEncode { + for _, v := range p.toSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } @@ -60,32 +72,21 @@ func (p *Phase1) writeTo(writer io.Writer) (int64, error) { // ReadFrom implements io.ReaderFrom func (p *Phase1) ReadFrom(reader io.Reader) (int64, error) { - toEncode := []interface{}{ - &p.PublicKeys.Tau.SG, - &p.PublicKeys.Tau.SXG, - &p.PublicKeys.Tau.XR, - &p.PublicKeys.Alpha.SG, - &p.PublicKeys.Alpha.SXG, - &p.PublicKeys.Alpha.XR, - &p.PublicKeys.Beta.SG, - &p.PublicKeys.Beta.SXG, - &p.PublicKeys.Beta.XR, - &p.Parameters.G1.Tau, - &p.Parameters.G1.AlphaTau, - &p.Parameters.G1.BetaTau, - &p.Parameters.G2.Tau, - &p.Parameters.G2.Beta, + var N uint64 + dec := curve.NewDecoder(reader) + if err := dec.Decode(&N); err != nil { + return dec.BytesRead(), err } - dec := curve.NewDecoder(reader) - for _, v := range toEncode { + p.Initialize(N) + toDecode := p.toSlice() + + for _, v := range toDecode[1:] { // we've already decoded N if err := dec.Decode(v); err != nil { return dec.BytesRead(), err } } - p.Hash = make([]byte, 32) - nBytes, err := reader.Read(p.Hash) - return dec.BytesRead() + int64(nBytes), err + return dec.BytesRead(), nil } // WriteTo implements io.WriterTo @@ -179,3 +180,12 @@ func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { return dec.BytesRead(), nil } + +// appendRefsToSlice appends references to values in x to s +func (x *valueUpdate) appendRefsToSlice(s []interface{}) []interface{} { + s = append(s, &x.contributionCommitment, &x.contributionPok, &x.updatedCommitment.g1) + if x.updatedCommitment.g2 != nil { + return append(s, x.updatedCommitment.g2) + } + return s +} diff --git a/backend/groth16/bn254/mpcsetup/marshal_test.go b/backend/groth16/bn254/mpcsetup/marshal_test.go index c8d7a6b004..9b69bb8e91 100644 --- a/backend/groth16/bn254/mpcsetup/marshal_test.go +++ b/backend/groth16/bn254/mpcsetup/marshal_test.go @@ -34,7 +34,8 @@ func TestContributionSerialization(t *testing.T) { assert := require.New(t) // Phase 1 - srs1 := NewPhase1(9) + var srs1 Phase1 + srs1.Initialize(1 << 9) srs1.Contribute() assert.NoError(gnarkio.RoundTripCheck(&srs1, func() interface{} { return new(Phase1) })) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 7eddd73e78..ba2c082902 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -23,7 +23,6 @@ import ( "fmt" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" - "math" "math/big" ) @@ -37,8 +36,8 @@ type Phase1 struct { } G1Derived struct { Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ⁿ⁻²]₁} - AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τⁿ⁻¹]₁} - BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τⁿ⁻¹]₁} + AlphaTau []curve.G1Affine // {[ατ⁰]₁, [ατ¹]₁, [ατ²]₁, …, [ατⁿ⁻¹]₁} + BetaTau []curve.G1Affine // {[βτ⁰]₁, [βτ¹]₁, [βτ²]₁, …, [βτⁿ⁻¹]₁} } G2Derived struct { Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τⁿ⁻¹]₂} @@ -84,29 +83,26 @@ func (p *Phase1) Contribute() { p.Challenge = challenge } -// NewPhase1 initialize phase 1 of the MPC. This is called once by the coordinator before -// any randomness contribution is made (see Contribute()). -func NewPhase1(power int) (phase1 Phase1) { - N := int(math.Pow(2, float64(power))) - +// Initialize an empty object of size N +func (p *Phase1) Initialize(N uint64) { _, _, g1, g2 := curve.Generators() - phase1.Challenge = []byte{0} - phase1.Principal.Alpha.setEmpty(true) - phase1.Principal.Beta.setEmpty(true) - phase1.Principal.Tau.setEmpty(false) + p.Challenge = []byte{0} + p.Principal.Alpha.setEmpty(true) + p.Principal.Beta.setEmpty(false) + p.Principal.Tau.setEmpty(false) - phase1.G1Derived.Tau = make([]curve.G1Affine, 2*N-1) - phase1.G2Derived.Tau = make([]curve.G2Affine, N) - phase1.G1Derived.AlphaTau = make([]curve.G1Affine, N) - phase1.G1Derived.BetaTau = make([]curve.G1Affine, N) - for i := range phase1.G1Derived.Tau { - phase1.G1Derived.Tau[i].Set(&g1) + p.G1Derived.Tau = make([]curve.G1Affine, 2*N-1) + p.G2Derived.Tau = make([]curve.G2Affine, N) + p.G1Derived.AlphaTau = make([]curve.G1Affine, N) + p.G1Derived.BetaTau = make([]curve.G1Affine, N) + for i := range p.G1Derived.Tau { + p.G1Derived.Tau[i].Set(&g1) } - for i := range phase1.G2Derived.Tau { - phase1.G2Derived.Tau[i].Set(&g2) - phase1.G1Derived.AlphaTau[i].Set(&g1) - phase1.G1Derived.BetaTau[i].Set(&g1) + for i := range p.G2Derived.Tau { + p.G2Derived.Tau[i].Set(&g2) + p.G1Derived.AlphaTau[i].Set(&g1) + p.G1Derived.BetaTau[i].Set(&g1) } return @@ -158,10 +154,10 @@ func (p *Phase1) Verify(previous *Phase1) error { r := linearCombCoeffs(len(p.G1Derived.Tau) - 1) // the longest of all lengths // will be reusing the coefficient TODO @Tabaie make sure that's okay - tauT1, tauS1 := linearCombinationsG1(r, p.G1Derived.Tau) - tauT2, tauS2 := linearCombinationsG2(r, p.G2Derived.Tau) - alphaTT, alphaTS := linearCombinationsG1(r, p.G1Derived.AlphaTau) - betaTT, betaTS := linearCombinationsG1(r, p.G1Derived.BetaTau) + tauT1, tauS1 := linearCombinationsG1(p.G1Derived.Tau[1:], r) + tauT2, tauS2 := linearCombinationsG2(p.G2Derived.Tau[1:], r) + alphaTT, alphaTS := linearCombinationsG1(p.G1Derived.AlphaTau, r) + betaTT, betaTS := linearCombinationsG1(p.G1Derived.BetaTau, r) if !sameRatioUnsafe(tauS1, tauT1, *p.Principal.Tau.updatedCommitment.g2, g2) { return errors.New("couldn't verify 𝔾₁ representations of the τⁱ") @@ -191,6 +187,7 @@ func (p *Phase1) hash() []byte { panic("challenge field missing") } sha := sha256.New() - p.writeTo(sha) + p.WriteTo(sha) + sha.Write(p.Challenge) return sha.Sum(nil) } diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index c1a334b739..f6ba644893 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -148,29 +148,9 @@ func merge(A, B []curve.G1Affine) (a, b curve.G1Affine) { return } -// truncated = ∑ rᵢAᵢ, shifted = ∑ rᵢAᵢ₊₁∈𝔾₁ -func linearCombinationG1(r []fr.Element, A []curve.G1Affine, nbTasks int) curve.G1Affine { - n := len(A) - r = r[:n-1] - var res curve.G1Affine - res.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - shifted.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return -} - -// truncated = ∑ rᵢAᵢ, shifted = ∑ rᵢAᵢ₊₁∈𝔾₂ -func linearCombinationG2(r []fr.Element, A []curve.G2Affine) (truncated, shifted curve.G2Affine) { - nc := runtime.NumCPU() - n := len(A) - r = r[:n-1] - truncated.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - shifted.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return -} - // linearCombinationsG1 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i // Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers) -func linearCombinationsG1(rPowers []fr.Element, A []curve.G1Affine) (truncated, shifted curve.G1Affine) { +func linearCombinationsG1(A []curve.G1Affine, rPowers []fr.Element) (truncated, shifted curve.G1Affine) { // the common section, 1 to N-2 var common curve.G1Affine common.MultiExp(A[1:len(A)-1], rPowers[:len(A)-2], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}) // A[1] + r.A[2] + ... + rᴺ⁻³.A[N-2] @@ -187,7 +167,7 @@ func linearCombinationsG1(rPowers []fr.Element, A []curve.G1Affine) (truncated, // linearCombinationsG2 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i // Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers) -func linearCombinationsG2(rPowers []fr.Element, A []curve.G2Affine) (truncated, shifted curve.G2Affine) { +func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, shifted curve.G2Affine) { // the common section, 1 to N-2 var common curve.G2Affine common.MultiExp(A[1:len(A)-1], rPowers[:len(A)-2], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}) // A[1] + r.A[2] + ... + rᴺ⁻³.A[N-2]