From c803e5f247eea4a87ff72e7903e5b5a5aeee497f Mon Sep 17 00:00:00 2001 From: hmiao <739025250@qq.com> Date: Wed, 7 Sep 2022 15:15:22 +0800 Subject: [PATCH 1/4] add zk verifier contract --- contracts/PlonkVerifier3.sol | 1066 ++++++++++++++++++++++++++++++ migrations/4_deploy_verifier3.js | 5 + {tests => test}/.gitkeep | 0 test/test_plonk_verifier3.js | 23 + 4 files changed, 1094 insertions(+) create mode 100644 contracts/PlonkVerifier3.sol create mode 100644 migrations/4_deploy_verifier3.js rename {tests => test}/.gitkeep (100%) create mode 100644 test/test_plonk_verifier3.js diff --git a/contracts/PlonkVerifier3.sol b/contracts/PlonkVerifier3.sol new file mode 100644 index 0000000..a24a1ee --- /dev/null +++ b/contracts/PlonkVerifier3.sol @@ -0,0 +1,1066 @@ +// SPDX-License-Identifier: GPL-3.0 +/* + Copyright 2021 0KIMS association. + + This file is generated with [snarkJS](https://github.com/iden3/snarkjs). + + snarkJS is a free software: you can redistribute it and/or modify it + under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + snarkJS is distributed in the hope that it will be useful, but WITHOUT + ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public + License for more details. + + You should have received a copy of the GNU General Public License + along with snarkJS. If not, see . +*/ + + +pragma solidity >=0.7.0 <0.9.0; + +contract PlonkVerifier3 { + + uint32 constant n = 524288; + uint16 constant nPublic = 8; + uint16 constant nLagrange = 8; + + uint256 constant Qmx = 8049082249034855461553496277350003968359739929829003548402482680003222719279; + uint256 constant Qmy = 11401275141395624835521281526873206564615125476805657905578425445103052605487; + uint256 constant Qlx = 6550213410460724735373151982106783680957637826527568355230010305865534870309; + uint256 constant Qly = 19342011125595154532627509134964990063074216114828922297573698803949071549592; + uint256 constant Qrx = 19255122342085252435721781353311215271243008947042577671628673841412223002457; + uint256 constant Qry = 10511243378088247041067732058201162942043328526192736332691598388749192318689; + uint256 constant Qox = 4846431139822809523664712206555211152797183858747332486860734167262070297843; + uint256 constant Qoy = 4275198844266346854204622782965162900566356498494141661359301267458631737164; + uint256 constant Qcx = 9572888766008340555115385580069270222689917748933972787194553025594815657174; + uint256 constant Qcy = 9601976452066829021812002106705867231634502269823129602683298200174202444823; + uint256 constant S1x = 2797423661869942199997562736488148946408546150114393901232965103945063696287; + uint256 constant S1y = 16569854745607775746431740940568788906469068385683184144689057058143261987873; + uint256 constant S2x = 17055306094270202707714426620367960622854341109275840107014018640526187121145; + uint256 constant S2y = 16419875379329981791092484609275198237714156346112241548394425979586290049156; + uint256 constant S3x = 20516592066437363359580313386871405088407063937187850357194742761782730339371; + uint256 constant S3y = 20178521677799005010535532246956263561178465885281899701458201425597684850469; + uint256 constant k1 = 2; + uint256 constant k2 = 3; + uint256 constant X2x1 = 21831381940315734285607113342023901060522397560371972897001948545212302161822; + uint256 constant X2x2 = 17231025384763736816414546592865244497437017442647097510447326538965263639101; + uint256 constant X2y1 = 2388026358213174446665280700919698872609886601280537296205114254867301080648; + uint256 constant X2y2 = 11507326595632554467052522095592665270651932854513688777769618397986436103170; + + uint256 constant q = 21888242871839275222246405745257275088548364400416034343698204186575808495617; + uint256 constant qf = 21888242871839275222246405745257275088696311157297823662689037894645226208583; + uint256 constant w1 = 15549849457946371566896172786938980432421851627449396898353380550861104573629; + + uint256 constant G1x = 1; + uint256 constant G1y = 2; + uint256 constant G2x1 = 10857046999023057135944570762232829481370756359578518086990519993285655852781; + uint256 constant G2x2 = 11559732032986387107991004021392285783925812861821192530917403151452391805634; + uint256 constant G2y1 = 8495653923123431417604973247489272438418190587263600148770280649306958101930; + uint256 constant G2y2 = 4082367875863433681332203403145435568316851327593401208105741076214120093531; + uint16 constant pA = 32; + uint16 constant pB = 96; + uint16 constant pC = 160; + uint16 constant pZ = 224; + uint16 constant pT1 = 288; + uint16 constant pT2 = 352; + uint16 constant pT3 = 416; + uint16 constant pWxi = 480; + uint16 constant pWxiw = 544; + uint16 constant pEval_a = 608; + uint16 constant pEval_b = 640; + uint16 constant pEval_c = 672; + uint16 constant pEval_s1 = 704; + uint16 constant pEval_s2 = 736; + uint16 constant pEval_zw = 768; + uint16 constant pEval_r = 800; + + uint16 constant pAlpha = 0; + uint16 constant pBeta = 32; + uint16 constant pGamma = 64; + uint16 constant pXi = 96; + uint16 constant pXin = 128; + uint16 constant pBetaXi = 160; + uint16 constant pV1 = 192; + uint16 constant pV2 = 224; + uint16 constant pV3 = 256; + uint16 constant pV4 = 288; + uint16 constant pV5 = 320; + uint16 constant pV6 = 352; + uint16 constant pU = 384; + uint16 constant pPl = 416; + uint16 constant pEval_t = 448; + uint16 constant pA1 = 480; + uint16 constant pB1 = 544; + uint16 constant pZh = 608; + uint16 constant pZhInv = 640; + + uint16 constant pEval_l1 = 672; + + uint16 constant pEval_l2 = 704; + + uint16 constant pEval_l3 = 736; + + uint16 constant pEval_l4 = 768; + + uint16 constant pEval_l5 = 800; + + uint16 constant pEval_l6 = 832; + + uint16 constant pEval_l7 = 864; + + uint16 constant pEval_l8 = 896; + + + + uint16 constant lastMem = 928; + + function verifyProof(bytes memory proof, uint[] memory pubSignals) public view returns (bool) { + assembly { + ///////// + // Computes the inverse using the extended euclidean algorithm + ///////// + function inverse(a, q) -> inv { + let t := 0 + let newt := 1 + let r := q + let newr := a + let quotient + let aux + + for { } newr { } { + quotient := sdiv(r, newr) + aux := sub(t, mul(quotient, newt)) + t:= newt + newt:= aux + + aux := sub(r,mul(quotient, newr)) + r := newr + newr := aux + } + + if gt(r, 1) { revert(0,0) } + if slt(t, 0) { t:= add(t, q) } + + inv := t + } + + /////// + // Computes the inverse of an array of values + // See https://vitalik.ca/general/2018/07/21/starks_part_3.html in section where explain fields operations + ////// + function inverseArray(pVals, n) { + + let pAux := mload(0x40) // Point to the next free position + let pIn := pVals + let lastPIn := add(pVals, mul(n, 32)) // Read n elemnts + let acc := mload(pIn) // Read the first element + pIn := add(pIn, 32) // Point to the second element + let inv + + + for { } lt(pIn, lastPIn) { + pAux := add(pAux, 32) + pIn := add(pIn, 32) + } + { + mstore(pAux, acc) + acc := mulmod(acc, mload(pIn), q) + } + acc := inverse(acc, q) + + // At this point pAux pint to the next free position we substract 1 to point to the last used + pAux := sub(pAux, 32) + // pIn points to the n+1 element, we substract to point to n + pIn := sub(pIn, 32) + lastPIn := pVals // We don't process the first element + for { } gt(pIn, lastPIn) { + pAux := sub(pAux, 32) + pIn := sub(pIn, 32) + } + { + inv := mulmod(acc, mload(pAux), q) + acc := mulmod(acc, mload(pIn), q) + mstore(pIn, inv) + } + // pIn points to first element, we just set it. + mstore(pIn, acc) + } + + function checkField(v) { + if iszero(lt(v, q)) { + mstore(0, 0) + return(0,0x20) + } + } + + function checkInput(pProof) { + if iszero(eq(mload(pProof), 800 )) { + mstore(0, 0) + return(0,0x20) + } + checkField(mload(add(pProof, pEval_a))) + checkField(mload(add(pProof, pEval_b))) + checkField(mload(add(pProof, pEval_c))) + checkField(mload(add(pProof, pEval_s1))) + checkField(mload(add(pProof, pEval_s2))) + checkField(mload(add(pProof, pEval_zw))) + checkField(mload(add(pProof, pEval_r))) + + // Points are checked in the point operations precompiled smart contracts + } + + function calculateChallanges(pProof, pMem, pPublic) { + + let a + let b + + + mstore( add(pMem, 928 ), mload( add( pPublic, 32))) + + mstore( add(pMem, 960 ), mload( add( pPublic, 64))) + + mstore( add(pMem, 992 ), mload( add( pPublic, 96))) + + mstore( add(pMem, 1024 ), mload( add( pPublic, 128))) + + mstore( add(pMem, 1056 ), mload( add( pPublic, 160))) + + mstore( add(pMem, 1088 ), mload( add( pPublic, 192))) + + mstore( add(pMem, 1120 ), mload( add( pPublic, 224))) + + mstore( add(pMem, 1152 ), mload( add( pPublic, 256))) + + mstore( add(pMem, 1184 ), mload( add( pProof, pA))) + mstore( add(pMem, 1216 ), mload( add( pProof, add(pA,32)))) + mstore( add(pMem, 1248 ), mload( add( pProof, add(pA,64)))) + mstore( add(pMem, 1280 ), mload( add( pProof, add(pA,96)))) + mstore( add(pMem, 1312 ), mload( add( pProof, add(pA,128)))) + mstore( add(pMem, 1344 ), mload( add( pProof, add(pA,160)))) + + b := mod(keccak256(add(pMem, lastMem), 448), q) + mstore( add(pMem, pBeta), b) + mstore( add(pMem, pGamma), mod(keccak256(add(pMem, pBeta), 32), q)) + mstore( add(pMem, pAlpha), mod(keccak256(add(pProof, pZ), 64), q)) + + a := mod(keccak256(add(pProof, pT1), 192), q) + mstore( add(pMem, pXi), a) + mstore( add(pMem, pBetaXi), mulmod(b, a, q)) + + a:= mulmod(a, a, q) + + a:= mulmod(a, a, q) + + a:= mulmod(a, a, q) + + a:= mulmod(a, a, q) + + a:= mulmod(a, a, q) + + a:= mulmod(a, a, q) + + a:= mulmod(a, a, q) + + a:= mulmod(a, a, q) + + a:= mulmod(a, a, q) + + a:= mulmod(a, a, q) + + a:= mulmod(a, a, q) + + a:= mulmod(a, a, q) + + a:= mulmod(a, a, q) + + a:= mulmod(a, a, q) + + a:= mulmod(a, a, q) + + a:= mulmod(a, a, q) + + a:= mulmod(a, a, q) + + a:= mulmod(a, a, q) + + a:= mulmod(a, a, q) + + mstore( add(pMem, pXin), a) + a:= mod(add(sub(a, 1),q), q) + mstore( add(pMem, pZh), a) + mstore( add(pMem, pZhInv), a) // We will invert later together with lagrange pols + + let v1 := mod(keccak256(add(pProof, pEval_a), 224), q) + mstore( add(pMem, pV1), v1) + a := mulmod(v1, v1, q) + mstore( add(pMem, pV2), a) + a := mulmod(a, v1, q) + mstore( add(pMem, pV3), a) + a := mulmod(a, v1, q) + mstore( add(pMem, pV4), a) + a := mulmod(a, v1, q) + mstore( add(pMem, pV5), a) + a := mulmod(a, v1, q) + mstore( add(pMem, pV6), a) + + mstore( add(pMem, pU), mod(keccak256(add(pProof, pWxi), 128), q)) + } + + function calculateLagrange(pMem) { + + let w := 1 + + mstore( + add(pMem, pEval_l1), + mulmod( + n, + mod( + add( + sub( + mload(add(pMem, pXi)), + w + ), + q + ), + q + ), + q + ) + ) + + w := mulmod(w, w1, q) + + + mstore( + add(pMem, pEval_l2), + mulmod( + n, + mod( + add( + sub( + mload(add(pMem, pXi)), + w + ), + q + ), + q + ), + q + ) + ) + + w := mulmod(w, w1, q) + + + mstore( + add(pMem, pEval_l3), + mulmod( + n, + mod( + add( + sub( + mload(add(pMem, pXi)), + w + ), + q + ), + q + ), + q + ) + ) + + w := mulmod(w, w1, q) + + + mstore( + add(pMem, pEval_l4), + mulmod( + n, + mod( + add( + sub( + mload(add(pMem, pXi)), + w + ), + q + ), + q + ), + q + ) + ) + + w := mulmod(w, w1, q) + + + mstore( + add(pMem, pEval_l5), + mulmod( + n, + mod( + add( + sub( + mload(add(pMem, pXi)), + w + ), + q + ), + q + ), + q + ) + ) + + w := mulmod(w, w1, q) + + + mstore( + add(pMem, pEval_l6), + mulmod( + n, + mod( + add( + sub( + mload(add(pMem, pXi)), + w + ), + q + ), + q + ), + q + ) + ) + + w := mulmod(w, w1, q) + + + mstore( + add(pMem, pEval_l7), + mulmod( + n, + mod( + add( + sub( + mload(add(pMem, pXi)), + w + ), + q + ), + q + ), + q + ) + ) + + w := mulmod(w, w1, q) + + + mstore( + add(pMem, pEval_l8), + mulmod( + n, + mod( + add( + sub( + mload(add(pMem, pXi)), + w + ), + q + ), + q + ), + q + ) + ) + + + + inverseArray(add(pMem, pZhInv), 9 ) + + let zh := mload(add(pMem, pZh)) + w := 1 + + + mstore( + add(pMem, pEval_l1 ), + mulmod( + mload(add(pMem, pEval_l1 )), + zh, + q + ) + ) + + + w := mulmod(w, w1, q) + + + + mstore( + add(pMem, pEval_l2), + mulmod( + w, + mulmod( + mload(add(pMem, pEval_l2)), + zh, + q + ), + q + ) + ) + + + w := mulmod(w, w1, q) + + + + mstore( + add(pMem, pEval_l3), + mulmod( + w, + mulmod( + mload(add(pMem, pEval_l3)), + zh, + q + ), + q + ) + ) + + + w := mulmod(w, w1, q) + + + + mstore( + add(pMem, pEval_l4), + mulmod( + w, + mulmod( + mload(add(pMem, pEval_l4)), + zh, + q + ), + q + ) + ) + + + w := mulmod(w, w1, q) + + + + mstore( + add(pMem, pEval_l5), + mulmod( + w, + mulmod( + mload(add(pMem, pEval_l5)), + zh, + q + ), + q + ) + ) + + + w := mulmod(w, w1, q) + + + + mstore( + add(pMem, pEval_l6), + mulmod( + w, + mulmod( + mload(add(pMem, pEval_l6)), + zh, + q + ), + q + ) + ) + + + w := mulmod(w, w1, q) + + + + mstore( + add(pMem, pEval_l7), + mulmod( + w, + mulmod( + mload(add(pMem, pEval_l7)), + zh, + q + ), + q + ) + ) + + + w := mulmod(w, w1, q) + + + + mstore( + add(pMem, pEval_l8), + mulmod( + w, + mulmod( + mload(add(pMem, pEval_l8)), + zh, + q + ), + q + ) + ) + + + + + + } + + function calculatePl(pMem, pPub) { + let pl := 0 + + + pl := mod( + add( + sub( + pl, + mulmod( + mload(add(pMem, pEval_l1)), + mload(add(pPub, 32)), + q + ) + ), + q + ), + q + ) + + pl := mod( + add( + sub( + pl, + mulmod( + mload(add(pMem, pEval_l2)), + mload(add(pPub, 64)), + q + ) + ), + q + ), + q + ) + + pl := mod( + add( + sub( + pl, + mulmod( + mload(add(pMem, pEval_l3)), + mload(add(pPub, 96)), + q + ) + ), + q + ), + q + ) + + pl := mod( + add( + sub( + pl, + mulmod( + mload(add(pMem, pEval_l4)), + mload(add(pPub, 128)), + q + ) + ), + q + ), + q + ) + + pl := mod( + add( + sub( + pl, + mulmod( + mload(add(pMem, pEval_l5)), + mload(add(pPub, 160)), + q + ) + ), + q + ), + q + ) + + pl := mod( + add( + sub( + pl, + mulmod( + mload(add(pMem, pEval_l6)), + mload(add(pPub, 192)), + q + ) + ), + q + ), + q + ) + + pl := mod( + add( + sub( + pl, + mulmod( + mload(add(pMem, pEval_l7)), + mload(add(pPub, 224)), + q + ) + ), + q + ), + q + ) + + pl := mod( + add( + sub( + pl, + mulmod( + mload(add(pMem, pEval_l8)), + mload(add(pPub, 256)), + q + ) + ), + q + ), + q + ) + + + mstore(add(pMem, pPl), pl) + + + } + + function calculateT(pProof, pMem) { + let t + let t1 + let t2 + t := addmod( + mload(add(pProof, pEval_r)), + mload(add(pMem, pPl)), + q + ) + + t1 := mulmod( + mload(add(pProof, pEval_s1)), + mload(add(pMem, pBeta)), + q + ) + + t1 := addmod( + t1, + mload(add(pProof, pEval_a)), + q + ) + + t1 := addmod( + t1, + mload(add(pMem, pGamma)), + q + ) + + t2 := mulmod( + mload(add(pProof, pEval_s2)), + mload(add(pMem, pBeta)), + q + ) + + t2 := addmod( + t2, + mload(add(pProof, pEval_b)), + q + ) + + t2 := addmod( + t2, + mload(add(pMem, pGamma)), + q + ) + + t1 := mulmod(t1, t2, q) + + t2 := addmod( + mload(add(pProof, pEval_c)), + mload(add(pMem, pGamma)), + q + ) + + t1 := mulmod(t1, t2, q) + t1 := mulmod(t1, mload(add(pProof, pEval_zw)), q) + t1 := mulmod(t1, mload(add(pMem, pAlpha)), q) + + t2 := mulmod( + mload(add(pMem, pEval_l1)), + mload(add(pMem, pAlpha)), + q + ) + + t2 := mulmod( + t2, + mload(add(pMem, pAlpha)), + q + ) + + t1 := addmod(t1, t2, q) + + t := mod(sub(add(t, q), t1), q) + t := mulmod(t, mload(add(pMem, pZhInv)), q) + + mstore( add(pMem, pEval_t) , t) + + } + + function g1_set(pR, pP) { + mstore(pR, mload(pP)) + mstore(add(pR, 32), mload(add(pP,32))) + } + + function g1_acc(pR, pP) { + let mIn := mload(0x40) + mstore(mIn, mload(pR)) + mstore(add(mIn,32), mload(add(pR, 32))) + mstore(add(mIn,64), mload(pP)) + mstore(add(mIn,96), mload(add(pP, 32))) + + let success := staticcall(sub(gas(), 2000), 6, mIn, 128, pR, 64) + + if iszero(success) { + mstore(0, 0) + return(0,0x20) + } + } + + function g1_mulAcc(pR, pP, s) { + let success + let mIn := mload(0x40) + mstore(mIn, mload(pP)) + mstore(add(mIn,32), mload(add(pP, 32))) + mstore(add(mIn,64), s) + + success := staticcall(sub(gas(), 2000), 7, mIn, 96, mIn, 64) + + if iszero(success) { + mstore(0, 0) + return(0,0x20) + } + + mstore(add(mIn,64), mload(pR)) + mstore(add(mIn,96), mload(add(pR, 32))) + + success := staticcall(sub(gas(), 2000), 6, mIn, 128, pR, 64) + + if iszero(success) { + mstore(0, 0) + return(0,0x20) + } + + } + + function g1_mulAccC(pR, x, y, s) { + let success + let mIn := mload(0x40) + mstore(mIn, x) + mstore(add(mIn,32), y) + mstore(add(mIn,64), s) + + success := staticcall(sub(gas(), 2000), 7, mIn, 96, mIn, 64) + + if iszero(success) { + mstore(0, 0) + return(0,0x20) + } + + mstore(add(mIn,64), mload(pR)) + mstore(add(mIn,96), mload(add(pR, 32))) + + success := staticcall(sub(gas(), 2000), 6, mIn, 128, pR, 64) + + if iszero(success) { + mstore(0, 0) + return(0,0x20) + } + } + + function g1_mulSetC(pR, x, y, s) { + let success + let mIn := mload(0x40) + mstore(mIn, x) + mstore(add(mIn,32), y) + mstore(add(mIn,64), s) + + success := staticcall(sub(gas(), 2000), 7, mIn, 96, pR, 64) + + if iszero(success) { + mstore(0, 0) + return(0,0x20) + } + } + + + function calculateA1(pProof, pMem) { + let p := add(pMem, pA1) + g1_set(p, add(pProof, pWxi)) + g1_mulAcc(p, add(pProof, pWxiw), mload(add(pMem, pU))) + } + + + function calculateB1(pProof, pMem) { + let s + let s1 + let p := add(pMem, pB1) + + // Calculate D + s := mulmod( mload(add(pProof, pEval_a)), mload(add(pMem, pV1)), q) + g1_mulSetC(p, Qlx, Qly, s) + + s := mulmod( s, mload(add(pProof, pEval_b)), q) + g1_mulAccC(p, Qmx, Qmy, s) + + s := mulmod( mload(add(pProof, pEval_b)), mload(add(pMem, pV1)), q) + g1_mulAccC(p, Qrx, Qry, s) + + s := mulmod( mload(add(pProof, pEval_c)), mload(add(pMem, pV1)), q) + g1_mulAccC(p, Qox, Qoy, s) + + s :=mload(add(pMem, pV1)) + g1_mulAccC(p, Qcx, Qcy, s) + + s := addmod(mload(add(pProof, pEval_a)), mload(add(pMem, pBetaXi)), q) + s := addmod(s, mload(add(pMem, pGamma)), q) + s1 := mulmod(k1, mload(add(pMem, pBetaXi)), q) + s1 := addmod(s1, mload(add(pProof, pEval_b)), q) + s1 := addmod(s1, mload(add(pMem, pGamma)), q) + s := mulmod(s, s1, q) + s1 := mulmod(k2, mload(add(pMem, pBetaXi)), q) + s1 := addmod(s1, mload(add(pProof, pEval_c)), q) + s1 := addmod(s1, mload(add(pMem, pGamma)), q) + s := mulmod(s, s1, q) + s := mulmod(s, mload(add(pMem, pAlpha)), q) + s := mulmod(s, mload(add(pMem, pV1)), q) + s1 := mulmod(mload(add(pMem, pEval_l1)), mload(add(pMem, pAlpha)), q) + s1 := mulmod(s1, mload(add(pMem, pAlpha)), q) + s1 := mulmod(s1, mload(add(pMem, pV1)), q) + s := addmod(s, s1, q) + s := addmod(s, mload(add(pMem, pU)), q) + g1_mulAcc(p, add(pProof, pZ), s) + + s := mulmod(mload(add(pMem, pBeta)), mload(add(pProof, pEval_s1)), q) + s := addmod(s, mload(add(pProof, pEval_a)), q) + s := addmod(s, mload(add(pMem, pGamma)), q) + s1 := mulmod(mload(add(pMem, pBeta)), mload(add(pProof, pEval_s2)), q) + s1 := addmod(s1, mload(add(pProof, pEval_b)), q) + s1 := addmod(s1, mload(add(pMem, pGamma)), q) + s := mulmod(s, s1, q) + s := mulmod(s, mload(add(pMem, pAlpha)), q) + s := mulmod(s, mload(add(pMem, pV1)), q) + s := mulmod(s, mload(add(pMem, pBeta)), q) + s := mulmod(s, mload(add(pProof, pEval_zw)), q) + s := mod(sub(q, s), q) + g1_mulAccC(p, S3x, S3y, s) + + + // calculate F + g1_acc(p , add(pProof, pT1)) + + s := mload(add(pMem, pXin)) + g1_mulAcc(p, add(pProof, pT2), s) + + s := mulmod(s, s, q) + g1_mulAcc(p, add(pProof, pT3), s) + + g1_mulAcc(p, add(pProof, pA), mload(add(pMem, pV2))) + g1_mulAcc(p, add(pProof, pB), mload(add(pMem, pV3))) + g1_mulAcc(p, add(pProof, pC), mload(add(pMem, pV4))) + g1_mulAccC(p, S1x, S1y, mload(add(pMem, pV5))) + g1_mulAccC(p, S2x, S2y, mload(add(pMem, pV6))) + + // calculate E + s := mload(add(pMem, pEval_t)) + s := addmod(s, mulmod(mload(add(pProof, pEval_r)), mload(add(pMem, pV1)), q), q) + s := addmod(s, mulmod(mload(add(pProof, pEval_a)), mload(add(pMem, pV2)), q), q) + s := addmod(s, mulmod(mload(add(pProof, pEval_b)), mload(add(pMem, pV3)), q), q) + s := addmod(s, mulmod(mload(add(pProof, pEval_c)), mload(add(pMem, pV4)), q), q) + s := addmod(s, mulmod(mload(add(pProof, pEval_s1)), mload(add(pMem, pV5)), q), q) + s := addmod(s, mulmod(mload(add(pProof, pEval_s2)), mload(add(pMem, pV6)), q), q) + s := addmod(s, mulmod(mload(add(pProof, pEval_zw)), mload(add(pMem, pU)), q), q) + s := mod(sub(q, s), q) + g1_mulAccC(p, G1x, G1y, s) + + + // Last part of B + s := mload(add(pMem, pXi)) + g1_mulAcc(p, add(pProof, pWxi), s) + + s := mulmod(mload(add(pMem, pU)), mload(add(pMem, pXi)), q) + s := mulmod(s, w1, q) + g1_mulAcc(p, add(pProof, pWxiw), s) + + } + + function checkPairing(pMem) -> isOk { + let mIn := mload(0x40) + mstore(mIn, mload(add(pMem, pA1))) + mstore(add(mIn,32), mload(add(add(pMem, pA1), 32))) + mstore(add(mIn,64), X2x2) + mstore(add(mIn,96), X2x1) + mstore(add(mIn,128), X2y2) + mstore(add(mIn,160), X2y1) + mstore(add(mIn,192), mload(add(pMem, pB1))) + let s := mload(add(add(pMem, pB1), 32)) + s := mod(sub(qf, s), qf) + mstore(add(mIn,224), s) + mstore(add(mIn,256), G2x2) + mstore(add(mIn,288), G2x1) + mstore(add(mIn,320), G2y2) + mstore(add(mIn,352), G2y1) + + let success := staticcall(sub(gas(), 2000), 8, mIn, 384, mIn, 0x20) + + isOk := and(success, mload(mIn)) + } + + let pMem := mload(0x40) + mstore(0x40, add(pMem, lastMem)) + + checkInput(proof) + calculateChallanges(proof, pMem, pubSignals) + calculateLagrange(pMem) + calculatePl(pMem, pubSignals) + calculateT(proof, pMem) + calculateA1(proof, pMem) + calculateB1(proof, pMem) + let isValid := checkPairing(pMem) + + mstore(0x40, sub(pMem, lastMem)) + mstore(0, isValid) + return(0,0x20) + } + + } +} diff --git a/migrations/4_deploy_verifier3.js b/migrations/4_deploy_verifier3.js new file mode 100644 index 0000000..ab3e3a0 --- /dev/null +++ b/migrations/4_deploy_verifier3.js @@ -0,0 +1,5 @@ +const v = artifacts.require("PlonkVerifier3"); + +module.exports = async function (deployer) { + await deployer.deploy(v); +}; diff --git a/tests/.gitkeep b/test/.gitkeep similarity index 100% rename from tests/.gitkeep rename to test/.gitkeep diff --git a/test/test_plonk_verifier3.js b/test/test_plonk_verifier3.js new file mode 100644 index 0000000..29a7b23 --- /dev/null +++ b/test/test_plonk_verifier3.js @@ -0,0 +1,23 @@ +const verifier = artifacts.require("PlonkVerifier3"); +const BN = require('bn.js'); + +contract("PlonkVerifier3", () => { + it("verify", async () => { + const verifierInstance = await verifier.deployed(); + + const proof = "0x0c2dd39b7ebf8f5150e00c600a7f35df829dd28e7e252a110ea5e974738e41ae162c25df47dde272bb6c37235c4154163a447c893c3b1c06eb72a127678016be2adbe82fac838a6f891270a69e0494e66087e47d04113a34f32c93775be29b1e2763713a978380aeddcbaadc10802a6fcff39a966fea52de7fbedfea67d691631cb28ca0d7a1ab39c2cc1a12a2e718b78ad2a1d9ca0616f9ec7f0801fc4600a6085a41ca8c84ee0d6b31db9144c527f933bc460da0da1af510ee9d954ebb78532f3c544a98bf27afe5e727f2469784ce76fe3c53e1fb5d21d79d309a34d86bf80f6b5bcb73c346c77934b7fd752233219e737e39a2a4ea8fe306349ae78a166d2b4dce68c2a2f58453b3cc19955001ac0ce3ab6c36b360597e8575b8af23276b10e4b9c86f64260a638db06e79d3b06f309500345a0c512d80b7fe8d903389cf0c4b6b89c3ed73e740ac00935da75724acce721018fdc30038f02dad312a138f18dc3019670b83ec001f94ce8b68be83fd6f5f5f41eb0178232f45d248b697e3134b5068f1c5db81809826f67e3bfdf6fa3aaf80b8a27ea371c3c1f43d268c03247960aa7190ebab9328ea72e1d9580cd90bbae291964fce8dbbebba7437f8a601c543201950a21105653ea7b5dc7ce9e3b4996e4b12234122d1c2e478c9b6070f114847b9de9cde54204001ecf1ea7bc3a934518dbd7bc1c2210bb200dfc70c1e6d01bf97f39bd906ab13706b938936fdbbee31ff2396dc71a7b3a9653ae2f20f9a8a73ec5386fecb9a437029ee7ba07df8c6336e9d01b0f799d11dd432796f27958d5ac5c0921f3765d8e463d4989d760a4976336c273d888edcb0693886b32577c916d113e1aae68d56668adfa049002fe30b9d56e57b86b51ac5971c013d1a64efbc301a00896d4fce2df3417ee44f3402ecb98ac21a41a1b83add650e4625a8cb97732aea3aa2cbf9e79ff3f21d440e43f40ff3593f811bb80e642445e52eafe289e98aca89a7bc3f56fcff3d9926dd67f2d2b949824742f17bd7590e310747d790d428798699183368102d00f2f47ccedb1ffacb1e80e54abc3250158f135f317a5a6d0c7e2389af448e32662d31775246d6c0901cc81f1b1e2bcb96c4"; + const pubSignals = [ + "0x30644e72e131a029b85045b68181585d2833e84876b7b965a760444f58bc4001", + "0x000000000000000000000000000000000000000000000000000000000000001d", + "0x30644e72e131a029b85045b68181585d2833e848486b15847c8d5608141f0001", + "0x000000000000000000000000000000000000000000000000000000000000001d", + "0x30644e72e131a029b85045b68181585d2833e8486d1e343c0de10642f1410001", + "0x000000000000000000000000000000000000000000000000000000000000001d", + "0x111da4b536325aca16982ce6fbcb52c06e6708b4976e0b0dbddb022776ff9ffc", + "0x0f6f31d790c79863387ddcbe761748b5dc72837e62035cc85e8eb136d727e8e4"].map((v) => { + return new BN(v.slice(2), 16); + }); + const res = await verifierInstance.verifyProof.call(proof, pubSignals); + assert.isTrue(res.valueOf()); + }) +}) \ No newline at end of file From 6b19f559ed639d662eafc1bfee6709c693d91762 Mon Sep 17 00:00:00 2001 From: hmiao <739025250@qq.com> Date: Wed, 7 Sep 2022 15:16:02 +0800 Subject: [PATCH 2/4] use config provider instead of hard code --- src/eth.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/eth.js b/src/eth.js index d552a98..28a7d77 100644 --- a/src/eth.js +++ b/src/eth.js @@ -18,7 +18,7 @@ function Connect() { }; // HTTP://127.0.0.1:7545 // conf.web3ProviderURL - let provider = new Web3.providers.WebsocketProvider("wss://node.delta.yuanben.org", options); + let provider = new Web3.providers.WebsocketProvider(conf.web3ProviderURL, options); provider.on('error', (e) => { console.error(e.reason) }); From e976adf4f24bd42a20ef1c297b317f9e73ed1f65 Mon Sep 17 00:00:00 2001 From: hmiao <739025250@qq.com> Date: Wed, 7 Sep 2022 15:16:31 +0800 Subject: [PATCH 3/4] add datahub contract --- contracts/DataHub.sol | 61 ++++++++++++++++++++++++++++++++++ migrations/5_deploy_datahub.js | 5 +++ test/test_datahub.js | 37 +++++++++++++++++++++ 3 files changed, 103 insertions(+) create mode 100644 contracts/DataHub.sol create mode 100644 migrations/5_deploy_datahub.js create mode 100644 test/test_datahub.js diff --git a/contracts/DataHub.sol b/contracts/DataHub.sol new file mode 100644 index 0000000..da89b0d --- /dev/null +++ b/contracts/DataHub.sol @@ -0,0 +1,61 @@ +// SPDX-License-Identifier: GPL-3.0 + +pragma solidity >=0.7.0 <0.9.0; + +contract DataHub { + address private owner; + event OwnerSet(address indexed oldOwner, address indexed newOwner); + + struct DataRecord { + bytes32 commitment; + uint32 version; + } + mapping(address => mapping(string => DataRecord)) datahub; + + event DataRegistered( + address indexed owner, + string name, + bytes32 commitment, + uint32 version + ); + + constructor() { + owner = msg.sender; + emit OwnerSet(address(0), owner); + } + + function register(string calldata name, bytes32 commitment) public { + datahub[msg.sender][name].commitment = commitment; + datahub[msg.sender][name].version += 1; + + emit DataRegistered( + msg.sender, + name, + commitment, + datahub[msg.sender][name].version + ); + } + + modifier dataExists(address addr, string calldata name) { + require(datahub[addr][name].version > 0); + _; + } + + function getDataCommitment(address addr, string calldata name) + public + view + dataExists(addr, name) + returns (bytes32 commitment) + { + commitment = datahub[addr][name].commitment; + } + + function getDataVersion(address addr, string calldata name) + public + view + dataExists(addr, name) + returns (uint32 version) + { + version = datahub[addr][name].version; + } +} diff --git a/migrations/5_deploy_datahub.js b/migrations/5_deploy_datahub.js new file mode 100644 index 0000000..3331403 --- /dev/null +++ b/migrations/5_deploy_datahub.js @@ -0,0 +1,5 @@ +const datahub = artifacts.require("DataHub"); + +module.exports = async function (deployer) { + await deployer.deploy(datahub); +}; diff --git a/test/test_datahub.js b/test/test_datahub.js new file mode 100644 index 0000000..1bdcc81 --- /dev/null +++ b/test/test_datahub.js @@ -0,0 +1,37 @@ +const datahub = artifacts.require("DataHub"); + +contract("DataHub", (accounts) => { + const name = "mnist"; + const commitment = "0x1230000000000000000000000000000000000000000000000000000000000000"; + it("register", async () => { + const datahubInstance = await datahub.deployed(); + const res = await datahubInstance.register(name, commitment, { from: accounts[0] }) + const _owner = res.logs[0].args['0']; + const _name = res.logs[0].args['1']; + const _commitment = res.logs[0].args['2']; + const version = res.logs[0].args['3'].toNumber(); + + assert.strictEqual(_owner, accounts[0]); + assert.strictEqual(_name, name); + assert.strictEqual(_commitment, commitment); + assert.strictEqual(version, 1); + }) + + it("get commitment and version", async () => { + const datahubInstance = await datahub.deployed(); + const _commitment = await datahubInstance.getDataCommitment.call(accounts[0], name) + const _version = (await datahubInstance.getDataVersion.call(accounts[0], name)).toNumber() + assert.strictEqual(_commitment, commitment) + assert.strictEqual(_version, 1); + }) + + const newCommitment = "0x3210000000000000000000000000000000000000000000000000000000000000" + it("update", async () => { + const datahubInstance = await datahub.deployed(); + await datahubInstance.register(name, newCommitment, { from: accounts[0] }) + const _commitment = await datahubInstance.getDataCommitment.call(accounts[0], name) + const _version = (await datahubInstance.getDataVersion.call(accounts[0], name)).toNumber() + assert.strictEqual(_commitment, newCommitment); + assert.strictEqual(_version, 2); + }) +}) \ No newline at end of file From 36df089cb03dc494ac6b1522ec48a2cccb72b6f5 Mon Sep 17 00:00:00 2001 From: hmiao <739025250@qq.com> Date: Thu, 8 Sep 2022 16:03:50 +0800 Subject: [PATCH 4/4] add hlr contract --- contracts/HLR.sol | 922 +++++++++++++++++++++++++++++++++++ contracts/PlonkVerifier3.sol | 2 +- migrations/6_deploy_hlr.js | 9 + 3 files changed, 932 insertions(+), 1 deletion(-) create mode 100644 contracts/HLR.sol create mode 100644 migrations/6_deploy_hlr.js diff --git a/contracts/HLR.sol b/contracts/HLR.sol new file mode 100644 index 0000000..6dbda1e --- /dev/null +++ b/contracts/HLR.sol @@ -0,0 +1,922 @@ +// SPDX-License-Identifier: GPL-3.0 + +pragma solidity >=0.7.0 <0.9.0; + +import "./IdentityContract.sol"; +import "./DataHub.sol"; + +contract Verifier { + uint256 public constant q = 0; + + function verifyProof(bytes memory proof, uint256[] memory pubSignals) + public + view + returns (bool) + {} +} + +/** + * @title HLR Contract + * @dev Contract for delta horizontal logistic regression + */ +contract HLR { + IdentityContract public idContract; + DataHub public dataContract; + + address private owner; + enum RoundStatus { + Started, + Running, + Calculating, + Aggregating, + Finished + } + mapping(bytes32 => Task) createdTasks; + mapping(bytes32 => VerifierState) verifierStates; + mapping(bytes32 => TaskRound[]) taskRounds; + mapping(bytes32 => RoundModelCommitments[]) roundModelCommitments; + uint64 private maxWeightCommitmentLength = 10485760; + uint64 private maxSSComitmentLength = 256; + struct RoundModelCommitments { + bytes32 weightCommitment; + mapping(address => bytes) resultCommitment; + mapping(address => mapping(address => SSData)) ssdata; + } + struct Task { + address creator; + string creatorUrl; + string dataSet; + bytes32 commitment; + string taskType; + uint64 currentRound; + bool finished; + bool enableVerify; + uint256 tolerance; + } + + struct Candidate { + bytes pk1; + bytes pk2; + } + + struct TaskRound { + uint64 currentRound; + uint32 maxSample; + uint32 minSample; + RoundStatus status; + mapping(address => Candidate) candidates; + address[] joinedAddrs; + address[] finishedAddrs; + } + + struct ExtCallTaskRoundStruct { + uint64 currentRound; + uint32 maxSample; + uint32 minSample; + uint8 status; + address[] joinedAddrs; + address[] finishedAddrs; + } + + struct SSData { + bytes seedPiece; + bytes seedCommitment; + bytes secretKeyPiece; + bytes secretKeyMaskCommitment; + } + + struct VerifierState { + uint256[] gradients; + uint256 precision; + mapping(address => bool) unfinishedClients; + uint256 unfinishedCount; + bool valid; + } + + // event for EVM logging + event OwnerSet(address indexed oldOwner, address indexed newOwner); + // triggered when task created + event TaskCreated( + address indexed creator, + bytes32 taskId, + string dataSet, + string creatorUrl, + bytes32 commitment, + string taskType, + bool enableVerify, + uint256 tolerance + ); + // triggered when task finished + event TaskFinished(bytes32 taskId); + // triggered when task developer call startRound + event RoundStart(bytes32 taskId, uint64 round); + + // triggered when task developer call startRound + event RoundEnd(bytes32 taskId, uint64 round); + + // triggered when task developer call selectCandidates + event PartnerSelected(bytes32 taskId, uint64 round, address[] addrs); + + // triggered when task developer call startAggregateUpload + event AggregateStarted(bytes32 taskId, uint64 round, address[] addrs); + + // triggered when task developer call startAggregate + event CalculateStarted(bytes32 taskId, uint64 round, address[] addrs); + + // triggered when client call uploadWeightCommitment , uploadSeedCommitment ,uploadSkMaskCommitment + event ContentUploaded( + bytes32 taskId, + uint64 round, + address sender, + address reciver, + string contentType, + bytes content + ); + + // triggered when client call verify method + event TaskMemberVerified(bytes32 taskId, address addr, bool verified); + // triggered when all clients pass the verification or any client is rejected by the verification + event TaskVerified(bytes32 taskId, bool verified); + + // modifier to check if caller is owner + modifier isOwner() { + // If the first argument of 'require' evaluates to 'false', execution terminates and all + // changes to the state and to Ether balances are reverted. + // This used to consume all gas in old EVM versions, but not anymore. + // It is often a good idea to use 'require' to check if functions are called correctly. + // As a second argument, you can also provide an explanation about what went wrong. + require(msg.sender == owner, "Caller is not owner"); + _; + } + + modifier taskExists(bytes32 task_id) { + require(createdTasks[task_id].creator != address(0), "Task not exists"); + _; + } + + modifier roundExists(bytes32 task_id, uint64 round) { + TaskRound[] storage rounds = taskRounds[task_id]; + require( + rounds.length > 1 && rounds.length > round, + "this round does not exist" + ); + _; + } + + modifier roundcmmtExists(bytes32 task_id, uint64 round) { + RoundModelCommitments[] storage cmmts = roundModelCommitments[task_id]; + require(cmmts.length > round, "The Task Round Must exists"); + _; + } + + modifier taskOwner(bytes32 task_id) { + require( + createdTasks[task_id].creator == msg.sender, + "Must called by the task owner" + ); + _; + } + + /** + * @dev Set contract deployer as owner + */ + constructor(IdentityContract idAddr, DataHub dbAddr) { + idContract = idAddr; + dataContract = dbAddr; + owner = msg.sender; // 'msg.sender' is sender of current call, contract deployer for a constructor + emit OwnerSet(address(0), owner); + } + + /** + * @dev get task info data + * @param taskId taskId + */ + function getTaskData(bytes32 taskId) + public + view + taskExists(taskId) + returns (Task memory task) + { + task = createdTasks[taskId]; + } + + /** + * @dev called by task developer, notifying all clients that a new learning task has been published + * @param dataSet data set name (file/folder name of training data) + * @param commitment training code hash (client validation purpose) + * @return taskId taskId + */ + function createTask( + string calldata dataSet, + bytes32 commitment, + string calldata taskType, + bool enableVerify, + uint256 tolerance + ) public payable returns (bytes32 taskId) { + bytes32 task_id = keccak256( + abi.encode(block.number, msg.sender, dataSet, commitment, taskType) + ); + IdentityContract.Node memory node = idContract.getNodeInfo(msg.sender); + createdTasks[task_id] = Task({ + creatorUrl: node.url, + creator: msg.sender, + dataSet: dataSet, + commitment: commitment, + taskType: taskType, + currentRound: 0, + finished: false, + enableVerify: enableVerify, + tolerance: tolerance + }); + taskId = task_id; + TaskRound[] storage rounds = taskRounds[taskId]; + rounds.push(); + emit TaskCreated( + msg.sender, + task_id, + dataSet, + node.url, + commitment, + taskType, + enableVerify, + tolerance + ); + } + + function finishTask(bytes32 taskId) + public + taskExists(taskId) + taskOwner(taskId) + { + Task storage task = createdTasks[taskId]; + task.finished = true; + if (task.enableVerify) { + TaskRound storage finalRound = taskRounds[taskId][ + task.currentRound + ]; + VerifierState storage state = verifierStates[taskId]; + for (uint256 i = 0; i < finalRound.finishedAddrs.length; i++) { + state.unfinishedClients[finalRound.finishedAddrs[i]] = true; + } + state.unfinishedCount = finalRound.finishedAddrs.length; + } + emit TaskFinished(taskId); + } + + function getTask(bytes32 taskId) + public + view + taskExists(taskId) + returns (Task memory task) + { + task = createdTasks[taskId]; + } + + /** + * @dev called by task developer, notifying all clients that a new computing round is started and open for joining + * @param taskId taskId + * @param round the round to start + */ + function startRound( + bytes32 taskId, + uint64 round, + uint32 maxSample, + uint32 minSample, + bytes32 weightCommitment + ) public taskExists(taskId) taskOwner(taskId) { + TaskRound[] storage rounds = taskRounds[taskId]; + require( + rounds.length == round, + "the round has been already started or the pre round does not exist" + ); + Task storage task = createdTasks[taskId]; + task.currentRound = round; + while (rounds.length == 0 || rounds.length - 1 < round) { + rounds.push(); + } + rounds[round].currentRound = round; + rounds[round].maxSample = maxSample; + rounds[round].minSample = minSample; + rounds[round].status = RoundStatus.Started; + RoundModelCommitments[] storage cmmts = roundModelCommitments[taskId]; + while (cmmts.length == 0 || cmmts.length - 1 < round) { + cmmts.push(); + } + RoundModelCommitments storage cmmt = cmmts[round]; + cmmt.weightCommitment = weightCommitment; + emit RoundStart(taskId, round); + } + + /** + * @dev called by anyone, get weight commitment of a task round + * @param taskId taskId + * @param round the round to start + */ + function getWeightCommitment(bytes32 taskId, uint64 round) + public + view + taskExists(taskId) + roundExists(taskId, round) + roundcmmtExists(taskId, round) + returns (bytes32) + { + RoundModelCommitments[] storage cmmts = roundModelCommitments[taskId]; + RoundModelCommitments storage cmmt = cmmts[round]; + return cmmt.weightCommitment; + } + + /** + * @dev called by client, join for that round of computation + * @param taskId taskId + * @param round the round to join + * @param pk1 used for secure communication channel establishment + * @param pk2 used for mask generation + */ + function joinRound( + bytes32 taskId, + uint64 round, + bytes calldata pk1, + bytes calldata pk2 + ) public taskExists(taskId) roundExists(taskId, round) returns (bool) { + TaskRound[] storage rounds = taskRounds[taskId]; + TaskRound storage thisRound = rounds[rounds.length - 1]; + require( + rounds.length - 1 == round && + thisRound.status == RoundStatus.Started, + "join phase has passed" + ); + require( + thisRound.candidates[msg.sender].pk1.length == 0, + "Cannot join the same round multiple times" + ); + thisRound.candidates[msg.sender] = Candidate({pk1: pk1, pk2: pk2}); + thisRound.joinedAddrs.push(msg.sender); + return true; + } + + /** + * @dev called by anyone, get Client Pks + * @return candidate (pk1,pk2) + */ + function getClientPublickeys( + bytes32 taskId, + uint64 round, + address[] calldata candidateAddrs + ) public view roundExists(taskId, round) returns (Candidate[] memory) { + Candidate[] memory candidates = new Candidate[](candidateAddrs.length); + for (uint256 i = 0; i < candidateAddrs.length; i++) { + candidates[i] = taskRounds[taskId][round].candidates[ + candidateAddrs[i] + ]; + } + return candidates; + } + + /** + * @dev getting task round infos + * @param taskId taskId + * @param round the round to fetch + * @return taskround the task round infos + */ + function getTaskRound(bytes32 taskId, uint64 round) + public + view + roundExists(taskId, round) + returns (ExtCallTaskRoundStruct memory taskround) + { + TaskRound storage temp = taskRounds[taskId][round]; + taskround = ExtCallTaskRoundStruct({ + currentRound: temp.currentRound, + maxSample: temp.maxSample, + minSample: temp.minSample, + status: (uint8)(temp.status), + joinedAddrs: temp.joinedAddrs, + finishedAddrs: temp.finishedAddrs + }); + } + + /** + * @dev called by task developer, randomly choose candidates to be computation nodes + * @dev clients now should start secret sharing phase + * @param addrs selected client addresses + */ + function selectCandidates( + bytes32 taskId, + uint64 round, + address[] calldata addrs + ) public taskOwner(taskId) roundExists(taskId, round) { + require(addrs.length > 0, "Must provide addresses"); + TaskRound storage curRound = taskRounds[taskId][round]; + for (uint256 i = 0; i < addrs.length; i++) { + require( + curRound.candidates[addrs[i]].pk1.length > 0, + "Candidate must exist" + ); + } + curRound.status = RoundStatus.Running; + emit PartnerSelected(taskId, round, addrs); + } + + /** + * @dev called by task developer, get commitments from blockchain + * @dev (Server has to call this method for every clients to get their commiments as the return value couldn't contain mapping type in solidity(damn it)) + * @param taskId taskId + * @param clientaddress the client that publish the commitments + * @param round the round of that commitment + * @return commitment commitment data + */ + function getResultCommitment( + bytes32 taskId, + address clientaddress, + uint64 round + ) + public + view + roundExists(taskId, round) + roundcmmtExists(taskId, round) + returns (bytes memory commitment) + { + RoundModelCommitments[] storage cmmts = roundModelCommitments[taskId]; + require(cmmts.length >= round, "The Task Round Must exists"); + RoundModelCommitments storage cmmt = cmmts[round]; + commitment = cmmt.resultCommitment[clientaddress]; + } + + /** + * @dev called by any participants + */ + function getSecretSharingDatas( + bytes32 taskId, + uint64 round, + address[] calldata senders, + address receiver + ) + public + view + roundExists(taskId, round) + roundcmmtExists(taskId, round) + returns (SSData[] memory) + { + RoundModelCommitments[] storage cmmts = roundModelCommitments[taskId]; + require(cmmts.length >= round, "The Task Round Must exists"); + RoundModelCommitments storage cmmt = cmmts[round]; + SSData[] memory ssdatas = new SSData[](senders.length); + for (uint256 i = 0; i < senders.length; i++) { + ssdatas[i] = (cmmt.ssdata[senders[i]][receiver]); + } + return ssdatas; + } + + /** + * @dev called by task developer, notifying all participants that the ss and gradient transfer phase has finished + * @dev client now should send corresponded ss share pieces to task developer according to the online status given by the task developer + * @param taskId taskId + * @param round the task round + * @param onlineClients clients that has transfered gradient to task developer + */ + function startAggregate( + bytes32 taskId, + uint64 round, + address[] calldata onlineClients + ) public taskOwner(taskId) roundExists(taskId, round) { + TaskRound storage curRound = taskRounds[taskId][round]; + require( + curRound.status == RoundStatus.Calculating, + "Calculating has not started" + ); + curRound.status = RoundStatus.Aggregating; + for (uint256 i = 0; i < onlineClients.length; i++) { + require( + curRound.candidates[onlineClients[i]].pk1.length > 0, + "Candidate must exist" + ); + } + for (uint256 i = 0; i < onlineClients.length; i++) { + curRound.finishedAddrs.push(onlineClients[i]); + } + emit AggregateStarted(taskId, round, onlineClients); + } + + /** + * @dev called by task developer, notifying all participants that the secret sharing phase is finished to transfer masked gradient to task server + * @param taskId taskId + * @param round the task round + */ + function startCalculate( + bytes32 taskId, + uint64 round, + address[] calldata onlineClients + ) public taskOwner(taskId) roundExists(taskId, round) { + TaskRound storage curRound = taskRounds[taskId][round]; + require( + curRound.status == RoundStatus.Running, + "This round is not running now" + ); + curRound.status = RoundStatus.Calculating; + emit CalculateStarted(taskId, round, onlineClients); + } + + /** + * @dev called by task developer, close round + * @param taskId taskId + * @param round the task round + */ + function endRound(bytes32 taskId, uint64 round) + public + taskOwner(taskId) + roundExists(taskId, round) + { + TaskRound storage curRound = taskRounds[taskId][round]; + curRound.status = RoundStatus.Finished; + emit RoundEnd(taskId, round); + } + + /** + * @dev called by client, upload weight commitment + * @param taskId taskId + * @param round the task round + * @param resultCommitment masked model incremental commitment + */ + function uploadResultCommitment( + bytes32 taskId, + uint64 round, + bytes calldata resultCommitment + ) public roundExists(taskId, round) { + require( + resultCommitment.length > 0 && + resultCommitment.length <= maxWeightCommitmentLength, + "commitment length exceeds limit or it is empty" + ); + TaskRound storage curRound = taskRounds[taskId][round]; + require( + curRound.status == RoundStatus.Calculating, + "not in uploading phase" + ); + RoundModelCommitments[] storage commitments = roundModelCommitments[ + taskId + ]; + RoundModelCommitments storage commitment = commitments[round]; + require( + commitment.resultCommitment[msg.sender].length == 0, + "cannot upload resultCommitment multiple times" + ); + commitment.resultCommitment[msg.sender] = resultCommitment; + emit ContentUploaded( + taskId, + round, + msg.sender, + address(0), + "WEIGHT", + resultCommitment + ); + } + + /** + * @dev called by client, upload secret sharing seed commitment + * @param taskId taskId + * @param round the task round + * @param receivers the receiver addresses + * @param seedCommitments seedCommitments[i] is the commitment send to receivers[i] + */ + function uploadSeedCommitment( + bytes32 taskId, + uint64 round, + address[] calldata receivers, + bytes[] calldata seedCommitments + ) public roundExists(taskId, round) { + require( + receivers.length == seedCommitments.length, + "receivers length is not equal to seedCommitments length" + ); + for (uint256 i = 0; i < seedCommitments.length; i++) { + require( + seedCommitments[i].length > 0 && + seedCommitments[i].length <= maxSSComitmentLength, + "commitment length exceeds limit or it is empty" + ); + } + TaskRound storage curRound = taskRounds[taskId][round]; + require( + curRound.status == RoundStatus.Running, + "not in secret sharing phase" + ); + RoundModelCommitments[] storage commitments = roundModelCommitments[ + taskId + ]; + RoundModelCommitments storage commitment = commitments[round]; + for (uint256 i = 0; i < seedCommitments.length; i++) { + require( + commitment + .ssdata[msg.sender][receivers[i]] + .seedCommitment + .length == 0, + "cannot upload seed cmmt multiple times" + ); + commitment + .ssdata[msg.sender][receivers[i]].seedCommitment = seedCommitments[ + i + ]; + emit ContentUploaded( + taskId, + round, + msg.sender, + receivers[i], + "SEEDCMMT", + seedCommitments[i] + ); + } + } + + /** + * @dev called by client, upload secret sharing seed commitment + * @param taskId taskId + * @param round the task round + * @param senders senders address + * @param seeds seeds[i] is the seed send by senders[i] + */ + function uploadSeed( + bytes32 taskId, + uint64 round, + address[] calldata senders, + bytes[] calldata seeds + ) public roundExists(taskId, round) { + require( + senders.length == seeds.length, + "senders length is not equal to seeds length" + ); + for (uint256 i = 0; i < seeds.length; i++) { + require( + seeds[i].length > 0 && seeds[i].length <= maxSSComitmentLength, + "commitment length exceeds limit or it is empty" + ); + } + TaskRound storage curRound = taskRounds[taskId][round]; + require( + curRound.status == RoundStatus.Aggregating, + "not in upload ss phase" + ); + RoundModelCommitments[] storage commitments = roundModelCommitments[ + taskId + ]; + RoundModelCommitments storage commitment = commitments[round]; + for (uint256 i = 0; i < seeds.length; i++) { + require( + commitment + .ssdata[senders[i]][msg.sender] + .seedCommitment + .length > 0, + "must upload commitment first" + ); + require( + commitment.ssdata[senders[i]][msg.sender].seedPiece.length == 0, + "cannot upload seed multiple times" + ); + commitment.ssdata[senders[i]][msg.sender].seedPiece = seeds[i]; + emit ContentUploaded( + taskId, + round, + senders[i], + msg.sender, + "SEED", + seeds[i] + ); + } + } + + /** + * @dev called by client, upload secret sharing sk commitment + * @param taskId taskId + * @param round the task round + * @param receivers the receiver addresses + * @param secretKeyCommitments secretKeyCommitments[i] is the commitment send to receivers[i] + */ + function uploadSecretKeyCommitment( + bytes32 taskId, + uint64 round, + address[] calldata receivers, + bytes[] calldata secretKeyCommitments + ) public roundExists(taskId, round) { + require( + receivers.length == secretKeyCommitments.length, + "receivers length is not equal to secretKeyCommitments length" + ); + for (uint256 i = 0; i < secretKeyCommitments.length; i++) { + require( + secretKeyCommitments[i].length > 0 && + secretKeyCommitments[i].length <= maxSSComitmentLength, + "commitment length exceeds limit or it is empty" + ); + } + TaskRound storage curRound = taskRounds[taskId][round]; + require( + curRound.status == RoundStatus.Running, + "not in secret sharing phase" + ); + RoundModelCommitments[] storage commitments = roundModelCommitments[ + taskId + ]; + RoundModelCommitments storage commitment = commitments[round]; + for (uint256 i = 0; i < secretKeyCommitments.length; i++) { + require( + commitment + .ssdata[msg.sender][receivers[i]] + .secretKeyMaskCommitment + .length == 0, + "cannot upload seed cmmt multiple times" + ); + commitment + .ssdata[msg.sender][receivers[i]] + .secretKeyMaskCommitment = secretKeyCommitments[i]; + emit ContentUploaded( + taskId, + round, + msg.sender, + receivers[i], + "SKMASKCMMT", + secretKeyCommitments[i] + ); + } + } + + /** + * @dev called by client, upload secret sharing sk commitment + * @param taskId taskId + * @param round the task round + * @param senders senders address + * @param secretkeyMasks secretkeyMasks[i] is the secretKeyMask send by senders[i] + */ + function uploadSecretkeyMask( + bytes32 taskId, + uint64 round, + address[] calldata senders, + bytes[] calldata secretkeyMasks + ) public roundExists(taskId, round) { + require( + senders.length == secretkeyMasks.length, + "senders length is not equal to secretkeyMasks length" + ); + for (uint256 i = 0; i < secretkeyMasks.length; i++) { + require( + secretkeyMasks[i].length > 0 && + secretkeyMasks[i].length <= maxSSComitmentLength, + "commitment length exceeds limit or it is empty" + ); + } + TaskRound storage curRound = taskRounds[taskId][round]; + require( + curRound.status == RoundStatus.Aggregating, + "not in upload ss phase" + ); + RoundModelCommitments[] storage commitments = roundModelCommitments[ + taskId + ]; + RoundModelCommitments storage commitment = commitments[round]; + for (uint256 i = 0; i < secretkeyMasks.length; i++) { + require( + commitment + .ssdata[senders[i]][msg.sender] + .secretKeyMaskCommitment + .length > 0, + "must upload commitment first" + ); + require( + commitment + .ssdata[senders[i]][msg.sender] + .secretKeyPiece + .length == 0, + "cannot upload seed multiple times" + ); + commitment + .ssdata[senders[i]][msg.sender].secretKeyPiece = secretkeyMasks[i]; + emit ContentUploaded( + taskId, + round, + senders[i], + msg.sender, + "SKMASK", + secretkeyMasks[i] + ); + } + } + + function verify( + bytes32 taskId, + address verifierAddr, + bytes memory proof, + uint256[] memory pubSignals + ) public taskExists(taskId) returns (bool) { + Verifier v = Verifier(verifierAddr); + bool valid = v.verifyProof(proof, pubSignals); + if (!valid) { + emit TaskMemberVerified(taskId, msg.sender, false); + emit TaskVerified(taskId, false); + return false; + } + + bytes32 weightCommitment = bytes32(pubSignals[pubSignals.length - 2]); + bytes32 dataCommitment = bytes32(pubSignals[pubSignals.length - 1]); + + Task storage task = createdTasks[taskId]; + require(task.finished); + // check gradient norm + VerifierState storage state = verifierStates[taskId]; + require(state.valid); + require(state.unfinishedClients[msg.sender]); + state.unfinishedClients[msg.sender] = true; + state.unfinishedCount--; + + for (uint256 i = 0; i < pubSignals.length - 2; i++) { + if (i % 2 == 0) { + state.gradients.push(pubSignals[i]); + } else { + if (state.precision == 0) { + state.precision = pubSignals[i]; + require(state.precision > task.tolerance); + } else { + require(state.precision == pubSignals[i]); + } + } + } + + if (state.unfinishedCount == 0) { + uint256 minGradient; + for (uint256 i = 0; i < state.gradients.length; i++) { + uint256 abs = v.q() - state.gradients[i]; + if (state.gradients[i] < abs) { + abs = state.gradients[i]; + } + if (abs < minGradient) { + minGradient = abs; + } + } + if (minGradient >= 10**(state.precision - task.tolerance)) { + state.valid = false; + emit TaskMemberVerified(taskId, msg.sender, false); + emit TaskVerified(taskId, false); + return false; + } + } + + // check weight commitment + RoundModelCommitments[] storage cmmts = roundModelCommitments[taskId]; + require(cmmts.length > task.currentRound); + RoundModelCommitments storage cmmt = cmmts[task.currentRound]; + if (cmmt.weightCommitment != weightCommitment) { + state.valid = false; + emit TaskMemberVerified(taskId, msg.sender, false); + emit TaskVerified(taskId, false); + return false; + } + // check data commitment + if ( + dataCommitment != + dataContract.getDataCommitment(msg.sender, task.dataSet) + ) { + state.valid = false; + emit TaskMemberVerified(taskId, msg.sender, false); + emit TaskVerified(taskId, false); + return false; + } + + emit TaskMemberVerified(taskId, msg.sender, true); + if (state.unfinishedCount == 0) { + emit TaskVerified(taskId, true); + } + + return true; + } + + function setMaxWeightCommitmentLength(uint64 maxLength) public isOwner { + maxWeightCommitmentLength = maxLength; + } + + function setMaxSSCommitmentLength(uint64 maxLength) public isOwner { + maxSSComitmentLength = maxLength; + } + + function getMaxCommitmentsLength() + public + view + returns (uint64 sslength, uint64 weightLength) + { + sslength = maxSSComitmentLength; + weightLength = maxWeightCommitmentLength; + } + + /** + * @dev Change owner + * @param newOwner address of new owner + */ + function changeOwner(address newOwner) public isOwner { + emit OwnerSet(owner, newOwner); + owner = newOwner; + } + + /** + * @dev Return owner address + * @return address of owner + */ + function getOwner() external view returns (address) { + return owner; + } +} diff --git a/contracts/PlonkVerifier3.sol b/contracts/PlonkVerifier3.sol index a24a1ee..e75a505 100644 --- a/contracts/PlonkVerifier3.sol +++ b/contracts/PlonkVerifier3.sol @@ -50,7 +50,7 @@ contract PlonkVerifier3 { uint256 constant X2y1 = 2388026358213174446665280700919698872609886601280537296205114254867301080648; uint256 constant X2y2 = 11507326595632554467052522095592665270651932854513688777769618397986436103170; - uint256 constant q = 21888242871839275222246405745257275088548364400416034343698204186575808495617; + uint256 public constant q = 21888242871839275222246405745257275088548364400416034343698204186575808495617; uint256 constant qf = 21888242871839275222246405745257275088696311157297823662689037894645226208583; uint256 constant w1 = 15549849457946371566896172786938980432421851627449396898353380550861104573629; diff --git a/migrations/6_deploy_hlr.js b/migrations/6_deploy_hlr.js new file mode 100644 index 0000000..65b36a1 --- /dev/null +++ b/migrations/6_deploy_hlr.js @@ -0,0 +1,9 @@ +const identity = artifacts.require("IdentityContract"); +const datahub = artifacts.require("DataHub"); +const hlr = artifacts.require("HLR"); + +module.exports = async function (deployer) { + const identityInstance = await identity.deployed() + const datahubInstance = await datahub.deployed() + await deployer.deploy(hlr, identityInstance.address, datahubInstance.address); +};