From e478a668c7a63ca472ed1bc9f2fc75a4eda28e09 Mon Sep 17 00:00:00 2001 From: Tom Yurek Date: Thu, 12 Sep 2019 21:17:24 -0500 Subject: [PATCH 1/6] Log-sized Polycommits --- .flake8 | 3 + benchmark/test_benchmark_poly_commit_const.py | 28 ++ benchmark/test_benchmark_poly_commit_log.py | 29 ++ honeybadgermpc/betterpairing.py | 341 ++++++++------- honeybadgermpc/poly_commit_log.py | 121 ++++++ honeybadgermpc/proofs.py | 395 ++++++++++++++++++ pairing/src/lib.rs | 78 ++-- tests/test_poly_commit_log.py | 27 ++ tests/test_proofs.py | 63 +++ 9 files changed, 909 insertions(+), 176 deletions(-) create mode 100644 benchmark/test_benchmark_poly_commit_const.py create mode 100644 benchmark/test_benchmark_poly_commit_log.py create mode 100644 honeybadgermpc/poly_commit_log.py create mode 100644 honeybadgermpc/proofs.py create mode 100644 tests/test_poly_commit_log.py create mode 100644 tests/test_proofs.py diff --git a/.flake8 b/.flake8 index 6da8b1dd..c29d279f 100644 --- a/.flake8 +++ b/.flake8 @@ -3,3 +3,6 @@ max_line_length=89 exclude = charm/, .eggs/ +per-file-ignores = + honeybadgermpc/poly_commit_log.py: N806, N803 + honeybadgermpc/proofs.py: N806, N803 diff --git a/benchmark/test_benchmark_poly_commit_const.py b/benchmark/test_benchmark_poly_commit_const.py new file mode 100644 index 00000000..11f75d4b --- /dev/null +++ b/benchmark/test_benchmark_poly_commit_const.py @@ -0,0 +1,28 @@ +from pytest import mark +from honeybadgermpc.betterpairing import G1, ZR +from honeybadgermpc.polynomial import polynomials_over +from honeybadgermpc.poly_commit_const import PolyCommitConst, gen_pc_const_crs + + +@mark.parametrize("t", [3, 10, 20, 33]) +def test_benchmark_commit(benchmark, t): + alpha = ZR.random() + g = G1.rand() + h = G1.rand() + crs = gen_pc_const_crs(t, alpha=alpha, g=g, h=h) + pc = PolyCommitConst(crs) + phi = polynomials_over(ZR).random(t) + benchmark(pc.commit, phi) + + +@mark.parametrize("t", [3, 10, 20, 33]) +def test_benchmark_create_witness(benchmark, t): + alpha = ZR.random() + g = G1.rand() + h = G1.rand() + crs = gen_pc_const_crs(t, alpha=alpha, g=g, h=h) + pc = PolyCommitConst(crs) + phi = polynomials_over(ZR).random(t) + c, phi_hat = pc.commit(phi) + pc.preprocess_prover(10) + benchmark(pc.create_witness, phi, phi_hat, 3) diff --git a/benchmark/test_benchmark_poly_commit_log.py b/benchmark/test_benchmark_poly_commit_log.py new file mode 100644 index 00000000..17fb6c62 --- /dev/null +++ b/benchmark/test_benchmark_poly_commit_log.py @@ -0,0 +1,29 @@ +from pytest import mark +from honeybadgermpc.betterpairing import ZR +from honeybadgermpc.polynomial import polynomials_over +from honeybadgermpc.poly_commit_log import PolyCommitLog + + +@mark.parametrize("t", [3, 10, 20, 33]) +def test_benchmark_commit(benchmark, t): + pc = PolyCommitLog(degree_max=t) + r = ZR.random() + phi = polynomials_over(ZR).random(t) + benchmark(pc.commit, phi, r) + + +@mark.parametrize("t", [3, 10, 20, 33]) +def test_benchmark_create_witness(benchmark, t): + pc = PolyCommitLog(degree_max=t) + r = ZR.random() + phi = polynomials_over(ZR).random(t) + benchmark(pc.create_witness, phi, r, 3) + + +@mark.parametrize("t", [3, 10, 20, 33]) +def test_benchmark_create_batch_witness(benchmark, t): + pc = PolyCommitLog(degree_max=t) + r = ZR.random() + phi = polynomials_over(ZR).random(t) + pc.preprocess_prover() + benchmark(pc.batch_create_witness, phi, r, n=3*t+1) diff --git a/honeybadgermpc/betterpairing.py b/honeybadgermpc/betterpairing.py index e93cfb55..a2bda704 100644 --- a/honeybadgermpc/betterpairing.py +++ b/honeybadgermpc/betterpairing.py @@ -2,9 +2,12 @@ import random import re import struct +from hashlib import sha256 # Order of BLS group -bls12_381_r = 52435875175126190479447740508185965837690552500527637822603658699938581184513 # (# noqa: E501) +bls12_381_r = ( + 52435875175126190479447740508185965837690552500527637822603658699938581184513 +) # (# noqa: E501) def pair(g1, g2): @@ -27,7 +30,7 @@ def dupe_pyg2(pyg2): def dupe_pyfr(pyfr): - out = PyFr("1") + out = PyFr(0, 0, 0, 0) out.copy(pyfr) return out @@ -45,10 +48,22 @@ def __init__(self, other=None): elif type(other) is list: assert len(other) == 2 assert len(other[0]) == 6 - x = PyFqRepr(other[0][0], other[0][1], other[0][2], - other[0][3], other[0][4], other[0][5]) - y = PyFqRepr(other[1][0], other[1][1], other[1][2], - other[1][3], other[1][4], other[1][5]) + x = PyFqRepr( + other[0][0], + other[0][1], + other[0][2], + other[0][3], + other[0][4], + other[0][5], + ) + y = PyFqRepr( + other[1][0], + other[1][1], + other[1][2], + other[1][3], + other[1][4], + other[1][5], + ) xq = PyFq() yq = PyFq() xq.from_repr(x) @@ -75,16 +90,16 @@ def __mul__(self, other): return G1(out) else: raise TypeError( - 'Invalid multiplication param. Expected G1. Got ' - + str(type(other))) + "Invalid multiplication param. Expected G1. Got " + str(type(other)) + ) def __imul__(self, other): if type(other) is G1: self.pyg1.add_assign(other.pyg1) return self raise TypeError( - 'Invalid multiplication param. Expected G1. Got ' - + str(type(other))) + "Invalid multiplication param. Expected G1. Got " + str(type(other)) + ) def __truediv__(self, other): if type(other) is G1: @@ -93,27 +108,28 @@ def __truediv__(self, other): return G1(out) else: raise TypeError( - 'Invalid division param. Expected G1. Got ' - + str(type(other))) + "Invalid division param. Expected G1. Got " + str(type(other)) + ) def __idiv__(self, other): if type(other) is G1: self.pyg1.sub_assign(other.pyg1) return self - raise TypeError( - 'Invalid division param. Expected G1. Got ' - + str(type(other))) + raise TypeError("Invalid division param. Expected G1. Got " + str(type(other))) def __pow__(self, other): - if type(other) is int: - exponend = ZR(other) - elif type(other) is ZR: + if type(other) is ZR: exponend = other else: - raise TypeError( - 'Invalid exponentiation param. Expected ZR or int. Got ' - + str(type(other))) - out = G1(dupe_pyg1(self.pyg1)) + try: + intother = int(other) + exponend = ZR(intother) + except ValueError: + raise TypeError( + "Invalid exponentiation param. Expected ZR or int. Got " + + str(type(other)) + ) + out = G1.one() self.pyg1.ppmul(exponend.val, out.pyg1) return out @@ -126,8 +142,9 @@ def __ipow__(self, other): return self else: raise TypeError( - 'Invalid exponentiation param. Expected ZR or int. Got ' - + str(type(other))) + "Invalid exponentiation param. Expected ZR or int. Got " + + str(type(other)) + ) def __rmul__(self, other): return self.__mul__(other) @@ -152,10 +169,10 @@ def __getstate__(self): for i in range(6): xlist[i] = int(xlist[i], 16) ylist[i] = int(ylist[i], 16) - return struct.pack('QQQQQQQQQQQQ', *(xlist+ylist)) + return struct.pack("QQQQQQQQQQQQ", *(xlist + ylist)) def __setstate__(self, d): - xylist = struct.unpack('QQQQQQQQQQQQ', d) + xylist = struct.unpack("QQQQQQQQQQQQ", d) self.__init__([xylist[:6], xylist[6:]]) def preprocess(self, level=4): @@ -189,15 +206,30 @@ def rand(seed=None): out = PyG1() if seed is None: seed = [] - for _ in range(4): + for _ in range(8): seed.append(random.SystemRandom().randint(0, 4294967295)) - out.rand(seed[0], seed[1], seed[2], seed[3]) + out.rand(seed) else: assert type(seed) is list - assert len(seed) == 4 - out.rand(seed[0], seed[1], seed[2], seed[3]) + assert len(seed) <= 8 + out.rand(seed) return G1(out) + # length determines how many G1 values to return + @staticmethod + def hash(bytestr, length=None): + assert type(bytestr) is bytes + hashout = sha256(bytestr).hexdigest() + seed = [int(hashout[i: i + 8], 16) for i in range(0, 64, 8)] + if length is None: + return G1.rand(seed) + assert type(length) is int + out = [G1.rand(seed)] + for j in range(0, length - 1): + bytestr += b"x42" + out.append(G1.hash(bytestr)) + return out + class G2: def __init__(self, other=None): @@ -206,14 +238,38 @@ def __init__(self, other=None): if type(other) is list: assert len(other) == 4 assert len(other[0]) == 6 - x1 = PyFqRepr(other[0][0], other[0][1], other[0][2], - other[0][3], other[0][4], other[0][5]) - x2 = PyFqRepr(other[1][0], other[1][1], other[1][2], - other[1][3], other[1][4], other[1][5]) - y1 = PyFqRepr(other[2][0], other[2][1], other[2][2], - other[2][3], other[2][4], other[2][5]) - y2 = PyFqRepr(other[3][0], other[3][1], other[3][2], - other[3][3], other[3][4], other[3][5]) + x1 = PyFqRepr( + other[0][0], + other[0][1], + other[0][2], + other[0][3], + other[0][4], + other[0][5], + ) + x2 = PyFqRepr( + other[1][0], + other[1][1], + other[1][2], + other[1][3], + other[1][4], + other[1][5], + ) + y1 = PyFqRepr( + other[2][0], + other[2][1], + other[2][2], + other[2][3], + other[2][4], + other[2][5], + ) + y2 = PyFqRepr( + other[3][0], + other[3][1], + other[3][2], + other[3][3], + other[3][4], + other[3][5], + ) xq = PyFq2() yq = PyFq2() xq.from_repr(x1, x2) @@ -229,7 +285,9 @@ def __str__(self): x2 = int(out[113:211], 0) y1 = int(out[226:324], 0) y2 = int(out[331:429], 0) - return "(" + str(x1) + " + " + str(x2) + "u, " + str(y1) + " + " + str(y2) + "u)" + return ( + "(" + str(x1) + " + " + str(x2) + "u, " + str(y1) + " + " + str(y2) + "u)" + ) def __repr__(self): return str(self) @@ -252,8 +310,8 @@ def __truediv__(self, other): return G2(out) else: raise TypeError( - 'Invalid division param. Expected G2. Got ' - + str(type(other))) + "Invalid division param. Expected G2. Got " + str(type(other)) + ) def __idiv__(self, other): if type(other) is G2: @@ -267,8 +325,9 @@ def __pow__(self, other): exponend = other else: raise TypeError( - 'Invalid exponentiation param. Expected ZR or int. Got ' - + str(type(other))) + "Invalid exponentiation param. Expected ZR or int. Got " + + str(type(other)) + ) out = G2(dupe_pyg2(self.pyg2)) self.pyg2.ppmul(exponend.val, out.pyg2) return out @@ -288,8 +347,9 @@ def __ipow__(self, other): return self else: raise TypeError( - 'Invalid exponentiation param. Expected ZR or int. Got ' - + str(type(other))) + "Invalid exponentiation param. Expected ZR or int. Got " + + str(type(other)) + ) def __rmul__(self, other): return self.__mul__(other) @@ -351,15 +411,30 @@ def rand(seed=None): out = PyG2() if seed is None: seed = [] - for _ in range(4): + for _ in range(8): seed.append(random.SystemRandom().randint(0, 4294967295)) - out.rand(seed[0], seed[1], seed[2], seed[3]) + out.rand(seed) else: assert type(seed) is list - assert len(seed) == 4 - out.rand(seed[0], seed[1], seed[2], seed[3]) + assert len(seed) <= 8 + out.rand(seed) return G2(out) + # length determines how many G2 values to return + @staticmethod + def hash(bytestr, length=None): + assert type(bytestr) is bytes + hashout = sha256(bytestr).hexdigest() + seed = [int(hashout[i: i + 8], 16) for i in range(0, 64, 8)] + if length is None: + return G2.rand(seed) + assert type(length) is int + out = [G2.rand(seed)] + for j in range(0, length - 1): + bytestr += b"x42" + out.append(G2.hash(bytestr)) + return out + class GT: def __init__(self, other=None): @@ -374,11 +449,11 @@ def __init__(self, other=None): self.pyfq12.from_strs(*other) elif type(other) is int: self.pyfq12 = PyFq12() - self.pyfq12.from_strs(str(other), *["0"]*11) + self.pyfq12.from_strs(str(other), *["0"] * 11) elif type(other) is str: - lst = [x.strip() for x in other.split(',')] + lst = [x.strip() for x in other.split(",")] assert len(lst) == 12 - if lst[0][1] == 'x': + if lst[0][1] == "x": for i in range(len(lst)): lst[i] = str(int(lst[i], 0)) self.pyfq12 = PyFq12() @@ -402,8 +477,9 @@ def oldpow(self, other): return out else: raise TypeError( - 'Invalid exponentiation param. Expected ZR or int. Got ' - + str(type(other))) + "Invalid exponentiation param. Expected ZR or int. Got " + + str(type(other)) + ) def __pow__(self, other): if type(other) is int: @@ -412,8 +488,9 @@ def __pow__(self, other): exponend = other else: raise TypeError( - 'Invalid exponentiation param. Expected ZR or int. Got ' - + str(type(other))) + "Invalid exponentiation param. Expected ZR or int. Got " + + str(type(other)) + ) outfq12 = PyFq12() self.pyfq12.pppow(exponend.val, outfq12) return GT(outfq12) @@ -425,8 +502,8 @@ def __mul__(self, other): return GT(out) else: raise TypeError( - 'Invalid multiplication param. Expected GT. Got ' - + str(type(other))) + "Invalid multiplication param. Expected GT. Got " + str(type(other)) + ) def __truediv__(self, other): if type(other) is GT: @@ -437,8 +514,8 @@ def __truediv__(self, other): return GT(out) else: raise TypeError( - 'Invalid division param. Expected GT. Got ' - + str(type(other))) + "Invalid division param. Expected GT. Got " + str(type(other)) + ) def __radd__(self, other): return self.__add__(other) @@ -486,12 +563,14 @@ def preprocess(self, level=4): def rand(seed=None): r = bls12_381_r if seed is None: - r = random.SystemRandom().randint(0, r-1) + r = random.SystemRandom().randint(0, r - 1) else: # Generate pseudorandomly based on seed - r = random.Random(seed).randint(0, r-1) + r = random.Random(seed).randint(0, r - 1) exp = ZR(str(r)) - out = GT('0x0158e1808f680056282c178bcba60c5acba8f0475a3c41a71d81f868772583714dc4b3eb5ca8c5d5061996e5c5ef24bcc,0x0b9df4a93419648e1d43121721548f16ed690a5f12c73ce16eba5969fe05995534cb764a7de2439edaa94924a939984d,0x0ad9d36bdee6b0d48b80a486461ec570e7f15393f721aa7631c5b685bb5b1e7b008f25437692e561083cac10c0a0aab0,0x0fb4a6fd9c72613c58e85dee45f293c9ac3df84243b775a80ca855e690f438b6361f82ed31c202709c16f75dd431e962,0x03b22c64e0522668d304ed847a33e02930cdb42f79ffab3aa2c54a7718283cf52fd7532d96e14f749c3e09ce4beabe49,0x01b597b86cbce4fc08a09487ec6d7141e3f4b6e02ec56fa57453b03ee0f2f535f3b2414d7b8366f45687a65475160ed0,0x0989f5f2a47ae4f5095ba9323b07330617f214f3972dc34be643e8ec361e3f04b260b845c46505429c6be9d441e721d1,0x01893a49f8840733e25c408a9fe57f15047da20a0fd498ea168b99977b99da42a32430a4934fd0acb7bc61b5abfb391a,0x0155d79b2f854e71ec012d26bdc0e05e0ffd4f002bfb4139b9e779f9e5fce72f0770f66d4cd475bfa4a6e769210a4e97a,0x016659bfd6c7b703935fd139f5c73653d1dd470435f05e73d2711bd5be4dd36c337a736f242f9c41d1674e18063f0548d,0x03cfeca937de62d23620a0de8d9e04b6318100480e8b10c30c16e33684629c34337ff25742986b90cfcf325fbae99564,0x0cb863283c7d744ddbb00c427295d45aaa3bd7be6181a5369b3bdbb89ffe3179dc58fd2aeca27bf19bf25b99af0cbd23') # (# noqa: E501) + out = GT( + "0x0158e1808f680056282c178bcba60c5acba8f0475a3c41a71d81f868772583714dc4b3eb5ca8c5d5061996e5c5ef24bcc,0x0b9df4a93419648e1d43121721548f16ed690a5f12c73ce16eba5969fe05995534cb764a7de2439edaa94924a939984d,0x0ad9d36bdee6b0d48b80a486461ec570e7f15393f721aa7631c5b685bb5b1e7b008f25437692e561083cac10c0a0aab0,0x0fb4a6fd9c72613c58e85dee45f293c9ac3df84243b775a80ca855e690f438b6361f82ed31c202709c16f75dd431e962,0x03b22c64e0522668d304ed847a33e02930cdb42f79ffab3aa2c54a7718283cf52fd7532d96e14f749c3e09ce4beabe49,0x01b597b86cbce4fc08a09487ec6d7141e3f4b6e02ec56fa57453b03ee0f2f535f3b2414d7b8366f45687a65475160ed0,0x0989f5f2a47ae4f5095ba9323b07330617f214f3972dc34be643e8ec361e3f04b260b845c46505429c6be9d441e721d1,0x01893a49f8840733e25c408a9fe57f15047da20a0fd498ea168b99977b99da42a32430a4934fd0acb7bc61b5abfb391a,0x0155d79b2f854e71ec012d26bdc0e05e0ffd4f002bfb4139b9e779f9e5fce72f0770f66d4cd475bfa4a6e769210a4e97a,0x016659bfd6c7b703935fd139f5c73653d1dd470435f05e73d2711bd5be4dd36c337a736f242f9c41d1674e18063f0548d,0x03cfeca937de62d23620a0de8d9e04b6318100480e8b10c30c16e33684629c34337ff25742986b90cfcf325fbae99564,0x0cb863283c7d744ddbb00c427295d45aaa3bd7be6181a5369b3bdbb89ffe3179dc58fd2aeca27bf19bf25b99af0cbd23" # (# noqa: E501) + ) out **= exp return out @@ -499,16 +578,25 @@ def rand(seed=None): class ZR: def __init__(self, val=None): if val is None: - self.val = PyFr("0") + self.val = PyFr(0, 0, 0, 0) elif type(val) is int: - self.val = PyFr(str(val % (bls12_381_r))) + uint = val % (bls12_381_r) + u1 = uint % 2 ** 64 + u2 = (uint // (2 ** 64)) % 2 ** 64 + u3 = (uint // (2 ** 128)) % 2 ** 64 + u4 = uint // (2 ** 192) + self.val = PyFr(u1, u2, u3, u4) elif type(val) is str: - if val[0:2] == '0x': + if val[0:2] == "0x": intval = int(val, 0) - self.val = PyFr(str(intval % (bls12_381_r))) else: intval = int(val) - self.val = PyFr(str(intval % (bls12_381_r))) + uint = intval % (bls12_381_r) + u1 = uint % 2 ** 64 + u2 = (uint // (2 ** 64)) % 2 ** 64 + u3 = (uint // (2 ** 128)) % 2 ** 64 + u4 = uint // (2 ** 192) + self.val = PyFr(u1, u2, u3, u4) elif type(val) is PyFr: self.val = val @@ -530,18 +618,13 @@ def __add__(self, other): return ZR(out) elif type(other) is int: out = dupe_pyfr(self.val) - if other < 0: - other *= -1 - addend = PyFr(str(other)) - addend.negate() - else: - addend = PyFr(str(other)) - out.add_assign(addend) + zrother = ZR(other) + out.add_assign(zrother.val) return ZR(out) else: raise TypeError( - 'Invalid addition param. Expected ZR or int. Got ' - + str(type(other))) + "Invalid addition param. Expected ZR or int. Got " + str(type(other)) + ) def __radd__(self, other): assert type(other) is int @@ -552,18 +635,13 @@ def __iadd__(self, other): self.val.add_assign(other.val) return self elif type(other) is int: - if other < 0: - other *= -1 - addend = PyFr(str(other)) - addend.negate() - else: - addend = PyFr(str(other)) - self.val.add_assign(addend) + zrother = ZR(other) + self.val.add_assign(zrother.val) return self else: raise TypeError( - 'Invalid addition param. Expected ZR or int. Got ' - + str(type(other))) + "Invalid addition param. Expected ZR or int. Got " + str(type(other)) + ) def __sub__(self, other): if type(other) is ZR: @@ -572,18 +650,13 @@ def __sub__(self, other): return ZR(out) elif type(other) is int: out = dupe_pyfr(self.val) - if other < 0: - other *= -1 - subend = PyFr(str(other)) - subend.negate() - else: - subend = PyFr(str(other)) - out.sub_assign(subend) + zrother = ZR(other) + out.sub_assign(zrother.val) return ZR(out) else: raise TypeError( - 'Invalid addition param. Expected ZR or int. Got ' - + str(type(other))) + "Invalid addition param. Expected ZR or int. Got " + str(type(other)) + ) def __rsub__(self, other): assert type(other) is int @@ -594,18 +667,13 @@ def __isub__(self, other): self.val.sub_assign(other.val) return self elif type(other) is int: - if other < 0: - other *= -1 - subend = PyFr(str(other)) - subend.negate() - else: - subend = PyFr(str(other)) - self.val.sub_assign(subend) + zrother = ZR(other) + self.val.sub_assign(zrother.val) return self else: raise TypeError( - 'Invalid addition param. Expected ZR or int. Got ' - + str(type(other))) + "Invalid addition param. Expected ZR or int. Got " + str(type(other)) + ) def __mul__(self, other): if type(other) is ZR: @@ -614,36 +682,28 @@ def __mul__(self, other): return ZR(out) elif type(other) is int: out = dupe_pyfr(self.val) - if other < 0: - other *= -1 - prodend = PyFr(str(other)) - prodend.negate() - else: - prodend = PyFr(str(other)) - out.mul_assign(prodend) + zrother = ZR(other) + out.mul_assign(zrother.val) return ZR(out) else: raise TypeError( - 'Invalid multiplication param. Expected ZR or int. Got ' - + str(type(other))) + "Invalid multiplication param. Expected ZR or int. Got " + + str(type(other)) + ) def __imul__(self, other): if type(other) is ZR: self.val.mul_assign(other.val) return self elif type(other) is int: - if other < 0: - other *= -1 - prodend = PyFr(str(other)) - prodend.negate() - else: - prodend = PyFr(str(other)) - self.val.mul_assign(prodend) + zrother = ZR(other) + self.val.mul_assign(zrother.val) return self else: raise TypeError( - 'Invalid multiplication param. Expected ZR or int. Got ' - + str(type(other))) + "Invalid multiplication param. Expected ZR or int. Got " + + str(type(other)) + ) def __rmul__(self, other): assert type(other) is int @@ -658,36 +718,32 @@ def __truediv__(self, other): return ZR(out) elif type(other) is int: out = dupe_pyfr(self.val) - if other < 0: - other *= -1 - prodend = PyFr(str(other)) - prodend.negate() - else: - prodend = PyFr(str(other)) - prodend.inverse() - out.mul_assign(prodend) + zrother = ZR(other) + zrother.val.inverse() + out.mul_assign(zrother.val) return ZR(out) else: raise TypeError( - 'Invalid division param. Expected ZR or int. Got ' - + str(type(other))) + "Invalid division param. Expected ZR or int. Got " + str(type(other)) + ) def __rtruediv__(self, other): return ZR(other).__truediv__(self) def __pow__(self, other): if type(other) is int: - exponend = ZR(other % (bls12_381_r-1)) + exponend = ZR(other % (bls12_381_r - 1)) out = dupe_pyfr(self.val) out.pow_assign(exponend.val) return ZR(out) elif type(other) is ZR: raise TypeError( - 'Invalid multiplication param. Expected int. Got ZR. This is not a bug') + "Invalid multiplication param. Expected int. Got ZR. This is not a bug" + ) else: raise TypeError( - 'Invalid multiplication param. Expected int. Got ' - + str(type(other))) + "Invalid multiplication param. Expected int. Got " + str(type(other)) + ) def __neg__(self): out = dupe_pyfr(self.val) @@ -710,20 +766,25 @@ def __setstate__(self, d): def random(seed=None): r = bls12_381_r if seed is None: - r = random.SystemRandom().randint(0, r-1) + r = random.SystemRandom().randint(0, r - 1) return ZR(str(r)) else: # Generate pseudorandomly based on seed - r = random.Random(seed).randint(0, r-1) + r = random.Random(seed).randint(0, r - 1) return ZR(str(r)) @staticmethod def zero(): - return ZR(PyFr("0")) + return ZR(0) @staticmethod def one(): - return ZR(PyFr("1")) + return ZR(1) + + @staticmethod + def hash(bytestr): + assert type(bytestr) is bytes + return ZR("0x" + sha256(bytestr).hexdigest()) def lagrange_at_x(s, j, x): diff --git a/honeybadgermpc/poly_commit_log.py b/honeybadgermpc/poly_commit_log.py new file mode 100644 index 00000000..1d36ba3c --- /dev/null +++ b/honeybadgermpc/poly_commit_log.py @@ -0,0 +1,121 @@ +from honeybadgermpc.betterpairing import ZR, G1 +from honeybadgermpc.proofs import prove_inner_product_one_known, \ + verify_inner_product_one_known, prove_batch_inner_product_one_known, \ + verify_batch_inner_product_one_known, MerkleTree +import pickle + + +class PolyCommitLog: + def __init__(self, crs=None, degree_max=33): + if crs is None: + n = degree_max + 1 + self.gs = G1.hash(b"honeybadgerg", length=n) + self.h = G1.hash(b"honeybadgerh") + self.u = G1.hash(b"honeybadgeru") + else: + assert len(crs) == 3 + [self.gs, self.hs, self.u] = crs + self.y_vecs = [] + + def commit(self, phi, r): + c = G1.one() + for i in range(len(phi.coeffs)): + c *= self.gs[i] ** phi.coeffs[i] + c *= self.h ** r + return c + + def create_witness(self, phi, r, i): + t = len(phi.coeffs) - 1 + y_vec = [ZR(i) ** j for j in range(t+1)] + s_vec = [ZR.random() for _ in range(t+1)] + sy_prod = ZR(0) + S = G1.one() + for j in range(t+1): + S *= self.gs[j] ** s_vec[j] + sy_prod += s_vec[j] * y_vec[j] + T = self.gs[0] ** sy_prod + rho = ZR.random() + S *= self.h ** rho + # Fiat Shamir + challenge = ZR.hash(pickle.dumps([self.gs, self.h, self.u, S, T])) + d_vec = [phi.coeffs[j] + s_vec[j] * challenge for j in range(t+1)] + D = G1.one() + for j in range(t+1): + D *= self.gs[j] ** d_vec[j] + mu = r + rho*challenge + comm, t_hat, iproof = prove_inner_product_one_known( + d_vec, y_vec, crs=[self.gs, self.u]) + return [S, T, D, mu, t_hat, iproof] + + # Create witnesses for points 1 to n. n defaults to 3*degree+1 if unset. + def batch_create_witness(self, phi, r, n=None): + t = len(phi.coeffs) - 1 + if n is None: + n = 3*t + 1 + if len(self.y_vecs) < n: + i = len(self.y_vecs) + while(i < n): + self.y_vecs.append([ZR(i+1) ** j for j in range(t+1)]) + i += 1 + s_vec = [ZR.random() for _ in range(t+1)] + sy_prods = [ZR(0) for _ in range(n)] + S = G1.one() + T_vec = [None] * n + witnesses = [[] for _ in range(n)] + for i in range(t+1): + S *= self.gs[i] ** s_vec[i] + for j in range(n): + for i in range(t+1): + sy_prods[j] += s_vec[i] * self.y_vecs[j][i] + T_vec[j] = self.gs[0] ** sy_prods[j] + rho = ZR.random() + S *= self.h ** rho + # Fiat Shamir + tree = MerkleTree() + for j in range(n): + tree.append(pickle.dumps(T_vec[j])) + roothash = tree.get_root_hash() + for j in range(n): + branch = tree.get_branch(j) + witnesses[j].append(roothash) + witnesses[j].append(branch) + challenge = ZR.hash(pickle.dumps([roothash, self.gs, self.h, self.u, S])) + d_vec = [phi.coeffs[j] + s_vec[j] * challenge for j in range(t+1)] + D = G1.one() + for j in range(t+1): + D *= self.gs[j] ** d_vec[j] + mu = r + rho*challenge + comm, t_hats, iproofs = prove_batch_inner_product_one_known( + d_vec, self.y_vecs, crs=[self.gs, self.u]) + for j in range(len(witnesses)): + witnesses[j] += [S, T_vec[j], D, mu, t_hats[j], iproofs[j]] + return witnesses + + def verify_eval(self, c, i, phi_at_i, witness): + t = witness[-1][0] - 1 + y_vec = [ZR(i) ** j for j in range(t+1)] + if len(witness) == 6: + [S, T, D, mu, t_hat, iproof] = witness + challenge = ZR.hash(pickle.dumps([self.gs, self.h, self.u, S, T])) + else: + [roothash, branch, S, T, D, mu, t_hat, iproof] = witness + print(branch) + if not MerkleTree.verify_membership(pickle.dumps(T), branch, roothash): + return False + challenge = ZR.hash(pickle.dumps([roothash, self.gs, self.h, self.u, S])) + ret = self.gs[0]**t_hat == self.gs[0]**phi_at_i * T ** challenge + print(ret) + ret &= D * self.h**mu == S**challenge * c + print(ret) + if len(iproof[-1]) > 3: + ret &= verify_batch_inner_product_one_known( + D, t_hat, y_vec, iproof, crs=[self.gs, self.u]) + else: + ret &= verify_inner_product_one_known( + D, t_hat, y_vec, iproof, crs=[self.gs, self.u]) + return ret + + def preprocess_prover(self, level=10): + self.u.preprocess(level) + for i in range(len(self.gs)-1): + self.y_vecs.append([ZR(i+1) ** j for j in range(len(self.gs))]) diff --git a/honeybadgermpc/proofs.py b/honeybadgermpc/proofs.py new file mode 100644 index 00000000..dfc81428 --- /dev/null +++ b/honeybadgermpc/proofs.py @@ -0,0 +1,395 @@ +from honeybadgermpc.betterpairing import ZR, G1 +import pickle +import math +import hashlib + + +class MerkleTree: + def __init__(self, leaves=None): + if leaves is None: + self.leaves = [] + else: + assert type(leaves) in [list, tuple] + self.leaves = leaves + self.tree = None + + def build_tree(self): + bottomrow = 2 ** math.ceil(math.log(len(self.leaves), 2)) + self.tree = [b""] * (2 * bottomrow) + for i in range(len(self.leaves)): + self.tree[bottomrow + i] = MerkleTree.hash(self.leaves[i]) + for i in range(bottomrow - 1, 0, -1): + self.tree[i] = MerkleTree.hash(self.tree[i * 2] + self.tree[i * 2 + 1]) + + def append(self, leaf): + assert type(leaf) is bytes + self.leaves.append(leaf) + self.tree = None + + def append_many(self, leaves): + assert type(leaves) in [list, tuple] + for leaf in leaves: + assert type(leaf) is bytes + self.leaves += list(leaves) + self.tree = None + + def get_root_hash(self): + if self.tree is None: + self.build_tree() + return self.tree[1] + + def get_branch(self, index): + if self.tree is None: + self.build_tree() + res = [] + t = index + (len(self.tree) >> 1) + while t > 1: + res.append(self.tree[t ^ 1]) # we are picking up the sibling + t //= 2 + return [res, index] + + @staticmethod + def hash(item): + assert type(item) is bytes + return hashlib.sha256(item).digest() + + @staticmethod + def verify_membership(leaf, branch, root_hash): + mbranch, index = branch + assert type(leaf) is bytes + # Index has information on whether we are facing a left or a right sibling + tmp = MerkleTree.hash(leaf) + tindex = index + for br in mbranch: + if tindex % 2 == 1: + tmp = MerkleTree.hash(br + tmp) + else: + tmp = MerkleTree.hash(tmp + br) + tindex >>= 1 + if tmp == root_hash: + return True + return False + + +# Inner product (aka dot product) argument from Bulletproofs paper. Not zero knowledge! +# g and h are vectors of G1 elements, a and b are vectors that form the inner product +def prove_inner_product(a_vec, b_vec, comm=None, crs=None): + def recursive_proof(g_vec, h_vec, u, a_vec, b_vec, n, P, transcript): + if n == 1: + proof = [] + proof.append([a_vec[0], b_vec[0]]) + return proof + proofstep = [] + if n % 2 == 1: + na, nb = -1 * a_vec[-1], -1 * b_vec[-1] + P *= g_vec[-1] ** (na) * h_vec[-1] ** (nb) * u ** (-na * nb) + proofstep.append(na) + proofstep.append(nb) + n_p = n//2 + cl = ZR(0) + cr = ZR(0) + L = G1.one() + R = G1.one() + for i in range(n_p): + cl += a_vec[:n_p][i] * b_vec[n_p:][i] + cr += a_vec[n_p:][i] * b_vec[:n_p][i] + L *= g_vec[n_p:][i]**a_vec[:n_p][i] * h_vec[:n_p][i]**b_vec[n_p:][i] + R *= g_vec[:n_p][i]**a_vec[n_p:][i] * h_vec[n_p:][i]**b_vec[:n_p][i] + L *= u ** cl + R *= u ** cr + # Fiat Shamir L, R, state... + transcript += pickle.dumps([g_vec, h_vec, u, P, L, R]) + x = ZR.hash(transcript) + xi = 1/x + # this part must come after the challenge is generated, which must + # come after L and R are calculated. Don't try to condense the loops + g_vec_p, h_vec_p, a_vec_p, b_vec_p = [], [], [], [] + for i in range(n_p): + g_vec_p.append(g_vec[:n_p][i] ** xi * g_vec[n_p:][i] ** x) + h_vec_p.append(h_vec[:n_p][i] ** x * h_vec[n_p:][i] ** xi) + a_vec_p.append(a_vec[:n_p][i] * x + a_vec[n_p:][i] * xi) + b_vec_p.append(b_vec[:n_p][i] * xi + b_vec[n_p:][i] * x) + P_p = L**(x*x) * P * R**(xi*xi) + proof = recursive_proof( + g_vec_p, h_vec_p, u, a_vec_p, b_vec_p, n_p, P_p, transcript) + proofstep.append(L) + proofstep.append(R) + proof.append(proofstep) + return proof + n = len(a_vec) + assert len(b_vec) == n + if crs is None: + g_vec = G1.hash(b"honeybadgerg", length=n) + h_vec = G1.hash(b"honeybadgerh", length=n) + u = G1.hash(b"honeybadgeru") + else: + [g_vec, h_vec, u] = crs + if comm is not None: + P = comm * G1.one() + else: + comm = G1.one() + for i in range(n): + comm *= g_vec[i] ** a_vec[i] * h_vec[i] ** b_vec[i] + iprod = ZR(0) + for i in range(n): + iprod += a_vec[i] * b_vec[i] + P = comm * u ** iprod + transcript = b'' + return [comm, iprod, [n]+recursive_proof( + g_vec, h_vec, u, a_vec, b_vec, n, P, transcript)] + + +def verify_inner_product(comm, iprod, proof, crs=None): + def recursive_verify(g_vec, h_vec, u, proof, n, P, transcript): + if n == 1: + a, b = proof[0][0], proof[0][1] + return P == g_vec[0]**a * h_vec[0]**b * u ** (a*b) + if n % 2 == 1: + [na, nb, L, R] = proof[-1] + P *= g_vec[-1] ** (na) * h_vec[-1] ** (nb) * u ** (-na * nb) + else: + [L, R] = proof[-1] + transcript += pickle.dumps([g_vec, h_vec, u, P, L, R]) + x = ZR.hash(transcript) + xi = 1/x + n_p = n//2 + g_vec_p = [] + h_vec_p = [] + for i in range(n_p): + g_vec_p.append(g_vec[:n_p][i]**xi * g_vec[n_p:][i]**x) + h_vec_p.append(h_vec[:n_p][i]**x * h_vec[n_p:][i]**xi) + P_p = L**(x*x) * P * R**(xi*xi) + return recursive_verify(g_vec_p, h_vec_p, u, proof[:-1], n_p, P_p, transcript) + n = proof[0] + iproof = proof[1:] + if crs is None: + g_vec = G1.hash(b"honeybadgerg", length=n) + h_vec = G1.hash(b"honeybadgerh", length=n) + u = G1.hash(b"honeybadgeru") + else: + [g_vec, h_vec, u] = crs + P = comm * u ** iprod + transcript = b'' + return recursive_verify(g_vec, h_vec, u, iproof, n, P, transcript) + + +# Inner product argument where one vector (b_vec) is known by both parties +def prove_inner_product_one_known(a_vec, b_vec, comm=None, crs=None): + def recursive_proof(g_vec, a_vec, b_vec, u, n, P, transcript): + if n == 1: + proof = [] + proof.append([a_vec[0]]) + return proof + proofstep = [] + if n % 2 == 1: + na = -1 * a_vec[-1] + P *= g_vec[-1] ** (na) * u ** (na * b_vec[-1]) + proofstep.append(na) + n_p = n//2 + cl = ZR(0) + cr = ZR(0) + L = G1.one() + R = G1.one() + for i in range(n_p): + cl += a_vec[:n_p][i] * b_vec[n_p:][i] + cr += a_vec[n_p:][i] * b_vec[:n_p][i] + L *= g_vec[n_p:][i]**a_vec[:n_p][i] + R *= g_vec[:n_p][i]**a_vec[n_p:][i] + L *= u ** cl + R *= u ** cr + # Fiat Shamir L, R, state... + transcript += pickle.dumps([g_vec, u, P, L, R]) + x = ZR.hash(transcript) + xi = 1/x + # this part must come after the challenge is generated, which must + # come after L and R are calculated. Don't try to condense the loops + g_vec_p, a_vec_p, b_vec_p = [], [], [] + for i in range(n_p): + g_vec_p.append(g_vec[:n_p][i] ** xi * g_vec[n_p:][i] ** x) + a_vec_p.append(a_vec[:n_p][i] * x + a_vec[n_p:][i] * xi) + b_vec_p.append(b_vec[:n_p][i] * xi + b_vec[n_p:][i] * x) + P_p = L**(x*x) * P * R**(xi*xi) + proof = recursive_proof(g_vec_p, a_vec_p, b_vec_p, u, n_p, P_p, transcript) + proofstep.append(L) + proofstep.append(R) + proof.append(proofstep) + return proof + n = len(a_vec) + assert len(b_vec) == n + if crs is None: + g_vec = G1.hash(b"honeybadgerg", length=n) + u = G1.hash(b"honeybadgeru") + else: + [g_vec, u] = crs + if comm is not None: + P = comm * G1.one() + else: + comm = G1.one() + for i in range(n): + comm *= g_vec[i] ** a_vec[i] + iprod = ZR(0) + for i in range(n): + iprod += a_vec[i] * b_vec[i] + P = comm * u ** iprod + transcript = b'' + return [comm, iprod, [n]+recursive_proof(g_vec, a_vec, b_vec, u, n, P, transcript)] + + +def verify_inner_product_one_known(comm, iprod, b_vec, proof, crs=None): + def recursive_verify(g_vec, b_vec, u, proof, n, P, transcript): + if n == 1: + a, b = proof[0][0], b_vec[0] + return P == g_vec[0]**a * u ** (a*b) + if n % 2 == 1: + [na, L, R] = proof[-1] + P *= g_vec[-1] ** (na) * u ** (na * b_vec[-1]) + else: + [L, R] = proof[-1] + transcript += pickle.dumps([g_vec, u, P, L, R]) + x = ZR.hash(transcript) + xi = 1/x + n_p = n//2 + g_vec_p = [] + b_vec_p = [] + for i in range(n_p): + g_vec_p.append(g_vec[:n_p][i]**xi * g_vec[n_p:][i]**x) + b_vec_p.append(b_vec[:n_p][i] * xi + b_vec[n_p:][i] * x) + P_p = L**(x*x) * P * R**(xi*xi) + return recursive_verify(g_vec_p, b_vec_p, u, proof[:-1], n_p, P_p, transcript) + n = proof[0] + iproof = proof[1:] + if crs is None: + g_vec = G1.hash(b"honeybadgerg", length=n) + u = G1.hash(b"honeybadgeru") + else: + [g_vec, u] = crs + P = comm * u ** iprod + transcript = b'' + return recursive_verify(g_vec, b_vec, u, iproof, n, P, transcript) + + +# Inner product argument where one vector (b_vec) is known by both parties +# Precomputing u is recommended +def prove_batch_inner_product_one_known(a_vec, b_vecs, comm=None, crs=None): + def recursive_proofs(g_vec, a_vec, b_vecs, u, n, P_vec, transcript): + if n == 1: + proofs = [None]*len(b_vecs) + for j in range(len(proofs)): + proofs[j] = [[a_vec[0]]] + return proofs + proofsteps = [[] for _ in range(len(b_vecs))] + if n % 2 == 1: + na = -1 * a_vec[-1] + for j in range(len(P_vec)): + P_vec[j] *= g_vec[-1] ** (na) * u ** (na * b_vecs[j][-1]) + proofsteps[j].append(na) + n_p = n//2 + cls = [ZR(0) for _ in range(len(b_vecs))] + crs = [ZR(0) for _ in range(len(b_vecs))] + La = G1.one() + Ra = G1.one() + L_vec = [None] * len(b_vecs) + R_vec = [None] * len(b_vecs) + for i in range(n_p): + La *= g_vec[n_p:][i]**a_vec[:n_p][i] + Ra *= g_vec[:n_p][i]**a_vec[n_p:][i] + for j in range(len(b_vecs)): + for i in range(n_p): + cls[j] += a_vec[:n_p][i] * b_vecs[j][n_p:][i] + crs[j] += a_vec[n_p:][i] * b_vecs[j][:n_p][i] + L_vec[j] = La * (u ** cls[j]) + R_vec[j] = Ra * (u ** crs[j]) + # Fiat Shamir + # Make a merkle tree over everything that varies between verifiers + # TODO: na should be in the transcript + tree = MerkleTree() + for j in range(len(b_vecs)): + tree.append(pickle.dumps([b_vecs[j], P_vec[j], L_vec[j], R_vec[j]])) + roothash = tree.get_root_hash() + for j in range(len(b_vecs)): + branch = tree.get_branch(j) + proofsteps[j].append(roothash) + proofsteps[j].append(branch) + transcript += pickle.dumps([g_vec, roothash]) + x = ZR.hash(transcript) + xi = 1/x + # this part must come after the challenge is generated, which must + # come after L and R are calculated. Don't try to condense the loops + g_vec_p, a_vec_p = [], [] + b_vecs_p = [[] for _ in range(len(b_vecs))] + for i in range(n_p): + g_vec_p.append(g_vec[:n_p][i] ** xi * g_vec[n_p:][i] ** x) + a_vec_p.append(a_vec[:n_p][i] * x + a_vec[n_p:][i] * xi) + for j in range(len(b_vecs)): + b_vecs_p[j].append(b_vecs[j][:n_p][i] * xi + b_vecs[j][n_p:][i] * x) + x2, xi2 = x*x, xi*xi + Lax2Raxi2 = La ** x2 * Ra ** xi2 + for j in range(len(P_vec)): + # Instead of doing L_vec[j]**(x2)*P_vec[j]*R_vec[j]**(xi2), save computation + P_vec[j] *= Lax2Raxi2 * u ** (x2 * cls[j] + xi2 * crs[j]) + proofs = recursive_proofs(g_vec_p, a_vec_p, b_vecs_p, u, n_p, P_vec, transcript) + for j in range(len(proofs)): + proofsteps[j].append(L_vec[j]) + proofsteps[j].append(R_vec[j]) + proofs[j].append(proofsteps[j]) + return proofs + n = len(a_vec) + if crs is None: + g_vec = G1.hash(b"honeybadgerg", length=n) + u = G1.hash(b"honeybadgeru") + else: + [g_vec, u] = crs + if comm is None: + comm = G1.one() + for i in range(n): + comm *= g_vec[i] ** a_vec[i] + iprods = [ZR(0) for _ in range(len(b_vecs))] + P_vec = [None] * len(b_vecs) + for j in range(len(b_vecs)): + for i in range(n): + iprods[j] += a_vec[i] * b_vecs[j][i] + P_vec[j] = comm * u ** iprods[j] + transcript = pickle.dumps(u) + proofs = recursive_proofs(g_vec, a_vec, b_vecs, u, n, P_vec, transcript) + for j in range(len(proofs)): + proofs[j].insert(0, n) + return [comm, iprods, proofs] + + +# Verify an inner product argument (with one vector known) that was generated in a batch +# Not to be confused with a function that does multiple verifications at once +def verify_batch_inner_product_one_known(comm, iprod, b_vec, proof, crs=None): + def recursive_verify(g_vec, b_vec, u, proof, n, P, transcript): + if n == 1: + a, b = proof[0][0], b_vec[0] + return P == g_vec[0]**a * u ** (a*b) + if n % 2 == 1: + [na, roothash, branch, L, R] = proof[-1] + P *= g_vec[-1] ** (na) * u ** (na * b_vec[-1]) + else: + [roothash, branch, L, R] = proof[-1] + # TODO: find a way to make the protocol abort nicely if this fails + assert MerkleTree.verify_membership( + pickle.dumps([b_vec, P, L, R]), branch, roothash) + transcript += pickle.dumps([g_vec, roothash]) + x = ZR.hash(transcript) + xi = 1/x + print(x) + n_p = n//2 + g_vec_p = [] + b_vec_p = [] + for i in range(n_p): + g_vec_p.append(g_vec[:n_p][i] ** xi * g_vec[n_p:][i] ** x) + b_vec_p.append(b_vec[:n_p][i] * xi + b_vec[n_p:][i] * x) + P_p = L**(x*x) * P * R**(xi*xi) + return recursive_verify(g_vec_p, b_vec_p, u, proof[:-1], n_p, P_p, transcript) + n = proof[0] + iproof = proof[1:] + if crs is None: + g_vec = G1.hash(b"honeybadgerg", length=n) + u = G1.hash(b"honeybadgeru") + else: + [g_vec, u] = crs + P = comm * u ** iprod + transcript = pickle.dumps(u) + return recursive_verify(g_vec, b_vec, u, iproof, n, P, transcript) diff --git a/pairing/src/lib.rs b/pairing/src/lib.rs index 2fd08ef2..48be222f 100644 --- a/pairing/src/lib.rs +++ b/pairing/src/lib.rs @@ -35,7 +35,7 @@ use ff::{Field, PrimeField, PrimeFieldDecodingError, PrimeFieldRepr, ScalarEngi use std::error::Error; use std::fmt; use std::io::{self, Write}; -use rand::{Rand, Rng, SeedableRng, XorShiftRng}; +use rand::{Rand, Rng, SeedableRng, XorShiftRng, ChaChaRng}; fn hex_to_bin (hexstr: &String) -> String { @@ -170,8 +170,6 @@ impl PyG1 { #[new] fn __new__(obj: &PyRawObject) -> PyResult<()>{ - //let mut rng = XorShiftRng::from_seed([0,0,0,1]); - //let g = G1::rand(&mut rng); let g = G1::one(); obj.init(|t| PyG1{ g1: g, @@ -180,8 +178,15 @@ impl PyG1 { }) } - fn rand(&mut self, s1: u32, s2: u32, s3: u32, s4: u32) -> PyResult<()>{ - let mut rng = XorShiftRng::from_seed([s1,s2,s3,s4]); + fn rand(&mut self, a: Vec) -> PyResult<()>{ + let mut seed: [u32;8] = [0,0,0,0,0,0,0,0]; + let mut i = 0; + for item in a.iter(){ + let myu32: &u32 = item; + seed[i] = *myu32; + i = i + 1; + } + let mut rng = ChaChaRng::from_seed(&seed); let g = G1::rand(&mut rng); self.g1 = g; if self.pplevel != 0 { @@ -286,6 +291,7 @@ impl PyG1 { Ok(()) } + //Keeping previous code for multithreading in case it comes in handy //fn mul_assign(&mut self, py: Python, other:&PyFr) -> PyResult<()> { fn mul_assign(&mut self, other:&PyFr) -> PyResult<()>{ //py.allow_threads(move || self.g1.mul_assign(other.fr)); @@ -394,8 +400,6 @@ impl PyG2 { #[new] fn __new__(obj: &PyRawObject) -> PyResult<()>{ - //let mut rng = XorShiftRng::from_seed([0,0,0,1]); - //let g = G2::rand(&mut rng); let g = G2::one(); obj.init(|t| PyG2{ g2: g, @@ -404,8 +408,15 @@ impl PyG2 { }) } - fn rand(&mut self, s1: u32, s2: u32, s3: u32, s4: u32) -> PyResult<()>{ - let mut rng = XorShiftRng::from_seed([s1,s2,s3,s4]); + fn rand(&mut self, a: Vec) -> PyResult<()>{ + let mut seed: [u32;8] = [0,0,0,0,0,0,0,0]; + let mut i = 0; + for item in a.iter(){ + let myu32: &u32 = item; + seed[i] = *myu32; + i = i + 1; + } + let mut rng = ChaChaRng::from_seed(&seed); let g = G2::rand(&mut rng); self.g2 = g; if self.pplevel != 0 { @@ -603,21 +614,13 @@ struct PyFr { impl PyFr { #[new] - //fn __new__(obj: &PyRawObject, s1: u32, s2: u32, s3: u32, s4: u32) -> PyResult<()>{ - // let mut rng = XorShiftRng::from_seed([s1,s2,s3,s4]); - // let f = Fr::rand(&mut rng); - // obj.init(|t| PyFr{ - // fr: f, - // }) - //} - //fn __new__(obj: &PyRawObject, s1: u32, s2: u32, s3: u32, s4: u32) -> PyResult<()>{ - fn __new__(obj: &PyRawObject, s: &str) -> PyResult<()>{ - let f = Fr::from_str(s).unwrap(); + fn __new__(obj: &PyRawObject, s1: u64, s2: u64, s3: u64, s4: u64) -> PyResult<()>{ + let f = Fr::from_repr(FrRepr([s1,s2,s3,s4])).unwrap(); obj.init(|t| PyFr{ fr: f, }) } - + fn one(&mut self) -> PyResult<()> { self.fr = Fr::one(); Ok(()) @@ -697,7 +700,6 @@ struct PyFq { #[pymethods] impl PyFq { #[new] - //fn __new__(obj: &PyRawObject, s1: u32, s2: u32, s3: u32, s4: u32) -> PyResult<()>{ fn __new__(obj: &PyRawObject) -> PyResult<()>{ let f = Fq::zero(); obj.init(|t| PyFq{ @@ -718,7 +720,6 @@ struct PyFq2 { #[pymethods] impl PyFq2 { #[new] - //fn __new__(obj: &PyRawObject, s1: u32, s2: u32, s3: u32, s4: u32) -> PyResult<()>{ fn __new__(obj: &PyRawObject) -> PyResult<()>{ let f = Fq2::zero(); obj.init(|t| PyFq2{ @@ -741,7 +742,6 @@ struct PyFq6 { #[pymethods] impl PyFq6 { #[new] - //fn __new__(obj: &PyRawObject, s1: u32, s2: u32, s3: u32, s4: u32) -> PyResult<()>{ fn __new__(obj: &PyRawObject) -> PyResult<()>{ let f = Fq6::zero(); obj.init(|t| PyFq6{ @@ -782,8 +782,26 @@ impl PyFq12 { pp: Vec::new(), pplevel : 0 }) - //Ok(()) } + + fn rand(&mut self, a: Vec) -> PyResult<()>{ + let mut seed: [u32;8] = [0,0,0,0,0,0,0,0]; + let mut i = 0; + for item in a.iter(){ + let myu32: &u32 = item; + seed[i] = *myu32; + i = i + 1; + } + let mut rng = ChaChaRng::from_seed(&seed); + let g = Fq12::rand(&mut rng); + self.fq12 = g; + if self.pplevel != 0 { + self.pp = Vec::new(); + self.pplevel = 0; + } + Ok(()) + } + fn from_strs(&mut self, s1: &str, s2: &str, s3: &str, s4: &str, s5: &str, s6: &str, s7: &str, s8: &str, s9: &str, s10: &str, s11: &str, s12: &str) -> PyResult<()> { let c0 = Fq6 { c0: Fq2 { @@ -830,18 +848,6 @@ impl PyFq12 { Ok(format!("({} + {} * w)",self.fq12.c0, self.fq12.c1 )) } - fn rand(&mut self, s1: u32, s2: u32, s3: u32, s4: u32) -> PyResult<()> { - let mut rng = XorShiftRng::from_seed([s1,s2,s3,s4]); - self.fq12 = Fq12::rand(&mut rng); - //self.fq12.c0 = rng.gen(); - //self.fq12.c1 = rng.gen(); - if self.pplevel != 0 { - self.pp = Vec::new(); - self.pplevel = 0; - } - Ok(()) - } - fn add_assign(&mut self, other: &Self) -> PyResult<()> { self.fq12.add_assign(&other.fq12); if self.pplevel != 0 { diff --git a/tests/test_poly_commit_log.py b/tests/test_poly_commit_log.py new file mode 100644 index 00000000..803521ae --- /dev/null +++ b/tests/test_poly_commit_log.py @@ -0,0 +1,27 @@ +from honeybadgermpc.betterpairing import ZR, G1 +from honeybadgermpc.polynomial import polynomials_over +from honeybadgermpc.poly_commit_log import PolyCommitLog + + +def test_pc_log(): + t = 3 + pc = PolyCommitLog() + phi = polynomials_over(ZR).random(t) + # ToDo: see if other polycommits return the commit randomness + # rather than expecting it as arg + r = ZR.random() + c = pc.commit(phi, r) + witness = pc.create_witness(phi, r, 3) + assert pc.verify_eval(c, 3, phi(3), witness) + assert not pc.verify_eval(c, 4, phi(3), witness) + assert not pc.verify_eval(G1.rand(), 3, phi(3), witness) + + +def test_pc_log_batch(): + t = 10 + pc = PolyCommitLog() + phi = polynomials_over(ZR).random(t) + r = ZR.random() + c = pc.commit(phi, r) + witnesses = pc.batch_create_witness(phi, r) + assert pc.verify_eval(c, 5, phi(5), witnesses[4]) diff --git a/tests/test_proofs.py b/tests/test_proofs.py new file mode 100644 index 00000000..00a84a3d --- /dev/null +++ b/tests/test_proofs.py @@ -0,0 +1,63 @@ +from honeybadgermpc.proofs import prove_inner_product, verify_inner_product, \ + prove_inner_product_one_known, verify_inner_product_one_known, \ + prove_batch_inner_product_one_known, verify_batch_inner_product_one_known, \ + MerkleTree +from honeybadgermpc.betterpairing import ZR, G1 + + +def test_inner_product_proof(): + n = 10 + a = [ZR.random() for i in range(n)] + b = [ZR.random() for i in range(n)] + iprod = ZR(0) + for i in range(n): + iprod += a[i]*b[i] + comm, iprod, proof = prove_inner_product(a, b) + assert verify_inner_product(comm, iprod, proof) + comm, iprod, proof2 = prove_inner_product(a, b, comm=comm) + assert verify_inner_product(comm, iprod, proof2) + comm, iprod, badproof = prove_inner_product(a, b, comm=G1.rand()) + assert not verify_inner_product(comm, iprod, badproof) + + +def test_inner_product_proof_one_known(): + n = 10 + a = [ZR.random() for i in range(n)] + b = [ZR.random() for i in range(n)] + iprod = ZR(0) + for i in range(n): + iprod += a[i]*b[i] + comm, iprod, proof = prove_inner_product_one_known(a, b) + assert verify_inner_product_one_known(comm, iprod, b, proof) + comm, iprod, badproof = prove_inner_product_one_known(a, b, comm=G1.rand()) + assert not verify_inner_product_one_known(comm, iprod, b, badproof) + + +def test_batch_inner_product_proof_one_known(): + n = 10 + a = [ZR.random() for i in range(n)] + bs = [[ZR.random() for j in range(n)] for i in range(3*n)] + comm, iprods, proofs = prove_batch_inner_product_one_known(a, bs) + assert verify_batch_inner_product_one_known(comm, iprods[2], bs[2], proofs[2]) + comm, iprods, badproofs = prove_batch_inner_product_one_known(a, bs, comm=G1.rand()) + assert not verify_batch_inner_product_one_known(comm, iprods[2], bs[2], badproofs[2]) + + +def test_merkle_tree(): + import pickle + leaves = [b"Cravings", b"is", b"best", b"restaurant"] + t = MerkleTree(leaves) + rh = t.get_root_hash() + br = t.get_branch(0) + assert MerkleTree.verify_membership(b"Cravings", br, rh) + assert not MerkleTree.verify_membership(b"Chipotle", br, rh) + t2 = MerkleTree() + vec = [pickle.dumps(G1.rand()) for _ in range(12)] + t2.append(vec[0]) + t2.append_many(vec[1:]) + rh2 = t2.get_root_hash() + br2 = t2.get_branch(7) + assert MerkleTree.verify_membership(vec[7], br2, rh2) + # If this fails, buy a lottery ticket... or check that G1.rand() is actually random + assert not MerkleTree.verify_membership(pickle.dumps(G1.rand()), br2, rh2) + assert not MerkleTree.verify_membership(vec[6], br2, rh) From 55615abe8040769a8e0c50da3e8c7a95a278046b Mon Sep 17 00:00:00 2001 From: Tom Yurek Date: Fri, 13 Sep 2019 15:56:30 -0500 Subject: [PATCH 2/6] fix odd-t error --- honeybadgermpc/poly_commit_log.py | 5 +---- honeybadgermpc/proofs.py | 19 +++++++++++-------- tests/test_poly_commit_log.py | 13 ++++++------- tests/test_proofs.py | 4 ++-- 4 files changed, 20 insertions(+), 21 deletions(-) diff --git a/honeybadgermpc/poly_commit_log.py b/honeybadgermpc/poly_commit_log.py index 1d36ba3c..6a1f80d7 100644 --- a/honeybadgermpc/poly_commit_log.py +++ b/honeybadgermpc/poly_commit_log.py @@ -14,7 +14,7 @@ def __init__(self, crs=None, degree_max=33): self.u = G1.hash(b"honeybadgeru") else: assert len(crs) == 3 - [self.gs, self.hs, self.u] = crs + [self.gs, self.h, self.u] = crs self.y_vecs = [] def commit(self, phi, r): @@ -99,14 +99,11 @@ def verify_eval(self, c, i, phi_at_i, witness): challenge = ZR.hash(pickle.dumps([self.gs, self.h, self.u, S, T])) else: [roothash, branch, S, T, D, mu, t_hat, iproof] = witness - print(branch) if not MerkleTree.verify_membership(pickle.dumps(T), branch, roothash): return False challenge = ZR.hash(pickle.dumps([roothash, self.gs, self.h, self.u, S])) ret = self.gs[0]**t_hat == self.gs[0]**phi_at_i * T ** challenge - print(ret) ret &= D * self.h**mu == S**challenge * c - print(ret) if len(iproof[-1]) > 3: ret &= verify_batch_inner_product_one_known( D, t_hat, y_vec, iproof, crs=[self.gs, self.u]) diff --git a/honeybadgermpc/proofs.py b/honeybadgermpc/proofs.py index dfc81428..a572cb03 100644 --- a/honeybadgermpc/proofs.py +++ b/honeybadgermpc/proofs.py @@ -221,6 +221,7 @@ def recursive_proof(g_vec, a_vec, b_vec, u, n, P, transcript): u = G1.hash(b"honeybadgeru") else: [g_vec, u] = crs + g_vec = g_vec[:n] if comm is not None: P = comm * G1.one() else: @@ -263,6 +264,7 @@ def recursive_verify(g_vec, b_vec, u, proof, n, P, transcript): u = G1.hash(b"honeybadgeru") else: [g_vec, u] = crs + g_vec = g_vec[:n] P = comm * u ** iprod transcript = b'' return recursive_verify(g_vec, b_vec, u, iproof, n, P, transcript) @@ -284,8 +286,8 @@ def recursive_proofs(g_vec, a_vec, b_vecs, u, n, P_vec, transcript): P_vec[j] *= g_vec[-1] ** (na) * u ** (na * b_vecs[j][-1]) proofsteps[j].append(na) n_p = n//2 - cls = [ZR(0) for _ in range(len(b_vecs))] - crs = [ZR(0) for _ in range(len(b_vecs))] + cl_vec = [ZR(0) for _ in range(len(b_vecs))] + cr_vec = [ZR(0) for _ in range(len(b_vecs))] La = G1.one() Ra = G1.one() L_vec = [None] * len(b_vecs) @@ -295,10 +297,10 @@ def recursive_proofs(g_vec, a_vec, b_vecs, u, n, P_vec, transcript): Ra *= g_vec[:n_p][i]**a_vec[n_p:][i] for j in range(len(b_vecs)): for i in range(n_p): - cls[j] += a_vec[:n_p][i] * b_vecs[j][n_p:][i] - crs[j] += a_vec[n_p:][i] * b_vecs[j][:n_p][i] - L_vec[j] = La * (u ** cls[j]) - R_vec[j] = Ra * (u ** crs[j]) + cl_vec[j] += a_vec[:n_p][i] * b_vecs[j][n_p:][i] + cr_vec[j] += a_vec[n_p:][i] * b_vecs[j][:n_p][i] + L_vec[j] = La * (u ** cl_vec[j]) + R_vec[j] = Ra * (u ** cr_vec[j]) # Fiat Shamir # Make a merkle tree over everything that varies between verifiers # TODO: na should be in the transcript @@ -326,7 +328,7 @@ def recursive_proofs(g_vec, a_vec, b_vecs, u, n, P_vec, transcript): Lax2Raxi2 = La ** x2 * Ra ** xi2 for j in range(len(P_vec)): # Instead of doing L_vec[j]**(x2)*P_vec[j]*R_vec[j]**(xi2), save computation - P_vec[j] *= Lax2Raxi2 * u ** (x2 * cls[j] + xi2 * crs[j]) + P_vec[j] *= Lax2Raxi2 * u ** (x2 * cl_vec[j] + xi2 * cr_vec[j]) proofs = recursive_proofs(g_vec_p, a_vec_p, b_vecs_p, u, n_p, P_vec, transcript) for j in range(len(proofs)): proofsteps[j].append(L_vec[j]) @@ -339,6 +341,7 @@ def recursive_proofs(g_vec, a_vec, b_vecs, u, n, P_vec, transcript): u = G1.hash(b"honeybadgeru") else: [g_vec, u] = crs + g_vec = g_vec[:n] if comm is None: comm = G1.one() for i in range(n): @@ -374,7 +377,6 @@ def recursive_verify(g_vec, b_vec, u, proof, n, P, transcript): transcript += pickle.dumps([g_vec, roothash]) x = ZR.hash(transcript) xi = 1/x - print(x) n_p = n//2 g_vec_p = [] b_vec_p = [] @@ -390,6 +392,7 @@ def recursive_verify(g_vec, b_vec, u, proof, n, P, transcript): u = G1.hash(b"honeybadgeru") else: [g_vec, u] = crs + g_vec = g_vec[:n] P = comm * u ** iprod transcript = pickle.dumps(u) return recursive_verify(g_vec, b_vec, u, iproof, n, P, transcript) diff --git a/tests/test_poly_commit_log.py b/tests/test_poly_commit_log.py index 803521ae..f9b0e5fb 100644 --- a/tests/test_poly_commit_log.py +++ b/tests/test_poly_commit_log.py @@ -1,10 +1,10 @@ +from pytest import mark from honeybadgermpc.betterpairing import ZR, G1 from honeybadgermpc.polynomial import polynomials_over from honeybadgermpc.poly_commit_log import PolyCommitLog - -def test_pc_log(): - t = 3 +@mark.parametrize("t", [3, 6, 10]) +def test_pc_log(t): pc = PolyCommitLog() phi = polynomials_over(ZR).random(t) # ToDo: see if other polycommits return the commit randomness @@ -16,12 +16,11 @@ def test_pc_log(): assert not pc.verify_eval(c, 4, phi(3), witness) assert not pc.verify_eval(G1.rand(), 3, phi(3), witness) - -def test_pc_log_batch(): - t = 10 +@mark.parametrize("t", [3, 6, 10]) +def test_pc_log_batch(t): pc = PolyCommitLog() phi = polynomials_over(ZR).random(t) r = ZR.random() c = pc.commit(phi, r) witnesses = pc.batch_create_witness(phi, r) - assert pc.verify_eval(c, 5, phi(5), witnesses[4]) + assert pc.verify_eval(c, 4, phi(4), witnesses[3]) diff --git a/tests/test_proofs.py b/tests/test_proofs.py index 00a84a3d..cc1cab16 100644 --- a/tests/test_proofs.py +++ b/tests/test_proofs.py @@ -21,7 +21,7 @@ def test_inner_product_proof(): def test_inner_product_proof_one_known(): - n = 10 + n = 15 a = [ZR.random() for i in range(n)] b = [ZR.random() for i in range(n)] iprod = ZR(0) @@ -34,7 +34,7 @@ def test_inner_product_proof_one_known(): def test_batch_inner_product_proof_one_known(): - n = 10 + n = 13 a = [ZR.random() for i in range(n)] bs = [[ZR.random() for j in range(n)] for i in range(3*n)] comm, iprods, proofs = prove_batch_inner_product_one_known(a, bs) From 155ee3e950defc3db5fa702c0966bed98b59079c Mon Sep 17 00:00:00 2001 From: Tom Yurek Date: Fri, 13 Sep 2019 15:58:10 -0500 Subject: [PATCH 3/6] more thorough --- honeybadgermpc/proofs.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/honeybadgermpc/proofs.py b/honeybadgermpc/proofs.py index a572cb03..a2164df9 100644 --- a/honeybadgermpc/proofs.py +++ b/honeybadgermpc/proofs.py @@ -124,6 +124,8 @@ def recursive_proof(g_vec, h_vec, u, a_vec, b_vec, n, P, transcript): u = G1.hash(b"honeybadgeru") else: [g_vec, h_vec, u] = crs + g_vec = g_vec[:n] + h_vec = h_vec[:n] if comm is not None: P = comm * G1.one() else: From 15f799459e40852a2698d0e30945bc04a8083c7c Mon Sep 17 00:00:00 2001 From: tyurek Date: Thu, 10 Oct 2019 15:07:00 -0500 Subject: [PATCH 4/6] Update .flake8 --- .flake8 | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.flake8 b/.flake8 index 46be0d34..5298af31 100644 --- a/.flake8 +++ b/.flake8 @@ -5,3 +5,6 @@ exclude = .eggs/, apps/tutorial/ ignore = E203, E266, E501, W503 +per-file-ignores = + honeybadgermpc/poly_commit_log.py: N806, N803 + honeybadgermpc/proofs.py: N806, N803 From 50b559f1c9f1a7790a3a67f751338cc8397e28ef Mon Sep 17 00:00:00 2001 From: tyurek Date: Thu, 24 Oct 2019 17:35:59 -0500 Subject: [PATCH 5/6] black --- benchmark/test_benchmark_poly_commit_const.py | 3 +- benchmark/test_benchmark_poly_commit_lin.py | 23 +++++ docker-compose.yml | 1 + honeybadgermpc/betterpairing.py | 4 +- honeybadgermpc/poly_commit_log.py | 62 ++++++------ honeybadgermpc/proofs.py | 95 +++++++++++-------- tests/test_poly_commit_log.py | 2 + tests/test_proofs.py | 24 +++-- 8 files changed, 136 insertions(+), 78 deletions(-) create mode 100644 benchmark/test_benchmark_poly_commit_lin.py diff --git a/benchmark/test_benchmark_poly_commit_const.py b/benchmark/test_benchmark_poly_commit_const.py index 11f75d4b..4302a3f3 100644 --- a/benchmark/test_benchmark_poly_commit_const.py +++ b/benchmark/test_benchmark_poly_commit_const.py @@ -25,4 +25,5 @@ def test_benchmark_create_witness(benchmark, t): phi = polynomials_over(ZR).random(t) c, phi_hat = pc.commit(phi) pc.preprocess_prover(10) - benchmark(pc.create_witness, phi, phi_hat, 3) + i = ZR.random() + benchmark(pc.create_witness, phi, phi_hat, i) diff --git a/benchmark/test_benchmark_poly_commit_lin.py b/benchmark/test_benchmark_poly_commit_lin.py new file mode 100644 index 00000000..ef7f0400 --- /dev/null +++ b/benchmark/test_benchmark_poly_commit_lin.py @@ -0,0 +1,23 @@ +from pytest import mark +from honeybadgermpc.betterpairing import G1, ZR +from honeybadgermpc.polynomial import polynomials_over +from honeybadgermpc.poly_commit_lin import PolyCommitLin + + +@mark.parametrize("t", [3, 10, 20, 33]) +def test_benchmark_commit(benchmark, t): + g = G1.rand() + h = G1.rand() + pc = PolyCommitLin([g, h]) + phi = polynomials_over(ZR).random(t) + benchmark(pc.commit, phi) + + +@mark.parametrize("t", [3, 10, 20, 33]) +def test_benchmark_create_witness(benchmark, t): + g = G1.rand() + h = G1.rand() + pc = PolyCommitLin([g, h]) + phi_hat = polynomials_over(ZR).random(t) + i = ZR.random() + benchmark(pc.create_witness, phi_hat, i) diff --git a/docker-compose.yml b/docker-compose.yml index 3852d0eb..cb75588d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -11,6 +11,7 @@ services: - ./apps:/usr/src/HoneyBadgerMPC/apps - ./progs:/usr/src/HoneyBadgerMPC/progs - ./benchmark:/usr/src/HoneyBadgerMPC/benchmark + - ./.benchmarks:/usr/src/HoneyBadgerMPC/.benchmarks - ./aws:/usr/src/HoneyBadgerMPC/aws - ./conf:/usr/src/HoneyBadgerMPC/conf - ./docs:/usr/src/HoneyBadgerMPC/docs diff --git a/honeybadgermpc/betterpairing.py b/honeybadgermpc/betterpairing.py index 33cc06c9..899fe29b 100644 --- a/honeybadgermpc/betterpairing.py +++ b/honeybadgermpc/betterpairing.py @@ -220,7 +220,7 @@ def rand(seed=None): def hash(bytestr, length=None): assert type(bytestr) is bytes hashout = sha256(bytestr).hexdigest() - seed = [int(hashout[i: i + 8], 16) for i in range(0, 64, 8)] + seed = [int(hashout[i : i + 8], 16) for i in range(0, 64, 8)] if length is None: return G1.rand(seed) assert type(length) is int @@ -425,7 +425,7 @@ def rand(seed=None): def hash(bytestr, length=None): assert type(bytestr) is bytes hashout = sha256(bytestr).hexdigest() - seed = [int(hashout[i: i + 8], 16) for i in range(0, 64, 8)] + seed = [int(hashout[i : i + 8], 16) for i in range(0, 64, 8)] if length is None: return G2.rand(seed) assert type(length) is int diff --git a/honeybadgermpc/poly_commit_log.py b/honeybadgermpc/poly_commit_log.py index 6a1f80d7..e8332099 100644 --- a/honeybadgermpc/poly_commit_log.py +++ b/honeybadgermpc/poly_commit_log.py @@ -1,7 +1,11 @@ from honeybadgermpc.betterpairing import ZR, G1 -from honeybadgermpc.proofs import prove_inner_product_one_known, \ - verify_inner_product_one_known, prove_batch_inner_product_one_known, \ - verify_batch_inner_product_one_known, MerkleTree +from honeybadgermpc.proofs import ( + prove_inner_product_one_known, + verify_inner_product_one_known, + prove_batch_inner_product_one_known, + verify_batch_inner_product_one_known, + MerkleTree, +) import pickle @@ -26,11 +30,11 @@ def commit(self, phi, r): def create_witness(self, phi, r, i): t = len(phi.coeffs) - 1 - y_vec = [ZR(i) ** j for j in range(t+1)] - s_vec = [ZR.random() for _ in range(t+1)] + y_vec = [ZR(i) ** j for j in range(t + 1)] + s_vec = [ZR.random() for _ in range(t + 1)] sy_prod = ZR(0) S = G1.one() - for j in range(t+1): + for j in range(t + 1): S *= self.gs[j] ** s_vec[j] sy_prod += s_vec[j] * y_vec[j] T = self.gs[0] ** sy_prod @@ -38,34 +42,35 @@ def create_witness(self, phi, r, i): S *= self.h ** rho # Fiat Shamir challenge = ZR.hash(pickle.dumps([self.gs, self.h, self.u, S, T])) - d_vec = [phi.coeffs[j] + s_vec[j] * challenge for j in range(t+1)] + d_vec = [phi.coeffs[j] + s_vec[j] * challenge for j in range(t + 1)] D = G1.one() - for j in range(t+1): + for j in range(t + 1): D *= self.gs[j] ** d_vec[j] - mu = r + rho*challenge + mu = r + rho * challenge comm, t_hat, iproof = prove_inner_product_one_known( - d_vec, y_vec, crs=[self.gs, self.u]) + d_vec, y_vec, crs=[self.gs, self.u] + ) return [S, T, D, mu, t_hat, iproof] # Create witnesses for points 1 to n. n defaults to 3*degree+1 if unset. def batch_create_witness(self, phi, r, n=None): t = len(phi.coeffs) - 1 if n is None: - n = 3*t + 1 + n = 3 * t + 1 if len(self.y_vecs) < n: i = len(self.y_vecs) - while(i < n): - self.y_vecs.append([ZR(i+1) ** j for j in range(t+1)]) + while i < n: + self.y_vecs.append([ZR(i + 1) ** j for j in range(t + 1)]) i += 1 - s_vec = [ZR.random() for _ in range(t+1)] + s_vec = [ZR.random() for _ in range(t + 1)] sy_prods = [ZR(0) for _ in range(n)] S = G1.one() T_vec = [None] * n witnesses = [[] for _ in range(n)] - for i in range(t+1): + for i in range(t + 1): S *= self.gs[i] ** s_vec[i] for j in range(n): - for i in range(t+1): + for i in range(t + 1): sy_prods[j] += s_vec[i] * self.y_vecs[j][i] T_vec[j] = self.gs[0] ** sy_prods[j] rho = ZR.random() @@ -80,20 +85,21 @@ def batch_create_witness(self, phi, r, n=None): witnesses[j].append(roothash) witnesses[j].append(branch) challenge = ZR.hash(pickle.dumps([roothash, self.gs, self.h, self.u, S])) - d_vec = [phi.coeffs[j] + s_vec[j] * challenge for j in range(t+1)] + d_vec = [phi.coeffs[j] + s_vec[j] * challenge for j in range(t + 1)] D = G1.one() - for j in range(t+1): + for j in range(t + 1): D *= self.gs[j] ** d_vec[j] - mu = r + rho*challenge + mu = r + rho * challenge comm, t_hats, iproofs = prove_batch_inner_product_one_known( - d_vec, self.y_vecs, crs=[self.gs, self.u]) + d_vec, self.y_vecs, crs=[self.gs, self.u] + ) for j in range(len(witnesses)): witnesses[j] += [S, T_vec[j], D, mu, t_hats[j], iproofs[j]] return witnesses def verify_eval(self, c, i, phi_at_i, witness): t = witness[-1][0] - 1 - y_vec = [ZR(i) ** j for j in range(t+1)] + y_vec = [ZR(i) ** j for j in range(t + 1)] if len(witness) == 6: [S, T, D, mu, t_hat, iproof] = witness challenge = ZR.hash(pickle.dumps([self.gs, self.h, self.u, S, T])) @@ -102,17 +108,19 @@ def verify_eval(self, c, i, phi_at_i, witness): if not MerkleTree.verify_membership(pickle.dumps(T), branch, roothash): return False challenge = ZR.hash(pickle.dumps([roothash, self.gs, self.h, self.u, S])) - ret = self.gs[0]**t_hat == self.gs[0]**phi_at_i * T ** challenge - ret &= D * self.h**mu == S**challenge * c + ret = self.gs[0] ** t_hat == self.gs[0] ** phi_at_i * T ** challenge + ret &= D * self.h ** mu == S ** challenge * c if len(iproof[-1]) > 3: ret &= verify_batch_inner_product_one_known( - D, t_hat, y_vec, iproof, crs=[self.gs, self.u]) + D, t_hat, y_vec, iproof, crs=[self.gs, self.u] + ) else: ret &= verify_inner_product_one_known( - D, t_hat, y_vec, iproof, crs=[self.gs, self.u]) + D, t_hat, y_vec, iproof, crs=[self.gs, self.u] + ) return ret def preprocess_prover(self, level=10): self.u.preprocess(level) - for i in range(len(self.gs)-1): - self.y_vecs.append([ZR(i+1) ** j for j in range(len(self.gs))]) + for i in range(len(self.gs) - 1): + self.y_vecs.append([ZR(i + 1) ** j for j in range(len(self.gs))]) diff --git a/honeybadgermpc/proofs.py b/honeybadgermpc/proofs.py index a2164df9..78a80aea 100644 --- a/honeybadgermpc/proofs.py +++ b/honeybadgermpc/proofs.py @@ -85,7 +85,7 @@ def recursive_proof(g_vec, h_vec, u, a_vec, b_vec, n, P, transcript): P *= g_vec[-1] ** (na) * h_vec[-1] ** (nb) * u ** (-na * nb) proofstep.append(na) proofstep.append(nb) - n_p = n//2 + n_p = n // 2 cl = ZR(0) cr = ZR(0) L = G1.one() @@ -93,14 +93,14 @@ def recursive_proof(g_vec, h_vec, u, a_vec, b_vec, n, P, transcript): for i in range(n_p): cl += a_vec[:n_p][i] * b_vec[n_p:][i] cr += a_vec[n_p:][i] * b_vec[:n_p][i] - L *= g_vec[n_p:][i]**a_vec[:n_p][i] * h_vec[:n_p][i]**b_vec[n_p:][i] - R *= g_vec[:n_p][i]**a_vec[n_p:][i] * h_vec[n_p:][i]**b_vec[:n_p][i] + L *= g_vec[n_p:][i] ** a_vec[:n_p][i] * h_vec[:n_p][i] ** b_vec[n_p:][i] + R *= g_vec[:n_p][i] ** a_vec[n_p:][i] * h_vec[n_p:][i] ** b_vec[:n_p][i] L *= u ** cl R *= u ** cr # Fiat Shamir L, R, state... transcript += pickle.dumps([g_vec, h_vec, u, P, L, R]) x = ZR.hash(transcript) - xi = 1/x + xi = 1 / x # this part must come after the challenge is generated, which must # come after L and R are calculated. Don't try to condense the loops g_vec_p, h_vec_p, a_vec_p, b_vec_p = [], [], [], [] @@ -109,13 +109,15 @@ def recursive_proof(g_vec, h_vec, u, a_vec, b_vec, n, P, transcript): h_vec_p.append(h_vec[:n_p][i] ** x * h_vec[n_p:][i] ** xi) a_vec_p.append(a_vec[:n_p][i] * x + a_vec[n_p:][i] * xi) b_vec_p.append(b_vec[:n_p][i] * xi + b_vec[n_p:][i] * x) - P_p = L**(x*x) * P * R**(xi*xi) + P_p = L ** (x * x) * P * R ** (xi * xi) proof = recursive_proof( - g_vec_p, h_vec_p, u, a_vec_p, b_vec_p, n_p, P_p, transcript) + g_vec_p, h_vec_p, u, a_vec_p, b_vec_p, n_p, P_p, transcript + ) proofstep.append(L) proofstep.append(R) proof.append(proofstep) return proof + n = len(a_vec) assert len(b_vec) == n if crs is None: @@ -136,16 +138,19 @@ def recursive_proof(g_vec, h_vec, u, a_vec, b_vec, n, P, transcript): for i in range(n): iprod += a_vec[i] * b_vec[i] P = comm * u ** iprod - transcript = b'' - return [comm, iprod, [n]+recursive_proof( - g_vec, h_vec, u, a_vec, b_vec, n, P, transcript)] + transcript = b"" + return [ + comm, + iprod, + [n] + recursive_proof(g_vec, h_vec, u, a_vec, b_vec, n, P, transcript), + ] def verify_inner_product(comm, iprod, proof, crs=None): def recursive_verify(g_vec, h_vec, u, proof, n, P, transcript): if n == 1: a, b = proof[0][0], proof[0][1] - return P == g_vec[0]**a * h_vec[0]**b * u ** (a*b) + return P == g_vec[0] ** a * h_vec[0] ** b * u ** (a * b) if n % 2 == 1: [na, nb, L, R] = proof[-1] P *= g_vec[-1] ** (na) * h_vec[-1] ** (nb) * u ** (-na * nb) @@ -153,15 +158,16 @@ def recursive_verify(g_vec, h_vec, u, proof, n, P, transcript): [L, R] = proof[-1] transcript += pickle.dumps([g_vec, h_vec, u, P, L, R]) x = ZR.hash(transcript) - xi = 1/x - n_p = n//2 + xi = 1 / x + n_p = n // 2 g_vec_p = [] h_vec_p = [] for i in range(n_p): - g_vec_p.append(g_vec[:n_p][i]**xi * g_vec[n_p:][i]**x) - h_vec_p.append(h_vec[:n_p][i]**x * h_vec[n_p:][i]**xi) - P_p = L**(x*x) * P * R**(xi*xi) + g_vec_p.append(g_vec[:n_p][i] ** xi * g_vec[n_p:][i] ** x) + h_vec_p.append(h_vec[:n_p][i] ** x * h_vec[n_p:][i] ** xi) + P_p = L ** (x * x) * P * R ** (xi * xi) return recursive_verify(g_vec_p, h_vec_p, u, proof[:-1], n_p, P_p, transcript) + n = proof[0] iproof = proof[1:] if crs is None: @@ -171,7 +177,7 @@ def recursive_verify(g_vec, h_vec, u, proof, n, P, transcript): else: [g_vec, h_vec, u] = crs P = comm * u ** iprod - transcript = b'' + transcript = b"" return recursive_verify(g_vec, h_vec, u, iproof, n, P, transcript) @@ -187,7 +193,7 @@ def recursive_proof(g_vec, a_vec, b_vec, u, n, P, transcript): na = -1 * a_vec[-1] P *= g_vec[-1] ** (na) * u ** (na * b_vec[-1]) proofstep.append(na) - n_p = n//2 + n_p = n // 2 cl = ZR(0) cr = ZR(0) L = G1.one() @@ -195,14 +201,14 @@ def recursive_proof(g_vec, a_vec, b_vec, u, n, P, transcript): for i in range(n_p): cl += a_vec[:n_p][i] * b_vec[n_p:][i] cr += a_vec[n_p:][i] * b_vec[:n_p][i] - L *= g_vec[n_p:][i]**a_vec[:n_p][i] - R *= g_vec[:n_p][i]**a_vec[n_p:][i] + L *= g_vec[n_p:][i] ** a_vec[:n_p][i] + R *= g_vec[:n_p][i] ** a_vec[n_p:][i] L *= u ** cl R *= u ** cr # Fiat Shamir L, R, state... transcript += pickle.dumps([g_vec, u, P, L, R]) x = ZR.hash(transcript) - xi = 1/x + xi = 1 / x # this part must come after the challenge is generated, which must # come after L and R are calculated. Don't try to condense the loops g_vec_p, a_vec_p, b_vec_p = [], [], [] @@ -210,12 +216,13 @@ def recursive_proof(g_vec, a_vec, b_vec, u, n, P, transcript): g_vec_p.append(g_vec[:n_p][i] ** xi * g_vec[n_p:][i] ** x) a_vec_p.append(a_vec[:n_p][i] * x + a_vec[n_p:][i] * xi) b_vec_p.append(b_vec[:n_p][i] * xi + b_vec[n_p:][i] * x) - P_p = L**(x*x) * P * R**(xi*xi) + P_p = L ** (x * x) * P * R ** (xi * xi) proof = recursive_proof(g_vec_p, a_vec_p, b_vec_p, u, n_p, P_p, transcript) proofstep.append(L) proofstep.append(R) proof.append(proofstep) return proof + n = len(a_vec) assert len(b_vec) == n if crs is None: @@ -234,15 +241,19 @@ def recursive_proof(g_vec, a_vec, b_vec, u, n, P, transcript): for i in range(n): iprod += a_vec[i] * b_vec[i] P = comm * u ** iprod - transcript = b'' - return [comm, iprod, [n]+recursive_proof(g_vec, a_vec, b_vec, u, n, P, transcript)] + transcript = b"" + return [ + comm, + iprod, + [n] + recursive_proof(g_vec, a_vec, b_vec, u, n, P, transcript), + ] def verify_inner_product_one_known(comm, iprod, b_vec, proof, crs=None): def recursive_verify(g_vec, b_vec, u, proof, n, P, transcript): if n == 1: a, b = proof[0][0], b_vec[0] - return P == g_vec[0]**a * u ** (a*b) + return P == g_vec[0] ** a * u ** (a * b) if n % 2 == 1: [na, L, R] = proof[-1] P *= g_vec[-1] ** (na) * u ** (na * b_vec[-1]) @@ -250,15 +261,16 @@ def recursive_verify(g_vec, b_vec, u, proof, n, P, transcript): [L, R] = proof[-1] transcript += pickle.dumps([g_vec, u, P, L, R]) x = ZR.hash(transcript) - xi = 1/x - n_p = n//2 + xi = 1 / x + n_p = n // 2 g_vec_p = [] b_vec_p = [] for i in range(n_p): - g_vec_p.append(g_vec[:n_p][i]**xi * g_vec[n_p:][i]**x) + g_vec_p.append(g_vec[:n_p][i] ** xi * g_vec[n_p:][i] ** x) b_vec_p.append(b_vec[:n_p][i] * xi + b_vec[n_p:][i] * x) - P_p = L**(x*x) * P * R**(xi*xi) + P_p = L ** (x * x) * P * R ** (xi * xi) return recursive_verify(g_vec_p, b_vec_p, u, proof[:-1], n_p, P_p, transcript) + n = proof[0] iproof = proof[1:] if crs is None: @@ -268,7 +280,7 @@ def recursive_verify(g_vec, b_vec, u, proof, n, P, transcript): [g_vec, u] = crs g_vec = g_vec[:n] P = comm * u ** iprod - transcript = b'' + transcript = b"" return recursive_verify(g_vec, b_vec, u, iproof, n, P, transcript) @@ -277,7 +289,7 @@ def recursive_verify(g_vec, b_vec, u, proof, n, P, transcript): def prove_batch_inner_product_one_known(a_vec, b_vecs, comm=None, crs=None): def recursive_proofs(g_vec, a_vec, b_vecs, u, n, P_vec, transcript): if n == 1: - proofs = [None]*len(b_vecs) + proofs = [None] * len(b_vecs) for j in range(len(proofs)): proofs[j] = [[a_vec[0]]] return proofs @@ -287,7 +299,7 @@ def recursive_proofs(g_vec, a_vec, b_vecs, u, n, P_vec, transcript): for j in range(len(P_vec)): P_vec[j] *= g_vec[-1] ** (na) * u ** (na * b_vecs[j][-1]) proofsteps[j].append(na) - n_p = n//2 + n_p = n // 2 cl_vec = [ZR(0) for _ in range(len(b_vecs))] cr_vec = [ZR(0) for _ in range(len(b_vecs))] La = G1.one() @@ -295,8 +307,8 @@ def recursive_proofs(g_vec, a_vec, b_vecs, u, n, P_vec, transcript): L_vec = [None] * len(b_vecs) R_vec = [None] * len(b_vecs) for i in range(n_p): - La *= g_vec[n_p:][i]**a_vec[:n_p][i] - Ra *= g_vec[:n_p][i]**a_vec[n_p:][i] + La *= g_vec[n_p:][i] ** a_vec[:n_p][i] + Ra *= g_vec[:n_p][i] ** a_vec[n_p:][i] for j in range(len(b_vecs)): for i in range(n_p): cl_vec[j] += a_vec[:n_p][i] * b_vecs[j][n_p:][i] @@ -316,7 +328,7 @@ def recursive_proofs(g_vec, a_vec, b_vecs, u, n, P_vec, transcript): proofsteps[j].append(branch) transcript += pickle.dumps([g_vec, roothash]) x = ZR.hash(transcript) - xi = 1/x + xi = 1 / x # this part must come after the challenge is generated, which must # come after L and R are calculated. Don't try to condense the loops g_vec_p, a_vec_p = [], [] @@ -326,7 +338,7 @@ def recursive_proofs(g_vec, a_vec, b_vecs, u, n, P_vec, transcript): a_vec_p.append(a_vec[:n_p][i] * x + a_vec[n_p:][i] * xi) for j in range(len(b_vecs)): b_vecs_p[j].append(b_vecs[j][:n_p][i] * xi + b_vecs[j][n_p:][i] * x) - x2, xi2 = x*x, xi*xi + x2, xi2 = x * x, xi * xi Lax2Raxi2 = La ** x2 * Ra ** xi2 for j in range(len(P_vec)): # Instead of doing L_vec[j]**(x2)*P_vec[j]*R_vec[j]**(xi2), save computation @@ -337,6 +349,7 @@ def recursive_proofs(g_vec, a_vec, b_vecs, u, n, P_vec, transcript): proofsteps[j].append(R_vec[j]) proofs[j].append(proofsteps[j]) return proofs + n = len(a_vec) if crs is None: g_vec = G1.hash(b"honeybadgerg", length=n) @@ -367,7 +380,7 @@ def verify_batch_inner_product_one_known(comm, iprod, b_vec, proof, crs=None): def recursive_verify(g_vec, b_vec, u, proof, n, P, transcript): if n == 1: a, b = proof[0][0], b_vec[0] - return P == g_vec[0]**a * u ** (a*b) + return P == g_vec[0] ** a * u ** (a * b) if n % 2 == 1: [na, roothash, branch, L, R] = proof[-1] P *= g_vec[-1] ** (na) * u ** (na * b_vec[-1]) @@ -375,18 +388,20 @@ def recursive_verify(g_vec, b_vec, u, proof, n, P, transcript): [roothash, branch, L, R] = proof[-1] # TODO: find a way to make the protocol abort nicely if this fails assert MerkleTree.verify_membership( - pickle.dumps([b_vec, P, L, R]), branch, roothash) + pickle.dumps([b_vec, P, L, R]), branch, roothash + ) transcript += pickle.dumps([g_vec, roothash]) x = ZR.hash(transcript) - xi = 1/x - n_p = n//2 + xi = 1 / x + n_p = n // 2 g_vec_p = [] b_vec_p = [] for i in range(n_p): g_vec_p.append(g_vec[:n_p][i] ** xi * g_vec[n_p:][i] ** x) b_vec_p.append(b_vec[:n_p][i] * xi + b_vec[n_p:][i] * x) - P_p = L**(x*x) * P * R**(xi*xi) + P_p = L ** (x * x) * P * R ** (xi * xi) return recursive_verify(g_vec_p, b_vec_p, u, proof[:-1], n_p, P_p, transcript) + n = proof[0] iproof = proof[1:] if crs is None: diff --git a/tests/test_poly_commit_log.py b/tests/test_poly_commit_log.py index f9b0e5fb..8e745140 100644 --- a/tests/test_poly_commit_log.py +++ b/tests/test_poly_commit_log.py @@ -3,6 +3,7 @@ from honeybadgermpc.polynomial import polynomials_over from honeybadgermpc.poly_commit_log import PolyCommitLog + @mark.parametrize("t", [3, 6, 10]) def test_pc_log(t): pc = PolyCommitLog() @@ -16,6 +17,7 @@ def test_pc_log(t): assert not pc.verify_eval(c, 4, phi(3), witness) assert not pc.verify_eval(G1.rand(), 3, phi(3), witness) + @mark.parametrize("t", [3, 6, 10]) def test_pc_log_batch(t): pc = PolyCommitLog() diff --git a/tests/test_proofs.py b/tests/test_proofs.py index cc1cab16..d42541fb 100644 --- a/tests/test_proofs.py +++ b/tests/test_proofs.py @@ -1,7 +1,12 @@ -from honeybadgermpc.proofs import prove_inner_product, verify_inner_product, \ - prove_inner_product_one_known, verify_inner_product_one_known, \ - prove_batch_inner_product_one_known, verify_batch_inner_product_one_known, \ - MerkleTree +from honeybadgermpc.proofs import ( + prove_inner_product, + verify_inner_product, + prove_inner_product_one_known, + verify_inner_product_one_known, + prove_batch_inner_product_one_known, + verify_batch_inner_product_one_known, + MerkleTree, +) from honeybadgermpc.betterpairing import ZR, G1 @@ -11,7 +16,7 @@ def test_inner_product_proof(): b = [ZR.random() for i in range(n)] iprod = ZR(0) for i in range(n): - iprod += a[i]*b[i] + iprod += a[i] * b[i] comm, iprod, proof = prove_inner_product(a, b) assert verify_inner_product(comm, iprod, proof) comm, iprod, proof2 = prove_inner_product(a, b, comm=comm) @@ -26,7 +31,7 @@ def test_inner_product_proof_one_known(): b = [ZR.random() for i in range(n)] iprod = ZR(0) for i in range(n): - iprod += a[i]*b[i] + iprod += a[i] * b[i] comm, iprod, proof = prove_inner_product_one_known(a, b) assert verify_inner_product_one_known(comm, iprod, b, proof) comm, iprod, badproof = prove_inner_product_one_known(a, b, comm=G1.rand()) @@ -36,15 +41,18 @@ def test_inner_product_proof_one_known(): def test_batch_inner_product_proof_one_known(): n = 13 a = [ZR.random() for i in range(n)] - bs = [[ZR.random() for j in range(n)] for i in range(3*n)] + bs = [[ZR.random() for j in range(n)] for i in range(3 * n)] comm, iprods, proofs = prove_batch_inner_product_one_known(a, bs) assert verify_batch_inner_product_one_known(comm, iprods[2], bs[2], proofs[2]) comm, iprods, badproofs = prove_batch_inner_product_one_known(a, bs, comm=G1.rand()) - assert not verify_batch_inner_product_one_known(comm, iprods[2], bs[2], badproofs[2]) + assert not verify_batch_inner_product_one_known( + comm, iprods[2], bs[2], badproofs[2] + ) def test_merkle_tree(): import pickle + leaves = [b"Cravings", b"is", b"best", b"restaurant"] t = MerkleTree(leaves) rh = t.get_root_hash() From 558407d9b98f3f5ba904c945b98e32fab8d3b31d Mon Sep 17 00:00:00 2001 From: tyurek Date: Thu, 24 Oct 2019 17:39:51 -0500 Subject: [PATCH 6/6] forgot one --- benchmark/test_benchmark_poly_commit_log.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/benchmark/test_benchmark_poly_commit_log.py b/benchmark/test_benchmark_poly_commit_log.py index 17fb6c62..90d618a6 100644 --- a/benchmark/test_benchmark_poly_commit_log.py +++ b/benchmark/test_benchmark_poly_commit_log.py @@ -26,4 +26,4 @@ def test_benchmark_create_batch_witness(benchmark, t): r = ZR.random() phi = polynomials_over(ZR).random(t) pc.preprocess_prover() - benchmark(pc.batch_create_witness, phi, r, n=3*t+1) + benchmark(pc.batch_create_witness, phi, r, n=3 * t + 1)