-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #40 from pluto/feat/new-hash-based-circuits
feat: hash-based HTTP circuits
- Loading branch information
Showing
8 changed files
with
407 additions
and
59 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,77 @@ | ||
pragma circom 2.1.9; | ||
|
||
include "../parser/machine.circom"; | ||
include "../interpreter.circom"; | ||
include "../../utils/bytes.circom"; | ||
|
||
template HttpNIVC(DATA_BYTES, MAX_NUMBER_OF_HEADERS) { | ||
signal input step_in[1]; | ||
signal output step_out[1]; | ||
|
||
// Authenticate the plaintext we are passing in | ||
signal input data[DATA_BYTES]; | ||
signal data_hash <== DataHasher(DATA_BYTES)(data); | ||
data_hash === step_in[0]; | ||
|
||
signal input start_line_hash; | ||
signal input header_hashes[MAX_NUMBER_OF_HEADERS]; | ||
signal input body_hash; | ||
|
||
// TODO: could just have a parser template and reduce code here | ||
component State[DATA_BYTES]; | ||
State[0] = HttpStateUpdate(); | ||
State[0].byte <== data[0]; | ||
State[0].parsing_start <== 1; | ||
State[0].parsing_header <== 0; | ||
State[0].parsing_field_name <== 0; | ||
State[0].parsing_field_value <== 0; | ||
State[0].parsing_body <== 0; | ||
State[0].line_status <== 0; | ||
|
||
for(var data_idx = 1; data_idx < DATA_BYTES; data_idx++) { | ||
State[data_idx] = HttpStateUpdate(); | ||
State[data_idx].byte <== data[data_idx]; | ||
State[data_idx].parsing_start <== State[data_idx - 1].next_parsing_start; | ||
State[data_idx].parsing_header <== State[data_idx - 1].next_parsing_header; | ||
State[data_idx].parsing_field_name <== State[data_idx - 1].next_parsing_field_name; | ||
State[data_idx].parsing_field_value <== State[data_idx - 1].next_parsing_field_value; | ||
State[data_idx].parsing_body <== State[data_idx - 1].next_parsing_body; | ||
State[data_idx].line_status <== State[data_idx - 1].next_line_status; | ||
} | ||
|
||
// Get the start line shit | ||
signal start_line[DATA_BYTES]; | ||
signal not_start_line_mask[DATA_BYTES]; | ||
for(var i = 0 ; i < DATA_BYTES ; i++) { | ||
not_start_line_mask[i] <== IsZero()(State[i].parsing_start); | ||
start_line[i] <== data[i] * (1 - not_start_line_mask[i]); | ||
} | ||
signal inner_start_line_hash <== DataHasher(DATA_BYTES)(start_line); | ||
inner_start_line_hash === start_line_hash; | ||
|
||
// Get the header shit | ||
signal header[MAX_NUMBER_OF_HEADERS][DATA_BYTES]; | ||
signal header_masks[MAX_NUMBER_OF_HEADERS][DATA_BYTES]; | ||
for(var i = 0 ; i < MAX_NUMBER_OF_HEADERS ; i++) { | ||
for(var j = 0 ; j < DATA_BYTES ; j++) { | ||
header_masks[i][j] <== IsEqual()([State[j].parsing_header, i + 1]); | ||
header[i][j] <== data[j] * header_masks[i][j]; | ||
} | ||
} | ||
signal inner_header_hashes[MAX_NUMBER_OF_HEADERS]; | ||
signal header_is_unused[MAX_NUMBER_OF_HEADERS]; // If a header hash is passed in as 0, it is not used (no way to compute preimage of 0) | ||
for(var i = 0 ; i < MAX_NUMBER_OF_HEADERS ; i++) { | ||
header_is_unused[i] <== IsZero()(header_hashes[i]); | ||
inner_header_hashes[i] <== DataHasher(DATA_BYTES)(header[i]); | ||
(1 - header_is_unused[i]) * inner_header_hashes[i] === header_hashes[i]; | ||
} | ||
|
||
// Get the body shit | ||
signal body[DATA_BYTES]; | ||
for(var i = 0 ; i < DATA_BYTES ; i++) { | ||
body[i] <== data[i] * State[i].parsing_body; | ||
} | ||
signal inner_body_hash <== DataHasher(DATA_BYTES)(body); | ||
inner_body_hash === body_hash; | ||
step_out[0] <== inner_body_hash; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
# JSON Notes | ||
|
||
Okay what we can do is have a hash chain up the stack and store at most MAX_STACK_HEIGHT hash values, then write to this array at the current depth for each new value we get. We also should hash the stack indicator (array vs. object). | ||
|
||
We then just have to assert that we get an `ArrayEqual` with our given input at some point. We also assert the hash of the value itself is correct (possibly this just happens in a uniform way thinking of it as an object itself? Some details remain.) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,56 @@ | ||
pragma circom 2.1.9; | ||
|
||
include "../interpreter.circom"; | ||
|
||
template JsonMaskObjectNIVC(DATA_BYTES, MAX_STACK_HEIGHT, MAX_KEY_LENGTH, MAX_KEY_SIZE) { | ||
signal input step_in[1]; | ||
signal output step_out[1]; | ||
|
||
// Input a hash of the value we are extracting | ||
signal input value_hash[1]; | ||
|
||
// use codified version of keys to get the json shitttt | ||
signal input keys[MAX_STACK_HEIGHT][MAX_KEY_LENGTH][1]; | ||
// ^^^^^^^^ ^^^^^^^^^^^^^^ ^ | ||
// max keys we could ever use max key size key_type: 0 is null, 1 is string, 2 is array index, 3 is value | ||
|
||
/* | ||
[["data",1], ["items", 1], [0,2], ["profile", 1], ["name", 1], ["Taylor Swift", 3], [0,0]] | ||
this is like the branch of the "tree" of the JSON that we want to prove exists | ||
*/ | ||
|
||
// Authenticate the (potentially further masked) plaintext we are passing in | ||
signal input data[DATA_BYTES]; | ||
signal data_hash <== DataHasher(DATA_BYTES)(data); | ||
data_hash === step_in[0]; | ||
|
||
// Run the JSON parser | ||
component State[DATA_BYTES - MAX_KEY_LENGTH]; | ||
State[0] = StateUpdate(MAX_STACK_HEIGHT); | ||
State[0].byte <== data[0]; | ||
for(var i = 0; i < MAX_STACK_HEIGHT; i++) { | ||
State[0].stack[i] <== [0,0]; | ||
} | ||
State[0].parsing_string <== 0; | ||
State[0].parsing_number <== 0; | ||
|
||
for(var data_idx = 1; data_idx < DATA_BYTES; data_idx++) { | ||
if(data_idx < DATA_BYTES - MAX_KEY_LENGTH) { | ||
State[data_idx] = StateUpdate(MAX_STACK_HEIGHT); | ||
State[data_idx].byte <== data[data_idx]; | ||
State[data_idx].stack <== State[data_idx - 1].next_stack; | ||
State[data_idx].parsing_string <== State[data_idx - 1].next_parsing_string; | ||
State[data_idx].parsing_number <== State[data_idx - 1].next_parsing_number; | ||
} | ||
|
||
// -------------------------------------------------------------------------------------- // | ||
// Do some real shit here | ||
|
||
|
||
|
||
// -------------------------------------------------------------------------------------- // | ||
// TODO (autoparallel): No idea what to do here yet lol | ||
step_out[0] <== DataHasher(DATA_BYTES)(masked); | ||
} | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.