diff --git a/CHANGELOG.md b/CHANGELOG.md index 7ce1a66..27f895c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,8 +5,37 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). +## [0.7.0] - 2024-12-3 + +### Added +### Changed +#### Circuit Builds +#### Artifacts +- **Circuit sizes:** + - `plaintext_authentication_1024b`: + - non-linear constaints: `383,102` + - R1CS file: `123.4MB` + - Graph file: `19.9MB` + - `http_verification_1024b`: + - non-linear constaints: `121,376` + - R1CS file: `80.7MB` + - Graph file: `4.4MB` + - **WARNING:** Extremely slow build with `--O2` flag. Need to investigate. + - `json_extraction_1024b`: + - non-linear constaints: `452,683` + - R1CS file: `90.3MB` + - Graph file: `13.2MB` + - **Total size:** `243.7MB` +- **Circuit param file sizes (SNARK):** + - `aux_params`: `62.2MB` + - `prover_key`: `50.3MB` + - `verifier_key`: `415.3MB` + +### Notes + +--- -## [UNRELEASED] [0.6.0] - 2024-12-3 +## [0.6.0] - 2024-12-3 ### Added @@ -21,9 +50,6 @@ and this project adheres to [Semantic Versioning](http://semver.org/). - Runs with `512b` per fold - `http_nivc` replaced with more suitable name: `http_verification` -### Fixed -- TODO - ### Notes - **Total circuits:** 5 - **Circuit sizes:** diff --git a/README.md b/README.md index 1c96613..ce212e8 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,10 @@ ## Overview -`web-prover-circuits` is a project focused on implementing parsers and extractors/selective-disclosure for various data formats inside zero-knowledge circuits. +`web-prover-circuits` is a project focused on implementing parsers and extractors/selective-disclosure for various data formats inside zero-knowledge circuits. +Specifically, these are designed to be used in an NIVC folding scheme. +Currently, our program layout looks like this: +![v0.7.0](docs/images/v0.7.0.png) ## Repository Structure @@ -94,12 +97,7 @@ npx circomkit compile plaintext_authentication_1024b ``` which implicitly checks the `circuits.json` for an object that points to the circuit's code itself. -If you are having trouble with `circomkit`, consider: - -##### SNARKJS -Likewise, `snarkjs` is used to handle proofs and verification under the hood. -There is [documentation](https://docs.circom.io/getting-started/compiling-circuits/) on Circom's usage to work with this. -We suggest starting at that link and carrying through to "Proving circuits with ZK". +If you are having trouble with `circomkit`, consider ##### Mocha `mocha` will also be installed from before. diff --git a/builds/target_1024b/json_extract_value_1024b.circom b/builds/target_1024b/json_extract_value_1024b.circom deleted file mode 100644 index fffbc10..0000000 --- a/builds/target_1024b/json_extract_value_1024b.circom +++ /dev/null @@ -1,5 +0,0 @@ -pragma circom 2.1.9; - -include "../../circuits/json/nivc/extractor.circom"; - -component main { public [step_in] } = MaskExtractFinal(1024, 50); \ No newline at end of file diff --git a/builds/target_1024b/json_extraction_1024b.circom b/builds/target_1024b/json_extraction_1024b.circom new file mode 100644 index 0000000..890a096 --- /dev/null +++ b/builds/target_1024b/json_extraction_1024b.circom @@ -0,0 +1,5 @@ +pragma circom 2.1.9; + +include "../../circuits/json/extraction.circom"; + +component main { public [step_in] } = JSONExtraction(1024, 10); \ No newline at end of file diff --git a/builds/target_1024b/json_mask_array_index_1024b.circom b/builds/target_1024b/json_mask_array_index_1024b.circom deleted file mode 100644 index d2793c8..0000000 --- a/builds/target_1024b/json_mask_array_index_1024b.circom +++ /dev/null @@ -1,5 +0,0 @@ -pragma circom 2.1.9; - -include "../../circuits/json/nivc/masker.circom"; - -component main { public [step_in] } = JsonMaskArrayIndexNIVC(1024, 10); \ No newline at end of file diff --git a/builds/target_1024b/json_mask_object_1024b.circom b/builds/target_1024b/json_mask_object_1024b.circom deleted file mode 100644 index e91d812..0000000 --- a/builds/target_1024b/json_mask_object_1024b.circom +++ /dev/null @@ -1,5 +0,0 @@ -pragma circom 2.1.9; - -include "../../circuits/json/nivc/masker.circom"; - -component main { public [step_in] } = JsonMaskObjectNIVC(1024, 10, 10); diff --git a/builds/target_1024b/plaintext_authentication_1024b.circom b/builds/target_1024b/plaintext_authentication_1024b.circom index 5b82c7d..89fde4e 100644 --- a/builds/target_1024b/plaintext_authentication_1024b.circom +++ b/builds/target_1024b/plaintext_authentication_1024b.circom @@ -1,5 +1,5 @@ pragma circom 2.1.9; -include "../../circuits/chacha20/nivc/chacha20_nivc.circom"; +include "../../circuits/chacha20/authentication.circom"; -component main { public [step_in] } = ChaCha20_NIVC(1024); \ No newline at end of file +component main { public [step_in] } = PlaintextAuthentication(1024); \ No newline at end of file diff --git a/builds/target_512b/json_extract_value_512b.circom b/builds/target_512b/json_extract_value_512b.circom deleted file mode 100644 index 600cc0a..0000000 --- a/builds/target_512b/json_extract_value_512b.circom +++ /dev/null @@ -1,5 +0,0 @@ -pragma circom 2.1.9; - -include "../../circuits/json/nivc/extractor.circom"; - -component main { public [step_in] } = MaskExtractFinal(512, 50); \ No newline at end of file diff --git a/builds/target_512b/json_extraction_512b.circom b/builds/target_512b/json_extraction_512b.circom new file mode 100644 index 0000000..52b216a --- /dev/null +++ b/builds/target_512b/json_extraction_512b.circom @@ -0,0 +1,5 @@ +pragma circom 2.1.9; + +include "../../circuits/json/extraction.circom"; + +component main { public [step_in] } = JSONExtraction(512, 10); \ No newline at end of file diff --git a/builds/target_512b/json_mask_array_index_512b.circom b/builds/target_512b/json_mask_array_index_512b.circom deleted file mode 100644 index ec72dc7..0000000 --- a/builds/target_512b/json_mask_array_index_512b.circom +++ /dev/null @@ -1,5 +0,0 @@ -pragma circom 2.1.9; - -include "../../circuits/json/nivc/masker.circom"; - -component main { public [step_in] } = JsonMaskArrayIndexNIVC(512, 10); \ No newline at end of file diff --git a/builds/target_512b/json_mask_object_512b.circom b/builds/target_512b/json_mask_object_512b.circom deleted file mode 100644 index 3bd0e31..0000000 --- a/builds/target_512b/json_mask_object_512b.circom +++ /dev/null @@ -1,5 +0,0 @@ -pragma circom 2.1.9; - -include "../../circuits/json/nivc/masker.circom"; - -component main { public [step_in] } = JsonMaskObjectNIVC(512, 10, 10); diff --git a/builds/target_512b/plaintext_authentication_512b.circom b/builds/target_512b/plaintext_authentication_512b.circom index f0c99ff..30388e9 100644 --- a/builds/target_512b/plaintext_authentication_512b.circom +++ b/builds/target_512b/plaintext_authentication_512b.circom @@ -1,5 +1,5 @@ pragma circom 2.1.9; -include "../../circuits/chacha20/nivc/chacha20_nivc.circom"; +include "../../circuits/chacha20/authentication.circom"; -component main { public [step_in] } = ChaCha20_NIVC(512); \ No newline at end of file +component main { public [step_in] } = PlaintextAuthentication(512); \ No newline at end of file diff --git a/circuits.json b/circuits.json index 8ed4b53..06e5fd9 100644 --- a/circuits.json +++ b/circuits.json @@ -14,29 +14,12 @@ 25 ] }, - "json_mask_object_1024b": { - "file": "json/nivc/masker", - "template": "JsonMaskObjectNIVC", + "json_extraction_1024b": { + "file": "json/extraction", + "template": "JSONExtraction", "params": [ 1024, - 10, 10 ] - }, - "json_mask_array_index_1024b": { - "file": "json/nivc/masker", - "template": "JsonMaskArrayIndexNIVC", - "params": [ - 1024, - 10 - ] - }, - "json_extract_value_1024b": { - "file": "json/nivc/extractor", - "template": "MaskExtractFinal", - "params": [ - 1024, - 50 - ] } } \ No newline at end of file diff --git a/circuits/chacha20/nivc/chacha20_nivc.circom b/circuits/chacha20/authentication.circom similarity index 75% rename from circuits/chacha20/nivc/chacha20_nivc.circom rename to circuits/chacha20/authentication.circom index e969b9d..973feef 100644 --- a/circuits/chacha20/nivc/chacha20_nivc.circom +++ b/circuits/chacha20/authentication.circom @@ -2,11 +2,12 @@ // modified for our needs pragma circom 2.1.9; -include "../chacha-round.circom"; -include "../chacha-qr.circom"; -include "../../utils/bits.circom"; -include "../../utils/hash.circom"; -include "../../utils/array.circom"; +include "chacha-round.circom"; +include "chacha-qr.circom"; +include "../utils/bits.circom"; +include "../utils/hash.circom"; +include "../utils/array.circom"; +include "circomlib/circuits/poseidon.circom"; /** ChaCha20 in counter mode */ @@ -23,7 +24,7 @@ include "../../utils/array.circom"; // | # | N | N | N | // +---+---+---+---+ // paramaterized by `DATA_BYTES` which is the plaintext length in bytes -template ChaCha20_NIVC(DATA_BYTES) { +template PlaintextAuthentication(DATA_BYTES) { // key => 8 32-bit words = 32 bytes signal input key[8][32]; // nonce => 3 32-bit words = 12 bytes @@ -33,22 +34,22 @@ template ChaCha20_NIVC(DATA_BYTES) { // the below can be both ciphertext or plaintext depending on the direction // in => N 32-bit words => N 4 byte words - signal input plainText[DATA_BYTES]; + signal input plaintext[DATA_BYTES]; - // step_in should be the ciphertext digest + // step_in should be the ciphertext digest + the HTTP digests + JSON seq digest signal input step_in[1]; // step_out should be the plaintext digest signal output step_out[1]; - signal isPadding[DATA_BYTES]; + signal isPadding[DATA_BYTES]; // == 1 in the case we hit padding number signal plaintextBits[DATA_BYTES / 4][32]; component toBits[DATA_BYTES / 4]; for (var i = 0 ; i < DATA_BYTES / 4 ; i++) { toBits[i] = fromWords32ToLittleEndian(); for (var j = 0 ; j < 4 ; j++) { - isPadding[i * 4 + j] <== IsEqual()([plainText[i * 4 + j], -1]); - toBits[i].words[j] <== (1 - isPadding[i * 4 + j]) * plainText[i*4 + j]; + isPadding[i * 4 + j] <== IsEqual()([plaintext[i * 4 + j], -1]); + toBits[i].words[j] <== (1 - isPadding[i * 4 + j]) * plaintext[i*4 + j]; } plaintextBits[i] <== toBits[i].data; } @@ -130,7 +131,7 @@ template ChaCha20_NIVC(DATA_BYTES) { component toCiphertextBytes[DATA_BYTES / 4]; signal bigEndianCiphertext[DATA_BYTES]; - + for (var i = 0 ; i < DATA_BYTES / 4 ; i++) { toCiphertextBytes[i] = fromLittleEndianToWords32(); for (var j = 0 ; j < 32 ; j++) { @@ -141,9 +142,15 @@ template ChaCha20_NIVC(DATA_BYTES) { } } - signal ciphertext_hash <== DataHasher(DATA_BYTES)(bigEndianCiphertext); - step_in[0] === ciphertext_hash; + signal ciphertext_digest <== DataHasher(DATA_BYTES)(bigEndianCiphertext); - signal plaintext_hash <== DataHasher(DATA_BYTES)(plainText); - step_out[0] <== plaintext_hash; + signal zeroed_plaintext[DATA_BYTES]; + for(var i = 0 ; i < DATA_BYTES ; i++) { + // Sets any padding bytes to zero (which are presumably at the end) so they don't accum into the poly hash + zeroed_plaintext[i] <== (1 - isPadding[i]) * plaintext[i]; + } + signal plaintext_digest <== PolynomialDigest(DATA_BYTES)(zeroed_plaintext, ciphertext_digest); + signal plaintext_digest_hashed <== Poseidon(1)([plaintext_digest]); + // TODO: I'm not sure we need to subtract the CT digest + step_out[0] <== step_in[0] - ciphertext_digest + plaintext_digest_hashed; } \ No newline at end of file diff --git a/circuits/http/verification.circom b/circuits/http/verification.circom index a88b480..6c53dc4 100644 --- a/circuits/http/verification.circom +++ b/circuits/http/verification.circom @@ -1,22 +1,32 @@ pragma circom 2.1.9; include "machine.circom"; +// TODO: we don't need this if we do a poly digest of the plaintext in authentication circuit include "../utils/hash.circom"; template HTTPVerification(DATA_BYTES, MAX_NUMBER_OF_HEADERS) { signal input step_in[1]; signal output step_out[1]; - // Authenticate the plaintext we are passing in + signal input ciphertext_digest; + signal input data[DATA_BYTES]; - signal data_hash <== DataHasher(DATA_BYTES)(data); - data_hash === step_in[0]; + signal isPadding[DATA_BYTES]; // == 1 in the case we hit padding number + signal zeroed_data[DATA_BYTES]; + for (var i = 0 ; i < DATA_BYTES ; i++) { + isPadding[i] <== IsEqual()([data[i], -1]); + zeroed_data[i] <== (1 - isPadding[i]) * data[i]; + } + signal data_digest <== PolynomialDigest(DATA_BYTES)(zeroed_data, ciphertext_digest); - signal input start_line_hash; - signal input header_hashes[MAX_NUMBER_OF_HEADERS]; - signal input body_hash; + signal input main_digests[MAX_NUMBER_OF_HEADERS + 1]; // Contains digests of start line and all intended headers (up to `MAX_NUMBER_OF_HEADERS`) + signal not_contained[MAX_NUMBER_OF_HEADERS + 1]; + var num_to_match = MAX_NUMBER_OF_HEADERS + 1; + for(var i = 0 ; i < MAX_NUMBER_OF_HEADERS + 1 ; i++) { + not_contained[i] <== IsZero()(main_digests[i]); + num_to_match -= not_contained[i]; + } - // TODO: could just have a parser template and reduce code here component State[DATA_BYTES]; State[0] = HttpStateUpdate(); State[0].byte <== data[0]; @@ -38,46 +48,55 @@ template HTTPVerification(DATA_BYTES, MAX_NUMBER_OF_HEADERS) { State[data_idx].line_status <== State[data_idx - 1].next_line_status; } - // Get the start line shit - signal start_line[DATA_BYTES]; - signal not_start_line_mask[DATA_BYTES]; - for(var i = 0 ; i < DATA_BYTES ; i++) { - not_start_line_mask[i] <== IsZero()(State[i].parsing_start); - start_line[i] <== data[i] * (1 - not_start_line_mask[i]); - } - signal inner_start_line_hash <== DataHasher(DATA_BYTES)(start_line); - signal start_line_hash_equal_check <== IsEqual()([inner_start_line_hash, start_line_hash]); - start_line_hash_equal_check === 1; + signal main_monomials[DATA_BYTES]; + main_monomials[0] <== 1; - // Get the header shit - signal header[MAX_NUMBER_OF_HEADERS][DATA_BYTES]; - signal header_masks[MAX_NUMBER_OF_HEADERS][DATA_BYTES]; - for(var i = 0 ; i < MAX_NUMBER_OF_HEADERS ; i++) { - for(var j = 0 ; j < DATA_BYTES ; j++) { - header_masks[i][j] <== IsEqual()([State[j].parsing_header, i + 1]); - header[i][j] <== data[j] * header_masks[i][j]; - } - } - signal inner_header_hashes[MAX_NUMBER_OF_HEADERS]; - signal header_is_unused[MAX_NUMBER_OF_HEADERS]; // If a header hash is passed in as 0, it is not used (no way to compute preimage of 0) - signal header_hashes_equal_check[MAX_NUMBER_OF_HEADERS]; - for(var i = 0 ; i < MAX_NUMBER_OF_HEADERS ; i++) { - header_is_unused[i] <== IsZero()(header_hashes[i]); - inner_header_hashes[i] <== DataHasher(DATA_BYTES)(header[i]); - header_hashes_equal_check[i] <== IsEqual()([(1 - header_is_unused[i]) * inner_header_hashes[i], header_hashes[i]]); - header_hashes_equal_check[i] === 1; + signal is_line_change[DATA_BYTES-1]; + signal was_cleared[DATA_BYTES-1]; + signal not_body_and_not_line_change[DATA_BYTES-1]; + + signal rescaled_or_was_cleared[DATA_BYTES-1]; + for(var i = 0 ; i < DATA_BYTES - 1 ; i++) { + is_line_change[i] <== Contains(2)(data[i + 1], [10, 13]); // capture if we hit an end line sequence + was_cleared[i] <== IsZero()(main_monomials[i]); + not_body_and_not_line_change[i] <== (1 - State[i + 1].parsing_body) * (1 - is_line_change[i]); + rescaled_or_was_cleared[i] <== (main_monomials[i] * ciphertext_digest + was_cleared[i]); + main_monomials[i + 1] <== not_body_and_not_line_change[i] * rescaled_or_was_cleared[i]; } - // Get the body shit - signal body[DATA_BYTES]; + signal is_match[DATA_BYTES]; + signal contains[DATA_BYTES]; + signal is_zero[DATA_BYTES]; + signal monomial_is_zero[DATA_BYTES]; + signal accum_prev[DATA_BYTES]; + var num_matched = 0; + signal inner_main_digest[DATA_BYTES + 1]; + inner_main_digest[0] <== 0; for(var i = 0 ; i < DATA_BYTES ; i++) { - body[i] <== data[i] * State[i].parsing_body; + monomial_is_zero[i] <== IsZero()(main_monomials[i]); + accum_prev[i] <== (1 - monomial_is_zero[i]) * inner_main_digest[i]; + inner_main_digest[i+1] <== accum_prev[i] + data[i] * main_monomials[i]; + is_zero[i] <== IsZero()(inner_main_digest[i+1]); + contains[i] <== Contains(MAX_NUMBER_OF_HEADERS + 1)(inner_main_digest[i+1], main_digests); + is_match[i] <== (1 - is_zero[i]) * contains[i]; + num_matched += is_match[i]; } - signal inner_body_hash <== DataHasher(DATA_BYTES)(body); - signal body_hash_equal_check <== IsEqual()([inner_body_hash, body_hash]); - body_hash_equal_check === 1; + num_matched === num_to_match; - step_out[0] <== inner_body_hash; + // BODY + signal body_monomials[DATA_BYTES]; + signal body_accum[DATA_BYTES]; + signal body_switch[DATA_BYTES -1]; + signal body_digest[DATA_BYTES]; + body_monomials[0] <== 0; + body_accum[0] <== 0; + body_digest[0] <== 0; + for(var i = 0 ; i < DATA_BYTES - 1 ; i++) { + body_accum[i + 1] <== body_accum[i] + State[i + 1].parsing_body; + body_switch[i] <== IsEqual()([body_accum[i + 1], 1]); + body_monomials[i + 1] <== body_monomials[i] * ciphertext_digest + body_switch[i]; + body_digest[i + 1] <== body_digest[i] + body_monomials[i + 1] * zeroed_data[i + 1]; + } // Verify machine ends in a valid state State[DATA_BYTES - 1].next_parsing_start === 0; @@ -86,4 +105,18 @@ template HTTPVerification(DATA_BYTES, MAX_NUMBER_OF_HEADERS) { State[DATA_BYTES - 1].next_parsing_field_value === 0; State[DATA_BYTES - 1].next_parsing_body === 1; State[DATA_BYTES - 1].next_line_status === 0; + + // TODO: Need to subtract all the header digests here and also wrap them in poseidon. We can use the ones from the input to make this cheaper since they're verified in this circuit! + signal body_digest_hashed <== Poseidon(1)([body_digest[DATA_BYTES - 1]]); + signal data_digest_hashed <== Poseidon(1)([data_digest]); + signal option_hash[MAX_NUMBER_OF_HEADERS + 1]; + signal main_digests_hashed[MAX_NUMBER_OF_HEADERS + 1]; + var accumulated_main_digests_hashed = 0; + for(var i = 0 ; i < MAX_NUMBER_OF_HEADERS + 1 ; i++) { + option_hash[i] <== Poseidon(1)([main_digests[i]]); + main_digests_hashed[i] <== (1 - not_contained[i]) * option_hash[i]; + accumulated_main_digests_hashed += main_digests_hashed[i]; + } + + step_out[0] <== step_in[0] + body_digest_hashed - accumulated_main_digests_hashed - data_digest_hashed; // TODO: data_digest is really plaintext_digest from before, consider changing names } diff --git a/circuits/json/extraction.circom b/circuits/json/extraction.circom new file mode 100644 index 0000000..e5185c9 --- /dev/null +++ b/circuits/json/extraction.circom @@ -0,0 +1,127 @@ +pragma circom 2.1.9; + +include "../utils/bits.circom"; +include "hash_machine.circom"; + +template JSONExtraction(DATA_BYTES, MAX_STACK_HEIGHT) { + signal input data[DATA_BYTES]; + signal input ciphertext_digest; + signal input sequence_digest; + signal input value_digest; + + signal input step_in[1]; + signal output step_out[1]; + + //--------------------------------------------------------------------------------------------// + component State[DATA_BYTES]; + + // Set up monomials for stack/tree digesting + signal monomials[3 * MAX_STACK_HEIGHT]; + monomials[0] <== 1; + for(var i = 1 ; i < 3 * MAX_STACK_HEIGHT ; i++) { + monomials[i] <== monomials[i - 1] * ciphertext_digest; + } + signal intermediate_digest[DATA_BYTES][3 * MAX_STACK_HEIGHT]; + signal state_digest[DATA_BYTES]; + + // Debugging + // for(var i = 0; i 44` - - out <== isNextPair*isComma ; -} - -/// Returns whether next key-value pair starts. -/// Applies following checks: -/// - get top of stack value and check whether parsing key: `[1, 0]` -/// - current byte = `,` -/// - current stack height is less than the key to be matched (it means that new key has started) -/// -/// # Arguments -/// - `n`: maximum stack depth -/// - `depth`: depth of matched key-value pair -/// -/// # Inputs -/// - `stack`: current stack state -/// - `curr_byte`: current parsed byte -/// -/// # Output -/// - `out`: Returns `1` for next key-value pair at specified depth. -template NextKVPairAtDepth(n) { - signal input stack[n][2]; - signal input currByte; - signal input depth; - signal output out; - - var logMaxDepth = log2Ceil(n+1); - - component topOfStack = GetTopOfStack(n); - topOfStack.stack <== stack; - signal currentVal[2] <== topOfStack.value; - signal pointer <== topOfStack.pointer; - - signal isNextPair <== IsEqualArray(2)([currentVal, [1, 0]]); - - // `,` -> 44 - signal isComma <== IsEqual()([currByte, 44]); - // pointer <= depth - // TODO: `LessThan` circuit warning - signal atLessDepth <== LessEqThan(logMaxDepth)([pointer-1, depth]); - // current depth is less than key depth - signal isCommaAtDepthLessThanCurrent <== isComma * atLessDepth; - - out <== isNextPair * isCommaAtDepthLessThanCurrent; -} - -/// Matches a JSON key at an `index` using Substring Matching -/// -/// # Arguments -/// - `dataLen`: parsed data length -/// - `keyLen`: key length -/// -/// # Inputs -/// - `data`: data bytes -/// - `key`: key bytes -/// - `r`: random number for substring matching. **Need to be chosen carefully.** -/// - `index`: data index to match from -/// - `parsing_key`: if current byte is inside a key -/// -/// # Output -/// - `out`: Returns `1` if `key` matches `data` at `index` -template KeyMatch(dataLen, keyLen) { - signal input data[dataLen]; - signal input key[keyLen]; - signal input index; - signal input parsing_key; - - // `"` -> 34 - signal end_of_key <== IndexSelector(dataLen)(data, index + keyLen); - signal is_end_of_key_equal_to_quote <== IsEqual()([end_of_key, 34]); - - signal start_of_key <== IndexSelector(dataLen)(data, index - 1); - signal is_start_of_key_equal_to_quote <== IsEqual()([start_of_key, 34]); - - signal substring_match <== SubstringMatchWithIndex(dataLen, keyLen)(data, key, index); - - signal is_key_between_quotes <== is_start_of_key_equal_to_quote * is_end_of_key_equal_to_quote; - signal is_parsing_correct_key <== is_key_between_quotes * parsing_key; - - signal output out <== substring_match * is_parsing_correct_key; -} - -/// Matches a JSON key at an `index` using Substring Matching at specified depth -/// -/// # Arguments -/// - `dataLen`: parsed data length -/// - `n`: maximum stack height -/// - `keyLen`: key length -/// - `depth`: depth of key to be matched -/// -/// # Inputs -/// - `data`: data bytes -/// - `key`: key bytes -/// - `r`: random number for substring matching. **Need to be chosen carefully.** -/// - `index`: data index to match from -/// - `parsing_key`: if current byte is inside a key -/// - `stack`: parser stack output -/// -/// # Output -/// - `out`: Returns `1` if `key` matches `data` at `index` -template KeyMatchAtDepth(dataLen, n, keyLen, depth) { - signal input data[dataLen]; - signal input key[keyLen]; - signal input index; - signal input parsing_key; - signal input stack[n][2]; - - component topOfStack = GetTopOfStack(n); - topOfStack.stack <== stack; - signal pointer <== topOfStack.pointer; - _ <== topOfStack.value; - - // `"` -> 34 - - // end of key equals `"` - signal end_of_key <== IndexSelector(dataLen)(data, index + keyLen); - signal is_end_of_key_equal_to_quote <== IsEqual()([end_of_key, 34]); - - // start of key equals `"` - signal start_of_key <== IndexSelector(dataLen)(data, index - 1); - signal is_start_of_key_equal_to_quote <== IsEqual()([start_of_key, 34]); - - // key matches - signal substring_match <== SubstringMatchWithIndex(dataLen, keyLen)(data, key, index); - - // key should be a string - signal is_key_between_quotes <== is_start_of_key_equal_to_quote * is_end_of_key_equal_to_quote; - - // is the index given correct? - signal is_parsing_correct_key <== is_key_between_quotes * parsing_key; - // is the key given by index at correct depth? - signal is_key_at_depth <== IsEqual()([pointer-1, depth]); - - signal is_parsing_correct_key_at_depth <== is_parsing_correct_key * is_key_at_depth; - - signal output out <== substring_match * is_parsing_correct_key_at_depth; -} - -template MatchPaddedKey(n) { - signal input in[2][n]; - signal input keyLen; - signal output out; - - var accum = 0; - component equalComponent[n]; - component isPaddedElement[n]; - - signal isEndOfKey[n]; - signal isQuote[n]; - signal endOfKeyAccum[n+1]; - endOfKeyAccum[0] <== 0; - - for(var i = 0; i < n; i++) { - isEndOfKey[i] <== IsEqual()([i, keyLen]); - isQuote[i] <== IsEqual()([in[1][i], 34]); - endOfKeyAccum[i+1] <== endOfKeyAccum[i] + isEndOfKey[i] * isQuote[i]; - - // TODO: might not be right to check for zero, instead check for -1? - isPaddedElement[i] = IsZero(); - isPaddedElement[i].in <== in[0][i]; - - equalComponent[i] = IsEqual(); - equalComponent[i].in[0] <== in[0][i]; - equalComponent[i].in[1] <== in[1][i] * (1-isPaddedElement[i].out); - accum += equalComponent[i].out; - } - - signal isEndOfKeyEqualToQuote <== IsEqual()([endOfKeyAccum[n], 1]); - - component totalEqual = IsEqual(); - totalEqual.in[0] <== n; - totalEqual.in[1] <== accum; - out <== totalEqual.out * isEndOfKeyEqualToQuote; -} - -/// Matches a JSON key at an `index` using Substring Matching at specified depth -/// -/// # Arguments -/// - `dataLen`: parsed data length -/// - `maxKeyLen`: maximum possible key length -/// - `index`: index of key in `data` -/// -/// # Inputs -/// - `data`: data bytes -/// - `key`: key bytes -/// - `parsing_key`: if current byte is inside a key -/// -/// # Output -/// - `out`: Returns `1` if `key` matches `data` at `index` -template KeyMatchAtIndex(dataLen, maxKeyLen, index) { - signal input data[dataLen]; - signal input key[maxKeyLen]; - signal input keyLen; - signal input parsing_key; - - signal paddedKey[maxKeyLen + 1]; - for (var i = 0 ; i < maxKeyLen ; i++) { - paddedKey[i] <== key[i]; - } - paddedKey[maxKeyLen] <== 0; - // `"` -> 34 - - // start of key equal to quote - signal startOfKeyEqualToQuote <== IsEqual()([data[index - 1], 34]); - signal isParsingCorrectKey <== parsing_key * startOfKeyEqualToQuote; - - // key matches - component isSubstringMatch = MatchPaddedKey(maxKeyLen+1); - isSubstringMatch.in[0] <== paddedKey; - isSubstringMatch.keyLen <== keyLen; - for(var matcher_idx = 0; matcher_idx <= maxKeyLen; matcher_idx++) { - isSubstringMatch.in[1][matcher_idx] <== data[index + matcher_idx]; - } - _ <== data; - - signal output out <== isSubstringMatch.out * isParsingCorrectKey; -} \ No newline at end of file diff --git a/circuits/json/parser/language.circom b/circuits/json/language.circom similarity index 100% rename from circuits/json/parser/language.circom rename to circuits/json/language.circom diff --git a/circuits/json/parser/machine.circom b/circuits/json/machine.circom similarity index 99% rename from circuits/json/parser/machine.circom rename to circuits/json/machine.circom index ff82988..5fa147a 100644 --- a/circuits/json/parser/machine.circom +++ b/circuits/json/machine.circom @@ -23,8 +23,8 @@ Tests for this module are located in the files: `circuits/test/parser/*.test.ts pragma circom 2.1.9; -include "../../utils/array.circom"; -include "../../utils/operators.circom"; +include "../utils/array.circom"; +include "../utils/operators.circom"; include "language.circom"; /* diff --git a/circuits/json/nivc/extractor.circom b/circuits/json/nivc/extractor.circom deleted file mode 100644 index f2de60f..0000000 --- a/circuits/json/nivc/extractor.circom +++ /dev/null @@ -1,32 +0,0 @@ -pragma circom 2.1.9; - -include "circomlib/circuits/gates.circom"; -include "@zk-email/circuits/utils/array.circom"; -include "../../utils/hash.circom"; - -template MaskExtractFinal(DATA_BYTES, MAX_VALUE_LENGTH) { - signal input step_in[1]; - signal input data[DATA_BYTES]; - - signal output step_out[1]; - - signal is_zero_mask[DATA_BYTES]; - signal is_prev_starting_index[DATA_BYTES]; - signal value_starting_index[DATA_BYTES]; - - signal data_hash <== DataHasher(DATA_BYTES)(data); - data_hash === step_in[0]; - - value_starting_index[0] <== 0; - is_prev_starting_index[0] <== 0; - is_zero_mask[0] <== IsZero()(data[0]); - for (var i=1 ; i < DATA_BYTES ; i++) { - is_zero_mask[i] <== IsZero()(data[i]); - is_prev_starting_index[i] <== IsZero()(value_starting_index[i-1]); - value_starting_index[i] <== value_starting_index[i-1] + i * (1-is_zero_mask[i]) * is_prev_starting_index[i]; - } - - signal value[MAX_VALUE_LENGTH] <== SelectSubArray(DATA_BYTES, MAX_VALUE_LENGTH)(data, value_starting_index[DATA_BYTES-1], MAX_VALUE_LENGTH); - - step_out[0] <== DataHasher(MAX_VALUE_LENGTH)(value); -} \ No newline at end of file diff --git a/circuits/json/nivc/masker.circom b/circuits/json/nivc/masker.circom deleted file mode 100644 index 97fcf8d..0000000 --- a/circuits/json/nivc/masker.circom +++ /dev/null @@ -1,136 +0,0 @@ -pragma circom 2.1.9; - -include "../interpreter.circom"; -include "../../utils/hash.circom"; - -template JsonMaskObjectNIVC(DATA_BYTES, MAX_STACK_HEIGHT, MAX_KEY_LENGTH) { - signal input step_in[1]; - signal input key[MAX_KEY_LENGTH]; - signal input keyLen; - - signal output step_out[1]; - - // Authenticate the (potentially further masked) plaintext we are passing in - signal input data[DATA_BYTES]; - signal data_hash <== DataHasher(DATA_BYTES)(data); - data_hash === step_in[0]; - - // flag determining whether this byte is matched value - signal is_value_match[DATA_BYTES - MAX_KEY_LENGTH]; - - component State[DATA_BYTES - MAX_KEY_LENGTH]; - State[0] = StateUpdate(MAX_STACK_HEIGHT); - State[0].byte <== data[0]; - for(var i = 0; i < MAX_STACK_HEIGHT; i++) { - State[0].stack[i] <== [0,0]; - } - State[0].parsing_string <== 0; - State[0].parsing_number <== 0; - - signal parsing_key[DATA_BYTES - MAX_KEY_LENGTH]; - signal parsing_value[DATA_BYTES - MAX_KEY_LENGTH]; - signal is_key_match[DATA_BYTES - MAX_KEY_LENGTH]; - signal is_key_match_for_value[DATA_BYTES+1 - MAX_KEY_LENGTH]; - is_key_match_for_value[0] <== 0; - signal is_next_pair_at_depth[DATA_BYTES - MAX_KEY_LENGTH]; - signal or[DATA_BYTES - MAX_KEY_LENGTH - 1]; - - // initialise first iteration - - // check inside key or value - parsing_key[0] <== InsideKey()(State[0].next_stack[0], State[0].next_parsing_string, State[0].next_parsing_number); - parsing_value[0] <== InsideValueObject()(State[0].next_stack[0], State[0].next_stack[1], State[0].next_parsing_string, State[0].next_parsing_number); - - is_key_match[0] <== 0; - is_next_pair_at_depth[0] <== NextKVPairAtDepth(MAX_STACK_HEIGHT)(State[0].next_stack, data[0], 0); - is_key_match_for_value[1] <== Mux1()([is_key_match_for_value[0] * (1-is_next_pair_at_depth[0]), is_key_match[0] * (1-is_next_pair_at_depth[0])], is_key_match[0]); - is_value_match[0] <== parsing_value[0] * is_key_match_for_value[1]; - - signal masked[DATA_BYTES]; - masked[0] <== data[0] * is_value_match[0]; - - for(var data_idx = 1; data_idx < DATA_BYTES; data_idx++) { - if(data_idx < DATA_BYTES - MAX_KEY_LENGTH) { - State[data_idx] = StateUpdate(MAX_STACK_HEIGHT); - State[data_idx].byte <== data[data_idx]; - State[data_idx].stack <== State[data_idx - 1].next_stack; - State[data_idx].parsing_string <== State[data_idx - 1].next_parsing_string; - State[data_idx].parsing_number <== State[data_idx - 1].next_parsing_number; - - // - parsing key - // - parsing value (different for string/numbers and array) - // - key match (key 1, key 2) - // - is next pair - // - is key match for value - // - value_mask - // - mask - - // check if inside key or not - parsing_key[data_idx] <== InsideKey()(State[data_idx].next_stack[0], State[data_idx].next_parsing_string, State[data_idx].next_parsing_number); - // check if inside value - parsing_value[data_idx] <== InsideValueObject()(State[data_idx].next_stack[0], State[data_idx].next_stack[1], State[data_idx].next_parsing_string, State[data_idx].next_parsing_number); - - // to get correct value, check: - // - key matches at current index and depth of key is as specified - // - whether next KV pair starts - // - whether key matched for a value (propogate key match until new KV pair of lower depth starts) - - // TODO (autoparallel): this can be optimized i'm sure of it, running without it saves 110k constraints on 1024b (553k with it) - is_key_match[data_idx] <== KeyMatchAtIndex(DATA_BYTES, MAX_KEY_LENGTH, data_idx)(data, key, keyLen, parsing_key[data_idx]); - - // TODO (autoparallel): this could also likely be optimized, costs like 140k constraints itself - is_next_pair_at_depth[data_idx] <== NextKVPairAtDepth(MAX_STACK_HEIGHT)(State[data_idx].next_stack, data[data_idx], 0); - is_key_match_for_value[data_idx+1] <== Mux1()([is_key_match_for_value[data_idx] * (1-is_next_pair_at_depth[data_idx]), is_key_match[data_idx] * (1-is_next_pair_at_depth[data_idx])], is_key_match[data_idx]); - is_value_match[data_idx] <== is_key_match_for_value[data_idx+1] * parsing_value[data_idx]; - - or[data_idx - 1] <== OR()(is_value_match[data_idx], is_value_match[data_idx - 1]); - - // mask = currently parsing value and all subsequent keys matched - masked[data_idx] <== data[data_idx] * or[data_idx - 1]; // TODO here - } else { - masked[data_idx] <== 0; - } - } - step_out[0] <== DataHasher(DATA_BYTES)(masked); -} - -template JsonMaskArrayIndexNIVC(DATA_BYTES, MAX_STACK_HEIGHT) { - signal input step_in[1]; - signal input index; - - signal output step_out[1]; - - // Authenticate the (potentially further masked) plaintext we are passing in - signal input data[DATA_BYTES]; - signal data_hash <== DataHasher(DATA_BYTES)(data); - data_hash === step_in[0]; - - component State[DATA_BYTES]; - State[0] = StateUpdate(MAX_STACK_HEIGHT); - State[0].byte <== data[0]; - for(var i = 0; i < MAX_STACK_HEIGHT; i++) { - State[0].stack[i] <== [0,0]; - } - State[0].parsing_string <== 0; - State[0].parsing_number <== 0; - - signal parsing_array[DATA_BYTES]; - signal or[DATA_BYTES - 1]; - - parsing_array[0] <== InsideArrayIndexObject()(State[0].next_stack[0], State[0].next_stack[1], State[0].next_parsing_string, State[0].next_parsing_number, index); - signal masked[DATA_BYTES]; - masked[0] <== data[0] * parsing_array[0]; - for(var data_idx = 1; data_idx < DATA_BYTES; data_idx++) { - State[data_idx] = StateUpdate(MAX_STACK_HEIGHT); - State[data_idx].byte <== data[data_idx]; - State[data_idx].stack <== State[data_idx - 1].next_stack; - State[data_idx].parsing_string <== State[data_idx - 1].next_parsing_string; - State[data_idx].parsing_number <== State[data_idx - 1].next_parsing_number; - - parsing_array[data_idx] <== InsideArrayIndexObject()(State[data_idx].next_stack[0], State[data_idx].next_stack[1], State[data_idx].next_parsing_string, State[data_idx].next_parsing_number, index); - - or[data_idx - 1] <== OR()(parsing_array[data_idx], parsing_array[data_idx - 1]); - masked[data_idx] <== data[data_idx] * or[data_idx - 1]; - } - step_out[0] <== DataHasher(DATA_BYTES)(masked); -} diff --git a/circuits/json/parser/parser.circom b/circuits/json/parser.circom similarity index 100% rename from circuits/json/parser/parser.circom rename to circuits/json/parser.circom diff --git a/circuits/test/chacha20/chacha20-nivc.test.ts b/circuits/test/chacha20/authentication.test.ts similarity index 67% rename from circuits/test/chacha20/chacha20-nivc.test.ts rename to circuits/test/chacha20/authentication.test.ts index 70a743c..972abf0 100644 --- a/circuits/test/chacha20/chacha20-nivc.test.ts +++ b/circuits/test/chacha20/authentication.test.ts @@ -1,16 +1,16 @@ import { WitnessTester } from "circomkit"; -import { circomkit, toByte, toUint32Array, uintArray32ToBits } from "../common"; +import { circomkit, PolynomialDigest, toByte, toUint32Array, uintArray32ToBits, modAdd } from "../common"; import { DataHasher } from "../common/poseidon"; import { assert } from "chai"; +import { poseidon1 } from "poseidon-lite"; - -describe("chacha20-nivc", () => { - let circuit: WitnessTester<["key", "nonce", "counter", "plainText", "step_in"], ["step_out"]>; +describe("Plaintext Authentication", () => { + let circuit: WitnessTester<["key", "nonce", "counter", "plaintext", "step_in"], ["step_out"]>; describe("16 block test", () => { it("should perform encryption", async () => { - circuit = await circomkit.WitnessTester(`ChaCha20`, { - file: "chacha20/nivc/chacha20_nivc", - template: "ChaCha20_NIVC", + circuit = await circomkit.WitnessTester(`PlaintextAuthentication`, { + file: "chacha20/authentication", + template: "PlaintextAuthentication", params: [64] // number of bytes for plaintext }); // Test case from RCF https://www.rfc-editor.org/rfc/rfc7539.html#section-2.4.2 @@ -54,18 +54,22 @@ describe("chacha20-nivc", () => { key: toInput(Buffer.from(keyBytes)), nonce: toInput(Buffer.from(nonceBytes)), counter: counterBits, - plainText: plaintextBytes, - step_in: DataHasher(ciphertextBytes) + plaintext: plaintextBytes, + step_in: 0 }, (["step_out"])); - assert.deepEqual(w.step_out, DataHasher(plaintextBytes)); + // Output + let ciphertext_digest = DataHasher(ciphertextBytes); + let plaintext_digest_hashed = poseidon1([PolynomialDigest(plaintextBytes, ciphertext_digest)]); + let output = modAdd(plaintext_digest_hashed - ciphertext_digest, BigInt(0)); + assert.deepEqual(w.step_out, output); }); }); describe("padded plaintext", () => { it("should perform encryption", async () => { - circuit = await circomkit.WitnessTester(`ChaCha20`, { - file: "chacha20/nivc/chacha20_nivc", - template: "ChaCha20_NIVC", + circuit = await circomkit.WitnessTester(`PlaintextAuthentication`, { + file: "chacha20/authentication", + template: "PlaintextAuthentication", params: [128] // number of bytes in plaintext }); // Test case from RCF https://www.rfc-editor.org/rfc/rfc7539.html#section-2.4.2 @@ -108,54 +112,13 @@ describe("chacha20-nivc", () => { key: toInput(Buffer.from(keyBytes)), nonce: toInput(Buffer.from(nonceBytes)), counter: counterBits, - plainText: paddedPlaintextBytes, - step_in: DataHasher(ciphertextBytes) - }, (["step_out"])); - assert.deepEqual(w.step_out, DataHasher(paddedPlaintextBytes)); - }); - }); - - describe("wrong ciphertext hash", () => { - it("should fail", async () => { - circuit = await circomkit.WitnessTester(`ChaCha20`, { - file: "chacha20/nivc/chacha20_nivc", - template: "ChaCha20_NIVC", - params: [128] // number of bytes in plaintext - }); - // Test case from RCF https://www.rfc-editor.org/rfc/rfc7539.html#section-2.4.2 - // the input encoding here is not the most intuitive. inputs are serialized as little endian. - // i.e. "e4e7f110" is serialized as "10 f1 e7 e4". So the way i am reading in inputs is - // to ensure that every 32 bit word is byte reversed before being turned into bits. - // i think this should be easy when we compute witness in rust. - let keyBytes = [ - 0x00, 0x01, 0x02, 0x03, - 0x04, 0x05, 0x06, 0x07, - 0x08, 0x09, 0x0a, 0x0b, - 0x0c, 0x0d, 0x0e, 0x0f, - 0x10, 0x11, 0x12, 0x13, - 0x14, 0x15, 0x16, 0x17, - 0x18, 0x19, 0x1a, 0x1b, - 0x1c, 0x1d, 0x1e, 0x1f - ]; - - let nonceBytes = - [ - 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x4a, - 0x00, 0x00, 0x00, 0x00 - ]; - let plaintextBytes = - toByte("Ladies and Gentlemen of the class of '99: If I could offer you only one tip "); - let totalLength = 128; - let paddedPlaintextBytes = plaintextBytes.concat(Array(totalLength - plaintextBytes.length).fill(-1)); - const counterBits = uintArray32ToBits([1])[0] - await circuit.expectFail({ - key: toInput(Buffer.from(keyBytes)), - nonce: toInput(Buffer.from(nonceBytes)), - counter: counterBits, - plainText: paddedPlaintextBytes, + plaintext: paddedPlaintextBytes, step_in: 0 - }); + }, (["step_out"])); + let ciphertext_digest = DataHasher(ciphertextBytes); + let plaintext_digest = poseidon1([PolynomialDigest(plaintextBytes, ciphertext_digest)]); + let output = modAdd(plaintext_digest - ciphertext_digest, BigInt(0)); + assert.deepEqual(w.step_out, output); }); }); }); @@ -175,4 +138,4 @@ export function fromInput(bits: number[]) { buffer.writeUInt32LE(uint32Array[i], i * 4); } return buffer; -} \ No newline at end of file +} diff --git a/circuits/test/chacha20/chacha20.test.ts b/circuits/test/chacha20/chacha20.test.ts index bfe5a38..b16a739 100644 --- a/circuits/test/chacha20/chacha20.test.ts +++ b/circuits/test/chacha20/chacha20.test.ts @@ -1,7 +1,7 @@ import { WitnessTester } from "circomkit"; import { circomkit, hexToBits, toUint32Array, uintArray32ToBits } from "../common"; -describe("chacha20", () => { +describe("ChaCha20", () => { describe("qtr-round", () => { let circuit: WitnessTester<["in"], ["out"]>; it("should perform qtr-round", async () => { @@ -10,16 +10,16 @@ describe("chacha20", () => { template: "QR", }); // Test case from RCF https://www.rfc-editor.org/rfc/rfc7539.html#section-2.1 - let input = [ + let input = [ hexToBits("0x11111111"), hexToBits("0x01020304"), hexToBits("0x9b8d6f43"), hexToBits("0x01234567") ]; - let expected = [ - hexToBits("0xea2a92f4"), - hexToBits("0xcb1cf8ce"), - hexToBits("0x4581472e"), + let expected = [ + hexToBits("0xea2a92f4"), + hexToBits("0xcb1cf8ce"), + hexToBits("0x4581472e"), hexToBits("0x5881c4bb") ]; await circuit.expectPass({ in: input }, { out: expected }); @@ -34,17 +34,17 @@ describe("chacha20", () => { template: "Round", }); // Test case from RCF https://www.rfc-editor.org/rfc/rfc7539.html#section-2.1 - let input = [ - hexToBits("61707865"), hexToBits("3320646e"), hexToBits("79622d32"), hexToBits("6b206574"), - hexToBits("03020100"), hexToBits("07060504"), hexToBits("0b0a0908"), hexToBits("0f0e0d0c"), - hexToBits("13121110"), hexToBits("17161514"), hexToBits("1b1a1918"), hexToBits("1f1e1d1c"), - hexToBits("00000001"), hexToBits("09000000"), hexToBits("4a000000"), hexToBits("00000000") + let input = [ + hexToBits("61707865"), hexToBits("3320646e"), hexToBits("79622d32"), hexToBits("6b206574"), + hexToBits("03020100"), hexToBits("07060504"), hexToBits("0b0a0908"), hexToBits("0f0e0d0c"), + hexToBits("13121110"), hexToBits("17161514"), hexToBits("1b1a1918"), hexToBits("1f1e1d1c"), + hexToBits("00000001"), hexToBits("09000000"), hexToBits("4a000000"), hexToBits("00000000") ]; - let expected = [ - hexToBits("e4e7f110"), hexToBits("15593bd1"), hexToBits("1fdd0f50"), hexToBits("c47120a3"), - hexToBits("c7f4d1c7"), hexToBits("0368c033"), hexToBits("9aaa2204"), hexToBits("4e6cd4c3"), - hexToBits("466482d2"), hexToBits("09aa9f07"), hexToBits("05d7c214"), hexToBits("a2028bd9"), - hexToBits("d19c12b5"), hexToBits("b94e16de"), hexToBits("e883d0cb"), hexToBits("4e3c50a2") + let expected = [ + hexToBits("e4e7f110"), hexToBits("15593bd1"), hexToBits("1fdd0f50"), hexToBits("c47120a3"), + hexToBits("c7f4d1c7"), hexToBits("0368c033"), hexToBits("9aaa2204"), hexToBits("4e6cd4c3"), + hexToBits("466482d2"), hexToBits("09aa9f07"), hexToBits("05d7c214"), hexToBits("a2028bd9"), + hexToBits("d19c12b5"), hexToBits("b94e16de"), hexToBits("e883d0cb"), hexToBits("4e3c50a2") ]; await circuit.expectPass({ in: input }, { out: expected }); }); @@ -65,59 +65,60 @@ describe("chacha20", () => { // to ensure that every 32 bit word is byte reversed before being turned into bits. // i think this should be easy when we compute witness in rust. let test = { - keyBytes: Buffer.from( - [ - 0x00, 0x01, 0x02, 0x03, - 0x04, 0x05, 0x06, 0x07, - 0x08, 0x09, 0x0a, 0x0b, - 0x0c, 0x0d, 0x0e, 0x0f, - 0x10, 0x11, 0x12, 0x13, - 0x14, 0x15, 0x16, 0x17, - 0x18, 0x19, 0x1a, 0x1b, - 0x1c, 0x1d, 0x1e, 0x1f - ] - ), - nonceBytes: Buffer.from( - [ - 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x4a, - 0x00, 0x00, 0x00, 0x00 - ] - ), - counter: 1, - plaintextBytes: Buffer.from( - [ - 0x4c, 0x61, 0x64, 0x69, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x47, 0x65, 0x6e, 0x74, 0x6c, - 0x65, 0x6d, 0x65, 0x6e, 0x20, 0x6f, 0x66, 0x20, 0x74, 0x68, 0x65, 0x20, 0x63, 0x6c, 0x61, 0x73, - 0x73, 0x20, 0x6f, 0x66, 0x20, 0x27, 0x39, 0x39, 0x3a, 0x20, 0x49, 0x66, 0x20, 0x49, 0x20, 0x63, - 0x6f, 0x75, 0x6c, 0x64, 0x20, 0x6f, 0x66, 0x66, 0x65, 0x72, 0x20, 0x79, 0x6f, 0x75, 0x20, 0x6f, - ] - ), - ciphertextBytes: Buffer.from( - [ - 0x6e, 0x2e, 0x35, 0x9a, 0x25, 0x68, 0xf9, 0x80, 0x41, 0xba, 0x07, 0x28, 0xdd, 0x0d, 0x69, 0x81, - 0xe9, 0x7e, 0x7a, 0xec, 0x1d, 0x43, 0x60, 0xc2, 0x0a, 0x27, 0xaf, 0xcc, 0xfd, 0x9f, 0xae, 0x0b, - 0xf9, 0x1b, 0x65, 0xc5, 0x52, 0x47, 0x33, 0xab, 0x8f, 0x59, 0x3d, 0xab, 0xcd, 0x62, 0xb3, 0x57, - 0x16, 0x39, 0xd6, 0x24, 0xe6, 0x51, 0x52, 0xab, 0x8f, 0x53, 0x0c, 0x35, 0x9f, 0x08, 0x61, 0xd8 - ] - )} + keyBytes: Buffer.from( + [ + 0x00, 0x01, 0x02, 0x03, + 0x04, 0x05, 0x06, 0x07, + 0x08, 0x09, 0x0a, 0x0b, + 0x0c, 0x0d, 0x0e, 0x0f, + 0x10, 0x11, 0x12, 0x13, + 0x14, 0x15, 0x16, 0x17, + 0x18, 0x19, 0x1a, 0x1b, + 0x1c, 0x1d, 0x1e, 0x1f + ] + ), + nonceBytes: Buffer.from( + [ + 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x4a, + 0x00, 0x00, 0x00, 0x00 + ] + ), + counter: 1, + plaintextBytes: Buffer.from( + [ + 0x4c, 0x61, 0x64, 0x69, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x47, 0x65, 0x6e, 0x74, 0x6c, + 0x65, 0x6d, 0x65, 0x6e, 0x20, 0x6f, 0x66, 0x20, 0x74, 0x68, 0x65, 0x20, 0x63, 0x6c, 0x61, 0x73, + 0x73, 0x20, 0x6f, 0x66, 0x20, 0x27, 0x39, 0x39, 0x3a, 0x20, 0x49, 0x66, 0x20, 0x49, 0x20, 0x63, + 0x6f, 0x75, 0x6c, 0x64, 0x20, 0x6f, 0x66, 0x66, 0x65, 0x72, 0x20, 0x79, 0x6f, 0x75, 0x20, 0x6f, + ] + ), + ciphertextBytes: Buffer.from( + [ + 0x6e, 0x2e, 0x35, 0x9a, 0x25, 0x68, 0xf9, 0x80, 0x41, 0xba, 0x07, 0x28, 0xdd, 0x0d, 0x69, 0x81, + 0xe9, 0x7e, 0x7a, 0xec, 0x1d, 0x43, 0x60, 0xc2, 0x0a, 0x27, 0xaf, 0xcc, 0xfd, 0x9f, 0xae, 0x0b, + 0xf9, 0x1b, 0x65, 0xc5, 0x52, 0x47, 0x33, 0xab, 0x8f, 0x59, 0x3d, 0xab, 0xcd, 0x62, 0xb3, 0x57, + 0x16, 0x39, 0xd6, 0x24, 0xe6, 0x51, 0x52, 0xab, 0x8f, 0x53, 0x0c, 0x35, 0x9f, 0x08, 0x61, 0xd8 + ] + ) + } const ciphertextBits = uintArray32ToBits(toUint32Array(test.ciphertextBytes)) const plaintextBits = uintArray32ToBits(toUint32Array(test.plaintextBytes)) - const counterBits = uintArray32ToBits([test.counter])[0] - await circuit.expectPass({ - key: uintArray32ToBits(toUint32Array(test.keyBytes)), - nonce: uintArray32ToBits(toUint32Array(test.nonceBytes)), - counter: counterBits, - in: plaintextBits, - }, { out: ciphertextBits }); + const counterBits = uintArray32ToBits([test.counter])[0] + await circuit.expectPass({ + key: uintArray32ToBits(toUint32Array(test.keyBytes)), + nonce: uintArray32ToBits(toUint32Array(test.nonceBytes)), + counter: counterBits, + in: plaintextBits, + }, { out: ciphertextBits }); /// decryption since symmetric - const w2 = await circuit.expectPass({ - key: uintArray32ToBits(toUint32Array(test.keyBytes)), - nonce: uintArray32ToBits(toUint32Array(test.nonceBytes)), - counter: counterBits, - in: ciphertextBits, - }, { out: plaintextBits }); + const w2 = await circuit.expectPass({ + key: uintArray32ToBits(toUint32Array(test.keyBytes)), + nonce: uintArray32ToBits(toUint32Array(test.nonceBytes)), + counter: counterBits, + in: ciphertextBits, + }, { out: plaintextBits }); }); }); }); \ No newline at end of file diff --git a/circuits/test/common/index.ts b/circuits/test/common/index.ts index d65d7c6..045ad32 100644 --- a/circuits/test/common/index.ts +++ b/circuits/test/common/index.ts @@ -26,7 +26,7 @@ export function generateDescription(input: any): string { } export function readJSONInputFile(filename: string, key: any[]): [number[], number[][], number[]] { - const valueStringPath = join(__dirname, "..", "..", "..", "examples", "json", "test", filename); + const valueStringPath = join(__dirname, "..", "..", "..", "examples", "json", filename); let input: number[] = []; let output: number[] = []; @@ -62,6 +62,8 @@ export function readJSONInputFile(filename: string, key: any[]): [number[], numb } import fs from 'fs'; +import { DataHasher } from './poseidon'; +import { poseidon1 } from 'poseidon-lite'; export function readJsonFile(filePath: string): T { // Read the file synchronously @@ -90,13 +92,13 @@ export function toByte(data: string): number[] { export function hexToBytes(hex: any) { return hex.match(/.{1,2}/g).map((byte: any) => parseInt(byte, 16)); - } - +} + export function hexBytesToBigInt(hexBytes: number[]): any[] { -return hexBytes.map(byte => { - let n = BigInt(byte); - return n; -}); + return hexBytes.map(byte => { + let n = BigInt(byte); + return n; + }); } export function hexToBits(hex: string): number[] { @@ -178,26 +180,26 @@ export function binaryStringToHex(binaryString: string): string { * BE order. */ export function uint8ArrayToBitsBE(buff: Uint8Array | number[]) { - const res: number[] = [] - for (let i = 0; i < buff.length; i++) { - for (let j = 0; j < 8; j++) { - if ((buff[i] >> 7-j) & 1) { - res.push(1); - } else { - res.push(0); - } - } - } - return res; + const res: number[] = [] + for (let i = 0; i < buff.length; i++) { + for (let j = 0; j < 8; j++) { + if ((buff[i] >> 7 - j) & 1) { + res.push(1); + } else { + res.push(0); + } + } + } + return res; } export function toUint32Array(buf: Uint8Array) { - const arr = new Uint32Array(buf.length / 4) - const arrView = new DataView(buf.buffer, buf.byteOffset, buf.byteLength) - for(let i = 0;i < arr.length;i++) { - arr[i] = arrView.getUint32(i * 4, true) - } - return arr + const arr = new Uint32Array(buf.length / 4) + const arrView = new DataView(buf.buffer, buf.byteOffset, buf.byteLength) + for (let i = 0; i < arr.length; i++) { + arr[i] = arrView.getUint32(i * 4, true) + } + return arr } /** @@ -205,24 +207,24 @@ export function toUint32Array(buf: Uint8Array) { * LE order. */ export function uintArray32ToBits(uintArray: Uint32Array | number[]) { - const bits: number[][] = [] - for (let i = 0; i < uintArray.length; i++) { - const uint = uintArray[i] - bits.push(numToBitsNumerical(uint)) - } + const bits: number[][] = [] + for (let i = 0; i < uintArray.length; i++) { + const uint = uintArray[i] + bits.push(numToBitsNumerical(uint)) + } - return bits + return bits } export function numToBitsNumerical(num: number, bitCount = 32) { - const bits: number[] = [] - for(let i = 2 ** (bitCount - 1);i >= 1;i /= 2) { - const bit = num >= i ? 1 : 0 - bits.push(bit) - num -= bit * i - } + const bits: number[] = [] + for (let i = 2 ** (bitCount - 1); i >= 1; i /= 2) { + const bit = num >= i ? 1 : 0 + bits.push(bit) + num -= bit * i + } - return bits + return bits } export function bytesToBigInt(bytes: number[] | Uint8Array): bigint { @@ -233,4 +235,273 @@ export function bytesToBigInt(bytes: number[] | Uint8Array): bigint { } return result; +} + +const prime = BigInt("21888242871839275222246405745257275088548364400416034343698204186575808495617"); +export function PolynomialDigest(coeffs: number[], input: bigint): bigint { + let result = BigInt(0); + let power = BigInt(1); + + for (let i = 0; i < coeffs.length; i++) { + result = (result + BigInt(coeffs[i]) * power) % prime; + power = (power * input) % prime; + } + + return result; +} + +// HTTP/1.1 200 OK +// content-type: application/json; charset=utf-8 +// content-encoding: gzip +// Transfer-Encoding: chunked +// +// { +// "data": { +// "items": [ +// { +// "data": "Artist", +// "profile": { +// "name": "Taylor Swift" +// } +// } +// ] +// } +// } + +// 320 bytes in the HTTP response +export const http_response_plaintext = [ + 72, 84, 84, 80, 47, 49, 46, 49, 32, 50, 48, 48, 32, 79, 75, 13, 10, 99, 111, 110, 116, 101, 110, + 116, 45, 116, 121, 112, 101, 58, 32, 97, 112, 112, 108, 105, 99, 97, 116, 105, 111, 110, 47, 106, + 115, 111, 110, 59, 32, 99, 104, 97, 114, 115, 101, 116, 61, 117, 116, 102, 45, 56, 13, 10, 99, + 111, 110, 116, 101, 110, 116, 45, 101, 110, 99, 111, 100, 105, 110, 103, 58, 32, 103, 122, 105, + 112, 13, 10, 84, 114, 97, 110, 115, 102, 101, 114, 45, 69, 110, 99, 111, 100, 105, 110, 103, 58, + 32, 99, 104, 117, 110, 107, 101, 100, 13, 10, 13, 10, 123, 13, 10, 32, 32, 32, 34, 100, 97, 116, + 97, 34, 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 34, 105, 116, 101, 109, 115, 34, 58, 32, + 91, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, + 32, 32, 32, 32, 32, 32, 32, 32, 34, 100, 97, 116, 97, 34, 58, 32, 34, 65, 114, 116, 105, 115, + 116, 34, 44, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 112, 114, + 111, 102, 105, 108, 101, 34, 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, + 32, 32, 32, 32, 34, 110, 97, 109, 101, 34, 58, 32, 34, 84, 97, 121, 108, 111, 114, 32, 83, 119, + 105, 102, 116, 34, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, + 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, 32, 32, 93, 13, + 10, 32, 32, 32, 125, 13, 10, 125, +]; + +export const http_response_ciphertext = [ + 2, 125, 219, 141, 140, 93, 49, 129, 95, 178, 135, 109, 48, 36, 194, 46, 239, 155, 160, 70, 208, + 147, 37, 212, 17, 195, 149, 190, 38, 215, 23, 241, 84, 204, 167, 184, 179, 172, 187, 145, 38, 75, + 123, 96, 81, 6, 149, 36, 135, 227, 226, 254, 177, 90, 241, 159, 0, 230, 183, 163, 210, 88, 133, + 176, 9, 122, 225, 83, 171, 157, 185, 85, 122, 4, 110, 52, 2, 90, 36, 189, 145, 63, 122, 75, 94, + 21, 163, 24, 77, 85, 110, 90, 228, 157, 103, 41, 59, 128, 233, 149, 57, 175, 121, 163, 185, 144, + 162, 100, 17, 34, 9, 252, 162, 223, 59, 221, 106, 127, 104, 11, 121, 129, 154, 49, 66, 220, 65, + 130, 171, 165, 43, 8, 21, 248, 12, 214, 33, 6, 109, 3, 144, 52, 124, 225, 206, 223, 213, 86, 186, + 93, 170, 146, 141, 145, 140, 57, 152, 226, 218, 57, 30, 4, 131, 161, 0, 248, 172, 49, 206, 181, + 47, 231, 87, 72, 96, 139, 145, 117, 45, 77, 134, 249, 71, 87, 178, 239, 30, 244, 156, 70, 118, + 180, 176, 90, 92, 80, 221, 177, 86, 120, 222, 223, 244, 109, 150, 226, 142, 97, 171, 210, 38, + 117, 143, 163, 204, 25, 223, 238, 209, 58, 59, 100, 1, 86, 241, 103, 152, 228, 37, 187, 79, 36, + 136, 133, 171, 41, 184, 145, 146, 45, 192, 173, 219, 146, 133, 12, 246, 190, 5, 54, 99, 155, 8, + 198, 156, 174, 99, 12, 210, 95, 5, 128, 166, 118, 50, 66, 26, 20, 3, 129, 232, 1, 192, 104, 23, + 152, 212, 94, 97, 138, 162, 90, 185, 108, 221, 211, 247, 184, 253, 15, 16, 24, 32, 240, 240, 3, + 148, 89, 30, 54, 161, 131, 230, 161, 217, 29, 229, 251, 33, 220, 230, 102, 131, 245, 27, 141, + 220, 67, 16, 26, +]; + +export const http_start_line = [72, 84, 84, 80, 47, 49, 46, 49, 32, 50, 48, 48, 32, 79, 75]; + +export const http_header_0 = [ + 99, 111, 110, 116, 101, 110, 116, 45, 116, 121, 112, 101, 58, 32, 97, 112, 112, 108, 105, 99, 97, + 116, 105, 111, 110, 47, 106, 115, 111, 110, 59, 32, 99, 104, 97, 114, 115, 101, 116, 61, 117, + 116, 102, 45, 56, +]; + +export const http_header_1 = [ + 99, 111, 110, 116, 101, 110, 116, 45, 101, 110, 99, 111, 100, 105, 110, 103, 58, 32, 103, 122, + 105, 112, +]; +export const http_body = [ + 123, 13, 10, 32, 32, 32, 34, 100, 97, 116, 97, 34, 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, + 32, 34, 105, 116, 101, 109, 115, 34, 58, 32, 91, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, + 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 100, 97, 116, + 97, 34, 58, 32, 34, 65, 114, 116, 105, 115, 116, 34, 44, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, + 32, 32, 32, 32, 32, 32, 32, 34, 112, 114, 111, 102, 105, 108, 101, 34, 58, 32, 123, 13, 10, 32, + 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 110, 97, 109, 101, 34, 58, 32, + 34, 84, 97, 121, 108, 111, 114, 32, 83, 119, 105, 102, 116, 34, 13, 10, 32, 32, 32, 32, 32, 32, + 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, + 13, 10, 32, 32, 32, 32, 32, 32, 32, 93, 13, 10, 32, 32, 32, 125, 13, 10, 125, +]; + +export function strToBytes(str: string): number[] { + return Array.from(str.split('').map(c => c.charCodeAt(0))); +} + +// Enum equivalent for JsonMaskType +export type JsonMaskType = + | { type: "Object", value: number[] } // Changed from Uint8Array to number[] + | { type: "ArrayIndex", value: number }; + +// Constants for the field arithmetic +const PRIME = BigInt("21888242871839275222246405745257275088548364400416034343698204186575808495617"); +const ONE = BigInt(1); +const ZERO = BigInt(0); + +export function modAdd(a: bigint, b: bigint): bigint { + return ((a + b) % PRIME + PRIME) % PRIME; +} + +function modMul(a: bigint, b: bigint): bigint { + return (a * b) % PRIME; +} + +export function jsonTreeHasher( + polynomialInput: bigint, + keySequence: JsonMaskType[], + maxStackHeight: number +): [Array<[bigint, bigint]>, Array] { + if (keySequence.length > maxStackHeight) { + throw new Error("Key sequence length exceeds max stack height"); + } + + const stack: Array<[bigint, bigint]> = []; + const treeHashes: Array = []; + + for (const valType of keySequence) { + if (valType.type === "Object") { + stack.push([ONE, ONE]); + let stringHash = ZERO; + let monomial = ONE; + + for (const byte of valType.value) { + stringHash = modAdd(stringHash, modMul(monomial, BigInt(byte))); + monomial = modMul(monomial, polynomialInput); + } + treeHashes.push(stringHash); + } else { // ArrayIndex + treeHashes.push(ZERO); + stack.push([BigInt(2), BigInt(valType.value)]); + } + } + + return [stack, treeHashes]; +} + +export function compressTreeHash( + polynomialInput: bigint, + stackAndTreeHashes: [Array<[bigint, bigint]>, Array] +): bigint { + const [stack, treeHashes] = stackAndTreeHashes; + + if (stack.length !== treeHashes.length) { + throw new Error("Stack and tree hashes must have the same length"); + } + + let accumulated = ZERO; + let monomial = ONE; + + for (let idx = 0; idx < stack.length; idx++) { + accumulated = modAdd(accumulated, modMul(stack[idx][0], monomial)); + monomial = modMul(monomial, polynomialInput); + + accumulated = modAdd(accumulated, modMul(stack[idx][1], monomial)); + monomial = modMul(monomial, polynomialInput); + + accumulated = modAdd(accumulated, modMul(treeHashes[idx], monomial)); + monomial = modMul(monomial, polynomialInput); + } + + return accumulated; +} + +interface ManifestResponse { + version: string; + status: string; + message: string; + headers: Record; + body: { + json: JsonMaskType[]; + }; +} + +interface Manifest { + response: ManifestResponse; +} + +function headersToBytes(headers: Record): number[][] { + const result: number[][] = []; + + for (const [key, values] of Object.entries(headers)) { + for (const value of values) { + // In HTTP/1.1, headers are formatted as "key: value" + const headerLine = `${key}: ${value}`; + result.push(strToBytes(headerLine)); + } + } + + return result; +} + +export function InitialDigest( + manifest: Manifest, + ciphertext: number[], + maxStackHeight: number +): [bigint, bigint] { + // Create a digest of the ciphertext itself + const ciphertextDigest = DataHasher(ciphertext); + + // Digest the start line using the ciphertext_digest as a random input + const startLineBytes = strToBytes( + `${manifest.response.version} ${manifest.response.status} ${manifest.response.message}` + ); + const startLineDigest = PolynomialDigest(startLineBytes, ciphertextDigest); + + // Digest all the headers + const headerBytes = headersToBytes(manifest.response.headers); + const headersDigest = headerBytes.map(bytes => + PolynomialDigest(bytes, ciphertextDigest) + ); + + // Digest the JSON sequence + const jsonTreeHash = jsonTreeHasher( + ciphertextDigest, + manifest.response.body.json, + maxStackHeight + ); + const jsonSequenceDigest = compressTreeHash(ciphertextDigest, jsonTreeHash); + + // Put all the digests into an array + const allDigests: bigint[] = [jsonSequenceDigest, startLineDigest, ...headersDigest]; + + // Calculate manifest digest + const manifestDigest = modAdd( + ciphertextDigest, + allDigests.map(d => poseidon1([d])).reduce((a, b) => modAdd(a, b), ZERO) + ); + + return [ciphertextDigest, manifestDigest]; +} + +export function MockManifest(): Manifest { + const headers: Record = { + "content-type": ["application/json; charset=utf-8"], + "content-encoding": ["gzip"] + }; + + const jsonSequence: JsonMaskType[] = [ + { type: "Object", value: strToBytes("data") }, + { type: "Object", value: strToBytes("items") }, + { type: "ArrayIndex", value: 0 }, + { type: "Object", value: strToBytes("profile") }, + { type: "Object", value: strToBytes("name") } + ]; + + return { + response: { + status: "200", + version: "HTTP/1.1", + message: "OK", + headers: headers, + body: { + json: jsonSequence + } + } + }; } \ No newline at end of file diff --git a/circuits/test/common/poseidon.ts b/circuits/test/common/poseidon.ts index ab18924..ab44b05 100644 --- a/circuits/test/common/poseidon.ts +++ b/circuits/test/common/poseidon.ts @@ -105,4 +105,4 @@ export function DataHasher(input: number[]): bigint { // Return the last hash return hashes[Math.ceil(input.length / 16)]; -} \ No newline at end of file +} diff --git a/circuits/test/full/full.test.ts b/circuits/test/full/full.test.ts index 507fa1f..13ca7db 100644 --- a/circuits/test/full/full.test.ts +++ b/circuits/test/full/full.test.ts @@ -1,7 +1,7 @@ import { assert } from "chai"; -import { circomkit, WitnessTester, toByte, uintArray32ToBits, toUint32Array } from "../common"; -import { DataHasher } from "../common/poseidon"; -import { toInput } from "../chacha20/chacha20-nivc.test"; +import { circomkit, WitnessTester, uintArray32ToBits, http_response_plaintext, http_response_ciphertext, http_start_line, http_header_0, http_header_1, http_body, PolynomialDigest, strToBytes, JsonMaskType, jsonTreeHasher, compressTreeHash, modAdd, InitialDigest, MockManifest } from "../common"; +import { toInput } from "../chacha20/authentication.test"; +import { poseidon1 } from "poseidon-lite"; // HTTP/1.1 200 OK // content-type: application/json; charset=utf-8 @@ -22,286 +22,123 @@ import { toInput } from "../chacha20/chacha20-nivc.test"; // } // 320 bytes in the HTTP response -const http_response_plaintext = [ - 72, 84, 84, 80, 47, 49, 46, 49, 32, 50, 48, 48, 32, 79, 75, 13, 10, 99, 111, 110, 116, 101, 110, - 116, 45, 116, 121, 112, 101, 58, 32, 97, 112, 112, 108, 105, 99, 97, 116, 105, 111, 110, 47, 106, - 115, 111, 110, 59, 32, 99, 104, 97, 114, 115, 101, 116, 61, 117, 116, 102, 45, 56, 13, 10, 99, - 111, 110, 116, 101, 110, 116, 45, 101, 110, 99, 111, 100, 105, 110, 103, 58, 32, 103, 122, 105, - 112, 13, 10, 84, 114, 97, 110, 115, 102, 101, 114, 45, 69, 110, 99, 111, 100, 105, 110, 103, 58, - 32, 99, 104, 117, 110, 107, 101, 100, 13, 10, 13, 10, 123, 13, 10, 32, 32, 32, 34, 100, 97, 116, - 97, 34, 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 34, 105, 116, 101, 109, 115, 34, 58, 32, - 91, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 34, 100, 97, 116, 97, 34, 58, 32, 34, 65, 114, 116, 105, 115, - 116, 34, 44, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 112, 114, - 111, 102, 105, 108, 101, 34, 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 32, 34, 110, 97, 109, 101, 34, 58, 32, 34, 84, 97, 121, 108, 111, 114, 32, 83, 119, - 105, 102, 116, 34, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, - 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, 32, 32, 93, 13, - 10, 32, 32, 32, 125, 13, 10, 125]; -const chacha20_http_response_ciphertext = [ - 2, 125, 219, 141, 140, 93, 49, 129, 95, 178, 135, 109, 48, 36, 194, 46, 239, 155, 160, 70, 208, 147, 37, 212, 17, 195, 149, - 190, 38, 215, 23, 241, 84, 204, 167, 184, 179, 172, 187, 145, 38, 75, 123, 96, 81, 6, 149, 36, 135, 227, 226, 254, 177, 90, - 241, 159, 0, 230, 183, 163, 210, 88, 133, 176, 9, 122, 225, 83, 171, 157, 185, 85, 122, 4, 110, 52, 2, 90, 36, 189, 145, 63, - 122, 75, 94, 21, 163, 24, 77, 85, 110, 90, 228, 157, 103, 41, 59, 128, 233, 149, 57, 175, 121, 163, 185, 144, 162, 100, 17, - 34, 9, 252, 162, 223, 59, 221, 106, 127, 104, 11, 121, 129, 154, 49, 66, 220, 65, 130, 171, 165, 43, 8, 21, 248, 12, 214, 33, - 6, 109, 3, 144, 52, 124, 225, 206, 223, 213, 86, 186, 93, 170, 146, 141, 145, 140, 57, 152, 226, 218, 57, 30, 4, 131, 161, 0, - 248, 172, 49, 206, 181, 47, 231, 87, 72, 96, 139, 145, 117, 45, 77, 134, 249, 71, 87, 178, 239, 30, 244, 156, 70, 118, 180, - 176, 90, 92, 80, 221, 177, 86, 120, 222, 223, 244, 109, 150, 226, 142, 97, 171, 210, 38, 117, 143, 163, 204, 25, 223, 238, - 209, 58, 59, 100, 1, 86, 241, 103, 152, 228, 37, 187, 79, 36, 136, 133, 171, 41, 184, 145, 146, 45, 192, 173, 219, 146, 133, - 12, 246, 190, 5, 54, 99, 155, 8, 198, 156, 174, 99, 12, 210, 95, 5, 128, 166, 118, 50, 66, 26, 20, 3, 129, 232, 1, 192, 104, - 23, 152, 212, 94, 97, 138, 162, 90, 185, 108, 221, 211, 247, 184, 253, 15, 16, 24, 32, 240, 240, 3, 148, 89, 30, 54, 161, - 131, 230, 161, 217, 29, 229, 251, 33, 220, 230, 102, 131, 245, 27, 141, 220, 67, 16, 26 -]; +const DATA_BYTES = 1024; +const MAX_NUMBER_OF_HEADERS = 25; +const MAX_STACK_HEIGHT = 10; -const http_start_line = [ - 72, 84, 84, 80, 47, 49, 46, 49, 32, 50, 48, 48, 32, 79, 75, 13, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, -]; +// These `check_*` are currently from Rust to ensure we have parity +const check_ciphertext_digest = BigInt("5947802862726868637928743536818722886587721698845887498686185738472802646104"); +const check_init_nivc_input = BigInt("10288873638660630335427615297930270928433661836597941144520949467184902553219"); -const http_header_0 = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 99, 111, 110, 116, 101, 110, 116, 45, 116, - 121, 112, 101, 58, 32, 97, 112, 112, 108, 105, 99, 97, 116, 105, 111, 110, 47, 106, 115, 111, - 110, 59, 32, 99, 104, 97, 114, 115, 101, 116, 61, 117, 116, 102, 45, 56, 13, 10, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -]; -const http_header_1 = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 99, 111, 110, 116, 101, 110, 116, 45, 101, 110, 99, 111, 100, 105, 110, 103, 58, 32, 103, 122, - 105, 112, 13, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -]; -const http_body = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 123, 13, 10, 32, 32, 32, 34, - 100, 97, 116, 97, 34, 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 34, 105, 116, 101, 109, - 115, 34, 58, 32, 91, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 123, 13, 10, 32, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 100, 97, 116, 97, 34, 58, 32, 34, 65, 114, - 116, 105, 115, 116, 34, 44, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, - 34, 112, 114, 111, 102, 105, 108, 101, 34, 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 34, 110, 97, 109, 101, 34, 58, 32, 34, 84, 97, 121, 108, 111, - 114, 32, 83, 119, 105, 102, 116, 34, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, - 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, - 32, 32, 93, 13, 10, 32, 32, 32, 125, 13, 10, 125, -]; -const lengthDiff = http_response_plaintext.length - http_body.length; - -// Create an array of zeros with the length difference -const padding = new Array(lengthDiff).fill(0); - -// Concatenate the padding with http_body -const padded_http_body = [...padding, ...http_body]; - -const http_response_hash = DataHasher(http_response_plaintext); -const http_start_line_hash = DataHasher(http_start_line); -const http_header_0_hash = DataHasher(http_header_0); -const http_header_1_hash = DataHasher(http_header_1); -const http_body_mask_hash = DataHasher(padded_http_body); - - -const json_key0_mask = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 34, 105, 116, 101, 109, 115, 34, 58, 32, 91, - 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 34, 100, 97, 116, 97, 34, 58, 32, 34, 65, 114, 116, 105, 115, 116, - 34, 44, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 112, 114, 111, - 102, 105, 108, 101, 34, 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 34, 110, 97, 109, 101, 34, 58, 32, 34, 84, 97, 121, 108, 111, 114, 32, 83, 119, 105, - 102, 116, 34, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, 10, - 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, 32, 32, 93, 13, 0, - 0, 0, 0, 0, 0, 0, 0, -]; -const json_key0_mask_hash = DataHasher(json_key0_mask); - -const json_key1_mask = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 91, 13, 10, 32, 32, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, - 32, 34, 100, 97, 116, 97, 34, 58, 32, 34, 65, 114, 116, 105, 115, 116, 34, 44, 13, 10, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 112, 114, 111, 102, 105, 108, 101, 34, - 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 110, 97, - 109, 101, 34, 58, 32, 34, 84, 97, 121, 108, 111, 114, 32, 83, 119, 105, 102, 116, 34, 13, 10, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, 32, 32, 93, 0, 0, 0, 0, 0, 0, 0, 0, 0, -]; -const json_key1_mask_hash = DataHasher(json_key1_mask); - -const json_arr_mask = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 100, - 97, 116, 97, 34, 58, 32, 34, 65, 114, 116, 105, 115, 116, 34, 44, 13, 10, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 112, 114, 111, 102, 105, 108, 101, 34, 58, 32, 123, 13, - 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 110, 97, 109, 101, 34, - 58, 32, 34, 84, 97, 121, 108, 111, 114, 32, 83, 119, 105, 102, 116, 34, 13, 10, 32, 32, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, - 32, 125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -]; -const json_arr_mask_hash = DataHasher(json_arr_mask); - -const json_key2_mask = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 110, - 97, 109, 101, 34, 58, 32, 34, 84, 97, 121, 108, 111, 114, 32, 83, 119, 105, 102, 116, 34, 13, 10, - 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -]; -const json_key2_mask_hash = DataHasher(json_key2_mask); - -const json_key3_mask = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 34, - 84, 97, 121, 108, 111, 114, 32, 83, 119, 105, 102, 116, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, -]; -const json_key3_mask_hash = DataHasher(json_key3_mask); - -describe("NIVC_FULL", async () => { - let chacha20Circuit: WitnessTester<["key", "nonce", "counter", "plainText", "step_in"], ["step_out"]>; - let httpCircuit: WitnessTester<["step_in", "data", "start_line_hash", "header_hashes", "body_hash"], ["step_out"]>; - let json_mask_object_circuit: WitnessTester<["step_in", "data", "key", "keyLen"], ["step_out"]>; - let json_mask_arr_circuit: WitnessTester<["step_in", "data", "index"], ["step_out"]>; - let extract_value_circuit: WitnessTester<["step_in", "data"], ["step_out"]>; - - const MAX_NUMBER_OF_HEADERS = 2; - const DATA_BYTES = 320; - const MAX_STACK_HEIGHT = 5; - const MAX_KEY_LENGTH = 8; - const MAX_VALUE_LENGTH = 32; +describe("Example NIVC Proof", async () => { + let PlaintextAuthentication: WitnessTester<["step_in", "plaintext", "key", "nonce", "counter"], ["step_out"]>; + let HTTPVerification: WitnessTester<["step_in", "ciphertext_digest", "data", "main_digests"], ["step_out"]>; + let JSONExtraction: WitnessTester<["step_in", "ciphertext_digest", "data", "sequence_digest", "value_digest"], ["step_out"]>; before(async () => { - chacha20Circuit = await circomkit.WitnessTester("CHACHA20", { - file: "chacha20/nivc/chacha20_nivc", - template: "ChaCha20_NIVC", - params: [320] + PlaintextAuthentication = await circomkit.WitnessTester("PlaintextAuthentication", { + file: "chacha20/authentication", + template: "PlaintextAuthentication", + params: [DATA_BYTES] }); - console.log("#constraints (CHACHA20):", await chacha20Circuit.getConstraintCount()); - httpCircuit = await circomkit.WitnessTester(`HttpNIVC`, { + HTTPVerification = await circomkit.WitnessTester("HTTPVerification", { file: "http/verification", template: "HTTPVerification", params: [DATA_BYTES, MAX_NUMBER_OF_HEADERS], }); - console.log("#constraints (HTTPVerification):", await httpCircuit.getConstraintCount()); - - json_mask_object_circuit = await circomkit.WitnessTester(`JsonMaskObjectNIVC`, { - file: "json/nivc/masker", - template: "JsonMaskObjectNIVC", - params: [DATA_BYTES, MAX_STACK_HEIGHT, MAX_KEY_LENGTH], - }); - console.log("#constraints (JSON-MASK-OBJECT):", await json_mask_object_circuit.getConstraintCount()); - json_mask_arr_circuit = await circomkit.WitnessTester(`JsonMaskArrayIndexNIVC`, { - file: "json/nivc/masker", - template: "JsonMaskArrayIndexNIVC", + JSONExtraction = await circomkit.WitnessTester(`JSONExtraction`, { + file: "json/extraction", + template: "JSONExtraction", params: [DATA_BYTES, MAX_STACK_HEIGHT], }); - console.log("#constraints (JSON-MASK-ARRAY-INDEX):", await json_mask_arr_circuit.getConstraintCount()); - - extract_value_circuit = await circomkit.WitnessTester(`JsonMaskExtractFinal`, { - file: "json/nivc/extractor", - template: "MaskExtractFinal", - params: [DATA_BYTES, MAX_VALUE_LENGTH], - }); - console.log("#constraints (JSON-MASK-EXTRACT-FINAL):", await extract_value_circuit.getConstraintCount()); }); - it("NIVC_CHAIN", async () => { - const init_nivc_input = DataHasher(chacha20_http_response_ciphertext); - // Run ChaCha20 - const counterBits = uintArray32ToBits([1])[0] - const keyIn = toInput(Buffer.from(Array(32).fill(0))); - const nonceIn = toInput(Buffer.from([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x4a, 0x00, 0x00, 0x00, 0x00])); - let chacha20 = await chacha20Circuit.compute({ key: keyIn, nonce: nonceIn, counter: counterBits, plainText: http_response_plaintext, step_in: init_nivc_input }, ["step_out"]); - console.log("ChaCha20 `step_out`:", chacha20.step_out); - assert.deepEqual(http_response_hash, chacha20.step_out); - - let http = await httpCircuit.compute({ step_in: chacha20.step_out, data: http_response_plaintext, start_line_hash: http_start_line_hash, header_hashes: [http_header_0_hash, http_header_1_hash], body_hash: http_body_mask_hash }, ["step_out"]); - console.log("HttpNIVC `step_out`:", http.step_out); + it("Spotify Example", async () => { + // Run PlaintextAuthentication - let key0 = [100, 97, 116, 97, 0, 0, 0, 0]; // "data" - let key0Len = 4; - let key1 = [105, 116, 101, 109, 115, 0, 0, 0]; // "items" - let key1Len = 5; - let key2 = [112, 114, 111, 102, 105, 108, 101, 0]; // "profile" - let key2Len = 7; - let key3 = [110, 97, 109, 101, 0, 0, 0, 0]; // "name" - let key3Len = 4; + let http_response_padded = http_response_plaintext.concat(Array(DATA_BYTES - http_response_plaintext.length).fill(-1)); + let http_response_0_padded = http_response_plaintext.concat(Array(DATA_BYTES - http_start_line.length).fill(0)); + let ciphertext_padded = http_response_ciphertext.concat(Array(DATA_BYTES - http_response_ciphertext.length).fill(-1)); - let json_extract_key0 = await json_mask_object_circuit.compute({ step_in: http.step_out, data: http_body, key: key0, keyLen: key0Len }, ["step_out"]); - console.log("JSON Extract key0 `step_out`:", json_extract_key0.step_out); - assert.deepEqual(json_extract_key0.step_out, json_key0_mask_hash); - let json_extract_key1 = await json_mask_object_circuit.compute({ step_in: json_extract_key0.step_out, data: json_key0_mask, key: key1, keyLen: key1Len }, ["step_out"]); - assert.deepEqual(json_extract_key1.step_out, json_key1_mask_hash); - console.log("JSON Extract key1 `step_out`:", json_extract_key1.step_out); + const [ciphertext_digest, init_nivc_input] = InitialDigest(MockManifest(), ciphertext_padded, MAX_STACK_HEIGHT); + assert.deepEqual(ciphertext_digest, check_ciphertext_digest); + assert.deepEqual(init_nivc_input, check_init_nivc_input); - let json_extract_arr = await json_mask_arr_circuit.compute({ step_in: json_extract_key1.step_out, data: json_key1_mask, index: 0 }, ["step_out"]); - assert.deepEqual(json_extract_arr.step_out, json_arr_mask_hash); - console.log("JSON Extract arr `step_out`:", json_extract_arr.step_out); - - let json_extract_key2 = await json_mask_object_circuit.compute({ step_in: json_extract_arr.step_out, data: json_arr_mask, key: key2, keyLen: key2Len }, ["step_out"]); - assert.deepEqual(json_extract_key2.step_out, json_key2_mask_hash); - console.log("JSON Extract key2 `step_out`:", json_extract_key2.step_out); - - let json_extract_key3 = await json_mask_object_circuit.compute({ step_in: json_extract_key2.step_out, data: json_key2_mask, key: key3, keyLen: key3Len }, ["step_out"]); - assert.deepEqual(json_extract_key3.step_out, json_key3_mask_hash); - console.log("JSON Extract key3 `step_out`:", json_extract_key3.step_out); - - // TODO (autoparallel): we need to rethink extraction here. - let finalOutput = toByte("\"Taylor Swift\""); - let finalOutputPadded = finalOutput.concat(Array(Math.max(0, MAX_VALUE_LENGTH - finalOutput.length)).fill(0)); - let final_value_hash = DataHasher(finalOutputPadded); - let extractValue = await extract_value_circuit.compute({ step_in: json_extract_key3.step_out, data: json_key3_mask }, ["step_out"]); - console.log("finalValue", extractValue.step_out); - assert.deepEqual(extractValue.step_out, final_value_hash); + const counterBits = uintArray32ToBits([1])[0] + const keyIn = toInput(Buffer.from(Array(32).fill(0))); + const nonceIn = toInput(Buffer.from([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x4a, 0x00, 0x00, 0x00, 0x00])); + let plaintext_authentication = await PlaintextAuthentication.compute({ + step_in: init_nivc_input, + plaintext: http_response_padded, + key: keyIn, + nonce: nonceIn, + counter: counterBits, + }, ["step_out"]); + + const http_response_plaintext_digest = PolynomialDigest(http_response_0_padded, ciphertext_digest); + const http_response_plaintext_digest_hashed = poseidon1([http_response_plaintext_digest]); + const correct_plaintext_authentication_step_out = modAdd(init_nivc_input - ciphertext_digest, http_response_plaintext_digest_hashed); + assert.deepEqual(plaintext_authentication.step_out, correct_plaintext_authentication_step_out); + + // Run HTTPVerification + const start_line_digest = PolynomialDigest(http_start_line, ciphertext_digest); + const header_0_digest = PolynomialDigest(http_header_0, ciphertext_digest); + const header_1_digest = PolynomialDigest(http_header_1, ciphertext_digest); + + let main_digests = Array(MAX_NUMBER_OF_HEADERS + 1).fill(0); + main_digests[0] = start_line_digest; + main_digests[1] = header_0_digest; + main_digests[2] = header_1_digest; + + let step_in = BigInt(plaintext_authentication.step_out.toString(10)); + let http_verification = await HTTPVerification.compute({ + step_in, + ciphertext_digest, + data: http_response_padded, + main_digests, + }, ["step_out"]); + + const padded_http_body = http_body.concat(Array(DATA_BYTES - http_body.length).fill(0)); + let http_verification_step_out = BigInt((http_verification.step_out as number[])[0]); + let body_digest = PolynomialDigest(http_body, ciphertext_digest); + + const body_digest_hashed = poseidon1([body_digest]); + const start_line_digest_digest_hashed = poseidon1([start_line_digest]); + const header_0_digest_hashed = poseidon1([header_0_digest]); + const header_1_digest_hashed = poseidon1([header_1_digest]); + const correct_http_verification_step_out = modAdd(step_in - start_line_digest_digest_hashed - header_0_digest_hashed - header_1_digest_hashed - http_response_plaintext_digest_hashed, body_digest_hashed); + assert.deepEqual(http_verification_step_out, correct_http_verification_step_out); + + // Run JSONExtraction + const KEY0 = strToBytes("data"); + const KEY1 = strToBytes("items"); + const KEY2 = strToBytes("profile"); + const KEY3 = strToBytes("name"); + const targetValue = strToBytes("Taylor Swift"); + const keySequence: JsonMaskType[] = [ + { type: "Object", value: KEY0 }, + { type: "Object", value: KEY1 }, + { type: "ArrayIndex", value: 0 }, + { type: "Object", value: KEY2 }, + { type: "Object", value: KEY3 }, + ]; + + const [stack, treeHashes] = jsonTreeHasher(ciphertext_digest, keySequence, MAX_STACK_HEIGHT); + const sequence_digest = compressTreeHash(ciphertext_digest, [stack, treeHashes]); + const value_digest = PolynomialDigest(targetValue, ciphertext_digest); + + let json_extraction = await JSONExtraction.compute({ + step_in: http_verification_step_out, + ciphertext_digest, + data: padded_http_body, + value_digest, + sequence_digest, + }, ["step_out"]); + assert.deepEqual(json_extraction.step_out, value_digest); }); }); diff --git a/circuits/test/http/verification.test.ts b/circuits/test/http/verification.test.ts index 065ab51..ce97d66 100644 --- a/circuits/test/http/verification.test.ts +++ b/circuits/test/http/verification.test.ts @@ -1,6 +1,6 @@ -import { circomkit, WitnessTester, toByte } from "../common"; +import { circomkit, WitnessTester, PolynomialDigest, http_response_plaintext, http_start_line, http_header_0, http_header_1, http_body, modAdd } from "../common"; import { assert } from "chai"; -import { DataHasher } from "../common/poseidon"; +import { poseidon1 } from "poseidon-lite"; // HTTP/1.1 200 OK // content-type: application/json; charset=utf-8 @@ -20,148 +20,127 @@ import { DataHasher } from "../common/poseidon"; // } // } -// 320 bytes in the HTTP response -let TEST_HTTP = [ - 72, 84, 84, 80, 47, 49, 46, 49, 32, 50, 48, 48, 32, 79, 75, 13, 10, 99, 111, 110, 116, 101, 110, - 116, 45, 116, 121, 112, 101, 58, 32, 97, 112, 112, 108, 105, 99, 97, 116, 105, 111, 110, 47, 106, - 115, 111, 110, 59, 32, 99, 104, 97, 114, 115, 101, 116, 61, 117, 116, 102, 45, 56, 13, 10, 99, - 111, 110, 116, 101, 110, 116, 45, 101, 110, 99, 111, 100, 105, 110, 103, 58, 32, 103, 122, 105, - 112, 13, 10, 84, 114, 97, 110, 115, 102, 101, 114, 45, 69, 110, 99, 111, 100, 105, 110, 103, 58, - 32, 99, 104, 117, 110, 107, 101, 100, 13, 10, 13, 10, 123, 13, 10, 32, 32, 32, 34, 100, 97, 116, - 97, 34, 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 34, 105, 116, 101, 109, 115, 34, 58, 32, - 91, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 34, 100, 97, 116, 97, 34, 58, 32, 34, 65, 114, 116, 105, 115, - 116, 34, 44, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 112, 114, - 111, 102, 105, 108, 101, 34, 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 32, 34, 110, 97, 109, 101, 34, 58, 32, 34, 84, 97, 121, 108, 111, 114, 32, 83, 119, - 105, 102, 116, 34, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, - 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, 32, 32, 93, 13, - 10, 32, 32, 32, 125, 13, 10, 125]; - -const TEST_HTTP_START_LINE = [ - 72, 84, 84, 80, 47, 49, 46, 49, 32, 50, 48, 48, 32, 79, 75, 13, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, -]; - -const TEST_HTTP_HEADER_0 = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 99, 111, 110, 116, 101, 110, 116, 45, 116, - 121, 112, 101, 58, 32, 97, 112, 112, 108, 105, 99, 97, 116, 105, 111, 110, 47, 106, 115, 111, - 110, 59, 32, 99, 104, 97, 114, 115, 101, 116, 61, 117, 116, 102, 45, 56, 13, 10, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -]; - -const TEST_HTTP_HEADER_1 = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 99, 111, 110, 116, 101, 110, 116, 45, 101, 110, 99, 111, 100, 105, 110, 103, 58, 32, 103, 122, - 105, 112, 13, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -]; - -const TEST_HTTP_BODY = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 123, 13, 10, 32, 32, 32, 34, - 100, 97, 116, 97, 34, 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 34, 105, 116, 101, 109, - 115, 34, 58, 32, 91, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 123, 13, 10, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 100, 97, 116, 97, 34, 58, 32, 34, 65, - 114, 116, 105, 115, 116, 34, 44, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, - 32, 32, 34, 112, 114, 111, 102, 105, 108, 101, 34, 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 110, 97, 109, 101, 34, 58, 32, 34, 84, 97, 121, - 108, 111, 114, 32, 83, 119, 105, 102, 116, 34, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, 10, 32, - 32, 32, 32, 32, 32, 32, 93, 13, 10, 32, 32, 32, 125, 13, 10, 125, -]; - const DATA_BYTES = 320; const MAX_NUMBER_OF_HEADERS = 2; -describe("HTTP Verfication", async () => { - let dataHasher: WitnessTester<["in"], ["out"]>; - let httpNivc: WitnessTester<["step_in", "data", "start_line_hash", "header_hashes", "body_hash"], ["step_out"]>; +describe("HTTP Verification", async () => { + let HTTPVerification: WitnessTester<["step_in", "data", "main_digests", "ciphertext_digest"], ["step_out"]>; before(async () => { - dataHasher = await circomkit.WitnessTester(`DataHasher`, { - file: "utils/hash", - template: "DataHasher", - params: [320], - }); - - httpNivc = await circomkit.WitnessTester("http_nivc", { + HTTPVerification = await circomkit.WitnessTester("http_nivc", { file: "http/verification", template: "HTTPVerification", params: [DATA_BYTES, MAX_NUMBER_OF_HEADERS] }); }); + const mock_ct_digest = poseidon1([69]); - it("witness: TEST_HTTP, single header", async () => { + it("witness: http_response_plaintext, no header", async () => { // Get all the hashes we need - // Get the data hash - let data_hash = await dataHasher.compute({ in: TEST_HTTP }, ["out"]); - // Get the start line hash - let start_line_hash = await dataHasher.compute({ in: TEST_HTTP_START_LINE }, ["out"]) - // Get the header hash - let header_hash = await dataHasher.compute({ in: TEST_HTTP_HEADER_0 }, ["out"]); - // Get the body hash - let body_hash = await dataHasher.compute({ in: TEST_HTTP_BODY }, ["out"]); + let data_digest = PolynomialDigest(http_response_plaintext, mock_ct_digest); + let data_digest_hashed = poseidon1([data_digest]); + + // Compute the HTTP info digest + let start_line_digest = PolynomialDigest(http_start_line, mock_ct_digest); + let start_line_digest_hashed = poseidon1([start_line_digest]); + let body_digest = PolynomialDigest(http_body, mock_ct_digest); + let body_digest_hashed = poseidon1([body_digest]); + // Use `modAdd` to get back a number between 0 and PRIME + let output_difference = modAdd(body_digest_hashed - start_line_digest_hashed - data_digest_hashed, BigInt(0)); // Run the HTTP circuit // POTENTIAL BUG: I didn't get this to work with `expectPass` as it didn't compute `step_out` that way??? - let http_nivc_compute = await httpNivc.compute({ - step_in: data_hash.out, - data: TEST_HTTP, - start_line_hash: start_line_hash.out, - header_hashes: [header_hash.out, 0], - body_hash: body_hash.out, + let http_nivc_compute = await HTTPVerification.compute({ + step_in: 0, // This doesn't really matter for this test + data: http_response_plaintext, + main_digests: [start_line_digest].concat(Array(2).fill(0)), + ciphertext_digest: mock_ct_digest }, ["step_out"]); + // I fucking hate circomkit + assert.deepEqual((http_nivc_compute.step_out as BigInt[])[0], output_difference); + }); + + it("witness: http_response_plaintext, one header", async () => { + // Get all the hashes we need + let data_digest = PolynomialDigest(http_response_plaintext, mock_ct_digest); + let data_digest_hashed = poseidon1([data_digest]); + + // Compute the HTTP info digest + let start_line_digest = PolynomialDigest(http_start_line, mock_ct_digest); + let start_line_digest_hashed = poseidon1([start_line_digest]); + let header_0_digest = PolynomialDigest(http_header_0, mock_ct_digest); + let header_0_digest_hashed = poseidon1([header_0_digest]); + let body_digest = PolynomialDigest(http_body, mock_ct_digest); + let body_digest_hashed = poseidon1([body_digest]); + // Use `modAdd` to get back a number between 0 and PRIME + let output_difference = modAdd(body_digest_hashed - start_line_digest_hashed - header_0_digest_hashed - data_digest_hashed, BigInt(0)); - assert.deepEqual(http_nivc_compute.step_out, body_hash.out); + // Run the HTTP circuit + // POTENTIAL BUG: I didn't get this to work with `expectPass` as it didn't compute `step_out` that way??? + let http_nivc_compute = await HTTPVerification.compute({ + step_in: 0, // This doesn't really matter for this test + data: http_response_plaintext, + main_digests: [start_line_digest, header_0_digest].concat(Array(1).fill(0)), + ciphertext_digest: mock_ct_digest + }, ["step_out"]); + // I fucking hate circomkit + assert.deepEqual((http_nivc_compute.step_out as BigInt[])[0], output_difference); }); - it("witness: TEST_HTTP, two headers", async () => { + it("witness: http_response_plaintext, two headers", async () => { // Get all the hashes we need - // Get the data hash - let data_hash = await dataHasher.compute({ in: TEST_HTTP }, ["out"]); - // Get the start line hash - let start_line_hash = await dataHasher.compute({ in: TEST_HTTP_START_LINE }, ["out"]) - // Get the header hashes - let header_0_hash = await dataHasher.compute({ in: TEST_HTTP_HEADER_0 }, ["out"]); - let header_1_hash = await dataHasher.compute({ in: TEST_HTTP_HEADER_1 }, ["out"]); - // Get the body hash - let body_hash = await dataHasher.compute({ in: TEST_HTTP_BODY }, ["out"]); + let data_digest = PolynomialDigest(http_response_plaintext, mock_ct_digest); + let data_digest_hashed = poseidon1([data_digest]); + + // Compute the HTTP info digest + let start_line_digest = PolynomialDigest(http_start_line, mock_ct_digest); + let start_line_digest_hashed = poseidon1([start_line_digest]); + let header_0_digest = PolynomialDigest(http_header_0, mock_ct_digest); + let header_0_digest_hashed = poseidon1([header_0_digest]); + let header_1_digest = PolynomialDigest(http_header_1, mock_ct_digest); + let header_1_digest_hashed = poseidon1([header_1_digest]); + let body_digest = PolynomialDigest(http_body, mock_ct_digest); + let body_digest_hashed = poseidon1([body_digest]); + // Use `modAdd` to get back a number between 0 and PRIME + let output_difference = modAdd(body_digest_hashed - start_line_digest_hashed - header_0_digest_hashed - header_1_digest_hashed - data_digest_hashed, BigInt(0)); // Run the HTTP circuit // POTENTIAL BUG: I didn't get this to work with `expectPass` as it didn't compute `step_out` that way??? - let http_nivc_compute = await httpNivc.compute({ - step_in: data_hash.out, - data: TEST_HTTP, - start_line_hash: start_line_hash.out, - header_hashes: [header_0_hash.out, header_1_hash.out], - body_hash: body_hash.out, + let http_nivc_compute = await HTTPVerification.compute({ + step_in: 0, // This doesn't really matter for this test + data: http_response_plaintext, + main_digests: [start_line_digest, header_0_digest, header_1_digest], + ciphertext_digest: mock_ct_digest }, ["step_out"]); + // I fucking hate circomkit + assert.deepEqual((http_nivc_compute.step_out as BigInt[])[0], output_difference); + }); - assert.deepEqual(http_nivc_compute.step_out, body_hash.out); + it("witness: http_response_plaintext, two headers, order does not matter", async () => { + // Get all the hashes we need + let data_digest = PolynomialDigest(http_response_plaintext, mock_ct_digest); + let data_digest_hashed = poseidon1([data_digest]); + + // Compute the HTTP info digest + let start_line_digest = PolynomialDigest(http_start_line, mock_ct_digest); + let start_line_digest_hashed = poseidon1([start_line_digest]); + let header_0_digest = PolynomialDigest(http_header_0, mock_ct_digest); + let header_0_digest_hashed = poseidon1([header_0_digest]); + let header_1_digest = PolynomialDigest(http_header_1, mock_ct_digest); + let header_1_digest_hashed = poseidon1([header_1_digest]); + let body_digest = PolynomialDigest(http_body, mock_ct_digest); + let body_digest_hashed = poseidon1([body_digest]); + // Use `modAdd` to get back a number between 0 and PRIME + let output_difference = modAdd(body_digest_hashed - start_line_digest_hashed - header_0_digest_hashed - header_1_digest_hashed - data_digest_hashed, BigInt(0)); + + // Run the HTTP circuit + // POTENTIAL BUG: I didn't get this to work with `expectPass` as it didn't compute `step_out` that way??? + let http_nivc_compute = await HTTPVerification.compute({ + step_in: 0, // This doesn't really matter for this test + data: http_response_plaintext, + main_digests: [header_1_digest, start_line_digest, header_0_digest], + ciphertext_digest: mock_ct_digest + }, ["step_out"]); + // I fucking hate circomkit + assert.deepEqual((http_nivc_compute.step_out as BigInt[])[0], output_difference); }); }); \ No newline at end of file diff --git a/circuits/test/json/extraction.test.ts b/circuits/test/json/extraction.test.ts new file mode 100644 index 0000000..152df1a --- /dev/null +++ b/circuits/test/json/extraction.test.ts @@ -0,0 +1,194 @@ +import { poseidon1, poseidon2 } from "poseidon-lite"; +import { circomkit, WitnessTester, readJSONInputFile, strToBytes, JsonMaskType, jsonTreeHasher, compressTreeHash, PolynomialDigest, modAdd } from "../common"; + +describe("JSON Extraction", () => { + let hash_parser: WitnessTester<["step_in", "ciphertext_digest", "data", "sequence_digest", "value_digest"]>; + const mock_ct_digest = poseidon2([69, 420]); + + it(`input: array_only`, async () => { + let filename = "array_only"; + let [input, _keyUnicode, _output] = readJSONInputFile(`${filename}.json`, []); + const MAX_STACK_HEIGHT = 3; + + hash_parser = await circomkit.WitnessTester(`Parser`, { + file: "json/extraction", + template: "JSONExtraction", + params: [input.length, MAX_STACK_HEIGHT], + }); + + // Test `42` in 0th slot + let targetValue = strToBytes("42"); + let keySequence: JsonMaskType[] = [ + { type: "ArrayIndex", value: 0 }, + ]; + let [stack, treeHashes] = jsonTreeHasher(mock_ct_digest, keySequence, MAX_STACK_HEIGHT); + let sequence_digest = compressTreeHash(mock_ct_digest, [stack, treeHashes]); + let sequence_digest_hashed = poseidon1([sequence_digest]); + let value_digest = PolynomialDigest(targetValue, mock_ct_digest); + let data_digest = PolynomialDigest(input, mock_ct_digest); + let data_digest_hashed = poseidon1([data_digest]); + let step_in = modAdd(sequence_digest_hashed, data_digest_hashed); + + await hash_parser.expectPass({ + data: input, + ciphertext_digest: mock_ct_digest, + sequence_digest, + value_digest, + step_in + }); + console.log("> First subtest passed."); + + // Test `"b"` in 1st slot object + targetValue = strToBytes("b"); + keySequence = [ + { type: "ArrayIndex", value: 1 }, + { type: "Object", value: strToBytes("a") }, + ]; + [stack, treeHashes] = jsonTreeHasher(mock_ct_digest, keySequence, MAX_STACK_HEIGHT); + sequence_digest = compressTreeHash(mock_ct_digest, [stack, treeHashes]); + sequence_digest_hashed = poseidon1([sequence_digest]); + value_digest = PolynomialDigest(targetValue, mock_ct_digest); + step_in = modAdd(sequence_digest_hashed, data_digest_hashed); + + await hash_parser.expectPass({ + data: input, + ciphertext_digest: mock_ct_digest, + sequence_digest, + value_digest, + step_in + }); + console.log("> Second subtest passed."); + }); + + it(`input: value_array`, async () => { + let filename = "value_array"; + let [input, _keyUnicode, _output] = readJSONInputFile(`${filename}.json`, []); + const MAX_STACK_HEIGHT = 3; + + hash_parser = await circomkit.WitnessTester(`Parser`, { + file: "json/extraction", + template: "JSONExtraction", + params: [input.length, MAX_STACK_HEIGHT], + }); + + // Test `420` in "k"'s 0th slot + let targetValue = strToBytes("420"); + let keySequence: JsonMaskType[] = [ + { type: "Object", value: strToBytes("k") }, + { type: "ArrayIndex", value: 0 }, + ]; + let [stack, treeHashes] = jsonTreeHasher(mock_ct_digest, keySequence, MAX_STACK_HEIGHT); + let sequence_digest = compressTreeHash(mock_ct_digest, [stack, treeHashes]); + let sequence_digest_hashed = poseidon1([sequence_digest]); + let data_digest = PolynomialDigest(input, mock_ct_digest); + let data_digest_hashed = poseidon1([data_digest]); + let value_digest = PolynomialDigest(targetValue, mock_ct_digest); + let step_in = modAdd(sequence_digest_hashed, data_digest_hashed); + + await hash_parser.expectPass({ + data: input, + ciphertext_digest: mock_ct_digest, + sequence_digest, + value_digest, + step_in + }); + console.log("> First subtest passed."); + + // Test `"d"` in "b"'s 3rd slot + targetValue = strToBytes("d"); + keySequence = [ + { type: "Object", value: strToBytes("b") }, + { type: "ArrayIndex", value: 3 }, + ]; + [stack, treeHashes] = jsonTreeHasher(mock_ct_digest, keySequence, MAX_STACK_HEIGHT); + sequence_digest = compressTreeHash(mock_ct_digest, [stack, treeHashes]); + sequence_digest_hashed = poseidon1([sequence_digest]); + value_digest = PolynomialDigest(targetValue, mock_ct_digest); + step_in = modAdd(sequence_digest_hashed, data_digest_hashed); + await hash_parser.expectPass({ + data: input, + ciphertext_digest: mock_ct_digest, + sequence_digest, + value_digest, + step_in + }); + console.log("> Second subtest passed."); + }); + + it(`input: value_array_object`, async () => { + let filename = "value_array_object"; + let [input, keyUnicode, output] = readJSONInputFile(`${filename}.json`, []); + hash_parser = await circomkit.WitnessTester(`Parser`, { + file: "json/extraction", + template: "JSONExtraction", + params: [input.length, 5], + }); + + const KEY0 = strToBytes("a"); + const KEY1 = strToBytes("b"); + const targetValue = strToBytes("4"); + + const keySequence: JsonMaskType[] = [ + { type: "Object", value: KEY0 }, + { type: "ArrayIndex", value: 0 }, + { type: "Object", value: KEY1 }, + { type: "ArrayIndex", value: 1 }, + ]; + + const [stack, treeHashes] = jsonTreeHasher(mock_ct_digest, keySequence, 10); + const sequence_digest = compressTreeHash(mock_ct_digest, [stack, treeHashes]); + const sequence_digest_hashed = poseidon1([sequence_digest]); + const data_digest = PolynomialDigest(input, mock_ct_digest); + const data_digest_hashed = poseidon1([data_digest]); + const value_digest = PolynomialDigest(targetValue, mock_ct_digest); + const step_in = modAdd(sequence_digest_hashed, data_digest_hashed); + + await hash_parser.expectPass({ + data: input, + ciphertext_digest: mock_ct_digest, + sequence_digest, + value_digest, + step_in + }); + }); + + it(`input: spotify`, async () => { + let filename = "spotify"; + let [input, keyUnicode, output] = readJSONInputFile(`${filename}.json`, []); + hash_parser = await circomkit.WitnessTester(`Parser`, { + file: "json/extraction", + template: "JSONExtraction", + params: [input.length, 5], + }); + + const KEY0 = strToBytes("data"); + const KEY1 = strToBytes("items"); + const KEY2 = strToBytes("profile"); + const KEY3 = strToBytes("name"); + const targetValue = strToBytes("Taylor Swift"); + + const keySequence: JsonMaskType[] = [ + { type: "Object", value: KEY0 }, + { type: "Object", value: KEY1 }, + { type: "ArrayIndex", value: 0 }, + { type: "Object", value: KEY2 }, + { type: "Object", value: KEY3 }, + ]; + + const [stack, treeHashes] = jsonTreeHasher(mock_ct_digest, keySequence, 10); + const sequence_digest = compressTreeHash(mock_ct_digest, [stack, treeHashes]); + const sequence_digest_hashed = poseidon1([sequence_digest]); + const data_digest = PolynomialDigest(input, mock_ct_digest); + const data_digest_hashed = poseidon1([data_digest]); + const value_digest = PolynomialDigest(targetValue, mock_ct_digest); + const step_in = modAdd(sequence_digest_hashed, data_digest_hashed); + + await hash_parser.expectPass({ + data: input, + ciphertext_digest: mock_ct_digest, + sequence_digest, + value_digest, + step_in + }); + }); +}) \ No newline at end of file diff --git a/circuits/test/json/extractor/interpreter.test.ts b/circuits/test/json/extractor/interpreter.test.ts deleted file mode 100644 index eed2ab2..0000000 --- a/circuits/test/json/extractor/interpreter.test.ts +++ /dev/null @@ -1,445 +0,0 @@ -import { circomkit, WitnessTester, generateDescription, readJSONInputFile } from "../../common"; - -describe("Interpreter", async () => { - describe("InsideKeyAtTop", async () => { - let circuit: WitnessTester<["stack", "parsing_string", "parsing_number"], ["out"]>; - - before(async () => { - circuit = await circomkit.WitnessTester(`InsideKeyAtTop`, { - file: "json/interpreter", - template: "InsideKeyAtTop", - params: [4], - }); - console.log("#constraints:", await circuit.getConstraintCount()); - }); - - function generatePassCase(input: any, expected: any, desc: string) { - const description = generateDescription(input); - - it(`(valid) witness: ${description} ${desc}`, async () => { - await circuit.expectPass(input, expected); - }); - } - - let input1 = { stack: [[1, 0], [2, 0], [3, 1], [1, 0]], parsing_string: 1, parsing_number: 0 }; - let output = { out: 1 }; - generatePassCase(input1, output, ""); - - let input2 = { stack: [[1, 0], [2, 0], [1, 0], [0, 0]], parsing_string: 1, parsing_number: 0 }; - generatePassCase(input2, output, ""); - - let input3 = { stack: [[1, 0], [0, 0], [0, 0], [0, 0]], parsing_string: 1, parsing_number: 0 }; - generatePassCase(input3, output, ""); - - // fail cases - - let input4 = { stack: [[1, 0], [2, 0], [3, 1], [1, 1]], parsing_string: 1, parsing_number: 0 }; - generatePassCase(input4, { out: 0 }, "invalid stack"); - - let input5 = { stack: [[1, 0], [2, 0], [3, 1], [1, 0]], parsing_string: 1, parsing_number: 1 }; - generatePassCase(input5, { out: 0 }, "parsing number as a key"); - }); - - describe("InsideKey", async () => { - let circuit: WitnessTester<["stack", "parsing_string", "parsing_number"], ["out"]>; - - before(async () => { - circuit = await circomkit.WitnessTester(`InsideKey`, { - file: "json/interpreter", - template: "InsideKey", - }); - console.log("#constraints:", await circuit.getConstraintCount()); - }); - - function generatePassCase(input: any, expected: any, desc: string) { - const description = generateDescription(input); - - it(`(valid) witness: ${description} ${desc}`, async () => { - await circuit.expectPass(input, expected); - }); - } - - let input1 = { stack: [1, 0], parsing_string: 1, parsing_number: 0 }; - let output = { out: 1 }; - generatePassCase(input1, output, ""); - - // fail cases - - let input2 = { stack: [1, 1], parsing_string: 1, parsing_number: 0 }; - generatePassCase(input2, { out: 0 }, "invalid stack"); - - let input3 = { stack: [1, 0], parsing_string: 1, parsing_number: 1 }; - generatePassCase(input3, { out: 0 }, "parsing number as a key"); - }); - - describe("InsideValueAtTop", async () => { - let circuit: WitnessTester<["stack", "parsing_string", "parsing_number"], ["out"]>; - - before(async () => { - circuit = await circomkit.WitnessTester(`InsideValueAtTop`, { - file: "json/interpreter", - template: "InsideValueAtTop", - params: [4], - }); - console.log("#constraints:", await circuit.getConstraintCount()); - }); - - function generatePassCase(input: any, expected: any, desc: string) { - const description = generateDescription(input); - - it(`(valid) witness: ${description} ${desc}`, async () => { - await circuit.expectPass(input, expected); - }); - } - - let input1 = { stack: [[1, 0], [2, 0], [3, 1], [1, 1]], parsing_string: 1, parsing_number: 0 }; - let output = { out: 1 }; - generatePassCase(input1, output, ""); - - let input2 = { stack: [[1, 0], [2, 0], [1, 1], [0, 0]], parsing_string: 1, parsing_number: 0 }; - generatePassCase(input2, output, ""); - - let input3 = { stack: [[1, 1], [0, 0], [0, 0], [0, 0]], parsing_string: 1, parsing_number: 0 }; - generatePassCase(input3, output, ""); - - // fail cases - - let input4 = { stack: [[1, 0], [2, 0], [3, 1], [1, 0]], parsing_string: 1, parsing_number: 0 }; - generatePassCase(input4, { out: 0 }, "invalid stack"); - - let input5 = { stack: [[1, 0], [2, 0], [3, 1], [1, 1]], parsing_string: 1, parsing_number: 1 }; - generatePassCase(input5, { out: 0 }, "parsing number and key both"); - }); - - describe("InsideValue", async () => { - let circuit: WitnessTester<["stack", "parsing_string", "parsing_number"], ["out"]>; - - function generatePassCase(input: any, expected: any, depth: number, desc: string) { - const description = generateDescription(input); - - it(`(valid) witness: ${description} ${desc}`, async () => { - circuit = await circomkit.WitnessTester(`InsideValue`, { - file: "json/interpreter", - template: "InsideValue", - }); - console.log("#constraints:", await circuit.getConstraintCount()); - - input.stack = input.stack[depth]; - - await circuit.expectPass(input, expected); - }); - } - - let input1 = { stack: [[1, 0], [2, 0], [3, 1], [1, 1]], parsing_string: 1, parsing_number: 0 }; - let output = { out: 1 }; - generatePassCase(input1, output, 3, ""); - - let input2 = { stack: [[1, 0], [2, 0], [1, 1], [1, 1]], parsing_string: 1, parsing_number: 0 }; - generatePassCase(input2, output, 2, ""); - - let input3 = { stack: [[1, 1], [0, 0], [0, 0], [1, 1]], parsing_string: 1, parsing_number: 0 }; - generatePassCase(input3, output, 0, ""); - - // fail cases - - let input4 = { stack: [[1, 0], [2, 0], [3, 1], [1, 0]], parsing_string: 1, parsing_number: 0 }; - generatePassCase(input4, { out: 0 }, 0, "invalid stack"); - - let input5 = { stack: [[1, 0], [2, 0], [3, 1], [1, 1]], parsing_string: 1, parsing_number: 1 }; - generatePassCase(input5, { out: 0 }, 3, "parsing number and key both"); - }); - - describe("InsideArrayIndexAtTop", async () => { - let circuit: WitnessTester<["stack", "parsing_string", "parsing_number"], ["out"]>; - - function generatePassCase(input: any, expected: any, index: number, desc: string) { - const description = generateDescription(input); - - it(`(valid) witness: ${description} ${desc}`, async () => { - circuit = await circomkit.WitnessTester(`InsideArrayIndexAtTop`, { - file: "json/interpreter", - template: "InsideArrayIndexAtTop", - params: [4, index], - }); - console.log("#constraints:", await circuit.getConstraintCount()); - - await circuit.expectPass(input, expected); - }); - } - - let input1 = { stack: [[1, 0], [2, 0], [3, 1], [2, 1]], parsing_string: 1, parsing_number: 0 }; - let output = { out: 1 }; - generatePassCase(input1, output, 1, ""); - - let input2 = { stack: [[1, 0], [2, 0], [2, 3], [0, 0]], parsing_string: 1, parsing_number: 0 }; - generatePassCase(input2, output, 3, ""); - - let input3 = { stack: [[2, 10], [0, 0], [0, 0], [0, 0]], parsing_string: 1, parsing_number: 0 }; - generatePassCase(input3, output, 10, ""); - - // fail cases - - let input4 = { stack: [[1, 0], [2, 0], [3, 1], [1, 0]], parsing_string: 1, parsing_number: 0 }; - generatePassCase(input4, { out: 0 }, 4, "invalid stack"); - - let input5 = { stack: [[1, 0], [2, 0], [3, 1], [1, 1]], parsing_string: 1, parsing_number: 1 }; - generatePassCase(input5, { out: 0 }, 4, "parsing number and key both"); - - let input6 = { stack: [[1, 0], [2, 0], [3, 1], [2, 4]], parsing_string: 1, parsing_number: 0 }; - generatePassCase(input6, { out: 0 }, 3, "incorrect index"); - }); - - describe("InsideArrayIndex", async () => { - let circuit: WitnessTester<["stack", "parsing_string", "parsing_number"], ["out"]>; - - function generatePassCase(input: any, expected: any, index: number, depth: number, desc: string) { - const description = generateDescription(input); - - it(`(valid) witness: ${description} ${desc}`, async () => { - circuit = await circomkit.WitnessTester(`InsideArrayIndex`, { - file: "json/interpreter", - template: "InsideArrayIndex", - params: [index], - }); - console.log("#constraints:", await circuit.getConstraintCount()); - - input.stack = input.stack[depth] - - await circuit.expectPass(input, expected); - }); - } - - let input1 = { stack: [[1, 0], [2, 0], [3, 1], [2, 1]], parsing_string: 1, parsing_number: 0 }; - let output = { out: 1 }; - generatePassCase(input1, output, 1, 3, ""); - - let input2 = { stack: [[1, 0], [2, 0], [2, 3], [2, 0]], parsing_string: 1, parsing_number: 0 }; - generatePassCase(input2, output, 3, 2, ""); - - let input3 = { stack: [[2, 10], [0, 0], [1, 0], [0, 0]], parsing_string: 1, parsing_number: 0 }; - generatePassCase(input3, output, 10, 0, ""); - - // fail cases - - let input4 = { stack: [[1, 0], [2, 0], [3, 1], [1, 0]], parsing_string: 1, parsing_number: 0 }; - generatePassCase(input4, { out: 0 }, 4, 2, "invalid stack depth"); - - let input5 = { stack: [[1, 0], [2, 0], [3, 1], [1, 1]], parsing_string: 1, parsing_number: 1 }; - generatePassCase(input5, { out: 0 }, 4, 1, "parsing number and key both"); - }); - - describe("NextKVPair", async () => { - let circuit: WitnessTester<["stack", "currByte"], ["out"]>; - - before(async () => { - circuit = await circomkit.WitnessTester(`NextKVPair`, { - file: "json/interpreter", - template: "NextKVPair", - params: [4], - }); - console.log("#constraints:", await circuit.getConstraintCount()); - }); - - function generatePassCase(input: any, expected: any, desc: string) { - const description = generateDescription(input); - - it(`(valid) witness: ${description} ${desc}`, async () => { - await circuit.expectPass(input, expected); - }); - } - - let input1 = { stack: [[1, 0], [2, 0], [3, 1], [1, 0]], currByte: 44 }; - let output = { out: 1 }; - generatePassCase(input1, output, ""); - - let input2 = { stack: [[1, 0], [2, 0], [1, 0], [0, 0]], currByte: 44 }; - generatePassCase(input2, output, ""); - - let input3 = { stack: [[1, 0], [0, 0], [0, 0], [0, 0]], currByte: 44 }; - generatePassCase(input3, output, ""); - - let input4 = { stack: [[1, 0], [2, 0], [3, 1], [1, 1]], currByte: 44 }; - generatePassCase(input4, { out: 0 }, "invalid stack"); - - let input5 = { stack: [[1, 0], [2, 0], [3, 1], [1, 0]], currByte: 34 }; - generatePassCase(input5, { out: 0 }, "incorrect currByte"); - }); - - describe("NextKVPairAtDepth", async () => { - let circuit: WitnessTester<["stack", "currByte", "depth"], ["out"]>; - - function generatePassCase(input: any, expected: any, desc: string) { - const description = generateDescription(input); - - it(`(valid) witness: ${description} ${desc}`, async () => { - circuit = await circomkit.WitnessTester(`NextKVPairAtDepth`, { - file: "json/interpreter", - template: "NextKVPairAtDepth", - params: [4], - }); - console.log("#constraints:", await circuit.getConstraintCount()); - - await circuit.expectPass(input, expected); - }); - } - - let input1 = { stack: [[1, 0], [2, 0], [3, 1], [1, 0]], currByte: 44, depth: 3 }; - // output = 1 represents correct execution - let output = { out: 1 }; - generatePassCase(input1, output, ""); - - // key depth is 2, and even if new-kv pair starts at depth greater than 2, it returns 0. - let input2 = { stack: [[1, 0], [2, 0], [1, 1], [1, 0]], currByte: 44, depth: 2 }; - generatePassCase(input2, { out: 0 }, ""); - - let input3 = { stack: [[1, 0], [1, 0], [0, 0], [0, 0]], currByte: 44, depth: 3 }; - generatePassCase(input3, output, "stack height less than specified"); - - let input4 = { stack: [[1, 0], [2, 0], [1, 0], [0, 0]], currByte: 34, depth: 2 }; - generatePassCase(input4, { out: 0 }, "incorrect currByte"); - }); - - describe("KeyMatch", async () => { - let circuit: WitnessTester<["data", "key", "index", "parsing_key"], ["out"]>; - - function generatePassCase(input: any, expected: any, desc: string) { - const description = generateDescription(input); - - it(`(valid) witness: ${description} ${desc}`, async () => { - circuit = await circomkit.WitnessTester(`KeyMatch`, { - file: "json/interpreter", - template: "KeyMatch", - params: [input.data.length, input.key.length], - }); - console.log("#constraints:", await circuit.getConstraintCount()); - - await circuit.expectPass(input, expected); - }); - } - - let input = readJSONInputFile("value_array_object.json", ["a"]); - - let output = { out: 1 }; - let input1 = { data: input[0], key: input[1][0], index: 2, parsing_key: 1 }; - generatePassCase(input1, output, ""); - - let input2 = { data: input[0], key: [99], index: 20, parsing_key: 1 }; - generatePassCase(input2, output, ""); - - // fail cases - - let input3 = { data: input[0], key: input[1][0], index: 3, parsing_key: 1 }; - generatePassCase(input3, { out: 0 }, "wrong index"); - - let input4 = { data: input[0], key: [98], index: 2, parsing_key: 1 }; - generatePassCase(input4, { out: 0 }, "wrong key"); - - let input5 = { data: input[0], key: [97], index: 2, parsing_key: 0 }; - generatePassCase(input5, { out: 0 }, "not parsing key"); - }); - - describe("KeyMatchAtDepth", async () => { - let circuit: WitnessTester<["data", "key", "index", "parsing_key", "stack"], ["out"]>; - - function generatePassCase(input: any, expected: any, depth: number, desc: string) { - const description = generateDescription(input); - - it(`(valid) witness: ${description} ${desc}`, async () => { - circuit = await circomkit.WitnessTester(`KeyMatchAtDepth`, { - file: "json/interpreter", - template: "KeyMatchAtDepth", - params: [input.data.length, 4, input.key.length, depth], - }); - console.log("#constraints:", await circuit.getConstraintCount()); - - await circuit.expectPass(input, expected); - }); - } - - let input = readJSONInputFile("value_array_object.json", ["a", 0, "b", 0]); - - let output = { out: 1 }; - - let input1 = { data: input[0], key: input[1][0], index: 2, parsing_key: 1, stack: [[1, 0], [0, 0], [0, 0], [0, 0]] }; - generatePassCase(input1, output, 0, ""); - - let input2 = { data: input[0], key: input[1][2], index: 8, parsing_key: 1, stack: [[1, 1], [2, 0], [1, 0], [0, 0]] }; - generatePassCase(input2, output, 2, ""); - - let input3 = { data: input[0], key: [99], index: 20, parsing_key: 1, stack: [[1, 1], [2, 1], [1, 1], [0, 0]] }; - generatePassCase(input3, output, 2, "wrong stack"); - - // fail cases - - let input4 = { data: input[0], key: input[1][1], index: 3, parsing_key: 1, stack: [[1, 0], [2, 0], [1, 0], [0, 0]] }; - generatePassCase(input4, { out: 0 }, 2, "wrong key"); - - let input5 = { data: input[0], key: [97], index: 12, parsing_key: 0, stack: [[1, 1], [2, 0], [1, 1], [0, 0]] }; - generatePassCase(input5, { out: 0 }, 3, "not parsing key"); - - let input6Data = input[0].slice(0); - input6Data.splice(1, 1, 35); - let input6 = { data: input6Data, key: input[1][0], index: 2, parsing_key: 1, stack: [[1, 0], [0, 0], [0, 0], [0, 0]] }; - generatePassCase(input6, { out: 0 }, 0, "invalid key (not surrounded by quotes)"); - - let input7 = { data: input[0], key: input[1][0], index: 2, parsing_key: 1, stack: [[1, 0], [0, 0], [0, 0], [0, 0]] }; - generatePassCase(input6, { out: 0 }, 1, "wrong depth"); - }); - - describe("KeyMatchAtIndex", async () => { - let circuit: WitnessTester<["data", "key", "keyLen", "parsing_key"], ["out"]>; - let maxKeyLen = 3; - - function generatePassCase(input: any, expected: any, index: number, desc: string) { - const description = generateDescription(input); - - it(`(valid) witness: ${description} ${desc}`, async () => { - // pad key with 0's - let padded_key = input.key.concat(Array(maxKeyLen - input.key.length).fill(0)); - input.key = padded_key; - - circuit = await circomkit.WitnessTester(`KeyMatchAtIndex`, { - file: "json/interpreter", - template: "KeyMatchAtIndex", - params: [input.data.length, maxKeyLen, index], - }); - console.log("#constraints:", await circuit.getConstraintCount()); - - await circuit.expectPass(input, expected); - }); - } - - let input = readJSONInputFile("value_array_object.json", ["a", 0, "b", 0]); - - let output = { out: 1 }; - - let key1 = input[1][0]; - let input1 = { data: input[0], key: key1, keyLen: key1.length, parsing_key: 1 }; - generatePassCase(input1, output, 2, ""); - - let key2 = input[1][2]; - let input2 = { data: input[0], key: key2, keyLen: key2.length, parsing_key: 1 }; - generatePassCase(input2, output, 8, ""); - - let input3 = { data: input[0], key: [99], keyLen: 1, parsing_key: 1 }; - generatePassCase(input3, output, 20, "wrong stack"); - - // fail cases - - let failOutput = { out: 0 }; - let key4 = input[1][1]; - let input4 = { data: input[0], key: key4, keyLen: key4.length, parsing_key: 1 }; - generatePassCase(input4, failOutput, 3, "wrong key"); - - let input5 = { data: input[0], key: [97], keyLen: 1, parsing_key: 0 }; - generatePassCase(input5, failOutput, 12, "not parsing key"); - - let input6Data = input[0].slice(0); - input6Data.splice(1, 1, 35); - let input6 = { data: input6Data, key: input[1][0], keyLen: input[1][0].length, parsing_key: 1 }; - generatePassCase(input6, failOutput, 2, "invalid key (not surrounded by quotes)"); - - let input7 = { data: input[0], key: input[1][0], keyLen: input[1][0].length, parsing_key: 1 }; - generatePassCase(input6, failOutput, 2, "wrong depth"); - }); -}); \ No newline at end of file diff --git a/circuits/test/json/parser/index.ts b/circuits/test/json/index.ts similarity index 100% rename from circuits/test/json/parser/index.ts rename to circuits/test/json/index.ts diff --git a/circuits/test/json/nivc/masker_nivc.test.ts b/circuits/test/json/nivc/masker_nivc.test.ts deleted file mode 100644 index 0d9a265..0000000 --- a/circuits/test/json/nivc/masker_nivc.test.ts +++ /dev/null @@ -1,171 +0,0 @@ -import { circomkit, WitnessTester, generateDescription, readJsonFile, toByte } from "../../common"; -import { DataHasher } from "../../common/poseidon"; -import { assert } from "chai"; - -// HTTP/1.1 200 OK -// content-type: application/json; charset=utf-8 -// content-encoding: gzip -// Transfer-Encoding: chunked -// -// { -// "data": { -// "items": [ -// { -// "data": "Artist", -// "profile": { -// "name": "Taylor Swift" -// } -// } -// ] -// } -// } - -// 202 bytes in the JSON -let json_input = [ - 123, 13, 10, 32, 32, 32, 34, 100, 97, 116, 97, 34, 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, - 32, 34, 105, 116, 101, 109, 115, 34, 58, 32, 91, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, - 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 100, 97, 116, - 97, 34, 58, 32, 34, 65, 114, 116, 105, 115, 116, 34, 44, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 34, 112, 114, 111, 102, 105, 108, 101, 34, 58, 32, 123, 13, 10, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 110, 97, 109, 101, 34, 58, 32, - 34, 84, 97, 121, 108, 111, 114, 32, 83, 119, 105, 102, 116, 34, 13, 10, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, - 13, 10, 32, 32, 32, 32, 32, 32, 32, 93, 13, 10, 32, 32, 32, 125, 13, 10, 125]; - -const json_key0_mask = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 123, 13, 10, 32, 32, 32, 32, 32, 32, - 32, 34, 105, 116, 101, 109, 115, 34, 58, 32, 91, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, - 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 100, 97, 116, - 97, 34, 58, 32, 34, 65, 114, 116, 105, 115, 116, 34, 44, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 34, 112, 114, 111, 102, 105, 108, 101, 34, 58, 32, 123, 13, 10, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 110, 97, 109, 101, 34, 58, 32, - 34, 84, 97, 121, 108, 111, 114, 32, 83, 119, 105, 102, 116, 34, 13, 10, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, - 13, 10, 32, 32, 32, 32, 32, 32, 32, 93, 13, 10, 32, 32, 32, 125, 0, 0, 0, 0, 0, 0, 0, 0, 0]; -const json_key0_mask_hash = DataHasher(json_key0_mask); - -const json_key1_mask = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 91, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 123, 13, 10, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 100, 97, 116, 97, 34, 58, 32, 34, 65, 114, - 116, 105, 115, 116, 34, 44, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, - 34, 112, 114, 111, 102, 105, 108, 101, 34, 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 34, 110, 97, 109, 101, 34, 58, 32, 34, 84, 97, 121, 108, 111, - 114, 32, 83, 119, 105, 102, 116, 34, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, - 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, - 32, 32, 93, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; -const json_key1_mask_hash = DataHasher(json_key1_mask); - -const json_arr_mask = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 123, 13, 10, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 100, 97, 116, 97, 34, 58, 32, 34, 65, 114, 116, 105, 115, 116, - 34, 44, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 112, 114, 111, 102, 105, - 108, 101, 34, 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 110, - 97, 109, 101, 34, 58, 32, 34, 84, 97, 121, 108, 111, 114, 32, 83, 119, 105, 102, 116, 34, 13, 10, 32, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -]; -const json_arr_mask_hash = DataHasher(json_arr_mask); - -const json_key2_mask = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 110, 97, 109, - 101, 34, 58, 32, 34, 84, 97, 121, 108, 111, 114, 32, 83, 119, 105, 102, 116, 34, 13, 10, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -]; - -const json_key2_mask_hash = DataHasher(json_key2_mask); - -const json_key3_mask = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 34, 84, 97, 121, 108, 111, 114, 32, 83, 119, 105, 102, 116, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -]; -const json_key3_mask_hash = DataHasher(json_key3_mask); - -describe("NIVC Extract", async () => { - let json_mask_object_circuit: WitnessTester<["step_in", "data", "key", "keyLen"], ["step_out"]>; - let json_mask_arr_circuit: WitnessTester<["step_in", "data", "index"], ["step_out"]>; - let extract_value_circuit: WitnessTester<["step_in", "data"], ["step_out"]>; - - const DATA_BYTES = 208; - const MAX_STACK_HEIGHT = 5; - const MAX_KEY_LENGTH = 8; - const MAX_VALUE_LENGTH = 32; - - before(async () => { - json_mask_arr_circuit = await circomkit.WitnessTester(`JsonMaskArrayIndexNIVC`, { - file: "json/nivc/masker", - template: "JsonMaskArrayIndexNIVC", - params: [DATA_BYTES, MAX_STACK_HEIGHT], - }); - console.log("#constraints:", await json_mask_arr_circuit.getConstraintCount()); - - json_mask_object_circuit = await circomkit.WitnessTester(`JsonMaskObjectNIVC`, { - file: "json/nivc/masker", - template: "JsonMaskObjectNIVC", - params: [DATA_BYTES, MAX_STACK_HEIGHT, MAX_KEY_LENGTH], - }); - console.log("#constraints:", await json_mask_object_circuit.getConstraintCount()); - - extract_value_circuit = await circomkit.WitnessTester(`JsonMaskExtractFinal`, { - file: "json/nivc/extractor", - template: "MaskExtractFinal", - params: [DATA_BYTES, MAX_VALUE_LENGTH], - }); - console.log("#constraints:", await extract_value_circuit.getConstraintCount()); - }); - - - let key0 = [100, 97, 116, 97, 0, 0, 0, 0]; // "data" - let key0Len = 4; - let key1 = [105, 116, 101, 109, 115, 0, 0, 0]; // "items" - let key1Len = 5; - let key2 = [112, 114, 111, 102, 105, 108, 101, 0]; // "profile" - let key2Len = 7; - let key3 = [110, 97, 109, 101, 0, 0, 0, 0]; // "name" - let key3Len = 4; - - let value = toByte("\"Taylor Swift\""); - - it("parse and mask", async () => { - - console.log(json_input.length); - let extended_json_input = json_input.concat(Array(Math.max(0, DATA_BYTES - json_input.length)).fill(0)); - - let jsonInputHash = DataHasher(extended_json_input); - let json_extract_key0 = await json_mask_object_circuit.compute({ step_in: jsonInputHash, data: extended_json_input, key: key0, keyLen: key0Len }, ["step_out"]); - console.log("JSON Extract key0 `step_out`:", json_extract_key0.step_out); - assert.deepEqual(json_extract_key0.step_out, json_key0_mask_hash); - - let json_extract_key1 = await json_mask_object_circuit.compute({ step_in: json_extract_key0.step_out, data: json_key0_mask, key: key1, keyLen: key1Len }, ["step_out"]); - assert.deepEqual(json_extract_key1.step_out, json_key1_mask_hash); - console.log("JSON Extract key1 `step_out`:", json_extract_key1.step_out); - - let json_extract_arr = await json_mask_arr_circuit.compute({ step_in: json_extract_key1.step_out, index: 0, data: json_key1_mask }, ["step_out"]); - assert.deepEqual(json_extract_arr.step_out, json_arr_mask_hash); - console.log("JSON Extract arr `step_out`:", json_extract_arr.step_out); - - let json_extract_key2 = await json_mask_object_circuit.compute({ step_in: json_extract_arr.step_out, data: json_arr_mask, key: key2, keyLen: key2Len }, ["step_out"]); - assert.deepEqual(json_extract_key2.step_out, json_key2_mask_hash); - console.log("JSON Extract key2 `step_out`:", json_extract_key2.step_out); - - let json_extract_key3 = await json_mask_object_circuit.compute({ step_in: json_extract_key2.step_out, data: json_key2_mask, key: key3, keyLen: key3Len }, ["step_out"]); - assert.deepEqual(json_extract_key3.step_out, json_key3_mask_hash); - console.log("JSON Extract key3 `step_out`:", json_extract_key3.step_out); - - value = value.concat(Array(MAX_VALUE_LENGTH - value.length).fill(0)); - let final_value_hash = DataHasher(value); - let extractValue = await extract_value_circuit.compute({ step_in: json_extract_key3.step_out, data: json_key3_mask }, ["step_out"]); - console.log("JSON Extract finalValue `step_out`:", extractValue.step_out); - assert.deepEqual(extractValue.step_out, final_value_hash); - }); -}); \ No newline at end of file diff --git a/circuits/test/json/parser/parser.test.ts b/circuits/test/json/parser.test.ts similarity index 69% rename from circuits/test/json/parser/parser.test.ts rename to circuits/test/json/parser.test.ts index eccbc99..0c525b0 100644 --- a/circuits/test/json/parser/parser.test.ts +++ b/circuits/test/json/parser.test.ts @@ -1,6 +1,6 @@ -import { circomkit, WitnessTester, generateDescription, readJSONInputFile } from "../../common"; +import { circomkit, WitnessTester, readJSONInputFile } from "../common"; -describe("json-parser", () => { +describe("JSON Parser", () => { let circuit: WitnessTester<["data"]>; it(`array only input`, async () => { @@ -8,11 +8,10 @@ describe("json-parser", () => { let [input, keyUnicode, output] = readJSONInputFile(`${filename}.json`, [0]); circuit = await circomkit.WitnessTester(`Parser`, { - file: "json/parser/parser", + file: "json/parser", template: "Parser", params: [input.length, 2], }); - console.log("#constraints:", await circuit.getConstraintCount()); await circuit.expectPass({ data: input @@ -24,11 +23,10 @@ describe("json-parser", () => { let [input, keyUnicode, output] = readJSONInputFile(`${filename}.json`, ["a"]); circuit = await circomkit.WitnessTester(`Parser`, { - file: "json/parser/parser", + file: "json/parser", template: "Parser", params: [input.length, 3], }); - console.log("#constraints:", await circuit.getConstraintCount()); await circuit.expectPass({ data: input diff --git a/circuits/test/json/parser/parsing_types.test.ts b/circuits/test/json/parsing_types.test.ts similarity index 94% rename from circuits/test/json/parser/parsing_types.test.ts rename to circuits/test/json/parsing_types.test.ts index 88d892e..a803fc3 100644 --- a/circuits/test/json/parser/parsing_types.test.ts +++ b/circuits/test/json/parsing_types.test.ts @@ -1,7 +1,5 @@ -import { circomkit, WitnessTester, generateDescription } from "../../common"; -import { Delimiters, WhiteSpace, Numbers, Escape, INITIAL_IN, INITIAL_OUT } from '.'; - - +import { circomkit, WitnessTester, generateDescription } from "../common"; +import { Delimiters, WhiteSpace, INITIAL_IN, INITIAL_OUT } from '.'; describe("StateUpdate", () => { let circuit: WitnessTester< @@ -19,12 +17,10 @@ describe("StateUpdate", () => { before(async () => { circuit = await circomkit.WitnessTester(`StateUpdate`, { - file: "json/parser/machine", + file: "json/machine", template: "StateUpdate", params: [4], }); - console.log("#constraints:", await circuit.getConstraintCount()); - }); //-TEST_1----------------------------------------------------------// diff --git a/circuits/test/json/parser/stack.test.ts b/circuits/test/json/stack.test.ts similarity index 96% rename from circuits/test/json/parser/stack.test.ts rename to circuits/test/json/stack.test.ts index 860737e..6140d64 100644 --- a/circuits/test/json/parser/stack.test.ts +++ b/circuits/test/json/stack.test.ts @@ -1,15 +1,14 @@ -import { circomkit, WitnessTester, generateDescription } from "../../common"; -import { Delimiters, WhiteSpace, Numbers, Escape, INITIAL_IN, INITIAL_OUT } from '.'; +import { circomkit, WitnessTester, generateDescription } from "../common"; +import { Delimiters, INITIAL_IN, INITIAL_OUT } from '.'; describe("GetTopOfStack", () => { let circuit: WitnessTester<["stack"], ["value", "pointer"]>; before(async () => { circuit = await circomkit.WitnessTester(`GetTopOfStack`, { - file: "json/parser/machine", + file: "json/machine", template: "GetTopOfStack", params: [4], }); - console.log("#constraints:", await circuit.getConstraintCount()); }); function generatePassCase(input: any, expected: any) { @@ -34,11 +33,10 @@ describe("StateUpdate :: RewriteStack", () => { >; before(async () => { circuit = await circomkit.WitnessTester(`GetTopOfStack`, { - file: "json/parser/machine", + file: "json/machine", template: "StateUpdate", params: [4], }); - console.log("#constraints:", await circuit.getConstraintCount()); }); function generatePassCase(input: any, expected: any, desc: string) { diff --git a/circuits/test/json/parser/values.test.ts b/circuits/test/json/values.test.ts similarity index 98% rename from circuits/test/json/parser/values.test.ts rename to circuits/test/json/values.test.ts index 069525b..f51f43b 100644 --- a/circuits/test/json/parser/values.test.ts +++ b/circuits/test/json/values.test.ts @@ -1,4 +1,4 @@ -import { circomkit, WitnessTester, generateDescription } from "../../common"; +import { circomkit, WitnessTester, generateDescription } from "../common"; import { Delimiters, WhiteSpace, Numbers, Escape, INITIAL_IN, INITIAL_OUT } from '.'; describe("StateUpdate :: Values", () => { @@ -8,12 +8,12 @@ describe("StateUpdate :: Values", () => { >; before(async () => { circuit = await circomkit.WitnessTester(`GetTopOfStack`, { - file: "json/parser/machine", + file: "json/machine", template: "StateUpdate", params: [4], }); - console.log("#constraints:", await circuit.getConstraintCount()); }); + function generatePassCase(input: any, expected: any, desc: string) { const description = generateDescription(input); diff --git a/circuits/test/utils/array.test.ts b/circuits/test/utils/array.test.ts index 1202a47..2b88325 100644 --- a/circuits/test/utils/array.test.ts +++ b/circuits/test/utils/array.test.ts @@ -1,4 +1,5 @@ import { circomkit, WitnessTester } from "../common"; + describe("IsEqualArray", () => { let circuit: WitnessTester<["in"], ["out"]>; before(async () => { @@ -7,7 +8,6 @@ describe("IsEqualArray", () => { template: "IsEqualArray", params: [3], }); - console.log("#constraints:", await circuit.getConstraintCount()); }); it("witness: [[0,0,0],[0,0,0]]", async () => { @@ -61,7 +61,6 @@ describe("Contains", () => { template: "Contains", params: [3], }); - console.log("#constraints:", await circuit.getConstraintCount()); }); it("witness: in = 0, array = [0,1,2]", async () => { @@ -102,7 +101,6 @@ describe("ArrayAdd", () => { template: "ArrayAdd", params: [3], }); - console.log("#constraints:", await circuit.getConstraintCount()); }); it("witness: lhs = [0,1,2], rhs = [3,5,7]", async () => { @@ -122,7 +120,6 @@ describe("ArrayMul", () => { template: "ArrayMul", params: [3], }); - console.log("#constraints:", await circuit.getConstraintCount()); }); it("witness: lhs = [0,1,2], rhs = [3,5,7]", async () => { @@ -142,7 +139,6 @@ describe("GenericArrayAdd", () => { template: "GenericArrayAdd", params: [3, 2], }); - console.log("#constraints:", await circuit.getConstraintCount()); }); it("witness: arrays = [[0,1,2],[3,5,7]]", async () => { @@ -161,7 +157,6 @@ describe("fromLittleEndianToWords32", () => { file: "utils/array", template: "fromLittleEndianToWords32", }); - console.log("#constraints:", await circuit.getConstraintCount()); let input = [ 0, 1, 0, 1, 0, 0, 0, 0, 0, @@ -180,7 +175,6 @@ describe("fromWords32ToLittleEndian", () => { file: "utils/array", template: "fromWords32ToLittleEndian", }); - console.log("#constraints:", await circuit.getConstraintCount()); let input = [72, 84, 84, 80]; await circuit.expectPass({ words: input }, { diff --git a/circuits/test/utils/bits.test.ts b/circuits/test/utils/bits.test.ts deleted file mode 100644 index 30dfdc4..0000000 --- a/circuits/test/utils/bits.test.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { circomkit, WitnessTester } from "../common"; - -describe("ASCII", () => { - let circuit: WitnessTester<["in"], ["out"]>; - before(async () => { - circuit = await circomkit.WitnessTester(`ASCII`, { - file: "utils/bits", - template: "ASCII", - params: [13], - }); - console.log("#constraints:", await circuit.getConstraintCount()); - }); - - it("(valid) witness: in = b\"Hello, world!\"", async () => { - await circuit.expectPass( - { in: [72, 101, 108, 108, 111, 44, 32, 119, 111, 114, 108, 100, 33] }, - ); - }); - - it("(invalid) witness: in = [256, ...]", async () => { - await circuit.expectFail( - { in: [256, 101, 108, 108, 111, 44, 32, 119, 111, 114, 108, 100, 33] } - ); - }); -}); \ No newline at end of file diff --git a/circuits/test/utils/hash.test.ts b/circuits/test/utils/hash.test.ts index c62b969..3c8718a 100644 --- a/circuits/test/utils/hash.test.ts +++ b/circuits/test/utils/hash.test.ts @@ -1,184 +1,141 @@ import assert from "assert"; -import { circomkit, WitnessTester } from "../common"; +import { circomkit, http_response_plaintext, http_start_line, PolynomialDigest, WitnessTester } from "../common"; import { DataHasher, PoseidonModular } from "../common/poseidon"; +import { poseidon1 } from "poseidon-lite"; -describe("hash", () => { - describe("PoseidonModular_16", () => { - let circuit: WitnessTester<["in"], ["out"]>; - - before(async () => { - circuit = await circomkit.WitnessTester(`PoseidonModular`, { - file: "utils/hash", - template: "PoseidonModular", - params: [16], - }); - console.log("#constraints:", await circuit.getConstraintCount()); - }); - it("witness: in = [16*random]", async () => { - const input = Array.from({ length: 16 }, () => Math.floor(Math.random() * 256)); - const hash = PoseidonModular(input); +describe("DataHasher", () => { + let circuit: WitnessTester<["in"], ["out"]>; - await circuit.expectPass( - { in: input }, - { out: hash } - ); + before(async () => { + circuit = await circomkit.WitnessTester(`DataHasher`, { + file: "utils/hash", + template: "DataHasher", + params: [16], }); }); - describe("PoseidonModular_379", () => { - let circuit: WitnessTester<["in"], ["out"]>; - - before(async () => { - circuit = await circomkit.WitnessTester(`PoseidonModular`, { - file: "utils/hash", - template: "PoseidonModular", - params: [379], - }); - console.log("#constraints:", await circuit.getConstraintCount()); - }); + let all_zero_hash = BigInt("14744269619966411208579211824598458697587494354926760081771325075741142829156"); + it("witness: in = [0,...x16]", async () => { + const input = Array(16).fill(0); + await circuit.expectPass( + { in: input }, + { out: all_zero_hash } + ); + }); + // Check that TS version of DataHasher also is correct + assert.deepEqual(DataHasher(Array(16).fill(0)), all_zero_hash); + + it("witness: in = [-1,...x16]", async () => { + const input = Array(16).fill(-1); + await circuit.expectPass( + { in: input }, + { out: 0 } + ); + }); + // Check that TS version of DataHasher also is correct + assert.deepEqual(DataHasher(Array(16).fill(-1)), 0); + + it("witness: in = [1,0,...x15]", async () => { + let input = Array(16).fill(0); + input[0] = 1; + const hash = PoseidonModular([0, 1]); + await circuit.expectPass( + { in: input }, + { out: hash } + ); + }); - it("witness: in = [379*random]", async () => { - const input = Array.from({ length: 379 }, () => Math.floor(Math.random() * 256)); - const hash = PoseidonModular(input); - await circuit.expectPass( - { in: input }, - { out: hash } - ); - }); + it("witness: in = [0,0,...x15,1]", async () => { + let input = Array(16).fill(0); + input[15] = 1; + const hash = PoseidonModular([0, "1329227995784915872903807060280344576"]); + await circuit.expectPass( + { in: input }, + { out: hash } + ); }); +}); + +const padded_http_start_line = http_start_line.concat(Array(320 - http_start_line.length).fill(-1)); - describe("PoseidonChainer", () => { - let circuit: WitnessTester<["in"], ["out"]>; +describe("DataHasherHTTP", () => { + let circuit: WitnessTester<["in"], ["out"]>; + let circuit_small: WitnessTester<["in"], ["out"]>; - before(async () => { - circuit = await circomkit.WitnessTester(`PoseidonChainer`, { - file: "utils/hash", - template: "PoseidonChainer", - }); - console.log("#constraints:", await circuit.getConstraintCount()); + before(async () => { + circuit = await circomkit.WitnessTester(`DataHasher`, { + file: "utils/hash", + template: "DataHasher", + params: [320], }); - it("witness: in = [69,420]", async () => { - const input = [69, 420]; - const hash = PoseidonModular(input); - await circuit.expectPass( - { in: input }, - { out: hash } - ); + circuit_small = await circomkit.WitnessTester(`DataHasher`, { + file: "utils/hash", + template: "DataHasher", + params: [32], }); }); - describe("DataHasher", () => { - let circuit: WitnessTester<["in"], ["out"]>; + it("witness: HTTP bytes", async () => { + let hash = DataHasher(http_response_plaintext); + assert.deepEqual(String(hash), "2195365663909569734943279727560535141179588918483111718403427949138562480675"); + await circuit.expectPass({ in: http_response_plaintext }, { out: "2195365663909569734943279727560535141179588918483111718403427949138562480675" }); + }); - before(async () => { - circuit = await circomkit.WitnessTester(`DataHasher`, { - file: "utils/hash", - template: "DataHasher", - params: [16], - }); - console.log("#constraints:", await circuit.getConstraintCount()); - }); + let padded_hash = DataHasher(padded_http_start_line); + it("witness: padded HTTP start line", async () => { + await circuit.expectPass({ in: padded_http_start_line }, { out: padded_hash }); + }); - let all_zero_hash = BigInt("14744269619966411208579211824598458697587494354926760081771325075741142829156"); - it("witness: in = [0,...x16]", async () => { - const input = Array(16).fill(0); - await circuit.expectPass( - { in: input }, - { out: all_zero_hash } - ); - }); - // Check that TS version of DataHasher also is correct - assert.deepEqual(DataHasher(Array(16).fill(0)), all_zero_hash); - - it("witness: in = [-1,...x16]", async () => { - const input = Array(16).fill(-1); - await circuit.expectPass( - { in: input }, - { out: 0 } - ); - }); - // Check that TS version of DataHasher also is correct - assert.deepEqual(DataHasher(Array(16).fill(-1)), 0); - - it("witness: in = [1,0,...x15]", async () => { - let input = Array(16).fill(0); - input[0] = 1; - const hash = PoseidonModular([0, 1]); - await circuit.expectPass( - { in: input }, - { out: hash } - ); - }); + let hash = DataHasher(http_start_line); + it("witness: unpadded HTTP start line", async () => { + await circuit_small.expectPass({ in: http_start_line.concat(Array(32 - http_start_line.length).fill(-1)) }, { out: hash }); + }); +}); +describe("PolynomialDigest", () => { + let circuit: WitnessTester<["bytes", "polynomial_input"], ["digest"]>; - it("witness: in = [0,0,...x15,1]", async () => { - let input = Array(16).fill(0); - input[15] = 1; - const hash = PoseidonModular([0, "1329227995784915872903807060280344576"]); - await circuit.expectPass( - { in: input }, - { out: hash } - ); + before(async () => { + circuit = await circomkit.WitnessTester(`PolynomialDigest`, { + file: "utils/hash", + template: "PolynomialDigest", + params: [4], }); }); - const TEST_HTTP_BYTES = [ - 72, 84, 84, 80, 47, 49, 46, 49, 32, 50, 48, 48, 32, 79, 75, 13, 10, 99, 111, 110, 116, 101, 110, - 116, 45, 116, 121, 112, 101, 58, 32, 97, 112, 112, 108, 105, 99, 97, 116, 105, 111, 110, 47, 106, - 115, 111, 110, 59, 32, 99, 104, 97, 114, 115, 101, 116, 61, 117, 116, 102, 45, 56, 13, 10, 99, - 111, 110, 116, 101, 110, 116, 45, 101, 110, 99, 111, 100, 105, 110, 103, 58, 32, 103, 122, 105, - 112, 13, 10, 84, 114, 97, 110, 115, 102, 101, 114, 45, 69, 110, 99, 111, 100, 105, 110, 103, 58, - 32, 99, 104, 117, 110, 107, 101, 100, 13, 10, 13, 10, 123, 13, 10, 32, 32, 32, 34, 100, 97, 116, - 97, 34, 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 34, 105, 116, 101, 109, 115, 34, 58, 32, - 91, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 32, 32, 32, 32, 32, 34, 100, 97, 116, 97, 34, 58, 32, 34, 65, 114, 116, 105, 115, - 116, 34, 44, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 34, 112, 114, - 111, 102, 105, 108, 101, 34, 58, 32, 123, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, - 32, 32, 32, 32, 34, 110, 97, 109, 101, 34, 58, 32, 34, 84, 97, 121, 108, 111, 114, 32, 83, 119, - 105, 102, 116, 34, 13, 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, - 10, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 125, 13, 10, 32, 32, 32, 32, 32, 32, 32, 93, 13, - 10, 32, 32, 32, 125, 13, 10, 125] - - const http_start_line = [72, 84, 84, 80, 47, 49, 46, 49, 32, 50, 48, 48, 32, 79, 75, 13, 10]; - const padded_http_start_line = http_start_line.concat(Array(320 - http_start_line.length).fill(-1)); - - describe("DataHasherHTTP", () => { - let circuit: WitnessTester<["in"], ["out"]>; - let circuit_small: WitnessTester<["in"], ["out"]>; - - before(async () => { - circuit = await circomkit.WitnessTester(`DataHasher`, { - file: "utils/hash", - template: "DataHasher", - params: [320], - }); - console.log("#constraints:", await circuit.getConstraintCount()); - - circuit_small = await circomkit.WitnessTester(`DataHasher`, { - file: "utils/hash", - template: "DataHasher", - params: [32], - }); - console.log("#constraints:", await circuit.getConstraintCount()); - }); + it("witness: bytes = [0,0,0,0], polynomial_input = 1", async () => { + const bytes = [0, 0, 0, 0]; + const polynomial_input = 0; - it("witness: HTTP bytes", async () => { - let hash = DataHasher(TEST_HTTP_BYTES); - assert.deepEqual(String(hash), "2195365663909569734943279727560535141179588918483111718403427949138562480675"); - await circuit.expectPass({ in: TEST_HTTP_BYTES }, { out: "2195365663909569734943279727560535141179588918483111718403427949138562480675" }); - }); + await circuit.expectPass( + { bytes, polynomial_input }, + { digest: 0 } + ); + }); - let padded_hash = DataHasher(padded_http_start_line); - it("witness: padded HTTP start line", async () => { - await circuit.expectPass({ in: padded_http_start_line }, { out: padded_hash }); - }); + it("witness: bytes = [1,2,3,4], polynomial_input = 7", async () => { + const bytes = [1, 2, 3, 4]; + const polynomial_input = 7; - let hash = DataHasher(http_start_line); - it("witness: unpadded HTTP start line", async () => { - await circuit_small.expectPass({ in: http_start_line.concat(Array(32 - http_start_line.length).fill(-1)) }, { out: hash }); - }); + await circuit.expectPass( + { bytes, polynomial_input }, + { digest: 1 + 2 * 7 + 3 * 7 ** 2 + 4 * 7 ** 3 } + ); + }); + + it("witness: bytes = [4*random], polynomial_input = random", async () => { + const bytes = Array.from({ length: 4 }, () => Math.floor(Math.random() * 256)); + const polynomial_input = poseidon1([BigInt(Math.floor(Math.random() * 694206942069420))]); + const digest = PolynomialDigest(bytes, polynomial_input); + + await circuit.expectPass( + { bytes, polynomial_input }, + { digest } + ); }); + }); + diff --git a/circuits/test/utils/operators.test.ts b/circuits/test/utils/operators.test.ts index 67c0069..1635a10 100644 --- a/circuits/test/utils/operators.test.ts +++ b/circuits/test/utils/operators.test.ts @@ -8,7 +8,6 @@ describe("SwitchArray", () => { template: "SwitchArray", params: [3, 2], }); - console.log("#constraints:", await circuit.getConstraintCount()); }); it("witness: case = 0, branches = [0, 1, 2], vals = [[69,0], [420,1], [1337,2]]", async () => { @@ -63,7 +62,6 @@ describe("Switch", () => { template: "Switch", params: [3], }); - console.log("#constraints:", await circuit.getConstraintCount()); }); it("witness: case = 0, branches = [0, 1, 2], vals = [69, 420, 1337]", async () => { @@ -105,7 +103,6 @@ describe("InRange", () => { template: "InRange", params: [8], }); - console.log("#constraints:", await circuit.getConstraintCount()); }); it("witness: in = 1, range = [0,2]", async () => { diff --git a/circuits/test/utils/search.test.ts b/circuits/test/utils/search.test.ts deleted file mode 100644 index d05b948..0000000 --- a/circuits/test/utils/search.test.ts +++ /dev/null @@ -1,39 +0,0 @@ -import { circomkit, toByte, WitnessTester } from "../common"; - -const data = toByte("Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum"); -const key = toByte("Ipsum"); - -describe("SubstringMatchWithIndex", () => { - let circuit: WitnessTester<["data", "key", "start"], ["out"]>; - - before(async () => { - circuit = await circomkit.WitnessTester(`SubstringSearch`, { - file: "utils/search", - template: "SubstringMatchWithIndex", - params: [data.length, key.length], - }); - console.log("#constraints:", await circuit.getConstraintCount()); - }); - - it("data = witness.json:data, key = witness.json:key, r = hash(key+data)", async () => { - await circuit.expectPass( - { - data: data, - key: key, - start: 6 - }, - { out: 1 }, - ); - }); - - it("data = witness.json:data, key = witness.json:key, r = hash(key+data), output false", async () => { - await circuit.expectPass( - { - data: data, - key: key, - start: 98 - }, - { out: 0 } - ); - }); -}); diff --git a/circuits/utils/array.circom b/circuits/utils/array.circom index 87baf4b..10166db 100644 --- a/circuits/utils/array.circom +++ b/circuits/utils/array.circom @@ -265,4 +265,5 @@ template fromWords32ToLittleEndian() { data[i*8 + j] <== Num2Bits[i].out[7-j]; } } -} \ No newline at end of file +} + diff --git a/circuits/utils/bits.circom b/circuits/utils/bits.circom index 08f2daf..f847aea 100644 --- a/circuits/utils/bits.circom +++ b/circuits/utils/bits.circom @@ -2,25 +2,6 @@ pragma circom 2.1.9; include "circomlib/circuits/bitify.circom"; -/* -This template passes if a given array contains only valid ASCII values (e.g., u8 vals). - -# Params: - - `n`: the length of the array - -# Inputs: - - `in[n]`: array to check -*/ -template ASCII(n) { - signal input in[n]; - - component Byte[n]; - for(var i = 0; i < n; i++) { - Byte[i] = Num2Bits(8); - Byte[i].in <== in[i]; - } -} - // initially from https://github.com/reclaimprotocol/zk-symmetric-crypto // modified for our needs diff --git a/circuits/utils/hash.circom b/circuits/utils/hash.circom index 54639b6..5da3c12 100644 --- a/circuits/utils/hash.circom +++ b/circuits/utils/hash.circom @@ -3,67 +3,20 @@ pragma circom 2.1.9; include "circomlib/circuits/poseidon.circom"; include "./array.circom"; -/// Circuit to calculate Poseidon hash of an arbitrary number of inputs. -/// Splits input into chunks of 16 elements (or less for the last chunk) and hashes them separately -/// Then combines the chunk hashes using a binary tree structure. -/// -/// NOTE: from -/// -/// # Parameters -/// - `numElements`: Number of elements in the input array -/// -/// # Inputs -/// - `in`: Array of numElements to be hashed -/// -/// # Output -/// - `out`: Poseidon hash of the input array -template PoseidonModular(numElements) { - signal input in[numElements]; +template MaskedByteStreamDigest(DATA_BYTES) { + signal input in[DATA_BYTES]; signal output out; - var chunks = numElements \ 16; - var last_chunk_size = numElements % 16; - if (last_chunk_size != 0) { - chunks += 1; - } - - var _out; - - for (var i = 0; i < chunks; i++) { - var start = i * 16; - var end = start + 16; - var chunk_hash; - - if (end > numElements) { // last chunk - end = numElements; - var last_chunk[last_chunk_size]; - for (var i=start ; i Result> {