diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000000..286fcbabf6 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1 @@ +* @iron-fish/code-review diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000000..d9dbbfb7a4 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,184 @@ +name: Node.js CI + +on: [push] + +jobs: + lint: + name: Lint + + runs-on: ubuntu-latest + + steps: + - name: Check out Git repository + uses: actions/checkout@v2 + + - name: Use Node.js + uses: actions/setup-node@v1 + with: + node-version: '14.16.0' + + - name: Use wasm-pack + uses: actions-rs/install@v0.1 + with: + crate: wasm-pack + version: latest + use-tool-cache: true + + - name: Cache Cargo registry + uses: actions/cache@v2 + with: + path: ~/.cargo/registry + key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache Cargo index + uses: actions/cache@v2 + with: + path: ~/.cargo/git + key: ${{ runner.os }}-stable-cargo-index-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache Cargo build + uses: actions/cache@v1 + with: + path: ironfish-wasm/target + key: ${{ runner.os }}-stable-target-${{ hashFiles('**/Cargo.lock') }} + + - name: Restore Yarn cache + id: yarn-cache + uses: actions/cache@v2 + with: + path: | + node_modules + */*/node_modules + key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock') }} + + - name: Build Wasm packages + run: cd ironfish-wasm && yarn build + + - name: Install packages + if: steps.yarn-cache.outputs.cache-hit != 'true' + run: yarn --non-interactive --frozen-lockfile + + - name: Lint + run: yarn lint + + test: + name: Test + + runs-on: ubuntu-latest + + steps: + - name: Check out Git repository + uses: actions/checkout@v2 + + - name: Use Node.js + uses: actions/setup-node@v1 + with: + node-version: '14.16.0' + + - name: Use wasm-pack + uses: actions-rs/install@v0.1 + with: + crate: wasm-pack + version: latest + use-tool-cache: true + + - name: Cache Cargo registry + uses: actions/cache@v2 + with: + path: ~/.cargo/registry + key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache Cargo index + uses: actions/cache@v2 + with: + path: ~/.cargo/git + key: ${{ runner.os }}-stable-cargo-index-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache Cargo build + uses: actions/cache@v1 + with: + path: ironfish-wasm/target + key: ${{ runner.os }}-stable-target-${{ hashFiles('**/Cargo.lock') }} + + - name: Restore Yarn cache + id: yarn-cache + uses: actions/cache@v2 + with: + path: | + node_modules + */*/node_modules + key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock') }} + + - name: Build Wasm packages + run: cd ironfish-wasm && yarn build + + - name: Install packages + if: steps.yarn-cache.outputs.cache-hit != 'true' + run: yarn --non-interactive --frozen-lockfile + + - name: Run tests + run: yarn test:coverage + + - name: Upload coverage + run: CODECOV_TOKEN=${{ secrets.CODECOV_TOKEN }} ROOT_PATH=$GITHUB_WORKSPACE/ yarn coverage:upload + + testslow: + name: Slow Tests + + runs-on: ubuntu-latest + + steps: + - name: Check out Git repository + uses: actions/checkout@v2 + + - name: Use Node.js + uses: actions/setup-node@v1 + with: + node-version: '14.16.0' + + - name: Use wasm-pack + uses: actions-rs/install@v0.1 + with: + crate: wasm-pack + version: latest + use-tool-cache: true + + - name: Cache Cargo registry + uses: actions/cache@v2 + with: + path: ~/.cargo/registry + key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache Cargo index + uses: actions/cache@v2 + with: + path: ~/.cargo/git + key: ${{ runner.os }}-stable-cargo-index-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache Cargo build + uses: actions/cache@v1 + with: + path: ironfish-wasm/target + key: ${{ runner.os }}-stable-target-${{ hashFiles('**/Cargo.lock') }} + + - name: Restore Yarn cache + id: yarn-cache + uses: actions/cache@v2 + with: + path: | + node_modules + */*/node_modules + key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock') }} + + - name: Build Wasm packages + run: cd ironfish-wasm && yarn build + + - name: Install packages + if: steps.yarn-cache.outputs.cache-hit != 'true' + run: yarn --non-interactive --frozen-lockfile + + - name: Run slow tests & coverage + run: yarn test:slow:coverage + + - name: Upload coverage + run: CODECOV_TOKEN=${{ secrets.CODECOV_TOKEN }} ROOT_PATH=$GITHUB_WORKSPACE/ yarn coverage:upload diff --git a/.github/workflows/deploy-api.yml b/.github/workflows/deploy-api.yml new file mode 100644 index 0000000000..904387f566 --- /dev/null +++ b/.github/workflows/deploy-api.yml @@ -0,0 +1,36 @@ +name: Deploy API Image +on: workflow_dispatch + +jobs: + Deploy: + name: Deploy + runs-on: ubuntu-latest + + steps: + - name: Check out Git repository + uses: actions/checkout@v2 + + - name: Use Node.js + uses: actions/setup-node@v1 + with: + node-version: '14.16.0' + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-east-2 + + - name: Login to AWS Registry + run: aws ecr get-login-password --region us-east-1 | docker login --username AWS --password-stdin $AWS_BLOCK_API_REGISTRY_URL + env: + AWS_BLOCK_API_REGISTRY_URL: ${{ secrets.AWS_BLOCK_API_REGISTRY_URL }} + + - name: Build API Image + run: ./ironfish-http-api/scripts/build-docker.sh + + - name: Deploy API Image + run: ./ironfish-http-api/scripts/deploy-docker.sh + env: + AWS_BLOCK_API_REGISTRY_URL: ${{ secrets.AWS_BLOCK_API_REGISTRY_URL }} diff --git a/.github/workflows/deploy-blockexplorer.yml b/.github/workflows/deploy-blockexplorer.yml new file mode 100644 index 0000000000..ab257b8844 --- /dev/null +++ b/.github/workflows/deploy-blockexplorer.yml @@ -0,0 +1,36 @@ +name: Deploy Block Explorer Image +on: workflow_dispatch + +jobs: + Deploy: + name: Deploy + runs-on: ubuntu-latest + + steps: + - name: Check out Git repository + uses: actions/checkout@v2 + + - name: Use Node.js + uses: actions/setup-node@v1 + with: + node-version: '14.16.0' + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-east-2 + + - name: Login to AWS Registry + run: aws ecr get-login-password --region us-east-1 | docker login --username AWS --password-stdin $AWS_BLOCK_EXPLORER_REGISTRY_URL + env: + AWS_BLOCK_EXPLORER_REGISTRY_URL: ${{ secrets.AWS_BLOCK_EXPLORER_REGISTRY_URL }} + + - name: Build Rosetta Image + run: ./ironfish-rosetta-api/scripts/build-docker.sh + + - name: Deploy Rosetta Image + run: ./ironfish-rosetta-api/scripts/deploy-docker.sh + env: + AWS_BLOCK_EXPLORER_REGISTRY_URL: ${{ secrets.AWS_BLOCK_EXPLORER_REGISTRY_URL }} diff --git a/.github/workflows/deploy-bootstrap.yml b/.github/workflows/deploy-bootstrap.yml new file mode 100644 index 0000000000..b40803a90c --- /dev/null +++ b/.github/workflows/deploy-bootstrap.yml @@ -0,0 +1,36 @@ +name: Deploy Bootstrap Image +on: workflow_dispatch + +jobs: + Deploy: + name: Deploy + runs-on: ubuntu-latest + + steps: + - name: Check out Git repository + uses: actions/checkout@v2 + + - name: Use Node.js + uses: actions/setup-node@v1 + with: + node-version: '14.16.0' + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-east-2 + + - name: Login to AWS Registry + run: aws ecr get-login-password --region us-east-1 | docker login --username AWS --password-stdin $AWS_REGISTRY_URL + env: + AWS_REGISTRY_URL: ${{ secrets.AWS_REGISTRY_URL }} + + - name: Build Bootstrap Image + run: ./ironfish-cli/scripts/build-docker.sh + + - name: Deploy Bootstrap Image + run: ./ironfish-cli/scripts/deploy-docker.sh + env: + AWS_REGISTRY_URL: ${{ secrets.AWS_REGISTRY_URL }} diff --git a/.github/workflows/deploy-brew.yml b/.github/workflows/deploy-brew.yml new file mode 100644 index 0000000000..ec57ff0dd8 --- /dev/null +++ b/.github/workflows/deploy-brew.yml @@ -0,0 +1,63 @@ +name: Deploy Brew Tap Release +on: workflow_dispatch + +jobs: + Deploy: + name: Deploy + runs-on: macos-10.15 + + steps: + - name: Check out Git repository + uses: actions/checkout@v2 + + - name: "Use Rust Toolchain" + uses: actions-rs/toolchain@v1 + with: + components: rustfmt, clippy + + - name: Use wasm-pack + uses: actions-rs/install@v0.1 + with: + crate: wasm-pack + version: latest + use-tool-cache: true + + - name: Cache Cargo registry + uses: actions/cache@v2 + with: + path: ~/.cargo/registry + key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache Cargo index + uses: actions/cache@v2 + with: + path: ~/.cargo/git + key: ${{ runner.os }}-stable-cargo-index-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache Cargo build + uses: actions/cache@v1 + with: + path: ironfish-wasm/target + key: ${{ runner.os }}-stable-target-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache Ironfish CLI Build + id: cache-ironfish-cli-build + uses: actions/cache@v1 + with: + path: ironfish-cli/build.cli/ironfish-cli.tar.gz + key: ${{ github.sha }} + + - name: Use Node.js + uses: actions/setup-node@v1 + with: + node-version: '14.16.0' + + - name: Build Ironfish CLI + if: steps.cache-ironfish-cli-build.outputs.cache-hit != 'true' + run: ./ironfish-cli/scripts/build.sh + + - name: Deploy Ironfish CLI Brew + run: ./ironfish-cli/scripts/deploy-brew.sh + env: + BREW_GITHUB_USERNAME: ${{ secrets.BREW_GITHUB_USERNAME }} + BREW_GITHUB_TOKEN: ${{ secrets.BREW_GITHUB_TOKEN }} diff --git a/.github/workflows/rust_ci.yml b/.github/workflows/rust_ci.yml new file mode 100644 index 0000000000..9d7459cf57 --- /dev/null +++ b/.github/workflows/rust_ci.yml @@ -0,0 +1,119 @@ +on: + push: + paths: + - 'ironfish-rust/**' + - 'ironfish-wasm/**' + - 'rust-toolchain' + +name: Rust CI + +jobs: + ironfish_rust: + name: ironfish-rust + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions-rs/toolchain@v1 + with: + components: rustfmt, clippy + - name: Cache Cargo registry + uses: actions/cache@v2 + with: + path: ~/.cargo/registry + key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} + - name: Cache Cargo index + uses: actions/cache@v2 + with: + path: ~/.cargo/git + key: ${{ runner.os }}-stable-cargo-index-${{ hashFiles('**/Cargo.lock') }} + - name: Cache Cargo build + uses: actions/cache@v1 + with: + path: ironfish-rust/target + key: ${{ runner.os }}-stable-target-${{ hashFiles('**/Cargo.lock') }} + + - name: Check for license headers + run: ./ci/lintHeaders.sh ./ironfish-rust/src *.rs + + # Build & Run test & Collect Code coverage + - name: Run cargo-tarpaulin on ironfish-rust + uses: actions-rs/tarpaulin@v0.1 + with: + version: '0.15.0' + args: --manifest-path ironfish-rust/Cargo.toml --release --all-features -- --test-threads 1 + + # Upload code coverage to Codecov + - name: Upload to codecov.io + uses: codecov/codecov-action@v1.0.2 + with: + token: ${{secrets.CODECOV_TOKEN}} + flags: ironfish-rust + + # fmt + - uses: actions-rs/cargo@v1 + name: "`cargo fmt` check on ironfish-rust" + with: + command: fmt + args: --manifest-path ironfish-rust/Cargo.toml --all -- --check + + # clippy + - uses: actions-rs/cargo@v1 + name: "Clippy check on ironfish-rust" + with: + command: clippy + args: --manifest-path ironfish-rust/Cargo.toml -- -D warnings + + ironfish_wasm: + name: ironfish-wasm + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions-rs/toolchain@v1 + with: + components: rustfmt, clippy + - name: Cache Cargo registry + uses: actions/cache@v2 + with: + path: ~/.cargo/registry + key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} + - name: Cache Cargo index + uses: actions/cache@v2 + with: + path: ~/.cargo/git + key: ${{ runner.os }}-stable-cargo-index-${{ hashFiles('**/Cargo.lock') }} + - name: Cache Cargo build + uses: actions/cache@v1 + with: + path: ironfish-wasm/target + key: ${{ runner.os }}-stable-target-${{ hashFiles('**/Cargo.lock') }} + + - name: Check for license headers + run: ./ci/lintHeaders.sh ./ironfish-wasm/src *.rs + + # Build & Run test & Collect Code coverage + - name: Run cargo-tarpaulin on ironfish-wasm + uses: actions-rs/tarpaulin@v0.1 + with: + version: '0.15.0' + args: --manifest-path ironfish-wasm/Cargo.toml --release --all-features -- --test-threads 1 + + # upload code coverage to Codecov + - name: Upload to codecov.io + uses: codecov/codecov-action@v1.0.2 + with: + token: ${{secrets.CODECOV_TOKEN}} + flags: ironfish-wasm + + # fmt + - uses: actions-rs/cargo@v1 + name: "`cargo fmt` check on ironfish-wasm" + with: + command: fmt + args: --manifest-path ironfish-wasm/Cargo.toml --all -- --check + + # clippy + - uses: actions-rs/cargo@v1 + name: "Clippy check on ironfish-wasm" + with: + command: clippy + args: --manifest-path ironfish-wasm/Cargo.toml -- -D warnings diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000..06b0dfc604 --- /dev/null +++ b/.gitignore @@ -0,0 +1,44 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +**/node_modules + +# testing +**/coverage +testdbs + +# production +**/build + +# misc +.DS_Store +.env +.env.local +.env.development.local +.env.test.local +.env.production.local +**/*.test.js +*/**/yarn.lock + +# logs +npm-debug.log* +yarn-debug.log* +yarn-error.log* +lerna-debug.log* + +# Yarn Integrity file +.yarn-integrity + +# yarn v2 +.yarn/cache +.yarn/unplugged +.yarn/build-state.yml +.yarn/install-state.gz +.pnp.* + +# rust build artifacts +**/*/target/* + +# ironfish-cli +bin/ironfish-cli/databases +.dockerignore diff --git a/.npmrc b/.npmrc new file mode 100644 index 0000000000..cffe8cdef1 --- /dev/null +++ b/.npmrc @@ -0,0 +1 @@ +save-exact=true diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000..5d5197ce63 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,76 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, sex characteristics, gender identity and expression, +level of experience, education, socio-economic status, nationality, personal +appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at contact@ironfish.network. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see +https://www.contributor-covenant.org/faq \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000000..de4513af7a --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,88 @@ +# Contributing + +Thank you for considering contributing to the Iron Fish project. We want to make contributing to this project as easy and transparent as possible, whether it's about: + +* Discussing the current state of the code +* Documenting the code +* Reporting a bug +* Reporting a security threat +* Submitting a fix +* Suggesting a new feature + +We welcome contributions from anyone on the internet, and are grateful for even a one-word correction! Note that we have a [code of conduct](./CODE_OF_CONDUCT.md), please follow it in all your interactions with the project. + + +Thanks in advance for your help. + + +## We develop with Github + +We use Github to host code, to track issues and feature requests, as well as accept pull requests. + + +## Pull Requests Guidelines + +Pull requests are the best way to propose a new change to the codebase (we use the classic [Github Flow](https://guides.github.com/introduction/flow/index.html)). + +To create a new pull request: +1. Fork the repo and check out a new branch from `master`. +2. Add test - if your code change doesn't require a test change, please explain why in the PR description. +3. Update the documentation - Especially if you've changed APIs or created new functions. +4. Ensure the test suite passes by running `yarn test`. +5. Make sure your code lints by running `yarn lint`. +6. Once 4 & 5 are passing, create a new pull request on Github. +7. Add the right label to your PR `documentation`, `bug`, `security-issue`, or `enhancement`. +8. Add a description of what the PR is changing: + * What problem is the PR solving + * Explain if it's adding a breaking change for clients + * Explain how you've tested your change + +Once the PR is created, one of the maintainers will review it and merge it into the master branch. + +If you are thinking of working on a complex change, do not hesitate to discuss the change you wish to make via a Github Issue. You can also request feedback early, by opening a WIP pull request or discuss with a maintainer to ensure your work is in line with the philosophy and roadmap of Iron Fish. + + +## Where to start + +Please read our [README.md](./README.md) first, to learn how to set up Iron Fish. + +If you don't know what contribution you can work on, here are a few suggestions: +* Take a look at our current [list of issues](https://github.com/iron-fish/ironfish/issues). Update the issue if you are interested in working on it. +* Take a look at our current [pull requests](https://github.com/iron-fish/ironfish/pulls) and help review them. +* Help us add new tests. More testing allow everyone to ship quality code faster. +* Write documentation or fix the existing documentation +* If you still don't know what could be a good task for you, do not hesitate to contact us. + + +## Testing + +You can run the entire test suite by running `yarn test` on our TypeScript codebase. +You can run the test suite by entering the command `cargo-test` for each package (`ironfish-rust` or `ironfish-wasm`) within our Rust codebase. + + +## Continuous integration + +After creating a PR on Github, the code will be tested automatically by GitHub Action. The tests can take up to 15 minutes to pass. We ask you to test your code on your machine before submitting a PR. + + +## Style Guide + +Iron Fish uses `eslint` and `prettier` to maintain consistent formatting on the TypeScript codebase. +For the Rust codebase, we are using `rustfmt`. + +Please run it before submitting a change. + + +# Licensing + +Any contribution will be under the [MPL-2.0](https://www.mozilla.org/en-US/MPL/2.0/) Software License. +When you submit a code change, your submissions are understood to be under the same license that covers the project. + +Please contact us if this a concern for you. + + +# Contact Us + +In case of problems with trying to contribute to Iron Fish, you can contact us: +* On [Discord](https://discord.gg/H7Mk3qacyM) +* Via [email](contact@ironfish.network) diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000..a612ad9813 --- /dev/null +++ b/LICENSE @@ -0,0 +1,373 @@ +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. diff --git a/README.md b/README.md new file mode 100644 index 0000000000..d50a7ff805 --- /dev/null +++ b/README.md @@ -0,0 +1,67 @@ +# Iron Fish + +The monorepo for all Iron Fish TypeScript & Rust code. + +See https://ironfish.network + +[![codecov](https://codecov.io/gh/iron-fish/ironfish/branch/master/graph/badge.svg?token=PCSVEVEW5V)](https://codecov.io/gh/iron-fish/ironfish) + +## Development + +### Initial setup + +1. Install [Node.js 14.x](https://nodejs.org/en/download/) +1. Install [Rust](https://www.rust-lang.org/learn/get-started). +1. Install [yarn](https://classic.yarnpkg.com/en/docs/install). +1. Run `cargo install wasm-pack` to install the WebAssembly wrapper generator. +1. Run `yarn` from the root directory to install packages. + +### Usage +Once your environment is setup - you can run the CLI by following [these directions](https://github.com/iron-fish/ironfish/tree/master/ironfish-cli). + +### Tests +Slow tests that are going to be memory intensive should use the extension `test.slow.ts` they will be run in a separate CI. + +#### Running Tests +1. To test the entire monorepo: + a. Run `yarn test` at the root of the repository + b. Run `yarn test:coverage` at the root of the repository for test and coverage + b. Run `yarn test:slow:coverage` at the root of the repository to include slow tests +2. To test a specific project + a. Run `yarn test` at the root of the project + b. Run `yarn test:watch` if you want the tests to run on change + c. Run `test:coverage:html` if you want to export the coverage in an easy to use format (open the index.html file in the coverage folder of the project ) + +### Structure of the repository + +Here's an overview of the main packages in the repository + +#### ironfish-cli: +- The main client for Iron Fish as of today. It is a command line interface based on Node. Allows to sync a full node, start mining, and send or receive payments. More details on [our documentation](https://ironfish.network/docs/onboarding/iron-fish-tutorial). + +#### ironfish-http-api: +- API hosted on Iron Fish servers for the Iron Fish faucet. + +#### ironfish-rosetta-api: +- API hosted on Iron Fish servers for the block explorer. The code of the block explorer client can be found [here](https://github.com/iron-fish/block-explorer). + +#### ironfish-rust: +- Core API for interacting with the transactions and chain and using ZKP. + +#### ironfish-wasm: +- Rust wrapper for creating accounts and transactions to be converted into WASM. + +#### ironfish: +- `anchorChain` maintains the two global merkle trees of notes and + nullifiers and keeps those trees in sync with the heaviest chain. + "In sync" means that the commitments stored on the head of the heaviest + chain in the blockchain are the roots of the merkle trees at the time + the block was added. +- `network` is a general-purpose p2p library that supports gossip and + Rpc style messages. It is an opinionated library that runs primarily + over WebRTC with support from websockets. It can be run in either + a node or browser environment. +- `captain` is a coordination library that is primarily responsible for + using network messages to maintain the trees and blockchain. +- `ironfish` is the ironfish node library that uses captain to interact + with the ironfish p2p network for mining blocks, and spending notes. It also contains the account store and config. diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000000..876f7b37ef --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,5 @@ +# Security Policy + +## Reporting a Vulnerability + +Please do not file a public issue on Github mentioning the vulnerability. To find out how to disclose a vulnerability, please email us at contact@ironfish.network. diff --git a/ci/lintHeaders.sh b/ci/lintHeaders.sh new file mode 100755 index 0000000000..0e3459a369 --- /dev/null +++ b/ci/lintHeaders.sh @@ -0,0 +1,29 @@ +#!/usr/bin/env bash + +# Finds files in a given directory with a given file extension that don't have +# an MPL license header. +# $ lintHeaders ./src *.rs + +license1="/* This Source Code Form is subject to the terms of the Mozilla Public" +license2=" * License, v. 2.0. If a copy of the MPL was not distributed with this" +license3=" * file, You can obtain one at https://mozilla.org/MPL/2.0/. */" + +files=$(find $1 -type f -name '*.rs') +result=0 + +headerLineNumbers() { + grep -Fn -e "$license1" -e "$license2" -e "$license3" "$1" | cut -f1 -d: +} + +expectedHeaderLineNumbers='1 +2 +3' + +for file in $files; do + if ! [ "$(headerLineNumbers $file)" = "$expectedHeaderLineNumbers" ]; then + echo "$file" + result=1 + fi +done + +exit $result diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 0000000000..05eef0dfc2 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,71 @@ +ignore: + - config/* + - ironfish-wasm/* + - ironfish/src/aaptain/testUtilities/* +comment: false +github_checks: + annotations: false +coverage: + fixes: + - "/home/runner/work/ironfish/ironfish/::/" + status: + patch: + default: + target: auto + threshold: 50% + project: + default: + target: 63% + ironfish-rust: + target: 53% + flags: + - ironfish-rust + ironfish-cli: + target: 50% + flags: + - ironfish-cli + ironfish: + target: 60% + flags: + - ironfish + ironfish-http-api: + target: 70% + flags: + - ironfish-http-api + ironfish-rosetta-api: + target: 75% + flags: + - ironfish-rosetta-api + event: + target: 84% + flags: + - event + logger: + target: 90% + flags: + - logger + serde: + target: 90% + flags: + - serde +flags: + ironfish-rust: + paths: + - ironfish-rust + carryforward: true + ironfish-cli: + paths: + - ironfish-cli + carryforward: true + ironfish: + paths: + - ironfish + carryforward: true + ironfish-http-api: + paths: + - ironfish-http-api + carryforward: true + ironfish-rosetta-api: + paths: + - ironfish-rosetta-api + carryforward: true diff --git a/config/eslint-config-ironfish-react/index.js b/config/eslint-config-ironfish-react/index.js new file mode 100644 index 0000000000..f19c83d881 --- /dev/null +++ b/config/eslint-config-ironfish-react/index.js @@ -0,0 +1,15 @@ +'use strict' + +module.exports = { + extends: [ + 'ironfish', + 'plugin:react/recommended', + 'plugin:react-hooks/recommended', + ], + + settings: { + react: { + version: 'detect', + } + } +} diff --git a/config/eslint-config-ironfish-react/package.json b/config/eslint-config-ironfish-react/package.json new file mode 100644 index 0000000000..d288fc45f6 --- /dev/null +++ b/config/eslint-config-ironfish-react/package.json @@ -0,0 +1,20 @@ +{ + "name": "eslint-config-ironfish-react", + "version": "0.1.0", + "private": true, + "author": "Iron Fish (https://ironfish.network)", + "license": "MPL-2.0", + "files": [ + "index.js", + "prettierrc.js" + ], + "peerDependencies": { + "eslint-config-ironfish": "*", + "eslint-plugin-react": "7.20.6", + "eslint-plugin-react-hooks": "4.1.2" + }, + "resolutions": { + "node-forge": "0.10.0", + "object-path": "^0.11.4" + } +} diff --git a/config/eslint-config-ironfish-react/prettierrc.js b/config/eslint-config-ironfish-react/prettierrc.js new file mode 100644 index 0000000000..7110097ceb --- /dev/null +++ b/config/eslint-config-ironfish-react/prettierrc.js @@ -0,0 +1,3 @@ +module.exports = { + ...require('eslint-config-ironfish/prettierrc'), +} diff --git a/config/eslint-config-ironfish/.eslintignore b/config/eslint-config-ironfish/.eslintignore new file mode 100644 index 0000000000..378eac25d3 --- /dev/null +++ b/config/eslint-config-ironfish/.eslintignore @@ -0,0 +1 @@ +build diff --git a/config/eslint-config-ironfish/index.js b/config/eslint-config-ironfish/index.js new file mode 100644 index 0000000000..ad446508db --- /dev/null +++ b/config/eslint-config-ironfish/index.js @@ -0,0 +1,81 @@ +'use strict' + +module.exports = { + root: true, + + ignorePatterns: ["openapi.d.ts"], + + env: { + es6: true, + node: true, + }, + + parserOptions: { + ecmaVersion: '2018', + sourceType: 'module', + }, + + plugins: ['jest', 'prettier', 'header'], + + extends: [ + 'eslint:recommended', + 'plugin:prettier/recommended', + 'prettier', + 'prettier/@typescript-eslint', + ], + + overrides: [ + { + files: ['**/*.ts', '**/*.tsx'], + parser: '@typescript-eslint/parser', + parserOptions: { + project: './tsconfig.eslint.json', + tsconfigRootDir: __dirname, + }, + plugins: ['@typescript-eslint'], + extends: [ + 'plugin:@typescript-eslint/recommended', + 'plugin:@typescript-eslint/recommended-requiring-type-checking', + ], + }, + { + files: ['**/*.{spec,test}.*'], + extends: ['plugin:jest/recommended'], + rules: { + // It's common to want to mock functions with noops. This could be + // turned off for non-test code as well if it's a common pattern. + '@typescript-eslint/no-empty-function': 'off', + // Jest's asymmetric matchers (e.g expect.any(Date)) are typed with + // any return values. Fixing this either requires casting every use + // the matchers to unknown, or defining a custom matcher, which seems + // like too much friction for test-writing. + '@typescript-eslint/no-unsafe-assignment': 'off', + // It's common to want to mock unbound methods. + "@typescript-eslint/unbound-method": "off", + // Using try catch with expect.assertsions(n) is the recommended way to + // test async code where you need a reference to the error to validate the + // type and properties + "jest/no-conditional-expect": "off", + "jest/no-try-expect": "off", + } + } + ], + + rules: { + // Seems to be needed to allow for custom jest matchers + '@typescript-eslint/no-namespace': ['error', { allowDeclarations: true }], + + // Allows for using _ to strip off variables via destructuring, e.g. + // const { ignore: _ignored, ...rest } = node + '@typescript-eslint/no-unused-vars': ['error', { varsIgnorePattern: '^_', argsIgnorePattern: '^_' }], + + 'header/header': [2, 'block', [ + ' This Source Code Form is subject to the terms of the Mozilla Public', + ' * License, v. 2.0. If a copy of the MPL was not distributed with this', + ' * file, You can obtain one at https://mozilla.org/MPL/2.0/. ', + ]], + + // Prefer using the Logger library rather than directly using the console for output. + 'no-console': 'error' + }, +} diff --git a/config/eslint-config-ironfish/package.json b/config/eslint-config-ironfish/package.json new file mode 100644 index 0000000000..2de400cb66 --- /dev/null +++ b/config/eslint-config-ironfish/package.json @@ -0,0 +1,24 @@ +{ + "name": "eslint-config-ironfish", + "version": "0.1.0", + "private": true, + "author": "Iron Fish (https://ironfish.network)", + "license": "MPL-2.0", + "files": [ + "index.js", + "prettierrc" + ], + "peerDependencies": { + "@typescript-eslint/eslint-plugin": "4.9.0", + "@typescript-eslint/parser": "4.9.0", + "eslint": "7.12.0", + "eslint-config-prettier": "6.11.0", + "eslint-plugin-jest": "24.0.1", + "eslint-plugin-prettier": "3.1.4", + "prettier": "2.1.2" + }, + "resolutions": { + "node-forge": "0.10.0", + "object-path": "^0.11.4" + } +} diff --git a/config/eslint-config-ironfish/prettierrc.js b/config/eslint-config-ironfish/prettierrc.js new file mode 100644 index 0000000000..fe5c6c59fe --- /dev/null +++ b/config/eslint-config-ironfish/prettierrc.js @@ -0,0 +1,8 @@ +module.exports = { + semi: false, + trailingComma: "all", + singleQuote: true, + jsxSingleQuote: true, + printWidth: 96, + tabWidth: 2, +}; diff --git a/config/jest.config.base.js b/config/jest.config.base.js new file mode 100644 index 0000000000..f92fdc022a --- /dev/null +++ b/config/jest.config.base.js @@ -0,0 +1,9 @@ +module.exports = { + preset: "ts-jest", + testMatch: ["**/*.test.ts", "**/*.test.slow.ts", "**/*.test.perf.ts"], + testEnvironment: "../config/jestNodeEnvironment", + watchPlugins: ["../config/jestWatchPlugin"], + coverageProvider: "v8", + coverageReporters: ["text-summary", "json", "clover", "text"], + testPathIgnorePatterns: ['.*\\.test\\.slow\\.ts$', '.*\\.test\\.perf\\.ts$'], +}; diff --git a/config/jestNodeEnvironment.js b/config/jestNodeEnvironment.js new file mode 100644 index 0000000000..2616285572 --- /dev/null +++ b/config/jestNodeEnvironment.js @@ -0,0 +1,27 @@ +// Fixes an issue with jest+node where the ArrayBuffer instance within a vm context doesn't pass +// an instanceof check against an ArrayBuffer in the global scope. +// Taken from https://github.com/facebook/jest/issues/7780#issuecomment-615890410 +"use strict"; + +const NodeEnvironment = require("jest-environment-node"); + +class CustomNodeEnvironment extends NodeEnvironment { + constructor(config) { + super( + Object.assign({}, config, { + globals: Object.assign({}, config.globals, { + Uint32Array: Uint32Array, + Uint8Array: Uint8Array, + ArrayBuffer: ArrayBuffer, + }), + }), + ); + } + + async setup() {} + + async teardown() {} + +} + +module.exports = CustomNodeEnvironment; \ No newline at end of file diff --git a/config/jestWatchPlugin.js b/config/jestWatchPlugin.js new file mode 100644 index 0000000000..2c3003717a --- /dev/null +++ b/config/jestWatchPlugin.js @@ -0,0 +1,90 @@ +/** + * Adds a 'b' command to the jest watcher to rebuild TypeScript project references. + */ + +const ts = require('typescript') + +// Given a diagnostic, returns info that can be logged to the console. +function getTextForDiagnostic(diagnostic) { + if (diagnostic.file) { + const { line, character } = diagnostic.file.getLineAndCharacterOfPosition( + diagnostic.start + ) + const message = ts.flattenDiagnosticMessageText( + diagnostic.messageText, + "\n" + ) + // TODO: Colorize this like standard tsc output. ts provides a formatter, + // but on last attempt to use it, the jest watcher ate the output. + return `${diagnostic.file.fileName} (${line + 1},${character + 1}): ${message}` + } else { + return `${ts.flattenDiagnosticMessageText(diagnostic.messageText, "\n")}` + } +} + +// Reports errors +function reportDiagnostic(diagnostic) { + console.error(getTextForDiagnostic(diagnostic)) +} +// Reports status, like 'Project needs to be built because output file does not exist' +// Not currently relevant to the plugin, but passed in so we don't lose the info +function reportSolutionBuilderStatus(diagnostic) { + console.info(getTextForDiagnostic(diagnostic)) +} +// Reports summary with number of errors +function reportErrorSummary(errorCount) { + console.info(errorCount === 0 + ? `References built successfully.` + : `Build complete with ${errorCount} error${errorCount === 1 ? '' : 's'}.` + ) +} + +const host = ts.createSolutionBuilderHost( + ts.sys, + ts.createEmitAndSemanticDiagnosticsBuilderProgram, + reportDiagnostic, + reportSolutionBuilderStatus, + reportErrorSummary +) + +class TypescriptWatchPlugin { + getUsageInfo(globalConfig) { + return { + key: 'b', + prompt: 'build TypeScript project references', + } + } + + run(globalConfig, updateConfigAndRun) { + console.info('Building TypeScript project references...') + + return new Promise((resolve, reject) => { + const configPath = ts.findConfigFile(globalConfig.rootDir, ts.sys.fileExists) + if (!configPath) { + reject(`Could not find a valid 'tsconfig.json'.`) + } + + const solution = ts.createSolutionBuilder(host, [configPath], {}) + + const exitStatus = solution.buildReferences(configPath) + + if (exitStatus === 0) { + // Since we're just building references that aren't + // tracked by the test watcher, trigger a full test run. + if (globalConfig.watchAll === false) { + updateConfigAndRun({ mode: 'watchAll' }) + updateConfigAndRun({ mode: 'watch' }) + } else { + resolve(true) + return + } + } + + // We triggered a run if necessary, so always return false here. + resolve(false) + }) + + } +} + +module.exports = TypescriptWatchPlugin diff --git a/config/tsconfig.base.json b/config/tsconfig.base.json new file mode 100644 index 0000000000..9e5cb96331 --- /dev/null +++ b/config/tsconfig.base.json @@ -0,0 +1,19 @@ +{ + "compilerOptions": { + "target": "ES2017", + "lib": ["dom", "dom.iterable", "esnext"], + "allowJs": true, + "skipLibCheck": true, + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "strict": true, + "forceConsistentCasingInFileNames": true, + "module": "commonjs", + "moduleResolution": "node", + "sourceMap": true, + "resolveJsonModule": true, + "composite": true, + "jsx": "react", + "declarationMap": true + } +} diff --git a/ironfish-cli/.eslintrc.js b/ironfish-cli/.eslintrc.js new file mode 100644 index 0000000000..4bcfdbe8a4 --- /dev/null +++ b/ironfish-cli/.eslintrc.js @@ -0,0 +1,9 @@ +module.exports = { + extends: ['ironfish'], + parserOptions: { + tsconfigRootDir: __dirname, + }, + rules: { + 'jest/no-standalone-expect': 'off', + }, +} diff --git a/ironfish-cli/.prettierrc.js b/ironfish-cli/.prettierrc.js new file mode 100644 index 0000000000..1ad9c111e4 --- /dev/null +++ b/ironfish-cli/.prettierrc.js @@ -0,0 +1 @@ +module.exports = 'eslint-config-ironfish/prettierrc' diff --git a/ironfish-cli/Dockerfile b/ironfish-cli/Dockerfile new file mode 100644 index 0000000000..f3b9bea17a --- /dev/null +++ b/ironfish-cli/Dockerfile @@ -0,0 +1,25 @@ +FROM node:14.16.0 as build +ENV PATH="/root/.cargo/bin:${PATH}" + +COPY ./ ./ + +RUN \ + apt-get update && \ + apt-get install jq rsync -y && \ + curl https://sh.rustup.rs -sSf | sh -s -- -y && \ + cargo install wasm-pack && \ + ./ironfish-cli/scripts/build.sh + +FROM node:14.16.0-slim +EXPOSE 8020:8020 +EXPOSE 9033:9033 +VOLUME /root/.ironfish +ENV NODE_ENV production + +WORKDIR /usr/src +COPY --from=build /ironfish-cli/build.cli/ironfish-cli ./app + +# TODO: use environment variables for this +WORKDIR /usr/src/app +ENTRYPOINT ["./bin/run"] +CMD ["start", "--rpc.ipc", "--rpc.tcp"] diff --git a/ironfish-cli/README.md b/ironfish-cli/README.md new file mode 100644 index 0000000000..65f4b6fbe2 --- /dev/null +++ b/ironfish-cli/README.md @@ -0,0 +1,52 @@ +[![codecov](https://codecov.io/gh/iron-fish/ironfish/branch/master/graph/badge.svg?token=PCSVEVEW5V&flag=ironfish-cli)](https://codecov.io/gh/iron-fish/ironfish) + +The main entry point for an ironfish CLI that is capable of mining blocks and spending notes. It's created using the [oclif CLI framework](https://oclif.io) + +## Use Scenarios + +### Starting a single node +Run these command in the terminal: + +- `yarn start start` + +Interact with the node in a new tab: +- `yarn start accounts:balance` +- `yarn start faucet:giveme` +- `yarn start accounts:pay` + +### Mining +Then run these commands in two different terminals: + +- `yarn start start -d default -p 9033` +- `yarn start miners:start` + +You should see messages in the second terminal indicating that blocks are mined. + +### Multiple Nodes + +Run these commands in two different terminals: + +- `yarn start start -d default -p 9033` +- `yarn start start -d client -p 9034 -b ws://localhost:9033` + +You should see connection messages indicating that the two nodes are talking to each other. + +### Multiple Nodes with Miners + +**Node 1** +```bash +# in tab 1 +yarn start:once start + +# in tab 2 +yarn start:once miners:start +``` + +**Node 2** +```bash +# in tab 3 +yarn start:once start --datadir ~/.ironfish2 --port 9034 --bootstrap ws://localhost:9033 + +# in tab 4 +yarn start:once miners:start --datadir ~/.ironfish2 +``` \ No newline at end of file diff --git a/ironfish-cli/bin/ironfish b/ironfish-cli/bin/ironfish new file mode 100755 index 0000000000..0a4ae0e956 --- /dev/null +++ b/ironfish-cli/bin/ironfish @@ -0,0 +1,45 @@ +#!/usr/bin/env bash +set -e +echoerr() { echo "$@" 1>&2; } + +get_script_dir () { + SOURCE="${BASH_SOURCE[0]}" + # While $SOURCE is a symlink, resolve it + while [ -h "$SOURCE" ]; do + DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + SOURCE="$( readlink "$SOURCE" )" + # If $SOURCE was a relative symlink (so no "/" as prefix, need to resolve it relative to the symlink base directory + [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" + done + DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + echo "$DIR" +} +DIR=$(get_script_dir) +CLI_HOME=$(cd && pwd) +XDG_DATA_HOME=${XDG_DATA_HOME:="$CLI_HOME/.local/share"} +CLIENT_HOME=${IRONFISH_OCLIF_CLIENT_HOME:=$XDG_DATA_HOME/ironfish/client} +BIN_PATH="$CLIENT_HOME/bin/ironfish" +if [ -z "$IRONFISH_REDIRECTED" ] && [ -x "$BIN_PATH" ] && [[ ! "$DIR/ironfish" -ef "$BIN_PATH" ]]; then + if [ "$DEBUG" == "*" ]; then + echoerr "$BIN_PATH" "$@" + fi + IRONFISH_BINPATH="$BIN_PATH" IRONFISH_REDIRECTED=1 "$BIN_PATH" "$@" +else + export IRONFISH_BINPATH=${IRONFISH_BINPATH:="$DIR/ironfish"} + if [ -x "$(command -v "$XDG_DATA_HOME/oclif/node/node-custom")" ]; then + NODE="$XDG_DATA_HOME/oclif/node/node-custom" + elif [ -x "$(command -v "$DIR/node")" ]; then + NODE="$DIR/node" + elif [ -x "$(command -v "$XDG_DATA_HOME/oclif/node/node-12.18.4")" ]; then + NODE="$XDG_DATA_HOME/oclif/node/node-12.18.4" + elif [ -x "$(command -v node)" ]; then + NODE=node + else + echoerr 'Error: node is not installed.' >&2 + exit 1 + fi + if [ "$DEBUG" == "*" ]; then + echoerr IRONFISH_BINPATH="$IRONFISH_BINPATH" "$NODE" "$DIR/run" "$@" + fi + "$NODE" "$DIR/run" "$@" +fi diff --git a/ironfish-cli/bin/run b/ironfish-cli/bin/run new file mode 100755 index 0000000000..e7d21a2f9e --- /dev/null +++ b/ironfish-cli/bin/run @@ -0,0 +1,7 @@ +#!/usr/bin/env node + +require('segfault-handler').registerHandler() + +require('@oclif/command').run() +.then(require('@oclif/command/flush')) +.catch(require('@oclif/errors/handle')) diff --git a/ironfish-cli/bin/run.cmd b/ironfish-cli/bin/run.cmd new file mode 100644 index 0000000000..968fc30758 --- /dev/null +++ b/ironfish-cli/bin/run.cmd @@ -0,0 +1,3 @@ +@echo off + +node "%~dp0\run" %* diff --git a/ironfish-cli/jest.config.js b/ironfish-cli/jest.config.js new file mode 100644 index 0000000000..515c673d7f --- /dev/null +++ b/ironfish-cli/jest.config.js @@ -0,0 +1,13 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +const base = require('../config/jest.config.base') +const pkg = require('./package.json') + +module.exports = { + ...base, + testEnvironment: '../config/jestNodeEnvironment', + watchPlugins: ['../config/jestWatchPlugin'], + displayName: pkg.name, + globalSetup: './jest.setup.js', +} diff --git a/ironfish-cli/jest.setup.js b/ironfish-cli/jest.setup.js new file mode 100644 index 0000000000..1b1fda1d14 --- /dev/null +++ b/ironfish-cli/jest.setup.js @@ -0,0 +1,9 @@ +const fs = require('fs') + +module.exports = async () => { + if (fs.existsSync('./testdbs')) { + fs.rmdirSync('./testdbs', { recursive: true }) + } + + fs.mkdirSync('./testdbs') +} diff --git a/ironfish-cli/package.json b/ironfish-cli/package.json new file mode 100644 index 0000000000..f26aca7387 --- /dev/null +++ b/ironfish-cli/package.json @@ -0,0 +1,108 @@ +{ + "name": "ironfish-cli", + "version": "0.0.0", + "description": "Command line Iron Fish node", + "author": "Iron Fish (https://ironfish.network)", + "engines": { + "node": "14.x" + }, + "devDependencies": { + "@oclif/dev-cli": "^1", + "@oclif/test": "^1", + "@types/blessed": "0.1.17", + "@types/ws": "^7.2.6", + "chai": "4.2.0", + "eslint-config-ironfish": "*", + "jest": "~26.6.3", + "oclif": "1.16.1", + "tsc-watch": "4.2.9", + "typescript": "4.1.2", + "yarn": "^1.22.10" + }, + "scripts": { + "build": "tsc -b", + "lint": "tsc -b && eslint --ext .ts,.tsx,.js,.jsx src/", + "lint:fix": "tsc -b && eslint --ext .ts,.tsx,.js,.jsx src/ --fix", + "start": "node start", + "start:once": "yarn build && yarn start:js", + "start:js": "OCLIF_TS_NODE=0 IRONFISH_DEBUG=1 node bin/run", + "test": "yarn clean && tsc -b && tsc -b tsconfig.test.json && jest", + "test:coverage:html": "tsc -b tsconfig.test.json && jest --coverage --coverage-reporters html --testPathIgnorePatterns", + "test:watch": "tsc -b tsconfig.test.json && jest --watch --coverage false", + "postpack": "rm -f oclif.manifest.json", + "clean": "rm -rf build", + "pack": "oclif-dev pack", + "prepack": "yarn clean && yarn build && oclif-dev manifest && oclif-dev readme", + "oclif:posttest": "eslint . --ext .ts --config .eslintrc", + "oclif:test": "echo NO TESTS", + "oclif:version": "oclif-dev readme && git add README.md" + }, + "license": "MPL-2.0", + "dependencies": { + "@oclif/command": "^1", + "@oclif/config": "^1", + "@oclif/plugin-help": "3", + "@oclif/plugin-not-found": "1.2.4", + "blessed": "0.1.81", + "cli-ux": "^5.5.0", + "ironfish": "*", + "ironfish-wasm-nodejs": "*", + "json-colorizer": "2.2.2", + "segfault-handler": "1.3.0", + "tweetnacl": "1.0.3", + "wrtc": "^0.4.6", + "ws": "^7.3.1" + }, + "resolutions": { + "node-forge": "0.10.0", + "object-path": "^0.11.4" + }, + "oclif": { + "macos": { + "identifier": "network.ironfish.cli" + }, + "commands": "./build/commands", + "bin": "ironfish", + "plugins": [ + "@oclif/plugin-help", + "@oclif/plugin-not-found" + ], + "topics": { + "accounts": { + "description": "Create and delete accounts" + }, + "chain": { + "description": "Manage the blockchain" + }, + "config": { + "description": "Show and edit the node configuration" + }, + "faucet": { + "description": "Get coins to start using Iron Fish" + }, + "miners": { + "description": "Manage an Iron Fish miner" + }, + "peers": { + "description": "Manage the peers connected to this node" + } + } + }, + "bin": { + "ironfish": "./bin/run" + }, + "bugs": "https://github.com/iron-fish/ironfish/issues", + "files": [ + "/bin", + "/lib", + "/npm-shrinkwrap.json", + "/oclif.manifest.json" + ], + "homepage": "https://github.com/iron-fish/ironfish", + "keywords": [ + "oclif" + ], + "main": "build/index.js", + "repository": "iron-fish/ironfish", + "types": "build/index.d.ts" +} diff --git a/ironfish-cli/scripts/build-docker.sh b/ironfish-cli/scripts/build-docker.sh new file mode 100755 index 0000000000..9e43bac74d --- /dev/null +++ b/ironfish-cli/scripts/build-docker.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash +set -euo pipefail +cd "$(dirname "$0")" +cd ../.. + +echo "Building Docker Image" +cp .gitignore .dockerignore + +docker build . \ + --progress plain \ + --tag ironfish:latest \ + --file ironfish-cli/Dockerfile + +docker run \ + --interactive \ + --rm \ + ironfish:latest --version diff --git a/ironfish-cli/scripts/build.sh b/ironfish-cli/scripts/build.sh new file mode 100755 index 0000000000..b7fde21346 --- /dev/null +++ b/ironfish-cli/scripts/build.sh @@ -0,0 +1,74 @@ +#!/usr/bin/env bash +set -euo pipefail +cd "$(dirname "$0")" +cd ../../ + +if ! command -v jq &> /dev/null; then + echo "jq is not installed but is required" + exit 1 +fi + +if ! command -v rsync &> /dev/null; then + echo "rsync is not installed but is required" + exit 1 +fi + +if ! command -v git &> /dev/null; then + echo "git is not installed but is required" + exit 1 +fi + +echo "Inserting GIT hash into ironfish/gitHash.ts" +GIT_HASH=$(git rev-parse --short HEAD) +echo "export default '$GIT_HASH'" >> ironfish/gitHash.ts + +echo "Removing lifecycle scripts" +cat <<< "$(jq 'del(.scripts.prebuild)' < package.json)" > package.json +cat <<< "$(jq 'del(.scripts.preinstall)' < package.json)" > package.json + +echo "Building WASM" +( cd ironfish-wasm && yarn run build:node ) + +echo "Installing from lockfile" +yarn --non-interactive --frozen-lockfile + +echo "Building all projects" +yarn build + +cd ironfish-cli +echo "Outputting build to $PWD/build.cli" +rm -rf build.cli +mkdir build.cli + +echo "Packing CLI" +yarn pack -f ./build.cli/packaged.tar.gz +cd build.cli +tar zxvf packaged.tar.gz + +echo "Installing production node_modules" +rm -rf ../../node_modules +cd ../.. +yarn --non-interactive --frozen-lockfile --production +cd ironfish-cli/build.cli + +cd package +echo "Copying build" +cp -R ../../build ./ + +echo "Copying node_modules" +rsync -L -avrq --exclude='ironfish-cli' ../../../node_modules ./ +# Copy node_modules from ironfish-cli folder into the production node_modules folder +# yarn --production seems to split some packages into different folders for some reason +cp -R ../../node_modules/* ./node_modules + +echo "" +if ! ./bin/run --version > /dev/null; then + echo "Failed to build ironfish" +else + echo "Ironfish CLI built successfully" +fi + +echo "Packaging build into ironfish-cli.tar.gz" +cd .. +mv package ironfish-cli +tar -cf ironfish-cli.tar.gz ironfish-cli diff --git a/ironfish-cli/scripts/deploy-brew.sh b/ironfish-cli/scripts/deploy-brew.sh new file mode 100755 index 0000000000..f76b7d00a9 --- /dev/null +++ b/ironfish-cli/scripts/deploy-brew.sh @@ -0,0 +1,54 @@ +#!/usr/bin/env bash +set -euo pipefail +cd "$(dirname "$0")" +cd .. + +# This script will package the CLI for mac and upload a release asset +# to the latest release at https://github.com/iron-fish/homebrew-brew +# then prints out some extra steps to make the release public + +if [ -z "${AWS_ACCESS_KEY_ID-}" ]; then + echo "Set AWS_ACCESS_KEY_ID before running deploy-brew.sh" + exit 1 +fi + +if [ -z "${AWS_SECRET_ACCESS_KEY-}" ]; then + echo "Set AWS_SECRET_ACCESS_KEY before running deploy-brew.sh" + exit 1 +fi + +SOURCE_NAME=ironfish-cli.tar.gz +SOURCE_PATH=./build.cli/$SOURCE_NAME + +echo "Getting git hash" +GIT_HASH=$(git rev-parse --short HEAD) + +echo "Getting sha256" +UPLOAD_HASH=$(shasum -a 256 $SOURCE_PATH | awk '{print $1}') + +UPLOAD_NAME=ironfish-cli-$GIT_HASH.tar.gz +UPLOAD_URL=s3://ironfish-cli/$UPLOAD_NAME +PUBLIC_URL=https://ironfish-cli.s3.amazonaws.com/$UPLOAD_NAME + +echo "" +echo "GIT HASH: $GIT_HASH" +echo "SHA256: $UPLOAD_HASH" +echo "UPLOAD NAME: $UPLOAD_NAME" +echo "UPLOAD URL: $UPLOAD_URL" +echo "PUBLIC URL: $PUBLIC_URL" +echo "" + +if aws s3api head-object --bucket ironfish-cli --key $UPLOAD_NAME > /dev/null 2>&1 ; then + echo "Release already uploaded: $PUBLIC_URL" + exit 1 +fi + +echo "Uploading $SOURCE_NAME to $UPLOAD_URL" +aws s3 cp $SOURCE_PATH $UPLOAD_URL + +echo "" +echo "You are almost finished! To finish the process you need to update update url and sha256 in" +echo "https://github.com/iron-fish/homebrew-brew/blob/master/Formula/ironfish.rb" +echo "" +echo "URL = \"$PUBLIC_URL\"" +echo "SHA = \"$UPLOAD_HASH\"" diff --git a/ironfish-cli/scripts/deploy-docker.sh b/ironfish-cli/scripts/deploy-docker.sh new file mode 100755 index 0000000000..98cfa7c3c4 --- /dev/null +++ b/ironfish-cli/scripts/deploy-docker.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash +set -euo pipefail +cd "$(dirname "$0")" + +if [ -z "${AWS_REGISTRY_URL-}" ]; then + echo "Set AWS_REGISTRY_URL before running deploy-docker.sh" + exit 1 +fi + +docker tag ironfish:latest ${AWS_REGISTRY_URL}/ironfish:latest +docker push ${AWS_REGISTRY_URL}/ironfish:latest diff --git a/ironfish-cli/scripts/run-network-macos.sh b/ironfish-cli/scripts/run-network-macos.sh new file mode 100755 index 0000000000..2e0ae8fac6 --- /dev/null +++ b/ironfish-cli/scripts/run-network-macos.sh @@ -0,0 +1,63 @@ +#!/usr/bin/env bash +set -euo pipefail +cd "$(dirname "$0")" +cd .. + +# This script boots up 3 nodes and has them connect to each other +# node1 uses ~/.ironfish1 @ 9032 +# node2 uses ~/.ironfish2 @ 9033; connects to node1 +# node3 uses ~/.ironfish3 @ 9034; connects to node2 + +if [ ! "$(command -v "osascript")" ]; then + echo 'Your computer does not have osascript (Apple Script) installed' + exit 1 +fi + +rm -rf ~/.ironfish1/databases +rm -rf ~/.ironfish2/databases +rm -rf ~/.ironfish3/databases + +CWD="$(pwd)" +NODE1="yarn start:once start -v -p 9034 -w -n peer1 -b localhost:9033 --datadir ~/.ironfish1" +NODE2="yarn start:once start -v -p 9035 -w -n peer2 -b localhost:9033 --datadir ~/.ironfish2 --no-listen" +NODE3="yarn start:once start -v -p 9036 -w -n peer3 -b localhost:9033 --datadir ~/.ironfish3 --no-listen" +NODE2_LIST="yarn start:once peers:list -fenv --datadir ~/.ironfish1" + +osascript < | void + + async run(): Promise { + try { + await this.start() + } catch (error: unknown) { + if (hasUserResponseError(error)) { + this.log(error.codeMessage) + } else if (error instanceof ConnectionError) { + this.log(`Cannot connect to your node, start your node first.`) + } else throw error + } + + this.exit(0) + } + + async init(): Promise { + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-explicit-any + const commandClass = this.constructor as any + const { flags } = this.parse(commandClass) + + // Get the flags from the flag object which is unknown + const dataDirFlag = getFlag(flags, DataDirFlagKey) + const configFlag = getFlag(flags, ConfigFlagKey) + + const configOverrides: Partial = {} + + const databaseNameFlag = getFlag(flags, DatabaseFlagKey) + if (typeof databaseNameFlag === 'string') { + configOverrides.databaseName = databaseNameFlag + } + + const rpcConnectIpcFlag = getFlag(flags, RpcUseIpcFlagKey) + if (typeof rpcConnectIpcFlag === 'boolean') { + configOverrides.enableRpcIpc = rpcConnectIpcFlag + } + + const rpcConnectTcpFlag = getFlag(flags, RpcUseTcpFlagKey) + if (typeof rpcConnectTcpFlag === 'boolean') { + configOverrides.enableRpcTcp = rpcConnectTcpFlag + } + + const rpcTcpHostFlag = getFlag(flags, RpcTcpHostFlagKey) + if (typeof rpcTcpHostFlag === 'string') { + configOverrides.rpcTcpHost = rpcTcpHostFlag + } + + const rpcTcpPortFlag = getFlag(flags, RpcTcpPortFlagKey) + if (typeof rpcTcpPortFlag === 'number') { + configOverrides.rpcTcpPort = rpcTcpPortFlag + } + + const verboseFlag = getFlag(flags, VerboseFlagKey) + if (typeof verboseFlag === 'boolean' && verboseFlag) { + configOverrides.logLevel = '*:verbose' + } + + this.sdk = await IronfishSdk.init({ + configOverrides: configOverrides, + configName: typeof configFlag === 'string' ? configFlag : undefined, + dataDir: typeof dataDirFlag === 'string' ? dataDirFlag : undefined, + logger: this.logger, + }) + } + + listenForSignals(): void { + const signals: SIGNALS[] = ['SIGINT', 'SIGTERM', 'SIGUSR2'] + + for (const signal of signals) { + const gracefulShutdown = (signal: SIGNALS) => { + process.off(signal, gracefulShutdown) + + // Allow 3 seconds for graceful termination + setTimeout(() => { + this.log('Force closing after 3 seconds') + process.exit(1) + }, 3000).unref() + + this.closing = true + const promise = this.closeFromSignal(signal).catch((err) => { + this.logger.error('Failed to close', err) + }) + + void promise.then(() => { + process.exit(0) + }) + } + + process.on(signal, gracefulShutdown.bind(signal)) + } + } + + closeFromSignal(signal: SIGNALS): Promise { + throw new Error(`Not implemented closeFromSignal: ${signal}`) + } +} + +function getFlag(flags: unknown, flag: FLAGS): unknown | null { + return typeof flags === 'object' && flags != null && flag in flags + ? // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-explicit-any + (flags as any)[flag] + : null +} diff --git a/ironfish-cli/src/commands/accounts/balance.ts b/ironfish-cli/src/commands/accounts/balance.ts new file mode 100644 index 0000000000..04e815f2d3 --- /dev/null +++ b/ironfish-cli/src/commands/accounts/balance.ts @@ -0,0 +1,49 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { IronfishCommand } from '../../command' +import { RemoteFlags } from '../../flags' +import { displayIronAmountWithCurrency, oreToIron } from '../../../../ironfish' + +export class BalanceCommand extends IronfishCommand { + static description = `Display the account balance` + + static flags = { + ...RemoteFlags, + } + + static args = [ + { + name: 'account', + parse: (input: string): string => input.trim(), + required: false, + description: 'name of the account to export', + }, + ] + + async start(): Promise { + const { args } = this.parse(BalanceCommand) + const account = args.account as string | undefined + + await this.sdk.client.connect() + + const response = await this.sdk.client.getAccountBalance({ + account: account, + }) + + const { confirmedBalance, unconfirmedBalance } = response.content + + this.log( + `The account balance is: ${displayIronAmountWithCurrency( + oreToIron(Number(unconfirmedBalance)), + true, + )}`, + ) + this.log( + `Amount available to spend: ${displayIronAmountWithCurrency( + oreToIron(Number(confirmedBalance)), + true, + )}`, + ) + } +} diff --git a/ironfish-cli/src/commands/accounts/create.test.ts b/ironfish-cli/src/commands/accounts/create.test.ts new file mode 100644 index 0000000000..fdc879f60b --- /dev/null +++ b/ironfish-cli/src/commands/accounts/create.test.ts @@ -0,0 +1,54 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { expect as expectCli, test } from '@oclif/test' +import cli from 'cli-ux' +import * as ironfishmodule from 'ironfish' + +describe('accounts:create command', () => { + let createAccount = jest.fn() + const use = jest.fn() + + const ironFishSdkBackup = ironfishmodule.IronfishSdk.init + + beforeEach(() => { + createAccount = jest.fn().mockReturnValue({ content: {} }) + ironfishmodule.IronfishSdk.init = jest.fn().mockImplementation(() => ({ + accounts: { + use, + storage: { configPath: '' }, + }, + client: { + connect: jest.fn(), + createAccount, + }, + })) + }) + + afterEach(() => { + use.mockReset() + ironfishmodule.IronfishSdk.init = ironFishSdkBackup + }) + + const name = 'testaccount' + + test + .stdout() + .command(['accounts:create', name]) + .exit(0) + .it('creates the account', (ctx) => { + expect(createAccount).toHaveBeenCalledWith({ name }) + expectCli(ctx.stdout).not.include(`The default account is now: ${name}`) + expect(use).toBeCalledTimes(0) + }) + + test + .stub(cli, 'prompt', () => async () => await Promise.resolve(name)) + .stdout() + .command(['accounts:create']) + .exit(0) + .it('asks for account name and creates it', (ctx) => { + expectCli(ctx.stdout).include(`Creating account ${name}`) + expect(createAccount).toHaveBeenCalledWith({ name }) + }) +}) diff --git a/ironfish-cli/src/commands/accounts/create.ts b/ironfish-cli/src/commands/accounts/create.ts new file mode 100644 index 0000000000..8bef368b69 --- /dev/null +++ b/ironfish-cli/src/commands/accounts/create.ts @@ -0,0 +1,50 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import cli from 'cli-ux' +import { IronfishCommand } from '../../command' +import { RemoteFlags } from '../../flags' + +export class CreateCommand extends IronfishCommand { + static description = `Create a new account for sending and receiving coins` + + static args = [ + { + name: 'name', + parse: (input: string): string => input.trim(), + required: false, + description: 'name of the account', + }, + ] + + static flags = { + ...RemoteFlags, + } + + async start(): Promise { + const { args } = this.parse(CreateCommand) + let name = args.name as string + + if (!name) { + name = (await cli.prompt('Enter the name of the account', { + required: true, + })) as string + } + + await this.sdk.client.connect() + + this.log(`Creating account ${name}`) + const result = await this.sdk.client.createAccount({ name }) + + const { publicAddress, isDefaultAccount } = result.content + + this.log(`Account ${name} created with public address ${publicAddress}`) + + if (isDefaultAccount) { + this.log(`The default account is now: ${name}`) + } else { + this.log(`Run "ironfish accounts:use ${name}" to set the account as default`) + } + } +} diff --git a/ironfish-cli/src/commands/accounts/export.ts b/ironfish-cli/src/commands/accounts/export.ts new file mode 100644 index 0000000000..9e60988335 --- /dev/null +++ b/ironfish-cli/src/commands/accounts/export.ts @@ -0,0 +1,53 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { IronfishCommand } from '../../command' +import { ColorFlag, ColorFlagKey, RemoteFlags } from '../../flags' +import jsonColorizer from 'json-colorizer' +import fs from 'fs' + +export class ExportCommand extends IronfishCommand { + static description = `Export an account` + + static flags = { + ...RemoteFlags, + [ColorFlagKey]: ColorFlag, + } + + static args = [ + { + name: 'account', + parse: (input: string): string => input.trim(), + required: true, + description: 'name of the account to export', + }, + { + name: 'path', + parse: (input: string): string => input.trim(), + required: false, + description: 'a path to export the account to', + }, + ] + + async start(): Promise { + const { flags, args } = this.parse(ExportCommand) + const account = args.account as string + const exportPath = args.path as string | undefined + + await this.sdk.client.connect() + + const response = await this.sdk.client.exportAccount({ account: account }) + + let output = JSON.stringify(response.content.account, undefined, ' ') + + if (exportPath) { + const resolved = this.sdk.fileSystem.resolve(exportPath) + fs.writeFileSync(resolved, output) + this.log(`Exported account ${account} to the file ${exportPath}`) + return + } + + if (flags.color) output = jsonColorizer(output) + this.log(output) + } +} diff --git a/ironfish-cli/src/commands/accounts/import.ts b/ironfish-cli/src/commands/accounts/import.ts new file mode 100644 index 0000000000..1954d5e6d1 --- /dev/null +++ b/ironfish-cli/src/commands/accounts/import.ts @@ -0,0 +1,78 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { RemoteFlags } from '../../flags' +import { IronfishCommand } from '../../command' +import { JSONUtils, PromiseUtils, Account } from 'ironfish' +import fs from 'fs' +import { flags } from '@oclif/command' + +export class ImportCommand extends IronfishCommand { + static description = `Import an account` + + static flags = { + ...RemoteFlags, + rescan: flags.boolean({ + allowNo: true, + default: true, + description: 'rescan the blockchain once the account is imported', + }), + } + + static args = [ + { + name: 'path', + parse: (input: string): string => input.trim(), + required: false, + description: 'a path to export the account to', + }, + ] + + async start(): Promise { + const { flags, args } = this.parse(ImportCommand) + const importPath = args.path as string | undefined + + await this.sdk.client.connect() + + let data: string | null = null + + if (importPath) { + const resolved = this.sdk.fileSystem.resolve(importPath) + data = fs.readFileSync(resolved, 'utf8') + } else if (process.stdin) { + data = '' + + const onData = (dataIn: string): void => { + data += dataIn + } + + process.stdin.setEncoding('utf8') + process.stdin.on('data', onData) + while (!process.stdin.readableEnded) { + await PromiseUtils.sleep(100) + } + process.stdin.off('data', onData) + } + + if (data === null) { + this.log('No account to import provided') + this.exit(1) + } + + const account = JSONUtils.parse(data) + + const result = await this.sdk.client.importAccount({ + account: account, + rescan: flags.rescan, + }) + + const { name, isDefaultAccount } = result.content + this.log(`Account ${name} imported.`) + + if (isDefaultAccount) { + this.log(`The default account is now: ${name}`) + } else { + this.log(`Run "ironfish accounts:use ${name}" to set the account as default`) + } + } +} diff --git a/ironfish-cli/src/commands/accounts/list.ts b/ironfish-cli/src/commands/accounts/list.ts new file mode 100644 index 0000000000..475234756b --- /dev/null +++ b/ironfish-cli/src/commands/accounts/list.ts @@ -0,0 +1,29 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { IronfishCommand } from '../../command' +import { RemoteFlags } from '../../flags' + +export class ListCommand extends IronfishCommand { + static description = `List all the accounts on the node` + + static flags = { + ...RemoteFlags, + } + + async start(): Promise { + this.parse(ListCommand) + + await this.sdk.client.connect() + + const response = await this.sdk.client.getAccounts() + + if (response.content.accounts.length === 0) { + this.log('you have no accounts') + } + + for (const name of response.content.accounts) { + this.log(name) + } + } +} diff --git a/ironfish-cli/src/commands/accounts/pay.test.ts b/ironfish-cli/src/commands/accounts/pay.test.ts new file mode 100644 index 0000000000..abbc0c9b41 --- /dev/null +++ b/ironfish-cli/src/commands/accounts/pay.test.ts @@ -0,0 +1,130 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { expect as expectCli, test } from '@oclif/test' +import cli from 'cli-ux' +import * as ironfishmodule from 'ironfish' + +describe('accounts:pay command', () => { + let sendTransaction = jest.fn() + + const ironFishSdkBackup = ironfishmodule.IronfishSdk.init + + beforeEach(() => { + sendTransaction = jest.fn().mockReturnValue({ content: {} }) + + ironfishmodule.IronfishSdk.init = jest.fn().mockImplementation(() => ({ + client: { + connect: jest.fn(), + getAccountBalance: jest.fn().mockResolvedValue({ content: { confirmedBalance: 1000 } }), + sendTransaction, + }, + })) + }) + + afterEach(() => { + sendTransaction.mockReset() + ironfishmodule.IronfishSdk.init = ironFishSdkBackup + }) + + const fee = 1 + const amount = 2 + const to = + '997c586852d1b12da499bcff53595ba37d04e4909dbdb1a75f3bfd90dd7212217a1c2c0da652d187fc52ed' + const from = + '197c586852d1b12da499bcff53595ba37d04e4909dbdb1a75f3bfd90dd7212217a1c2c0da652d187fc52ed' + + test + .stub(cli, 'confirm', () => async () => await Promise.resolve(true)) + .stdout() + .command(['accounts:pay', `-a ${amount}`, `-t ${to}`, `-f ${from}`, `-o ${fee}`]) + .exit(0) + .it( + 'with every flag: show the right confirmation message and call sendTransaction if valid', + (ctx) => { + expectCli(ctx.stdout).include( + `$IRON 2 ($ORE 200,000,000) to ${to} from the account ${from}`, + ) + expectCli(ctx.stdout).include(`Transaction Hash`) + expect(sendTransaction).toBeCalledTimes(1) + }, + ) + + test + .stub(cli, 'prompt', () => async () => await Promise.resolve(to)) + .stub(cli, 'confirm', () => async () => await Promise.resolve(true)) + .stdout() + .command(['accounts:pay', `-a ${amount}`, `-f ${from}`, `-o ${fee}`]) + .exit(0) + .it( + 'without to flag: show the right confirmation message and call sendTransaction if valid', + (ctx) => { + expectCli(ctx.stdout).include(`Transaction Hash`) + expect(sendTransaction).toBeCalledTimes(1) + }, + ) + + test + .stub(cli, 'prompt', () => async () => await Promise.resolve('not correct address')) + .stub(cli, 'confirm', () => async () => await Promise.resolve(true)) + .stdout() + .command(['accounts:pay', `-a ${amount}`, `-f ${from}`]) + .exit(2) + .it('without account flag: show the right error message', () => { + expect(sendTransaction).toBeCalledTimes(0) + }) + + test + .stub(cli, 'prompt', () => async () => await Promise.resolve(3)) + .stub(cli, 'confirm', () => async () => await Promise.resolve(true)) + .stdout() + .command(['accounts:pay', `-t ${to}`, `-f ${from}`]) + .exit(0) + .it( + 'without account flag: show the right confirmation message and call sendTransaction if valid', + (ctx) => { + expectCli(ctx.stdout).include( + `$IRON 3.00000000 ($ORE 300,000,000) to ${to} from the account ${from}`, + ) + expectCli(ctx.stdout).include(`Transaction Hash`) + expect(sendTransaction).toBeCalledTimes(1) + }, + ) + + test + .stub(cli, 'prompt', () => async () => await Promise.resolve('non right value')) + .stub(cli, 'confirm', () => async () => await Promise.resolve(true)) + .stdout() + .command(['accounts:pay', `-t ${to}`, `-f ${from}`]) + .exit(0) + .it('without account flag: show the right error message', () => { + expect(sendTransaction).toBeCalledTimes(0) + }) + + test + .stub(cli, 'confirm', () => async () => await Promise.resolve(false)) + .stdout() + .command(['accounts:pay', `-a ${amount}`, `-t ${to}`, `-f ${from}`, `-o ${fee}`]) + .exit(0) + .it('aborts correctly', () => { + expect(sendTransaction).toBeCalledTimes(0) + }) + + describe('When the API throws an error', () => { + beforeEach(() => { + sendTransaction = jest.fn().mockRejectedValue('an error') + }) + test + .stub(cli, 'confirm', () => async () => await Promise.resolve(true)) + .stdout() + .command(['accounts:pay', `-a ${amount}`, `-t ${to}`, `-f ${from}`, `-o ${fee}`]) + .exit(2) + .it('show the right error message and call sendTransaction', (ctx) => { + expectCli(ctx.stdout).include( + `$IRON 2 ($ORE 200,000,000) to ${to} from the account ${from}`, + ) + expect(sendTransaction).toBeCalledTimes(1) + expectCli(ctx.stdout).include(`An error occurred while sending the transaction.`) + }) + }) +}) diff --git a/ironfish-cli/src/commands/accounts/pay.ts b/ironfish-cli/src/commands/accounts/pay.ts new file mode 100644 index 0000000000..5d84f24882 --- /dev/null +++ b/ironfish-cli/src/commands/accounts/pay.ts @@ -0,0 +1,203 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import cli from 'cli-ux' +import { flags } from '@oclif/command' +import { IronfishCommand } from '../../command' +import { RemoteFlags } from '../../flags' +import { + displayIronAmountWithCurrency, + ironToOre, + oreToIron, + isValidAmount, + MINIMUM_IRON_AMOUNT, +} from '../../../../ironfish' + +interface ProgressBar { + progress: VoidFunction + start: VoidFunction + stop: VoidFunction + update: (number: number) => void + getTotal: () => number +} + +export class Pay extends IronfishCommand { + static description = `Send coins to another account` + + static examples = [ + '$ ironfish accounts:pay -a 2 -o 0.00000001 -t 997c586852d1b12da499bcff53595ba37d04e4909dbdb1a75f3bfd90dd7212217a1c2c0da652d187fc52ed', + '$ ironfish accounts:pay -a 2 -o 0.00000001 -t 997c586852d1b12da499bcff53595ba37d04e4909dbdb1a75f3bfd90dd7212217a1c2c0da652d187fc52ed -f otheraccount', + ] + + static flags = { + ...RemoteFlags, + account: flags.string({ + char: 'f', + description: 'the account to send money from', + }), + amount: flags.string({ + char: 'a', + description: 'amount of coins to send', + }), + to: flags.string({ + char: 't', + description: 'the public address of the recipient', + }), + fee: flags.string({ + char: 'o', + description: 'the fee amount in Ore', + }), + confirm: flags.boolean({ + default: false, + description: 'confirm without asking', + }), + } + + async start(): Promise { + const { flags } = this.parse(Pay) + let amount = (flags.amount as unknown) as number + let fee = (flags.fee as unknown) as number + let to = flags.to + let from = flags.account + + await this.sdk.client.connect() + + if (!amount || Number.isNaN(amount)) { + const responseBalance = await this.sdk.client.getAccountBalance({ + account: from, + }) + const { confirmedBalance } = responseBalance.content + amount = (await cli.prompt( + `Enter the amount in $IRON (balance available: ${displayIronAmountWithCurrency( + oreToIron(Number(confirmedBalance)), + false, + )})`, + { + required: true, + }, + )) as number + if (Number.isNaN(amount)) this.error(`A valid amount is required`) + } + + if (!fee || Number.isNaN(Number(fee))) { + fee = (await cli.prompt('Enter the fee amount in $IRON', { + required: true, + default: '0.00000001', + })) as number + + if (Number.isNaN(fee)) this.error(`A valid fee amount is required`) + } + + if (!to) { + to = (await cli.prompt('Enter the the public address of the recipient', { + required: true, + })) as string + + // Todo: need better validation for public address + if (to.length != 86) this.error(`A valid public address is required`) + } + + if (!from) { + const response = await this.sdk.client.getDefaultAccount() + const defaultAccount = response.content.account + + if (!defaultAccount) + this.error( + `No account is currently active. + Use ironfish accounts:create to first create an account`, + ) + + from = defaultAccount.name + } + + if (!isValidAmount(amount)) { + this.log( + `The minimum transaction amount is ${displayIronAmountWithCurrency( + MINIMUM_IRON_AMOUNT, + false, + )}.`, + ) + this.exit(0) + } + + if (!isValidAmount(fee)) { + this.log( + `The minimum fee amount is ${displayIronAmountWithCurrency( + MINIMUM_IRON_AMOUNT, + false, + )}.`, + ) + this.exit(0) + } + + if (!flags.confirm) { + this.logger.log(` +You are about to send: +${displayIronAmountWithCurrency(amount, true)} to ${to} from the account ${from} + +* This action is NOT reversible * +`) + + const confirm = await cli.confirm('Do you confirm (Y/N)?') + if (!confirm) { + this.log('Transaction aborted.') + this.exit(0) + } + } + + // Run the progress bar for about 2 minutes + // Chances are that the transaction will finish faster (error or faster computer) + const bar = cli.progress({ + barCompleteChar: '\u2588', + barIncompleteChar: '\u2591', + format: 'Creating the transaction: [{bar}] {percentage}% | ETA: {eta}s', + }) as ProgressBar + + bar.start() + + let value = 0 + const timer = setInterval(() => { + value++ + bar.update(value) + if (value >= bar.getTotal()) { + bar.stop() + } + }, 1000) + + const stopProgressBar = () => { + clearInterval(timer) + bar.update(100) + bar.stop() + } + + try { + const result = await this.sdk.client.sendTransaction({ + amount: ironToOre(amount).toString(), + fromAccountName: from, + memo: '', + toPublicKey: to, + transactionFee: ironToOre(fee).toString(), + }) + + stopProgressBar() + + const transaction = result.content + this.logger.log(` +Sending ${displayIronAmountWithCurrency(amount, true)} to ${transaction.toPublicKey} from ${ + transaction.fromAccountName + } +Transaction Hash: ${transaction.transactionHash} +Transaction fee: ${displayIronAmountWithCurrency(fee, true)} + +Find the transaction on https://explorer.ironfish.network/transaction/${ + transaction.transactionHash + } (it can take a few minutes before the transaction appears in the Explorer)`) + } catch (error: unknown) { + stopProgressBar() + this.logger.log(`An error occurred while sending the transaction.`) + if (error instanceof Error) this.error(error.message) + this.exit(2) + } + } +} diff --git a/ironfish-cli/src/commands/accounts/publickey.ts b/ironfish-cli/src/commands/accounts/publickey.ts new file mode 100644 index 0000000000..f6572df2ab --- /dev/null +++ b/ironfish-cli/src/commands/accounts/publickey.ts @@ -0,0 +1,46 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { IronfishCommand } from '../../command' +import { flags } from '@oclif/command' +import { RemoteFlags } from '../../flags' + +export class PublicKeyCommand extends IronfishCommand { + static description = `Display or regenerate the account public key` + + static flags = { + ...RemoteFlags, + generate: flags.boolean({ + char: 'g', + default: false, + description: 'generate the public key', + }), + } + + static args = [ + { + name: 'account', + parse: (input: string): string => input.trim(), + required: false, + description: 'name of the account to get a public key', + }, + ] + + async start(): Promise { + const { args, flags } = this.parse(PublicKeyCommand) + const account = args.account as string | undefined + + await this.sdk.client.connect() + + const response = await this.sdk.client.getAccountPublicKey({ + account: account, + generate: flags.generate, + }) + + if (!response) { + this.error(`An error occurred while fetching the public key.`) + } + + this.log(`Account: ${response.content.account}, public key: ${response.content.publicKey}`) + } +} diff --git a/ironfish-cli/src/commands/accounts/remove.ts b/ironfish-cli/src/commands/accounts/remove.ts new file mode 100644 index 0000000000..c9512804d4 --- /dev/null +++ b/ironfish-cli/src/commands/accounts/remove.ts @@ -0,0 +1,50 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { flags } from '@oclif/command' +import { IronfishCommand } from '../../command' +import { RemoteFlags } from '../../flags' +import { cli } from 'cli-ux' + +export class RemoveCommand extends IronfishCommand { + static description = `Permanently remove an account` + + static args = [ + { + name: 'name', + required: true, + description: 'name of the account', + }, + ] + + static flags = { + ...RemoteFlags, + confirm: flags.boolean({ + description: 'suppress the confirmation prompt', + }), + } + + async start(): Promise { + const { args, flags } = this.parse(RemoveCommand) + const confirm = flags.confirm + const name = (args.name as string).trim() + + await this.sdk.client.connect() + + const response = await this.sdk.client.removeAccount({ name, confirm }) + + if (response.content.needsConfirm) { + const value = (await cli.prompt(`Are you sure? Type ${name} to confirm`)) as string + + if (value !== name) { + this.log(`Aborting: ${value} did not match ${name}`) + this.exit(1) + } + + await this.sdk.client.removeAccount({ name, confirm: true }) + } + + this.log(`Account '${name}' successfully removed.`) + } +} diff --git a/ironfish-cli/src/commands/accounts/rescan.ts b/ironfish-cli/src/commands/accounts/rescan.ts new file mode 100644 index 0000000000..1495da8136 --- /dev/null +++ b/ironfish-cli/src/commands/accounts/rescan.ts @@ -0,0 +1,63 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { IronfishCommand } from '../../command' +import { RemoteFlags } from '../../flags' +import cli from 'cli-ux' +import { IronfishRpcClient } from 'ironfish' +import { flags } from '@oclif/command' +import { hasUserResponseError } from '../../utils' + +export class RescanCommand extends IronfishCommand { + static description = `Rescan the blockchain for transaction` + + static flags = { + ...RemoteFlags, + follow: flags.boolean({ + default: false, + description: 'if a scan is already happening, follow that scan instead', + }), + reset: flags.boolean({ + default: false, + description: + 'clear the in-memory and disk caches before rescanning. note that this removes all pending transactions', + }), + } + + async start(): Promise { + const { flags } = this.parse(RescanCommand) + + await this.sdk.client.connect() + await rescan(this.sdk.client, flags.follow, flags.reset) + } +} + +export async function rescan( + client: IronfishRpcClient, + follow: boolean, + reset: boolean, +): Promise { + cli.action.start('Rescanning Transactions', 'Asking node to start scanning', { + stdout: true, + }) + + const startedAt = Date.now() + const response = client.rescanAccountStream({ follow, reset }) + + try { + for await (const result of response.contentStream()) { + cli.action.status = `Scanning Block: ${result.sequence}, ${Math.floor( + (Date.now() - startedAt) / 1000, + )} seconds` + } + } catch (error) { + if (hasUserResponseError(error)) { + cli.action.stop(error.codeMessage) + return + } + + throw error + } + + cli.action.stop('Scanning Complete') +} diff --git a/ironfish-cli/src/commands/accounts/use.ts b/ironfish-cli/src/commands/accounts/use.ts new file mode 100644 index 0000000000..3b1dd613ae --- /dev/null +++ b/ironfish-cli/src/commands/accounts/use.ts @@ -0,0 +1,30 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { IronfishCommand } from '../../command' +import { RemoteFlags } from '../../flags' + +export class UseCommand extends IronfishCommand { + static description = 'Change the default account used by all commands' + + static args = [ + { + name: 'name', + required: true, + description: 'name of the account', + }, + ] + + static flags = { + ...RemoteFlags, + } + + async start(): Promise { + const { args } = this.parse(UseCommand) + const name = (args.name as string).trim() + + await this.sdk.client.connect() + await this.sdk.client.useAccount({ name }) + this.log(`The default account is now: ${name}`) + } +} diff --git a/ironfish-cli/src/commands/accounts/which.ts b/ironfish-cli/src/commands/accounts/which.ts new file mode 100644 index 0000000000..3a47424742 --- /dev/null +++ b/ironfish-cli/src/commands/accounts/which.ts @@ -0,0 +1,41 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { IronfishCommand } from '../../command' +import { RemoteFlags } from '../../flags' + +export class WhichCommand extends IronfishCommand { + static description = `Show the account currently used. + + By default all commands will use this account when deciding what + keys to use. If no account is specified as the default, you must + specify the account in the command using --account ` + + static flags = { + ...RemoteFlags, + } + + async start(): Promise { + this.parse(WhichCommand) + + await this.sdk.client.connect() + + const { + content: { + accounts: [accountName], + }, + } = await this.sdk.client.getAccounts({ default: true }) + + if (!accountName) { + this.log( + 'There is currently no account being used.\n' + + ' * Create an account: "ironfish accounts:create"\n' + + ' * List all accounts: "ironfish accounts:list"\n' + + ' * Use an existing account: "ironfish accounts:use "', + ) + this.exit(0) + } + + this.log(accountName) + } +} diff --git a/ironfish-cli/src/commands/chain/block.ts b/ironfish-cli/src/commands/chain/block.ts new file mode 100644 index 0000000000..55aaae4f99 --- /dev/null +++ b/ironfish-cli/src/commands/chain/block.ts @@ -0,0 +1,33 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { IronfishCommand } from '../../command' +import { LocalFlags } from '../../flags' + +export default class Block extends IronfishCommand { + static description = 'Show the block header of a requested hash' + + static args = [ + { + name: 'hash', + parse: (input: string): string => input.trim(), + required: true, + description: 'the hash of the block to look at', + }, + ] + + static flags = { + ...LocalFlags, + } + + async start(): Promise { + const { args } = this.parse(Block) + const hash = args.hash as string + + this.log(`Getting the block...`) + await this.sdk.client.connect() + const data = await this.sdk.client.getBlockInfo({ hash: hash }) + + this.log(JSON.stringify(data.content, undefined, ' ')) + } +} diff --git a/ironfish-cli/src/commands/chain/genesisblock.ts b/ironfish-cli/src/commands/chain/genesisblock.ts new file mode 100644 index 0000000000..8143eb52ed --- /dev/null +++ b/ironfish-cli/src/commands/chain/genesisblock.ts @@ -0,0 +1,80 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { flags } from '@oclif/command' +import { IronfishCommand } from '../../command' +import { GenesisBlockInfo, makeGenesisBlock, IJSON } from 'ironfish' +import { LocalFlags } from '../../flags' + +export default class GenesisBlockCommand extends IronfishCommand { + static description = 'Create and serialize a genesis block' + + static hidden = true + + static flags = { + ...LocalFlags, + account: flags.string({ + char: 'a', + required: false, + default: 'IronFishGenesisAccount', + description: 'the name of the account to use for keys to assign the genesis block to', + }), + coins: flags.integer({ + char: 'c', + required: false, + default: 4200000000000000, + description: 'The amount of coins to generate', + }), + memo: flags.string({ + char: 'm', + required: false, + default: 'Genesis Block', + description: 'The memo of the block', + }), + } + + async start(): Promise { + const { flags } = this.parse(GenesisBlockCommand) + + const node = await this.sdk.node() + await node.openDB() + + if (!(await node.captain.chain.isEmpty())) { + this.log( + `The database ${node.config.get( + 'databaseName', + )} must be empty to create a genesis block.`, + ) + this.exit(0) + } + + let account = null + if (flags.account != null) { + account = node.accounts.getAccountByName(flags.account) + } + + if (account == null) { + const name = `IronFishGenesisAccount` // Faucet depends on the name + account = await node.accounts.createAccount(name) + this.log(`Creating account ${account.name} to assign the genesis block to.`) + } + + const info: GenesisBlockInfo = { + timestamp: Date.now(), + memo: flags.memo, + allocations: [ + { + publicAddress: account.publicAddress, + amount: flags.coins, + }, + ], + } + + this.log('\nBuilding a genesis block...') + const { block } = await makeGenesisBlock(node.captain, info, account, this.logger) + + this.log(`\nGenesis Block`) + const serialized = node.strategy._blockSerde.serialize(block) + this.log(IJSON.stringify(serialized, ' ')) + } +} diff --git a/ironfish-cli/src/commands/chain/show.ts b/ironfish-cli/src/commands/chain/show.ts new file mode 100644 index 0000000000..90a321f491 --- /dev/null +++ b/ironfish-cli/src/commands/chain/show.ts @@ -0,0 +1,22 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { IronfishCommand } from '../../command' +import { LocalFlags } from '../../flags' + +export default class Show extends IronfishCommand { + static description = 'Show the heaviest chain' + + static flags = { + ...LocalFlags, + } + + async start(): Promise { + this.parse(Show) + + this.log(`Getting the chain blocks...`) + await this.sdk.client.connect() + const data = await this.sdk.client.getChain() + data.content.content.forEach((content) => this.log(content)) + } +} diff --git a/ironfish-cli/src/commands/config/edit.ts b/ironfish-cli/src/commands/config/edit.ts new file mode 100644 index 0000000000..3fcdacda66 --- /dev/null +++ b/ironfish-cli/src/commands/config/edit.ts @@ -0,0 +1,65 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { IronfishCommand } from '../../command' +import { launchEditor } from '../../utils' +import { ConfigFlag, ConfigFlagKey, DataDirFlag, DataDirFlagKey } from '../../flags' +import os from 'os' +import { mkdtemp, writeFile, readFile } from 'fs' +import path from 'path' +import { promisify } from 'util' +import { getConnectedClient } from './show' +import { DEFAULT_CONFIG_NAME, JSONUtils } from 'ironfish' +import { flags } from '@oclif/command' + +const mkdtempAsync = promisify(mkdtemp) +const writeFileAsync = promisify(writeFile) +const readFileAsync = promisify(readFile) + +export class EditCommand extends IronfishCommand { + static description = `Edit the config in your configured editor + + Set the editor in either EDITOR environment variable, or set 'editor' in your ironfish config` + + static flags = { + [ConfigFlagKey]: ConfigFlag, + [DataDirFlagKey]: DataDirFlag, + remote: flags.boolean({ + default: false, + description: 'connect to the node when editing the config', + }), + } + + async start(): Promise { + const { flags } = this.parse(EditCommand) + + if (!flags.remote) { + const configPath = this.sdk.config.storage.configPath + this.log(`Editing ${configPath}`) + const code = await launchEditor(configPath) + this.exit(code || undefined) + } + + const client = await getConnectedClient(this.sdk, !flags.remote) + const response = await client.getConfig({ user: true }) + const output = JSON.stringify(response.content, undefined, ' ') + + const tmpDir = os.tmpdir() + const folderPath = await mkdtempAsync(path.join(tmpDir, 'ironfish')) + const filePath = path.join(folderPath, DEFAULT_CONFIG_NAME) + + await writeFileAsync(filePath, output) + const code = await launchEditor(filePath) + + if (code !== 0) { + this.exit(code || undefined) + } + + const content = await readFileAsync(filePath, { encoding: 'utf8' }) + const config = JSONUtils.parse>(content) + + await client.uploadConfig({ config }) + this.log('Uploaded config successfully.') + this.exit(0) + } +} diff --git a/ironfish-cli/src/commands/config/get.ts b/ironfish-cli/src/commands/config/get.ts new file mode 100644 index 0000000000..deb3eec99d --- /dev/null +++ b/ironfish-cli/src/commands/config/get.ts @@ -0,0 +1,60 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { flags } from '@oclif/command' +import { ConfigOptions } from 'ironfish' +import { ConfigFlag, ConfigFlagKey, DataDirFlag, DataDirFlagKey } from '../../flags' +import { IronfishCommand } from '../../command' +import jsonColorizer from 'json-colorizer' +import { getConnectedClient } from './show' + +export class GetCommand extends IronfishCommand { + static description = `Print out one config value` + + static args = [ + { + name: 'name', + parse: (input: string): string => input.trim(), + required: true, + description: 'name of the config item', + }, + ] + + static flags = { + [ConfigFlagKey]: ConfigFlag, + [DataDirFlagKey]: DataDirFlag, + user: flags.boolean({ + description: 'only show config from the users datadir and not overrides', + }), + local: flags.boolean({ + default: false, + description: 'dont connect to the node when displaying the config', + }), + color: flags.boolean({ + default: true, + allowNo: true, + description: 'should colorize the output', + }), + } + + async start(): Promise { + const { args, flags } = this.parse(GetCommand) + const name = (args.name as string).trim() + + const client = await getConnectedClient(this.sdk, flags.local) + + const response = await client.getConfig({ + user: flags.user, + name: name, + }) + + const key = name as keyof Partial + if (response.content[key] === undefined) this.exit(0) + + let output = JSON.stringify(response.content[key], undefined, ' ') + if (flags.color) output = jsonColorizer(output) + + this.log(output) + this.exit(0) + } +} diff --git a/ironfish-cli/src/commands/config/set.ts b/ironfish-cli/src/commands/config/set.ts new file mode 100644 index 0000000000..deeb4caef4 --- /dev/null +++ b/ironfish-cli/src/commands/config/set.ts @@ -0,0 +1,46 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { flags } from '@oclif/command' +import { ConfigFlag, ConfigFlagKey, DataDirFlag, DataDirFlagKey } from '../../flags' +import { IronfishCommand } from '../../command' +import { getConnectedClient } from './show' + +export class SetCommand extends IronfishCommand { + static description = `Set a value in the config` + + static args = [ + { + name: 'name', + parse: (input: string): string => input.trim(), + required: true, + description: 'name of the config item', + }, + { + name: 'value', + parse: (input: string): string => input.trim(), + required: true, + description: 'value of the config item', + }, + ] + + static flags = { + [ConfigFlagKey]: ConfigFlag, + [DataDirFlagKey]: DataDirFlag, + local: flags.boolean({ + default: false, + description: 'dont connect to the node when updating the config', + }), + } + + async start(): Promise { + const { args, flags } = this.parse(SetCommand) + const name = args.name as string + const value = args.value as string + + const client = await getConnectedClient(this.sdk, flags.local) + await client.setConfig({ name, value }) + + this.exit(0) + } +} diff --git a/ironfish-cli/src/commands/config/show.ts b/ironfish-cli/src/commands/config/show.ts new file mode 100644 index 0000000000..9f8a7ddcc8 --- /dev/null +++ b/ironfish-cli/src/commands/config/show.ts @@ -0,0 +1,57 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { IronfishRpcClient, IronfishSdk } from 'ironfish' +import { + ColorFlag, + ColorFlagKey, + ConfigFlag, + ConfigFlagKey, + DataDirFlag, + DataDirFlagKey, +} from '../../flags' +import { IronfishCommand } from '../../command' +import jsonColorizer from 'json-colorizer' +import { flags } from '@oclif/command' + +export class ShowCommand extends IronfishCommand { + static description = `Print out the entire config` + + static flags = { + [ConfigFlagKey]: ConfigFlag, + [DataDirFlagKey]: DataDirFlag, + [ColorFlagKey]: ColorFlag, + user: flags.boolean({ + description: 'only show config from the users datadir and not overrides', + }), + local: flags.boolean({ + default: false, + description: 'dont connect to the node when displaying the config', + }), + } + + async start(): Promise { + const { flags } = this.parse(ShowCommand) + + const client = await getConnectedClient(this.sdk, flags.local) + const response = await client.getConfig({ user: flags.user }) + + let output = JSON.stringify(response.content, undefined, ' ') + if (flags.color) output = jsonColorizer(output) + this.log(output) + } +} + +export async function getConnectedClient( + sdk: IronfishSdk, + local: boolean, +): Promise { + if (local) { + const node = await sdk.node() + await sdk.clientMemory.connect(node) + return sdk.clientMemory + } + + await sdk.client.connect() + return sdk.client +} diff --git a/ironfish-cli/src/commands/faucet/giveme.test.ts b/ironfish-cli/src/commands/faucet/giveme.test.ts new file mode 100644 index 0000000000..1a08be6130 --- /dev/null +++ b/ironfish-cli/src/commands/faucet/giveme.test.ts @@ -0,0 +1,99 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { expect as expectCli, test } from '@oclif/test' +import cli from 'cli-ux' + +import * as ironfishmodule from 'ironfish' + +describe('faucet:giveme command', () => { + let accountName: string | null = null + const request = jest.fn() + const createAccount = jest.fn() + const giveMeFaucet = jest.fn() + const getDefaultAccount = jest.fn() + + const ironFishSdkBackup = ironfishmodule.IronfishSdk.init + + beforeEach(() => { + giveMeFaucet.mockReturnValue(Promise.resolve({ content: { message: 'success' } })) + + getDefaultAccount.mockImplementation(() => { + return Promise.resolve({ content: { account: { name: accountName } } }) + }) + + ironfishmodule.IronfishSdk.init = jest.fn().mockImplementation(() => ({ + config: { get: jest.fn() }, + accounts: { use: jest.fn() }, + client: { + request, + connect: jest.fn(), + createAccount, + giveMeFaucet, + getDefaultAccount, + }, + })) + }) + + afterEach(() => { + createAccount.mockReset() + giveMeFaucet.mockReset() + getDefaultAccount.mockReset() + ironfishmodule.IronfishSdk.init = ironFishSdkBackup + }) + + test + .do(() => { + accountName = null + }) + .stub(cli, 'prompt', () => async () => await Promise.resolve('nameOfTheAccount')) + .stdout() + .command(['faucet:giveme']) + .exit(0) + .it('request to create an account if one is not set', (ctx) => { + expectCli(ctx.stdout).include( + `You don't have a default account set up yet. Let's create one first`, + ) + expect(createAccount).toHaveBeenCalledWith({ name: 'nameOfTheAccount', default: true }) + }) + + test + .do(() => { + accountName = 'myAccount' + }) + .stub(cli, 'prompt', () => async () => await Promise.resolve('johann@ironfish.network')) + .stdout() + .command(['faucet:giveme']) + .exit(0) + .it('request funds and succeed', (ctx) => { + expectCli(ctx.stdout).include(`Collecting your funds...`) + expect(createAccount).toHaveBeenCalledTimes(0) + expect(giveMeFaucet).toHaveBeenCalledWith({ + accountName: 'myAccount', + email: 'johann@ironfish.network', + }) + expectCli(ctx.stdout).include( + `Congratulations! The Iron Fish Faucet just added your request to the queue!`, + ) + }) + + test + .do(() => { + accountName = 'myAccount' + giveMeFaucet.mockRejectedValue('Error') + }) + .stub(cli, 'prompt', () => async () => await Promise.resolve('johann@ironfish.network')) + .stdout() + .command(['faucet:giveme']) + .exit(1) + .it('request funds and fail', (ctx) => { + expectCli(ctx.stdout).include(`Collecting your funds...`) + expect(giveMeFaucet).toHaveBeenCalledWith({ + accountName, + email: 'johann@ironfish.network', + }) + expectCli(ctx.stdout).include( + `Unfortunately, the faucet request failed. Please try again later`, + ) + }) +}) diff --git a/ironfish-cli/src/commands/faucet/giveme.ts b/ironfish-cli/src/commands/faucet/giveme.ts new file mode 100644 index 0000000000..b1b17375a5 --- /dev/null +++ b/ironfish-cli/src/commands/faucet/giveme.ts @@ -0,0 +1,79 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import cli from 'cli-ux' +import { RequestError } from 'ironfish' +import { IronfishCommand } from '../../command' +import { RemoteFlags } from '../../flags' +import { ONE_FISH_IMAGE, TWO_FISH_IMAGE } from '../../images' + +export class GiveMeCommand extends IronfishCommand { + static description = `Receive coins from the Iron Fish official Faucet` + + static flags = { + ...RemoteFlags, + } + + async start(): Promise { + this.log(`${ONE_FISH_IMAGE} + +Receive funds, check your balance and send money. + +Thanks for contributing to Iron Fish! +`) + + await this.sdk.client.connect() + + const email = (await cli.prompt('Enter your email to stay updated with Iron Fish', { + required: false, + })) as string + + // Create an account if one is not set + const response = await this.sdk.client.getDefaultAccount() + let accountName = response.content.account?.name + + if (!accountName) { + this.log(`You don't have a default account set up yet. Let's create one first!`) + accountName = + ((await cli.prompt('Please enter the name of your new Iron Fish account', { + required: false, + })) as string) || 'default' + + await this.sdk.client.createAccount({ name: accountName, default: true }) + } + + cli.action.start('Collecting your funds', 'Sending a request to the Iron Fish network', { + stdout: true, + }) + try { + await this.sdk.client.giveMeFaucet({ + accountName, + email, + }) + cli.action.stop('Success') + } catch (error: unknown) { + cli.action.stop('Unfortunately, the faucet request failed. Please try again later.') + if (error instanceof RequestError) { + this.log(error.message) + } + this.exit(1) + } + + this.log( + ` + +${TWO_FISH_IMAGE} + +Congratulations! The Iron Fish Faucet just added your request to the queue! +It will be processed within the next hour and $IRON will be sent directly to your account. + +Check your balance by running: +- ironfish accounts:balance + +Learn how to send a transaction by running: +- ironfish accounts:pay --help +`, + ) + } +} diff --git a/ironfish-cli/src/commands/logs.ts b/ironfish-cli/src/commands/logs.ts new file mode 100644 index 0000000000..9772a5f6a9 --- /dev/null +++ b/ironfish-cli/src/commands/logs.ts @@ -0,0 +1,37 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { IronfishCommand } from '../command' +import { IronfishNode, ConsoleReporterInstance } from 'ironfish' +import { RemoteFlags } from '../flags' +import { logType } from 'consola' + +export default class LogsCommand extends IronfishCommand { + static description = 'Tail server logs' + + static flags = { + ...RemoteFlags, + } + + node: IronfishNode | null = null + + async start(): Promise { + this.parse(LogsCommand) + + await this.sdk.client.connect() + + const response = this.sdk.client.getLogStream() + + for await (const value of response.contentStream()) { + ConsoleReporterInstance.log({ + level: Number(value.level), + type: value.type as logType, + tag: value.tag, + args: value.args, + date: new Date(value.date), + }) + } + + this.exit(0) + } +} diff --git a/ironfish-cli/src/commands/miners/start.ts b/ironfish-cli/src/commands/miners/start.ts new file mode 100644 index 0000000000..36acc74270 --- /dev/null +++ b/ironfish-cli/src/commands/miners/start.ts @@ -0,0 +1,65 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import cli from 'cli-ux' +import { miner, NewBlocksStreamResponse, PromiseUtils } from 'ironfish' +import { IronfishCommand } from '../../command' +import { RemoteFlags } from '../../flags' + +export class Miner extends IronfishCommand { + static description = `Start a miner and subscribe to new blocks for the node` + + static flags = { + ...RemoteFlags, + } + + async start(): Promise { + this.parse(Miner) + + const client = this.sdk.client + + const successfullyMined = (randomness: number, miningRequestId: number) => { + cli.action.stop( + `Successfully mined a block on request ${miningRequestId} randomness ${randomness}`, + ) + const request = client.successfullyMined({ randomness, miningRequestId }) + request.waitForEnd().catch(() => { + cli.action.stop('Unable to submit mined block') + }) + + cli.action.start('Mining a block') + } + + async function* nextBlock(blocksStream: AsyncGenerator) { + for (;;) { + const blocksResult = (await blocksStream.next()) as IteratorResult< + NewBlocksStreamResponse + > + + if (blocksResult.done) { + return + } + + yield blocksResult.value + } + } + + // eslint-disable-next-line no-constant-condition + while (true) { + const connected = await client.tryConnect() + + if (!connected) { + this.logger.log('Not connected to a node - waiting 5s before retrying') + await PromiseUtils.sleep(5000) + continue + } + + this.logger.log('Starting to mine') + const blocksStream = client.newBlocksStream().contentStream() + + cli.action.start('Mining a block') + await miner(nextBlock(blocksStream), successfullyMined) + cli.action.stop('Mining interrupted') + } + } +} diff --git a/ironfish-cli/src/commands/peers/list.ts b/ironfish-cli/src/commands/peers/list.ts new file mode 100644 index 0000000000..9daff02352 --- /dev/null +++ b/ironfish-cli/src/commands/peers/list.ts @@ -0,0 +1,165 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { cli, Table } from 'cli-ux' +import { flags } from '@oclif/command' +import { IronfishCommand } from '../../command' +import { PromiseUtils, GetPeersResponse } from 'ironfish' +import { RemoteFlags } from '../../flags' +import blessed from 'blessed' + +type GetPeerResponsePeer = GetPeersResponse['peers'][0] + +export class ListCommand extends IronfishCommand { + static description = `List all connected peers` + + static flags = { + ...RemoteFlags, + follow: flags.boolean({ + char: 'f', + default: false, + description: 'follow the peers list live', + }), + extended: flags.boolean({ + char: 'e', + default: false, + description: 'display all information', + }), + versions: flags.boolean({ + default: false, + description: 'display peer versions', + }), + names: flags.boolean({ + char: 'n', + default: false, + description: 'display node names', + hidden: true, + }), + } + + async start(): Promise { + const { flags } = this.parse(ListCommand) + + if (!flags.follow) { + await this.sdk.client.connect() + const response = await this.sdk.client.getPeers() + this.log(renderTable(response.content, flags)) + this.exit(0) + } + + // Console log will create display issues with Blessed + this.logger.pauseLogs() + + const screen = blessed.screen({ smartCSR: true }) + const text = blessed.text() + screen.append(text) + + // eslint-disable-next-line no-constant-condition + while (true) { + const connected = await this.sdk.client.tryConnect() + if (!connected) { + text.clearBaseLine(0) + text.setContent('Connecting...') + screen.render() + await PromiseUtils.sleep(1000) + continue + } + + const response = this.sdk.client.getPeersStream() + + for await (const value of response.contentStream()) { + text.clearBaseLine(0) + text.setContent(renderTable(value, flags)) + screen.render() + } + } + } +} + +function renderTable( + content: GetPeersResponse, + flags: { extended: boolean; names: boolean; versions: boolean }, +): string { + let columns: Table.table.Columns = { + identity: { + header: 'IDENTITY', + get: (row: GetPeerResponsePeer) => { + return row.identity || '-' + }, + }, + } + + if (flags.names) { + columns['name'] = { + header: 'NAME', + minWidth: 5, + get: (row: GetPeerResponsePeer) => { + return row.name || '-' + }, + } + } + + if (flags.versions) { + columns['version'] = { + header: 'VERSION', + minWidth: 5, + get: (row: GetPeerResponsePeer) => { + return row.version || '-' + }, + } + } + + columns = { + ...columns, + state: { + header: 'STATE', + minWidth: 15, + get: (row: GetPeerResponsePeer) => { + return row.state + (row.error ? '(!)' : '') + }, + }, + address: { + header: 'ADDRESS', + minWidth: 7, + get: (row: GetPeerResponsePeer) => { + let address = '' + if (row.address) address += row.address + if (row.port) address += ':' + String(row.port) + return address + }, + }, + connectionWebSocket: { + header: 'SOCKET', + minWidth: 4, + extended: true, + get: (row: GetPeerResponsePeer) => { + return row.connectionWebSocket + (row.connectionWebSocketError ? ' (!)' : '') || '-' + }, + }, + connectionWebRTC: { + header: 'RTC', + minWidth: 5, + extended: true, + get: (row: GetPeerResponsePeer) => { + return row.connectionWebRTC + (row.connectionWebRTCError ? ' (!)' : '') || '-' + }, + }, + error: { + header: 'ERROR', + minWidth: 5, + extended: true, + get: (row: GetPeerResponsePeer) => { + return row.error || '-' + }, + }, + } + + let result = '' + + cli.table(content.peers, columns, { + printLine: (line) => (result += `${String(line)}\n`), + extended: flags.extended, + }) + + return result +} diff --git a/ironfish-cli/src/commands/start.test.ts b/ironfish-cli/src/commands/start.test.ts new file mode 100644 index 0000000000..dd99866a77 --- /dev/null +++ b/ironfish-cli/src/commands/start.test.ts @@ -0,0 +1,144 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { expect as expectCli, test } from '@oclif/test' +import * as ironfishmodule from 'ironfish' + +jest.mock('ironfish', () => { + const originalModule = jest.requireActual('ironfish') + + // eslint-disable-next-line @typescript-eslint/no-unsafe-return + return { + ...originalModule, + PeerNetwork: jest.fn().mockReturnValue({ + peerManager: { + onConnect: { + on: jest.fn(), + }, + onDisconnect: { + on: jest.fn(), + }, + }, + onIsReadyChanged: { + on: jest.fn(), + }, + start: jest.fn(), + }), + } +}) + +describe('start command', () => { + let isFirstRun = true + let hasGenesisBlock = false + + const setConfig = jest.fn() + const seed = jest.fn().mockReturnValue(true) + const start = jest.fn() + const waitForShutdown = jest.fn() + + const ironFishSdkBackup = ironfishmodule.IronfishSdk.init + + beforeEach(() => { + const configOptions = { + enableTelemetry: false, + nodeName: '', + isWorker: false, + } + + const internalOptions = { + isFirstRun, + } + + const config = { + save: jest.fn(), + set: setConfig, + get: jest.fn().mockImplementation((config: 'enableTelemetry') => configOptions[config]), + } + + const internal = { + save: jest.fn(), + set: setConfig, + get: jest.fn().mockImplementation((config: 'isFirstRun') => internalOptions[config]), + } + + const accounts = { + accountExists: jest.fn(), + getDefaultAccount: jest.fn(), + } + + const node = { + start, + networkBridge: { attachPeerNetwork: jest.fn() }, + waitForShutdown, + openDB: jest.fn(), + closeDB: jest.fn(), + accounts: accounts, + seed: seed, + config: config, + internal: internal, + captain: { + chain: { + hasGenesisBlock: jest.fn().mockReturnValue(hasGenesisBlock), + }, + }, + } + + ironfishmodule.IronfishSdk.init = jest.fn().mockImplementation(() => ({ + clientMemory: { connect: jest.fn(), createAccount: jest.fn() }, + node: jest.fn().mockReturnValue(node), + config: config, + internal: internal, + getVersion: jest.fn().mockReturnValue('sdk/1/cli'), + })) + }) + + afterEach(() => { + setConfig.mockReset() + seed.mockReset() + start.mockReset() + ironfishmodule.IronfishSdk.init = ironFishSdkBackup + }) + + describe('First run', () => { + test + .stdout() + .command(['start']) + .exit(0) + .it('show the telemetry message, generate the genesis block', (ctx) => { + // welcome message + expectCli(ctx.stdout).include(`Peer Identity`) + // telemetry + expectCli(ctx.stdout).include( + `To help improve Ironfish, opt in to collecting telemetry`, + ) + expect(setConfig).toHaveBeenCalledWith('isFirstRun', false) + // generate genesis + expectCli(ctx.stdout).include(`Initializing the blockchain`) + expect(seed).toHaveBeenCalled() + // start the node + expect(start).toHaveBeenCalled() + expect(waitForShutdown).toHaveBeenCalled() + }) + }) + + describe('second run', () => { + beforeAll(() => { + isFirstRun = false + hasGenesisBlock = true + }) + test + .stdout() + .command(['start']) + .exit(0) + .it('show the telemetry message, generate the genesis block', (ctx) => { + // welcome message + expectCli(ctx.stdout).include(`Peer Identity`) + expect(setConfig).toHaveBeenCalledTimes(0) + // generate genesis + expect(seed).toHaveBeenCalledTimes(0) + // start the node + expect(start).toHaveBeenCalled() + expect(waitForShutdown).toHaveBeenCalled() + }) + }) +}) diff --git a/ironfish-cli/src/commands/start.ts b/ironfish-cli/src/commands/start.ts new file mode 100644 index 0000000000..a255ff445b --- /dev/null +++ b/ironfish-cli/src/commands/start.ts @@ -0,0 +1,275 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { flags } from '@oclif/command' +import { IronfishCommand, SIGNALS } from '../command' +import { + DatabaseIsLockedError, + DEFAULT_WEBSOCKET_PORT, + IronfishNode, + parseUrl, + Peer, + PeerNetwork, + privateIdentityToIdentity, + PromiseUtils, + setDefaultTags, +} from 'ironfish' +import cli from 'cli-ux' +import tweetnacl from 'tweetnacl' +import wrtc from 'wrtc' +import WSWebSocket from 'ws' +import { + ConfigFlag, + ConfigFlagKey, + DatabaseFlag, + DatabaseFlagKey, + DataDirFlag, + DataDirFlagKey, + RpcTcpHostFlag, + RpcTcpHostFlagKey, + RpcTcpPortFlag, + RpcTcpPortFlagKey, + RpcUseIpcFlag, + RpcUseIpcFlagKey, + RpcUseTcpFlag, + RpcUseTcpFlagKey, + VerboseFlag, + VerboseFlagKey, +} from '../flags' +import { ONE_FISH_IMAGE } from '../images' + +const DEFAULT_ACCOUNT_NAME = 'default' + +export default class Start extends IronfishCommand { + static description = 'Start the node' + + static flags = { + [VerboseFlagKey]: VerboseFlag, + [ConfigFlagKey]: ConfigFlag, + [DataDirFlagKey]: DataDirFlag, + [DatabaseFlagKey]: DatabaseFlag, + [RpcUseIpcFlagKey]: { ...RpcUseIpcFlag, allowNo: true }, + [RpcUseTcpFlagKey]: { ...RpcUseTcpFlag, allowNo: true }, + [RpcTcpHostFlagKey]: RpcTcpHostFlag, + [RpcTcpPortFlagKey]: RpcTcpPortFlag, + bootstrap: flags.string({ + char: 'b', + description: 'the address of a bootstrap node to connect to', + multiple: true, + }), + port: flags.integer({ + char: 'p', + description: 'port to run the local ws server on', + }), + name: flags.string({ + char: 'n', + description: 'name for the node', + hidden: true, + }), + worker: flags.boolean({ + char: 'w', + description: 'is this a worker node', + hidden: true, + }), + listen: flags.boolean({ + allowNo: true, + default: undefined, + description: 'disable the web socket listen server', + hidden: true, + }), + } + + node: IronfishNode | null = null + + peerNetwork: PeerNetwork | null = null + + /** + * This promise is used to wait until start is finished beforer closeFromSignal continues + * because you can cause errors if you attempt to shutdown while the node is still starting + * up to reduce shutdown hanging, start should cancel if it detects this.isClosing is true + * and resolve this promise + */ + startDonePromise: Promise | null = null + + async start(): Promise { + const [startDonePromise, startDoneResolve] = PromiseUtils.split() + this.startDonePromise = startDonePromise + + const { flags } = this.parse(Start) + + if (flags.bootstrap != undefined) { + this.sdk.config.setOverride('bootstrapNodes', flags.bootstrap) + } + if (flags.port != undefined && flags.port !== this.sdk.config.get('peerPort')) { + this.sdk.config.setOverride('peerPort', flags.port) + } + if (flags.name != undefined && flags.name.trim() !== this.sdk.config.get('nodeName')) { + this.sdk.config.setOverride('nodeName', flags.name.trim()) + } + if (flags.listen != undefined && flags.listen !== this.sdk.config.get('enableListenP2P')) { + this.sdk.config.setOverride('enableListenP2P', flags.listen) + } + if (flags.worker != undefined && flags.worker !== this.sdk.config.get('isWorker')) { + this.sdk.config.setOverride('isWorker', flags.worker) + } + + const peerPort = this.sdk.config.get('peerPort') + // Allow comma-separated nodes and remove empty strings + const bootstrapNodes = (this.sdk.config.get('bootstrapNodes') || []) + .flatMap((i) => i.split(',')) + .filter(Boolean) + + // Start peer networking + const identity = tweetnacl.box.keyPair() + const version = this.sdk.getVersion('cli') + const anonymousTelemetryId = Math.random().toString().substring(2) + setDefaultTags({ version, sessionId: anonymousTelemetryId }) + + const nodeName = this.sdk.config.get('nodeName').trim() || null + + this.logger.log(`\n${ONE_FISH_IMAGE}`) + this.logger.log(`Peer Identity ${privateIdentityToIdentity(identity)}`) + this.logger.log(`Peer Version ${version}`) + this.logger.log(`Port ${peerPort}`) + this.logger.log(`Bootstrap ${bootstrapNodes.join(',') || 'NONE'}`) + + if (nodeName) { + this.logger.log(`Node Name ${nodeName}`) + } + this.logger.log(` `) + + const peerNetwork = new PeerNetwork( + identity, + version, + WSWebSocket, + wrtc, + { + port: peerPort, + name: nodeName, + maxPeers: this.sdk.config.get('maxPeers'), + enableListen: this.sdk.config.get('enableListenP2P'), + targetPeers: this.sdk.config.get('targetPeers'), + isWorker: this.sdk.config.get('isWorker'), + broadcastWorkers: this.sdk.config.get('broadcastWorkers'), + simulateLatency: this.sdk.config.get('p2pSimulateLatency'), + }, + this.logger, + this.sdk.metrics, + ) + + peerNetwork.peerManager.onConnect.on((peer: Peer) => { + this.logger.debug(`Connected to ${peer.getIdentityOrThrow()}`) + }) + + peerNetwork.peerManager.onDisconnect.on((peer: Peer) => { + this.logger.debug(`Disconnected from ${String(peer.state.identity)}`) + }) + + peerNetwork.onIsReadyChanged.on((isReady: boolean) => { + if (isReady) this.logger.info(`Connected to the Iron Fish network`) + else this.logger.info(`Not connected to the Iron Fish network`) + }) + + const node = await this.sdk.node() + await this.waitForOpenDatabase(node) + if (this.closing) return startDoneResolve() + + // Information displayed the first time a node is running + if (node.internal.get('isFirstRun')) { + if (!node.config.get('enableTelemetry')) + this.logger.log(` +################################################################# +# Thank you for installing the Iron Fish Node. # +# To help improve Ironfish, opt in to collecting telemetry # +# by setting telemetry=true in your configuration file # +################################################################# +`) + + // Create a default account on startup + if (!node.accounts.getDefaultAccount()) { + if (node.accounts.accountExists(DEFAULT_ACCOUNT_NAME)) { + await node.accounts.setDefaultAccount(DEFAULT_ACCOUNT_NAME) + this.log(`The default account is now: ${DEFAULT_ACCOUNT_NAME}\n`) + } else { + await this.sdk.clientMemory.connect(node) + const result = await this.sdk.clientMemory.createAccount({ + name: DEFAULT_ACCOUNT_NAME, + }) + this.log( + `New default account created: ${DEFAULT_ACCOUNT_NAME} \nAccount's public address: ${result?.content.publicAddress}\n`, + ) + } + } + + node.internal.set('isFirstRun', false) + await node.internal.save() + } + + if (!(await node.captain.chain.hasGenesisBlock())) { + cli.action.start('Initializing the blockchain', 'Creating the genesis block', { + stdout: true, + }) + const result = await node.seed() + if (!result) { + cli.action.stop('Failed to seed the database with the genesis block.') + } + cli.action.stop('Genesis block created successfully') + } + + node.networkBridge.attachPeerNetwork(peerNetwork) + + await node.start() + + peerNetwork.start() + for (const node of bootstrapNodes) { + const url = parseUrl(node) + if (!url.hostname) + throw new Error(`bootstrapNode flag value ${node} must specify a hostname`) + + // If the user has not specified a port, we can guess that + // it's running on the default ironfish websocket port + const port = url.port ? url.port : DEFAULT_WEBSOCKET_PORT + const address = url.hostname + `:${port}` + peerNetwork.peerManager.connectToWebSocketAddress(address, true) + } + + this.node = node + this.peerNetwork = peerNetwork + + startDoneResolve() + this.listenForSignals() + await node.waitForShutdown() + } + + async closeFromSignal(signal: SIGNALS): Promise { + this.log(`Shutting down node after ${signal}`) + await this.startDonePromise + this.peerNetwork?.stop() + await this.node?.shutdown() + await this.node?.closeDB() + } + + async waitForOpenDatabase(node: IronfishNode): Promise { + let warnDatabaseInUse = false + const OPEN_DB_RETRY_TIME = 500 + + while (!this.closing) { + try { + await node.openDB() + return + } catch (e) { + if (e instanceof DatabaseIsLockedError) { + if (!warnDatabaseInUse) { + this.log('Another node is using the database, waiting for that node to close.') + warnDatabaseInUse = true + } + + await new Promise((r) => setTimeout(r, OPEN_DB_RETRY_TIME)) + continue + } + + throw e + } + } + } +} diff --git a/ironfish-cli/src/commands/status.ts b/ironfish-cli/src/commands/status.ts new file mode 100644 index 0000000000..175c705c95 --- /dev/null +++ b/ironfish-cli/src/commands/status.ts @@ -0,0 +1,96 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { flags } from '@oclif/command' +import { PromiseUtils, GetStatusResponse, FileUtils } from 'ironfish' +import { Assert } from 'ironfish' +import { IronfishCommand } from '../command' +import { RemoteFlags } from '../flags' +import blessed from 'blessed' + +export default class Status extends IronfishCommand { + static description = 'Show the status of the node' + + static flags = { + ...RemoteFlags, + follow: flags.boolean({ + char: 'f', + default: false, + description: 'follow the status of the node live', + }), + } + + async start(): Promise { + const { flags } = this.parse(Status) + + if (!flags.follow) { + const connected = await this.sdk.client.tryConnect() + + if (!connected) { + this.log('Node: STOPPED') + } else { + const response = await this.sdk.client.status() + this.log(renderStatus(response.content)) + } + + this.exit(0) + } + + // Console log will create display issues with Blessed + this.logger.pauseLogs() + + const screen = blessed.screen({ smartCSR: true }) + const statusText = blessed.text() + screen.append(statusText) + + // eslint-disable-next-line no-constant-condition + while (true) { + const connected = await this.sdk.client.tryConnect() + + if (!connected) { + statusText.clearBaseLine(0) + statusText.setContent('Node: STOPPED') + screen.render() + await PromiseUtils.sleep(1000) + continue + } + + const response = this.sdk.client.statusStream() + + for await (const value of response.contentStream()) { + statusText.clearBaseLine(0) + statusText.setContent(renderStatus(value)) + screen.render() + } + } + } +} + +function renderStatus(content: GetStatusResponse): string { + const nodeStatus = content.node.status.toUpperCase() + let blockSyncerStatus = content.blockSyncer.status.toString().toUpperCase() + + Assert.isNotUndefined(content.blockSyncer.syncing) + + const avgTimeToAddBlock = content.blockSyncer.syncing.blockSpeed + const speed = content.blockSyncer.syncing.speed + if (content.blockSyncer.status !== 'IDLE') { + blockSyncerStatus += ` @ ${speed} blocks per seconds` + } + + if (avgTimeToAddBlock) { + blockSyncerStatus += ` | avg time to add block ${avgTimeToAddBlock} ms` + } + + const peerNetworkStatus = `${ + content.peerNetwork.isReady ? 'CONNECTED' : 'WAITING' + } In: ${FileUtils.formatFileSize( + content.peerNetwork.inboundTraffic, + )}/s, Out: ${FileUtils.formatFileSize(content.peerNetwork.outboundTraffic)}/s` + + return ` +Node: ${nodeStatus} +Blocks syncing: ${blockSyncerStatus} +Heaviest head: ${content.node.heaviestHead} +P2P Network: ${peerNetworkStatus}` +} diff --git a/ironfish-cli/src/commands/stop.ts b/ironfish-cli/src/commands/stop.ts new file mode 100644 index 0000000000..c543b2e7e2 --- /dev/null +++ b/ironfish-cli/src/commands/stop.ts @@ -0,0 +1,27 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { IronfishCommand } from '../command' +import { ConnectionError, IronfishNode } from 'ironfish' +import { RemoteFlags } from '../flags' + +export default class StopCommand extends IronfishCommand { + static description = 'Stop the node from running' + + static flags = { + ...RemoteFlags, + } + + node: IronfishNode | null = null + + async start(): Promise { + this.parse(StopCommand) + + await this.sdk.client.connect({ retryConnect: false }).catch((e) => { + if (e instanceof ConnectionError) this.exit(0) + throw e + }) + + await this.sdk.client.stopNode() + } +} diff --git a/ironfish-cli/src/commands/swim.ts b/ironfish-cli/src/commands/swim.ts new file mode 100644 index 0000000000..b1e913e021 --- /dev/null +++ b/ironfish-cli/src/commands/swim.ts @@ -0,0 +1,87 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import cli from 'cli-ux' +import { IronfishCommand } from '../command' +import { ONE_FISH_IMAGE, TWO_FISH_IMAGE } from '../images' + +const FRAME_RATE_MS = 1000 / 30 + +export default class SwimCommand extends IronfishCommand { + static description = 'See the hex fish swim' + static hidden = true + + async start(): Promise { + this.parse(SwimCommand) + + const images = [ONE_FISH_IMAGE, TWO_FISH_IMAGE] + const image = images[Math.round(Math.random() * (images.length - 1))] + const pixels = this.getPixels(image) + let last = Date.now() + let elapsed = 0 + + for (;;) { + // Calculate elapsed time for calculating elapsed frames + const now = Date.now() + elapsed += now - last + last = now + + // Calculate elapsed frames + let frames = Math.floor(elapsed / FRAME_RATE_MS) + elapsed -= FRAME_RATE_MS * frames + + // Update for each elapsed frame + while (frames-- > 0) { + pixels.unshift(pixels.pop() as Array) + } + + // Render the current frame + // eslint-disable-next-line no-console + console.clear() + this.renderPixels(pixels) + this.log('The hex fish are coming...') + await cli.wait(32) + } + + // eslint-disable-next-line no-console + console.clear() + } + + getPixels(image: string): Array> { + const rows = image.split('\n') + const rowToGetWidth = Math.round(rows.length / 2) + const width = rows[rowToGetWidth].length + const height = rows.length + + const pixels = new Array>() + for (let x = 0; x < width; ++x) { + const col = [] + + for (let y = 0; y < height; ++y) { + col.push(rows[y][x]) + } + + pixels.push(col) + } + + return pixels + } + + renderPixels(pixels: Array>): void { + const rows = new Array() + const height = pixels[0].length + const width = pixels.length + + for (let y = 0; y < height; ++y) { + const row = [] + + for (let x = 0; x < width; ++x) { + row.push(pixels[x][y]) + } + + rows.push(row.join('')) + } + + this.log(rows.join('\n')) + } +} diff --git a/ironfish-cli/src/flags.ts b/ironfish-cli/src/flags.ts new file mode 100644 index 0000000000..c723fa098b --- /dev/null +++ b/ironfish-cli/src/flags.ts @@ -0,0 +1,89 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { flags } from '@oclif/command' +import { IOptionFlag } from '@oclif/command/lib/flags' +import { DEFAULT_CONFIG_NAME, DEFAULT_DATABASE_NAME, DEFAULT_DATA_DIR } from 'ironfish' + +export const VerboseFlagKey = 'verbose' +export const ConfigFlagKey = 'config' +export const ColorFlagKey = 'color' +export const DataDirFlagKey = 'datadir' +export const DatabaseFlagKey = 'database' +export const RpcUseIpcFlagKey = 'rpc.ipc' +export const RpcUseTcpFlagKey = 'rpc.tcp' +export const RpcTcpHostFlagKey = 'rpc.tcp.host' +export const RpcTcpPortFlagKey = 'rpc.tcp.port' + +export const VerboseFlag = flags.boolean({ + char: 'v', + default: false, + description: 'set logging level to verbose', +}) + +export const ColorFlag = flags.boolean({ + default: true, + allowNo: true, + description: 'should colorize the output', +}) + +export const ConfigFlag = flags.string({ + default: DEFAULT_CONFIG_NAME, + description: 'the name of the config file to use', +}) + +export const DataDirFlag = flags.string({ + default: DEFAULT_DATA_DIR, + description: 'the path to the data dir', +}) + +export const DatabaseFlag = flags.string({ + char: 'd', + default: DEFAULT_DATABASE_NAME, + description: 'the name of the database to use', +}) + +export const RpcUseIpcFlag = flags.boolean({ + default: true, + description: 'connect to the RPC over IPC (default)', +}) + +export const RpcUseTcpFlag = flags.boolean({ + default: false, + description: 'connect to the RPC over TCP', +}) + +export const RpcTcpHostFlag = flags.string({ + description: 'the TCP host to listen for connections on', +}) + +export const RpcTcpPortFlag = flags.integer({ + description: 'the TCP port to listen for connections on', +}) + +const localFlags: Record> = {} +localFlags[VerboseFlagKey] = (VerboseFlag as unknown) as IOptionFlag +localFlags[ConfigFlagKey] = (ConfigFlag as unknown) as IOptionFlag +localFlags[DataDirFlagKey] = (DataDirFlag as unknown) as IOptionFlag +localFlags[DatabaseFlagKey] = (DatabaseFlag as unknown) as IOptionFlag + +/** + * These flags should usually be used on any command that starts a node, + * or uses a database to execute the command + */ +export const LocalFlags = localFlags + +const remoteFlags: Record> = {} +remoteFlags[VerboseFlagKey] = (VerboseFlag as unknown) as IOptionFlag +remoteFlags[ConfigFlagKey] = (ConfigFlag as unknown) as IOptionFlag +remoteFlags[DataDirFlagKey] = (DataDirFlag as unknown) as IOptionFlag +remoteFlags[RpcUseTcpFlagKey] = (RpcUseTcpFlag as unknown) as IOptionFlag +remoteFlags[RpcUseIpcFlagKey] = (RpcUseIpcFlag as unknown) as IOptionFlag +remoteFlags[RpcTcpHostFlagKey] = (RpcTcpHostFlag as unknown) as IOptionFlag +remoteFlags[RpcTcpPortFlagKey] = (RpcTcpPortFlag as unknown) as IOptionFlag + +/** + * These flags should usually be used on any command that uses an + * RPC client to connect to a node to run the command + */ +export const RemoteFlags = remoteFlags diff --git a/ironfish-cli/src/images.ts b/ironfish-cli/src/images.ts new file mode 100644 index 0000000000..0630480d9b --- /dev/null +++ b/ironfish-cli/src/images.ts @@ -0,0 +1,33 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +export const ONE_FISH_IMAGE = + ' \n\ +:::::::::: ::::::::::::::::: \n\ +:::::::::::: ::::::::::::::::::: \n\ +::::::::::::: ::::::::::::::::::::: \n\ +:::::::::::::: :::::::::::::::::::::::: \n\ + ::::::::::::: :::::::::: ::::::::: \n\ + ::::::::::::::::::::: :::::::::: \n\ + ::::::::::::::::::::: ::::::::: \n\ + ::::::::::::: :::::::::: ::::::::: \n\ +:::::::::::::: :::::::::::::::::::::::: \n\ +::::::::::::: ::::::::::::::::::::: \n\ +:::::::::::: ::::::::::::::::::: \n\ +:::::::::: :::::::::::::::: \n\ +' + +export const TWO_FISH_IMAGE = + ' \n\ +:::::::::: ::::::::::::::::: :::::::::: ::::::::::::::::: \n\ +:::::::::::: ::::::::::::::::::: :::::::::::: ::::::::::::::::::: \n\ +::::::::::::: ::::::::::::::::::::: ::::::::::::: ::::::::::::::::::::: \n\ +:::::::::::::: :::::::::::::::::::::::: :::::::::::::: :::::::::::::::::::::::: \n\ + ::::::::::::: :::::::::: ::::::::: ::::::::::::: :::::::::: ::::::::: \n\ + ::::::::::::::::::::: :::::::::: ::::::::::::::::::::: :::::::::: \n\ + ::::::::::::::::::::: ::::::::: ::::::::::::::::::::: ::::::::: \n\ + ::::::::::::: :::::::::: ::::::::: ::::::::::::: :::::::::: ::::::::: \n\ +:::::::::::::: :::::::::::::::::::::::: :::::::::::::: :::::::::::::::::::::::: \n\ +::::::::::::: ::::::::::::::::::::: ::::::::::::: ::::::::::::::::::::: \n\ +:::::::::::: ::::::::::::::::::: :::::::::::: ::::::::::::::::::: \n\ +:::::::::: :::::::::::::::: :::::::::: :::::::::::::::: ' diff --git a/ironfish-cli/src/index.ts b/ironfish-cli/src/index.ts new file mode 100644 index 0000000000..72a07e4759 --- /dev/null +++ b/ironfish-cli/src/index.ts @@ -0,0 +1,4 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +export { run } from '@oclif/command' diff --git a/ironfish-cli/src/json-colorizer.d.ts b/ironfish-cli/src/json-colorizer.d.ts new file mode 100644 index 0000000000..e4e4324f2b --- /dev/null +++ b/ironfish-cli/src/json-colorizer.d.ts @@ -0,0 +1,26 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +/* eslint-disable @typescript-eslint/no-explicit-any */ + +declare module 'json-colorizer' { + export type ColorizeOptions = { + pretty?: boolean + colors: Partial<{ + BRACE: string + BRACKET: string + COLON: string + COMMA: string + STRING_KEY: string + STRING_LITERAL: string + NUMBER_LITERAL: string + BOOLEAN_LITERAL: string + NULL_LITERAL: string + }> + } + function colorize(data: any, options?: ColorizeOptions): string + function colorize(string: string, options?: ColorizeOptions): string + + export default colorize +} diff --git a/ironfish-cli/src/utils/editor.ts b/ironfish-cli/src/utils/editor.ts new file mode 100644 index 0000000000..cafb2c1a4d --- /dev/null +++ b/ironfish-cli/src/utils/editor.ts @@ -0,0 +1,26 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Config, Assert } from 'ironfish' +import { spawn } from 'child_process' + +export function launchEditor(file: string, config?: Config): Promise { + let editor = process.env.EDITOR + + if (!editor && config) { + editor = config.get('editor') + } + + if (!editor) { + throw new Error( + `you must set the EDITOR environment variable or 'editor' in the ironfish config`, + ) + } + + return new Promise((resolve, reject) => { + Assert.isNotUndefined(editor) + const process = spawn(editor, [file], { stdio: 'inherit' }) + process.on('exit', (code) => resolve(code)) + process.on('error', (error) => reject(error)) + }) +} diff --git a/ironfish-cli/src/utils/index.ts b/ironfish-cli/src/utils/index.ts new file mode 100644 index 0000000000..f94093dde8 --- /dev/null +++ b/ironfish-cli/src/utils/index.ts @@ -0,0 +1,6 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +export * from './types' +export * from './editor' +export * from './rpc' diff --git a/ironfish-cli/src/utils/rpc.ts b/ironfish-cli/src/utils/rpc.ts new file mode 100644 index 0000000000..52c3eb83e1 --- /dev/null +++ b/ironfish-cli/src/utils/rpc.ts @@ -0,0 +1,11 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { isResponseUserError, RequestError } from 'ironfish' + +export function hasUserResponseError(error: unknown): error is RequestError { + return ( + error instanceof RequestError && !!error.response && isResponseUserError(error.response) + ) +} diff --git a/ironfish-cli/src/utils/types.ts b/ironfish-cli/src/utils/types.ts new file mode 100644 index 0000000000..64313e9a6a --- /dev/null +++ b/ironfish-cli/src/utils/types.ts @@ -0,0 +1,11 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export type PartialRecursive = { + [P in keyof T]?: T[P] extends (infer U)[] + ? PartialRecursive[] + : T[P] extends Record + ? PartialRecursive + : T[P] +} diff --git a/ironfish-cli/src/wrtc.d.ts b/ironfish-cli/src/wrtc.d.ts new file mode 100644 index 0000000000..ee4b55452d --- /dev/null +++ b/ironfish-cli/src/wrtc.d.ts @@ -0,0 +1,24 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +declare module 'wrtc' { + // TODO: node-webrtc is supposed to be spec-compliant, but the + // typescript types may not match the browser implementations. + export const MediaStream: MediaStream + export const MediaStreamTrack: MediaStreamTrack + export const RTCDataChannel: RTCDataChannel + export const RTCDataChannelEvent: RTCDataChannelEvent + export const RTCDtlsTransport: RTCDtlsTransport + export const RTCIceCandidate: RTCIceCandidate + export const RTCIceTransport: RTCIceTransport + export const RTCPeerConnection: RTCPeerConnection + export const RTCPeerConnectionIceEvent: RTCPeerConnectionIceEvent + export const RTCRtpReceiver: RTCRtpReceiver + export const RTCRtpSender: RTCRtpSender + export const RTCRtpTransceiver: RTCRtpTransceiver + export const RTCSctpTransport: RTCSctpTransport + export const RTCSessionDescription: RTCSessionDescription + export const getUserMedia: (constraints?: MediaStreamConstraints) => Promise + export const mediaDevices: MediaDevices +} diff --git a/ironfish-cli/start.js b/ironfish-cli/start.js new file mode 100644 index 0000000000..9790f37292 --- /dev/null +++ b/ironfish-cli/start.js @@ -0,0 +1,11 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +const execSync = require('child_process').execSync + +const arg = process.argv.slice(2) || '' + +execSync(`tsc-watch --build --onSuccess "yarn run start:js ${arg.join(' ')}"`, { + stdio: [0, 1, 2], +}) diff --git a/ironfish-cli/tsconfig.eslint.json b/ironfish-cli/tsconfig.eslint.json new file mode 100644 index 0000000000..71cce3ca8a --- /dev/null +++ b/ironfish-cli/tsconfig.eslint.json @@ -0,0 +1,3 @@ +{ + "extends": "./tsconfig.json", +} diff --git a/ironfish-cli/tsconfig.json b/ironfish-cli/tsconfig.json new file mode 100644 index 0000000000..5ce44b1b8b --- /dev/null +++ b/ironfish-cli/tsconfig.json @@ -0,0 +1,12 @@ +{ + "extends": "../config/tsconfig.base.json", + "compilerOptions": { + "outDir": "build", + "rootDir": "src", + "tsBuildInfoFile": "./build/tsconfig.tsbuildinfo" + }, + "include": ["src"], + "references": [ + { "path": "../ironfish" }, + ] +} diff --git a/ironfish-cli/tsconfig.test.json b/ironfish-cli/tsconfig.test.json new file mode 100644 index 0000000000..33d207ff42 --- /dev/null +++ b/ironfish-cli/tsconfig.test.json @@ -0,0 +1,8 @@ +{ + "extends": "../config/tsconfig.base.json", + "compilerOptions": { + "noEmit": true + }, + "include": [], + "references": [{ "path": "../ironfish" }] +} diff --git a/ironfish-http-api/.eslintrc.js b/ironfish-http-api/.eslintrc.js new file mode 100644 index 0000000000..f2fb3ae1e8 --- /dev/null +++ b/ironfish-http-api/.eslintrc.js @@ -0,0 +1,6 @@ +module.exports = { + extends: ['ironfish'], + parserOptions: { + tsconfigRootDir: __dirname, + }, +} diff --git a/ironfish-http-api/.gitignore b/ironfish-http-api/.gitignore new file mode 100644 index 0000000000..9f6fd7e9a2 --- /dev/null +++ b/ironfish-http-api/.gitignore @@ -0,0 +1,2 @@ +combined.log +error.log diff --git a/ironfish-http-api/.prettierrc.js b/ironfish-http-api/.prettierrc.js new file mode 100644 index 0000000000..1ad9c111e4 --- /dev/null +++ b/ironfish-http-api/.prettierrc.js @@ -0,0 +1 @@ +module.exports = 'eslint-config-ironfish/prettierrc' diff --git a/ironfish-http-api/Dockerfile b/ironfish-http-api/Dockerfile new file mode 100644 index 0000000000..f91cbc2ee3 --- /dev/null +++ b/ironfish-http-api/Dockerfile @@ -0,0 +1,22 @@ +FROM node:14.16.0 as build +ENV PATH="/root/.cargo/bin:${PATH}" + +COPY ./ ./ + +RUN \ + apt-get update && \ + apt-get install rsync -y && \ + curl https://sh.rustup.rs -sSf | sh -s -- -y && \ + cargo install wasm-pack && \ + ./ironfish-http-api/scripts/build.sh + +FROM node:14.16.0 +EXPOSE 8000:8000 + +WORKDIR /usr/src +COPY --from=build /ironfish-http-api/build.api/ironfish-http-api ./app + +ENV NODE_ENV production +WORKDIR /usr/src/app +ENTRYPOINT ["yarn"] +CMD ["start"] diff --git a/ironfish-http-api/README.md b/ironfish-http-api/README.md new file mode 100644 index 0000000000..a66af69582 --- /dev/null +++ b/ironfish-http-api/README.md @@ -0,0 +1,96 @@ +# ironfish-http-api + +[![codecov](https://codecov.io/gh/iron-fish/ironfish/branch/master/graph/badge.svg?token=PCSVEVEW5V&flag=ironfish-http-api)](https://codecov.io/gh/iron-fish/ironfish) + +API to support: + +- faucet +- Iron Fish Node telemetry + +The API uses a queuing system (graphile-worker) to get the different faucet requests. The queue is currently only executing one job at a time. + +## Documentation + +Run dev environment and access `http://localhost:8000/docs/` + +## Installation +```sh +yarn +``` + +## Database +Depends on Postgres +```sh +brew install postgresql +brew services start postgresql + +createdb faucet; + +psql +CREATE USER postgres; +grant all privileges on database faucet to postgres; +ALTER SCHEMA public OWNER to faucet; +``` + +## Development +To start the api: +```sh +yarn +yarn dev +``` + +Start an Iron Fish node with +```sh +ironfish start --rpc.tcp --rpc.tpc-port=8021 +``` + +To start processing the queue: +```sh +yarn start:worker +``` + +## Production + +```sh +yarn +yarn build +yarn start +``` + +## Updating or creating a new API endpoint + +The repository is using OpenAPI 3.0 + +When updating or adding a new endpoint: + +- Edit the openapi.yml [online](https://editor.swagger.io/) + or offline using Swagger editor +- Save the yml file in `config/openapi.yml` +- Export the file in JSON and save it on `config/openapi.json` +- run `yarn api:types` + +## Collecting metrics with the influxdb endpoint + +- Download influxdb and run './influxd' +- Visit [localhost:8086](http://localhost:8086) and follow the setup instructions + - Suggested org name: ironfish + - Suggested initial bucket: devnet + - Hit configure later when you get to the welcome screen + - We don't need telegraf +- Visit [Tokens](http://localhost:8086/orgs/3f00366dda9a52d3/load-data/tokens) + - or Click Data, then Tokens + - Copy your token to clipboard +- Copy the example.env file to .env and edit it with the appropriate values +- In three terminals run: + - http-api: `yarn dev` + - ironfish-cli: `yarn start start` + - ironfish-cli: `yarn start miners:start` +- Visit Data Explorer in influxdb and explore the data +- As one example, paste this query into the Script Editor to show a simple graph: + ```flux + from(bucket: "devnet") + |> range(start: v.timeRangeStart, stop: v.timeRangeStop) + |> filter(fn: (r) => r["_measurement"] == "minedBlock") + |> filter(fn: (r) => r["_field"] == "difficulty") + ``` + diff --git a/ironfish-http-api/example.env b/ironfish-http-api/example.env new file mode 100644 index 0000000000..6db21933b4 --- /dev/null +++ b/ironfish-http-api/example.env @@ -0,0 +1,3 @@ +INFLUX_DB_TOKEN='EbhQ5Z9-q9wTmuxshNwKFJzUYA5d2XQLGTRPQN5nFHTDF5KN8d_44tWaqfjBhqBEkdrPlZ0vNEWWaYJsKbIFvg==' +INFLUX_DB_ORG='ironfish' +INFLUX_DB_BUCKET='devnet' diff --git a/ironfish-http-api/jest.config.js b/ironfish-http-api/jest.config.js new file mode 100644 index 0000000000..b9faa197f1 --- /dev/null +++ b/ironfish-http-api/jest.config.js @@ -0,0 +1,9 @@ +const base = require('../config/jest.config.base') +const pkg = require('./package.json') + +module.exports = { + ...base, + testEnvironment: '../config/jestNodeEnvironment', + watchPlugins: ['../config/jestWatchPlugin'], + displayName: pkg.name, +} diff --git a/ironfish-http-api/package.json b/ironfish-http-api/package.json new file mode 100644 index 0000000000..673a537233 --- /dev/null +++ b/ironfish-http-api/package.json @@ -0,0 +1,60 @@ +{ + "name": "ironfish-http-api", + "version": "0.1.0", + "description": "HTTP server to connect to an Iron Fish Node", + "private": true, + "author": "Iron Fish (https://ironfish.network)", + "license": "MPL-2.0", + "scripts": { + "build": "tsc -b", + "dev": "nodemon --watch src --exec yarn build:start -e ts", + "build:start": "tsc -b && yarn start", + "start": "node -r dotenv/config ./build/src/index.js", + "start:worker": "node -r dotenv/config ./build/src/tasks/worker.js", + "lint": "tsc -b && tsc -b tsconfig.test.json && eslint --ext .ts,.tsx,.js,.jsx src/", + "lint:fix": "tsc -b && tsc -b tsconfig.test.json && eslint --ext .ts,.tsx,.js,.jsx src/ --fix", + "test": "tsc -b tsconfig.test.json && jest", + "test:watch": "tsc -b tsconfig.test.json && jest --watch", + "api:types": "dtsgen src/config/openapi.json -o src/types/openapi.d.ts" + }, + "devDependencies": { + "@types/express": "^4.17.9", + "@types/express-openapi": "^1.9.0", + "@types/node": "^14.14.11", + "@types/supertest": "2.0.10", + "@types/swagger-ui-express": "^4.1.2", + "@types/winston": "^2.4.4", + "dotenv": "8.2.0", + "dtsgenerator": "^3.3.1", + "eslint-config-ironfish": "*", + "jest": "^26.4.2", + "nodemon": "^2.0.6", + "supertest": "6.0.1", + "ts-jest": "^26.4.0", + "typescript": "^4.1.2" + }, + "resolutions": { + "node-forge": "0.10.0", + "object-path": "^0.11.4" + }, + "homepage": "https://github.com/iron-fish/ironfish", + "keywords": [ + "http", + "api" + ], + "repository": "iron-fish/ironfish", + "dependencies": { + "@influxdata/influxdb-client": "1.9.0", + "@types/validator": "^13.1.1", + "connect": "^3.7.0", + "express": "^4.17.1", + "express-openapi-validator": "^4.9.0", + "graphile-worker": "0.9.0", + "ironfish": "*", + "swagger-routes-express": "^3.2.1", + "swagger-ui-express": "^4.1.5", + "validator": "^13.5.1", + "winston": "^3.3.3" + }, + "types": "build/index.d.ts" +} diff --git a/ironfish-http-api/scripts/build-docker.sh b/ironfish-http-api/scripts/build-docker.sh new file mode 100755 index 0000000000..ae3e683bcd --- /dev/null +++ b/ironfish-http-api/scripts/build-docker.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash +set -euo pipefail +cd "$(dirname "$0")" +cd ../.. + +echo "Building Docker Image" +cp .gitignore .dockerignore + +docker build . \ + --progress plain \ + --tag ironfish-http-api:latest \ + --file ironfish-http-api/Dockerfile diff --git a/ironfish-http-api/scripts/build.sh b/ironfish-http-api/scripts/build.sh new file mode 100755 index 0000000000..08535bf8c7 --- /dev/null +++ b/ironfish-http-api/scripts/build.sh @@ -0,0 +1,40 @@ +#!/usr/bin/env bash +set -euo pipefail +cd "$(dirname "$0")" +cd ../../ + +if ! command -v rsync &> /dev/null; then + echo "rsync is not installed but is required" + exit 1 +fi + +echo "Building WASM" +( cd ironfish-wasm && yarn run build:node ) + +echo "Installing from lockfile" +yarn --non-interactive --frozen-lockfile --ignore-scripts + +echo "Building Iron Fish HTTP API project" +cd ironfish-http-api +yarn build + +echo "Outputting build to $PWD/build.api" +rm -rf build.api +mkdir build.api + +echo "Packing API" +yarn pack -f ./build.api/packaged.tar.gz +cd build.api +tar zxvf packaged.tar.gz + +cd package +echo "Copying build" +cp -R ../../build ./ + +echo "Copying node_modules" +rsync -L -avrq --exclude='ironfish-http-api' ../../../node_modules ./ + +echo "Packaging build into ironfish-http-api.tar.gz" +cd .. +mv package ironfish-http-api +tar -cf ironfish-http-api.tar.gz ironfish-http-api \ No newline at end of file diff --git a/ironfish-http-api/scripts/deploy-docker.sh b/ironfish-http-api/scripts/deploy-docker.sh new file mode 100755 index 0000000000..42729203d3 --- /dev/null +++ b/ironfish-http-api/scripts/deploy-docker.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash +set -euo pipefail +cd "$(dirname "$0")" + +if [ -z "${AWS_BLOCK_API_REGISTRY_URL-}" ]; then + echo "Set AWS_BLOCK_API_REGISTRY_URL before running deploy-docker.sh" + exit 1 +fi + +docker tag ironfish-http-api:latest ${AWS_BLOCK_API_REGISTRY_URL}:latest +docker push ${AWS_BLOCK_API_REGISTRY_URL}:latest diff --git a/ironfish-http-api/src/config/config.ts b/ironfish-http-api/src/config/config.ts new file mode 100644 index 0000000000..de235228d0 --- /dev/null +++ b/ironfish-http-api/src/config/config.ts @@ -0,0 +1,32 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +/** + * Server specific config + * */ +export const SERVER_PORT = 8080 + +/** + * Database specific config + * */ +export const DATABASE_HOST = process.env.DATABASE_HOST || 'localhost' +export const DATABASE_PORT = process.env.DATABASE_PORT || 5432 +export const DATABASE_USERNAME = process.env.DATABASE_USERNAME || 'postgres' +export const DATABASE_PASSWORD = process.env.DATABASE_PASSWORD || '' +export const DATABASE_BASE = process.env.DATABASE_BASE || 'faucet' +export const DATABASE_CONNECTION_STRING = `postgres://${DATABASE_USERNAME}:${DATABASE_PASSWORD}@${DATABASE_HOST}:${DATABASE_PORT}/${DATABASE_BASE}` + +/** + * RPC Config + * */ +export const RPC_MODE = (process.env.RPC_MODE as 'tcp' | 'ipc') || 'ipc' +export const RPC_HOST = process.env.RPC_HOST || '0.0.0.0' +export const RPC_PORT = process.env.RPC_PORT || 8021 + +/** + * Faucet Config + * */ +export const FAUCET_AMOUNT = process.env.FAUCET_AMOUNT || 1000 +export const FAUCET_FEE = process.env.FAUCET_FEE || 1 +export const FAUCET_ACCOUNT_NAME = process.env.ACCOUNT_NAME || 'IronFishFaucetAccount' diff --git a/ironfish-http-api/src/config/index.ts b/ironfish-http-api/src/config/index.ts new file mode 100644 index 0000000000..5041263862 --- /dev/null +++ b/ironfish-http-api/src/config/index.ts @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './config' diff --git a/ironfish-http-api/src/config/openapi.json b/ironfish-http-api/src/config/openapi.json new file mode 100644 index 0000000000..e10a01a55f --- /dev/null +++ b/ironfish-http-api/src/config/openapi.json @@ -0,0 +1,185 @@ +{ + "openapi": "3.0.3", + "info": { + "title": "Iron Fish API", + "description": "Backend API for Faucet and Block explorer", + "license": { + "name": "MPL 2.0", + "url": "https://www.mozilla.org/en-US/MPL/2.0/" + }, + "version": "1.0.0" + }, + "externalDocs": { + "description": "Find out more about Iron Fish", + "url": "https://ironfish.network" + }, + "servers": [ + { + "url": "/api/v1" + } + ], + "tags": [ + { + "name": "Faucet", + "description": "Faucet APIs" + }, + { + "name": "Analytics", + "description": "Analytics APIs for the Block Explorer" + }, + { + "name": "Telemetry", + "description": "Telemetry APIs for metrics collection" + } + ], + "paths": { + "/getFunds": { + "post": { + "description": "Send coins to the caller address", + "tags": [ + "Faucet" + ], + "operationId": "getFunds", + "parameters": [ + { + "name": "email", + "required": false, + "in": "query", + "description": "The email of the user", + "allowEmptyValue": true, + "schema": { + "type": "string" + } + }, + { + "name": "publicKey", + "required": true, + "in": "query", + "description": "The public key of the user's account", + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Success", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GetFundsResponse" + } + } + } + } + } + } + }, + "/writeTelemetry": { + "post": { + "description": "Write a list of time-series metrics to the database", + "tags": [ + "Telemetry" + ], + "operationId": "writeTelemetry", + "requestBody": { + "description": "List of metrics to record", + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/WriteTelemetryRequest" + } + } + } + }, + "responses": { + "200": { + "description": "Telemetry successfully received" + } + } + } + } + }, + "components": { + "schemas": { + "GetFundsResponse": { + "type": "object", + "additionalProperties": false, + "required": [ + "message" + ], + "properties": { + "message": { + "type": "string" + } + } + }, + "WriteTelemetryRequest": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": false, + "required": [ + "name", + "timestamp", + "fields" + ], + "properties": { + "name": { + "type": "string", + "description": "Identifier for the metric", + "pattern": "^[a-zA-Z][a-zA-Z0-9]+$" + }, + "timestamp": { + "description": "Time when the metric was recorded", + "type": "string", + "format": "date-time" + }, + "tags": { + "type": "object", + "description": "Optional collection of properties to identify the metric", + "additionalProperties": { + "type": "string" + } + }, + "fields": { + "description": "List of values associated with a specific recording of that metric", + "minItems": 1, + "type": "array", + "items": { + "description": "Name and a strongly typed value for the type of data being recorded. Only one typed value can be specified per field.", + "type": "object", + "additionalProperties": false, + "required": [ + "name" + ], + "maxProperties": 2, + "minProperties": 2, + "properties": { + "name": { + "description": "The name of the field being recorded.", + "pattern": "^[a-zA-Z][a-zA-Z0-9]+$", + "type": "string" + }, + "string": { + "type": "string" + }, + "boolean": { + "type": "boolean" + }, + "float": { + "type": "number" + }, + "integer": { + "type": "integer" + } + } + } + } + } + } + } + } + } +} \ No newline at end of file diff --git a/ironfish-http-api/src/config/openapi.yml b/ironfish-http-api/src/config/openapi.yml new file mode 100644 index 0000000000..e2600c5315 --- /dev/null +++ b/ironfish-http-api/src/config/openapi.yml @@ -0,0 +1,130 @@ +openapi: 3.0.3 +info: + title: Iron Fish API + description: Backend API for Faucet and Block explorer + license: + name: MPL 2.0 + url: https://www.mozilla.org/en-US/MPL/2.0/ + version: 1.0.0 +externalDocs: + description: Find out more about Iron Fish + url: https://ironfish.network +servers: +- url: /api/v1 + +tags: + - name: Faucet + description: Faucet APIs + - name: Analytics + description: Analytics APIs for the Block Explorer + - name: Telemetry + description: Telemetry APIs for metrics collection + +paths: + /getFunds: + post: + description: Send coins to the caller address + tags: + - Faucet + operationId: getFunds + parameters: + - name: email + required: false + in: query + allowEmptyValue: true + description: The email of the user + schema: + type: string + - name: publicKey + required: true + in: query + description: The public key of the user's account + schema: + type: string + responses: + 200: + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/GetFundsResponse' + + /writeTelemetry: + post: + description: Write a list of time-series metrics to the database + tags: + - Telemetry + operationId: writeTelemetry + requestBody: + description: List of metrics to record + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/WriteTelemetryRequest' + responses: + 200: + description: Telemetry successfully received + + +components: + schemas: + GetFundsResponse: + type: object + additionalProperties: false + required: + - message + properties: + message: + type: string + + WriteTelemetryRequest: + type: array + items: + type: object + additionalProperties: false + required: + - name + - timestamp + - fields + properties: + name: + type: string + description: Identifier for the metric + pattern: '^[a-zA-Z][a-zA-Z0-9]+$' + timestamp: + description: Time when the metric was recorded + type: string + format: date-time + tags: + type: object + description: Optional collection of properties to identify the metric + additionalProperties: + type: string + fields: + description: List of values associated with a specific recording of that metric + minItems: 1 + type: array + items: + description: + Name and a strongly typed value for the type of + data being recorded. Only one typed value + can be specified per field. + type: object + additionalProperties: false + required: + - name + maxProperties: 2 + minProperties: 2 + properties: + name: + description: The name of the field being recorded. + type: string + string: + type: string + boolean: + type: boolean + float: + type: number + integer: + type: integer diff --git a/ironfish-http-api/src/controllers/Faucet.test.ts b/ironfish-http-api/src/controllers/Faucet.test.ts new file mode 100644 index 0000000000..745e86af59 --- /dev/null +++ b/ironfish-http-api/src/controllers/Faucet.test.ts @@ -0,0 +1,45 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import request from 'supertest' +import { Express } from 'express-serve-static-core' + +const NodeFileProvider = jest.fn() +// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access +NodeFileProvider.prototype.init = jest.fn() + +const quickAddJob = jest.fn().mockReturnValue(true) +jest.mock('graphile-worker', () => ({ + quickAddJob, +})) + +import { Server } from '../server/server' + +describe('POST /getFunds', () => { + let server: Express + beforeAll(() => { + server = new Server().app + }) + it('should return 200 and a valid response', async () => { + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-call + const result: { body: { message: string }; status: number } = await request(server).post( + `/api/v1/getFunds?publicKey=myPublicKey&email=johannjohann%40ironfish.network`, + ) + + expect(result.status).toEqual(200) + expect(result.body.message).toEqual( + 'Added johannjohann@ironfish.network to our newsletter. Faucet request successfully added to the queue for key: myPublicKey.', + ) + }) + + it('should return 200 with missing email', async () => { + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-call + const result: { body: { message: string }; status: number } = await request(server).post( + `/api/v1/getFunds?publicKey=myPublicKey&email=`, + ) + expect(result.status).toEqual(200) + expect(result.body.message).toEqual( + 'Faucet request successfully added to the queue for key: myPublicKey.', + ) + }) +}) diff --git a/ironfish-http-api/src/controllers/Faucet.ts b/ironfish-http-api/src/controllers/Faucet.ts new file mode 100644 index 0000000000..17b6b13d5f --- /dev/null +++ b/ironfish-http-api/src/controllers/Faucet.ts @@ -0,0 +1,28 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Request, Response } from 'express' +import { FaucetJob } from '../jobs/FaucetJob' +import { Logger } from '../utils/logger' + +export async function getFunds(request: Request, response: Response): Promise { + const qs = (request.query as unknown) as Paths.GetFunds.QueryParameters + + const { email, publicKey } = qs + + try { + const job = await FaucetJob(publicKey, email) + Logger.debug(`Created job: ${job.task_identifier}, id: ${job.id}`) + } catch (error: unknown) { + response.status(500) + if (typeof error === 'object' && error && 'toString' in error) { + response.send(error.toString()) + } + return response + } + + const keyMessage = `Faucet request successfully added to the queue for key: ${publicKey}.` + const message = email ? `Added ${email} to our newsletter. ${keyMessage}` : keyMessage + + return response.json({ message }) +} diff --git a/ironfish-http-api/src/controllers/Telemetry.test.ts b/ironfish-http-api/src/controllers/Telemetry.test.ts new file mode 100644 index 0000000000..77f0e6a8fd --- /dev/null +++ b/ironfish-http-api/src/controllers/Telemetry.test.ts @@ -0,0 +1,58 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import request from 'supertest' +import { Express } from 'express-serve-static-core' + +import { Server } from '../server/server' + +const influxWriter = jest.fn() +const PointMock = { + stringField: jest.fn(), + intField: jest.fn(), + floatField: jest.fn(), + booleanField: jest.fn(), + tag: jest.fn(), + timestamp: jest.fn(), +} +jest.mock('../utils/logger') +jest.mock('@influxdata/influxdb-client', () => { + return { + InfluxDB: jest.fn().mockImplementation(() => { + return { + getWriteApi: jest.fn().mockImplementation(() => { + return { writePoints: influxWriter } + }), + } + }), + Point: jest.fn().mockImplementation(() => { + return PointMock + }), + } +}) + +describe('POST /writeTelemetry', () => { + let server: Express + beforeAll(() => { + server = new Server().app + }) + + it('should return 200 with valid input', async () => { + const result = await request(server) + .post('/api/v1/writeTelemetry') + .send([ + { + name: 'finallyOver', + timestamp: new Date('2020-12-31T23:59:59.999Z'), + fields: [{ name: 'betterNow', boolean: true }], + }, + ]) + expect(result.status).toEqual(200) + expect(PointMock.booleanField).toHaveBeenCalledWith('betterNow', true) + expect(PointMock.tag).not.toHaveBeenCalled() + expect(PointMock.timestamp).toHaveBeenCalledTimes(1) + expect(PointMock.timestamp.mock.calls[0]).toEqual([new Date('2020-12-31T23:59:59.999Z')]) + expect(influxWriter).toHaveBeenCalledTimes(1) + }) +}) diff --git a/ironfish-http-api/src/controllers/Telemetry.ts b/ironfish-http-api/src/controllers/Telemetry.ts new file mode 100644 index 0000000000..f9e8515ea0 --- /dev/null +++ b/ironfish-http-api/src/controllers/Telemetry.ts @@ -0,0 +1,65 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Request, Response } from 'express' +import { Logger } from '../utils/logger' +import { InfluxDB, Point } from '@influxdata/influxdb-client' + +// Requires three environment variables to be set: +// INFLUX_DB_TOKEN, INFLUX_DB_ORG, and INFLUX_DB_BUCKET +// See example.env for an example. + +const influxClient = new InfluxDB({ + url: 'http://localhost:8086', + token: process.env.INFLUX_DB_TOKEN, +}) + +async function writeTelemetryController( + metrics: Components.Schemas.WriteTelemetryRequest, +): Promise { + Logger.debug('Received Metrics: ', metrics) + + const points = metrics.map((metric) => { + let point = new Point(metric.name) + + const timestamp = new Date(metric.timestamp) + point.timestamp(timestamp) + + if (metric.tags) { + for (const [key, value] of Object.entries(metric.tags)) { + point = point.tag(key, value) + } + } + + for (const field of metric.fields) { + // This is clumsy because openapi doesn't permit overloading types + if (field.string !== undefined) { + point = point.stringField(field.name, field.string) + } else if (field.integer !== undefined) { + point = point.intField(field.name, field.integer) + } else if (field.float !== undefined) { + point = point.floatField(field.name, field.float) + } else if (field.boolean !== undefined) { + point = point.booleanField(field.name, field.boolean) + } + } + + return point + }) + + const influxWriter = influxClient.getWriteApi( + process.env.INFLUX_DB_ORG || '', + process.env.INFLUX_DB_BUCKET || '', + ) + influxWriter.writePoints(points) + + return Promise.resolve() +} + +export async function writeTelemetry(request: Request, response: Response): Promise { + const body = (request.body as unknown) as Components.Schemas.WriteTelemetryRequest + await writeTelemetryController(body) + response.sendStatus(200) + return response +} diff --git a/ironfish-http-api/src/controllers/index.ts b/ironfish-http-api/src/controllers/index.ts new file mode 100644 index 0000000000..74b18f625e --- /dev/null +++ b/ironfish-http-api/src/controllers/index.ts @@ -0,0 +1,6 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './Faucet' +export * from './Telemetry' diff --git a/ironfish-http-api/src/index.ts b/ironfish-http-api/src/index.ts new file mode 100644 index 0000000000..022de48b1b --- /dev/null +++ b/ironfish-http-api/src/index.ts @@ -0,0 +1,19 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Server } from './server/server' +import { Logger } from './utils/logger' + +const PORT = 8000 + +const server = new Server() + +server + .open(PORT) + .then(() => { + Logger.info(`Listening on http://localhost:${PORT}`) + }) + .catch((err: string) => { + Logger.error(`Error: ${err}`) + }) diff --git a/ironfish-http-api/src/jobs/FaucetJob.ts b/ironfish-http-api/src/jobs/FaucetJob.ts new file mode 100644 index 0000000000..9e63a2edf6 --- /dev/null +++ b/ironfish-http-api/src/jobs/FaucetJob.ts @@ -0,0 +1,15 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { DATABASE_CONNECTION_STRING } from '../config' + +import { Job, quickAddJob } from 'graphile-worker' +export const JOB_NAME = 'getFundsTask' + +export async function FaucetJob(publicKey: string, email: string | undefined): Promise { + return await quickAddJob({ connectionString: DATABASE_CONNECTION_STRING }, JOB_NAME, { + publicKey, + email, + }) +} diff --git a/ironfish-http-api/src/middleware/errorHandler.test.ts b/ironfish-http-api/src/middleware/errorHandler.test.ts new file mode 100644 index 0000000000..487ac49ad3 --- /dev/null +++ b/ironfish-http-api/src/middleware/errorHandler.test.ts @@ -0,0 +1,33 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { errorHandler } from './errorHandler' +import { Request, Response } from 'express' + +const mockResponse = () => { + const res = {} as Response + res.status = jest.fn().mockReturnValue(res) + res.json = jest.fn().mockReturnValue(res) + return res +} + +describe('errorHandler middleware', () => { + it('should return the right error response', () => { + const response = mockResponse() + const error = { status: 401, message: 'not authorized' } + errorHandler(error, {} as Request, response, jest.fn()) + expect(response.json).toHaveBeenCalledWith({ + error: { + message: 'not authorized', + type: 'request_validation', + }, + }) + }) + + it('should not handle the unexpected error', () => { + const next = jest.fn() + const error = { message: 'not authorized' } + errorHandler(error, {} as Request, mockResponse(), next) + expect(next).toHaveBeenCalledWith(error) + }) +}) diff --git a/ironfish-http-api/src/middleware/errorHandler.ts b/ironfish-http-api/src/middleware/errorHandler.ts new file mode 100644 index 0000000000..9e9b418291 --- /dev/null +++ b/ironfish-http-api/src/middleware/errorHandler.ts @@ -0,0 +1,24 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Request, Response, NextFunction } from 'express' + +import { RequestError, isRouteErrorType } from '../types/RouteError' + +export const errorHandler = ( + error: RequestError, + req: Request, + res: Response, + next: NextFunction, +): void => { + if (isRouteErrorType(error)) { + res.status(error.status).json({ + error: { + type: 'request_validation', + message: error.message, + }, + }) + return + } + next(error) +} diff --git a/ironfish-http-api/src/rpc/rpc.ts b/ironfish-http-api/src/rpc/rpc.ts new file mode 100644 index 0000000000..3103c36d33 --- /dev/null +++ b/ironfish-http-api/src/rpc/rpc.ts @@ -0,0 +1,29 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { ConfigOptions, IronfishSdk, NodeFileProvider } from 'ironfish' +import { RPC_HOST, RPC_MODE, RPC_PORT } from '../config' + +export class RPCClient { + sdk: IronfishSdk + + private constructor(sdk: IronfishSdk) { + this.sdk = sdk + } + static async init(): Promise { + const fileSystem = new NodeFileProvider() + await fileSystem.init() + + const configOverrides: Partial = {} + configOverrides.logLevel = '*:verbose' + configOverrides.enableRpcTcp = RPC_MODE === 'tcp' + configOverrides.rpcTcpHost = RPC_HOST + configOverrides.rpcTcpPort = Number(RPC_PORT) + + const sdk = await IronfishSdk.init({ + configOverrides: configOverrides, + }) + + return new RPCClient(sdk) + } +} diff --git a/ironfish-http-api/src/server/server.ts b/ironfish-http-api/src/server/server.ts new file mode 100644 index 0000000000..212333d32d --- /dev/null +++ b/ironfish-http-api/src/server/server.ts @@ -0,0 +1,87 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import express from 'express' +import * as OpenApiValidator from 'express-openapi-validator' +import { Express } from 'express-serve-static-core' +import { connector } from 'swagger-routes-express' +import swaggerUi from 'swagger-ui-express' +import bodyParser from 'body-parser' + +import { errorHandler } from '../middleware/errorHandler' +import OpenAPIDefinition from '../config/openapi.json' +import * as api from '../controllers' +import { Logger } from '../utils/logger' +import http from 'http' + +export class Server { + app: Express + httpServer: http.Server | null = null + isOpen = false + openPromise: Promise | null = null + + constructor() { + const app = express() + + app.use(bodyParser.json()) + + // Setup API validator + const validatorOptions = { + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment,@typescript-eslint/no-explicit-any + apiSpec: OpenAPIDefinition, + validateRequests: true, + validateResponses: true, + } + // Route for health check + // TODO - return a real health check system + app.get('/healthcheck', (req, res) => res.end()) + // Route for api documentation + app.use('/docs', swaggerUi.serve, swaggerUi.setup(OpenAPIDefinition)) + app.use(OpenApiValidator.middleware(validatorOptions)) + app.use(errorHandler) + + connector(api, validatorOptions.apiSpec)(app) + + this.app = app + } + + async open(port: number): Promise { + this.isOpen = true + + this.openPromise = new Promise((resolve, reject) => { + const server = this.app.listen(port, (err?: unknown) => { + if (err) { + reject(err) + return + } + + this.httpServer = server + resolve() + }) + }) + + await this.openPromise + } + + async close(): Promise { + if (!this.isOpen) return + this.isOpen = false + await this.openPromise + + Logger.info('App server is starting shutdown') + + const httpServer = this.httpServer + + if (httpServer) { + await new Promise((resolve, reject) => { + httpServer.close((err: unknown) => { + if (err) reject(err) + else resolve() + }) + }) + } + + Logger.info('App server is no longer open for connections') + } +} diff --git a/ironfish-http-api/src/tasks/FaucetTask.ts b/ironfish-http-api/src/tasks/FaucetTask.ts new file mode 100644 index 0000000000..ad101f9276 --- /dev/null +++ b/ironfish-http-api/src/tasks/FaucetTask.ts @@ -0,0 +1,88 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { JobHelpers } from 'graphile-worker' + +import { RPCClient } from '../rpc/rpc' +import { FAUCET_AMOUNT, FAUCET_ACCOUNT_NAME, FAUCET_FEE } from '../config' + +const MEMO = 'Welcome to Iron Fish!' +const MAX_ATTEMPT = 3 + +interface FaucetPayload { + publicKey: string + email?: string +} +interface RPCError { + codeMessage: string +} + +function sleep(timeMs: number): Promise { + return new Promise((resolve) => setTimeout(resolve, timeMs)) +} + +function isFaucetPayload(payload: unknown): payload is FaucetPayload { + return typeof payload === 'object' && payload !== null && 'publicKey' in payload +} + +function isRPCError(error: unknown): error is RPCError { + return typeof error === 'object' && error !== null && 'codeMessage' in error +} + +export async function getFundsTask(payload: unknown, helpers: JobHelpers): Promise { + if (!isFaucetPayload(payload)) { + return + } + + const { publicKey } = payload + + helpers.logger.info(`Payment to ${publicKey} - processing`) + + const rpc = await RPCClient.init() + const connected = await rpc.sdk.client.tryConnect() + + if (!connected) { + throw new Error('Connection to RPC failed') + } + + helpers.logger.info(`Connected to RPC`) + + // When a transaction is sent, it might take a few seconds for the node to be ready to spend + // This will wait until the balance is > 0 again and then send the transaction + // If after ~1 minute, the balance is still 0, fail the task + let attempt = 0 + for (;;) { + if (attempt > MAX_ATTEMPT) { + throw new Error(`Not enough money on the faucet`) + } + + const balance = await rpc.sdk.client.getAccountBalance() + + if (balance && Number(balance.content.confirmedBalance) > 0) { + helpers.logger.info(`Faucet's balance is NOICE`) + + break + } + + helpers.logger.info(`Faucet's balance is currently 0 - waiting on the balance to update`) + await sleep(2000) + attempt += 1 + } + + try { + await rpc.sdk.client.sendTransaction({ + amount: FAUCET_AMOUNT.toString(), + fromAccountName: FAUCET_ACCOUNT_NAME, + memo: MEMO, + toPublicKey: publicKey, + transactionFee: BigInt(FAUCET_FEE).toString(), + }) + } catch (error: unknown) { + if (isRPCError(error)) { + throw new Error(`Sending transaction failed ${error.codeMessage}`) + } + throw new Error(`Sending transaction failed`) + } + + helpers.logger.info(`Payment to ${publicKey} - done`) +} diff --git a/ironfish-http-api/src/tasks/worker.ts b/ironfish-http-api/src/tasks/worker.ts new file mode 100644 index 0000000000..014c82c670 --- /dev/null +++ b/ironfish-http-api/src/tasks/worker.ts @@ -0,0 +1,29 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { run } from 'graphile-worker' + +import { Logger } from '../utils/logger' + +import { DATABASE_CONNECTION_STRING } from '../config' +import { getFundsTask } from './FaucetTask' + +async function main() { + const runner = await run({ + connectionString: DATABASE_CONNECTION_STRING, + concurrency: 1, + // Install signal handlers for graceful shutdown on SIGINT, SIGTERM, etc + noHandleSignals: false, + pollInterval: 1000, + taskList: { + getFundsTask, + }, + }) + + await runner.promise +} + +main().catch((err) => { + Logger.error(err) + process.exit(1) +}) diff --git a/ironfish-http-api/src/types/RouteError.ts b/ironfish-http-api/src/types/RouteError.ts new file mode 100644 index 0000000000..db5f24fe96 --- /dev/null +++ b/ironfish-http-api/src/types/RouteError.ts @@ -0,0 +1,15 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export type RequestError = Record +export type RouteError = { + status: number + message?: string +} + +export const isRouteErrorType = (error: RequestError): error is RouteError => + error != null && + 'status' in error && + typeof error.status === 'number' && + (!('message' in error) || typeof error.message === 'string') diff --git a/ironfish-http-api/src/types/openapi.d.ts b/ironfish-http-api/src/types/openapi.d.ts new file mode 100644 index 0000000000..64f4265d6c --- /dev/null +++ b/ironfish-http-api/src/types/openapi.d.ts @@ -0,0 +1,58 @@ +declare namespace Components { + namespace Schemas { + export interface GetFundsResponse { + message: string; + } + export type WriteTelemetryRequest = { + /** + * Identifier for the metric + */ + name: string; // ^[a-zA-Z][a-zA-Z0-9]+$ + /** + * Time when the metric was recorded + */ + timestamp: string; // date-time + /** + * Optional collection of properties to identify the metric + */ + tags?: { + [name: string]: string; + }; + /** + * List of values associated with a specific recording of that metric + */ + fields: { + /** + * The name of the field being recorded. + */ + name: string; // ^[a-zA-Z][a-zA-Z0-9]+$ + string?: string; + boolean?: boolean; + float?: number; + integer?: number; + }[]; + }[]; + } +} +declare namespace Paths { + namespace GetFunds { + namespace Parameters { + export type Email = string; + export type PublicKey = string; + } + export interface QueryParameters { + email?: Parameters.Email; + publicKey: Parameters.PublicKey; + } + namespace Responses { + export type $200 = Components.Schemas.GetFundsResponse; + } + } + namespace WriteTelemetry { + export type RequestBody = Components.Schemas.WriteTelemetryRequest; + namespace Responses { + export interface $200 { + } + } + } +} diff --git a/ironfish-http-api/src/types/swagger-routes-express.d.ts b/ironfish-http-api/src/types/swagger-routes-express.d.ts new file mode 100644 index 0000000000..f2ca553ac0 --- /dev/null +++ b/ironfish-http-api/src/types/swagger-routes-express.d.ts @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +declare module 'swagger-routes-express' diff --git a/ironfish-http-api/src/utils/logger.ts b/ironfish-http-api/src/utils/logger.ts new file mode 100644 index 0000000000..2dabceba4e --- /dev/null +++ b/ironfish-http-api/src/utils/logger.ts @@ -0,0 +1,20 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { transports, createLogger, format } from 'winston' + +/** + * Logger system using winston + * Allows to write errors to error.log and other log to combined.log + */ +export const Logger = createLogger({ + level: 'debug', + format: format.combine(format.timestamp(), format.json()), + defaultMeta: { service: 'user-service' }, + transports: [ + new transports.Console({ format: format.simple() }), + new transports.File({ filename: 'error.log', level: 'error' }), + new transports.File({ filename: 'combined.log' }), + ], +}) diff --git a/ironfish-http-api/tsconfig.eslint.json b/ironfish-http-api/tsconfig.eslint.json new file mode 100644 index 0000000000..a8d4317b49 --- /dev/null +++ b/ironfish-http-api/tsconfig.eslint.json @@ -0,0 +1,4 @@ +{ + "extends": "./tsconfig.json", + "exclude": [] +} diff --git a/ironfish-http-api/tsconfig.json b/ironfish-http-api/tsconfig.json new file mode 100644 index 0000000000..c4b8312310 --- /dev/null +++ b/ironfish-http-api/tsconfig.json @@ -0,0 +1,12 @@ +{ + "extends": "../config/tsconfig.base.json", + "compilerOptions": { + "outDir": "build", + "resolveJsonModule": true + }, + "include": ["src", "src/config/*.json"], + "exclude": ["src/**/*.test.*"], + "references": [ + { "path": "../ironfish" }, + ] +} diff --git a/ironfish-http-api/tsconfig.test.json b/ironfish-http-api/tsconfig.test.json new file mode 100644 index 0000000000..33d207ff42 --- /dev/null +++ b/ironfish-http-api/tsconfig.test.json @@ -0,0 +1,8 @@ +{ + "extends": "../config/tsconfig.base.json", + "compilerOptions": { + "noEmit": true + }, + "include": [], + "references": [{ "path": "../ironfish" }] +} diff --git a/ironfish-rosetta-api/.eslintrc.js b/ironfish-rosetta-api/.eslintrc.js new file mode 100644 index 0000000000..13ddcf16b1 --- /dev/null +++ b/ironfish-rosetta-api/.eslintrc.js @@ -0,0 +1,21 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +module.exports = { + extends: ['ironfish'], + parserOptions: { + tsconfigRootDir: __dirname, + }, + overrides: [ + { + // this rules are disabled for auto generated files from openapigenerator + files: ['*/types/model/*.ts'], + rules: { + '@typescript-eslint/ban-types': 'off', + '@typescript-eslint/no-unused-vars': 'off', + '@typescript-eslint/explicit-module-boundary-types': 'off', + }, + }, + ], +} diff --git a/ironfish-rosetta-api/.gitignore b/ironfish-rosetta-api/.gitignore new file mode 100644 index 0000000000..7b39c10bbe --- /dev/null +++ b/ironfish-rosetta-api/.gitignore @@ -0,0 +1,8 @@ +combined.log +error.log +rosetta-cli +rosetta-specifications +src/types/.openapi-generator +src/types/git_push.sh +src/types/.npmignore +src/types/.openapi-generator-ignore \ No newline at end of file diff --git a/ironfish-rosetta-api/.prettierrc.js b/ironfish-rosetta-api/.prettierrc.js new file mode 100644 index 0000000000..1ad9c111e4 --- /dev/null +++ b/ironfish-rosetta-api/.prettierrc.js @@ -0,0 +1 @@ +module.exports = 'eslint-config-ironfish/prettierrc' diff --git a/ironfish-rosetta-api/Dockerfile b/ironfish-rosetta-api/Dockerfile new file mode 100644 index 0000000000..5c21791f46 --- /dev/null +++ b/ironfish-rosetta-api/Dockerfile @@ -0,0 +1,22 @@ +FROM node:14.16.0 as build +ENV PATH="/root/.cargo/bin:${PATH}" + +COPY ./ ./ + +RUN \ + apt-get update && \ + apt-get install rsync -y && \ + curl https://sh.rustup.rs -sSf | sh -s -- -y && \ + cargo install wasm-pack && \ + ./ironfish-rosetta-api/scripts/build.sh + +FROM node:14.16.0 +EXPOSE 8080:8080 + +WORKDIR /usr/src +COPY --from=build /ironfish-rosetta-api/build.rosetta/ironfish-rosetta-api ./app + +ENV NODE_ENV production +WORKDIR /usr/src/app +ENTRYPOINT ["yarn"] +CMD ["start"] diff --git a/ironfish-rosetta-api/README.md b/ironfish-rosetta-api/README.md new file mode 100644 index 0000000000..d5d8501957 --- /dev/null +++ b/ironfish-rosetta-api/README.md @@ -0,0 +1,92 @@ +# ironfish-rosetta-api + +[![codecov](https://codecov.io/gh/iron-fish/ironfish/branch/master/graph/badge.svg?token=PCSVEVEW5V&flag=ironfish-rosetta-api)](https://codecov.io/gh/iron-fish/ironfish) + +API used for the Iron Fish Block Explorer. + +The architecture is as follow + +One instance: +Database <- Syncer -> Iron Fish node + +Other instance +API -> Database + + +The block explorer client connects to the API. It allows scaling the API and the database, while still needing only one Iron Fish node and one Syncer. + +## Installation +```sh +yarn +``` + +## Database +Depends on Postgres +```sh +brew install postgresql +brew services start postgresql + +createdb rosetta; + +psql +CREATE USER postgres; +grant all privileges on database rosetta to postgres; +ALTER SCHEMA public OWNER to postgres; +``` + +### Run migration +```sh +# Create a migration +yarn run migrate create my migration +# Run the migration +yarn run migrate up +# Rollback +yarn run migrate down +``` + +## Documentation +Run dev environment and access `http://localhost:8000/docs/` + +## Development +``` +brew services start postgresql +yarn dev +``` + +## Production +Starting the API: +``` +yarn +yarn build +yarn start +``` + +Starting the Syncer: +``` +yarn +yarn build +yarn start:syncer +``` + +Start an Iron Fish node with +```sh +ironfish start --rpc.tcp --rpc.tpc-port=8021 +``` + +# Updating or creating a new API endpoint +The repository is using OpenAPI 3.0 from the Coinbase Rosetta specs. Find the latest version [here](https://github.com/coinbase/rosetta-specifications) + +Copy the specs in the root +`cp -rf ../node_modules/rosetta-specifications ./rosetta-specifications` + +Run `make gen` in `./rosetta-specifications` + +Update the type file: +- run `yarn api:types` + +# Testing the Rosetta integration +Install Rosetta CLI https://github.com/coinbase/rosetta-cli + +Run: +- `rosetta-cli view:networks` to see the networks +... \ No newline at end of file diff --git a/ironfish-rosetta-api/jest.config.js b/ironfish-rosetta-api/jest.config.js new file mode 100644 index 0000000000..b9faa197f1 --- /dev/null +++ b/ironfish-rosetta-api/jest.config.js @@ -0,0 +1,9 @@ +const base = require('../config/jest.config.base') +const pkg = require('./package.json') + +module.exports = { + ...base, + testEnvironment: '../config/jestNodeEnvironment', + watchPlugins: ['../config/jestWatchPlugin'], + displayName: pkg.name, +} diff --git a/ironfish-rosetta-api/openapitools.json b/ironfish-rosetta-api/openapitools.json new file mode 100644 index 0000000000..5ef9056080 --- /dev/null +++ b/ironfish-rosetta-api/openapitools.json @@ -0,0 +1,7 @@ +{ + "$schema": "node_modules/@openapitools/openapi-generator-cli/config.schema.json", + "spaces": 2, + "generator-cli": { + "version": "5.0.0-beta3" + } +} diff --git a/ironfish-rosetta-api/ormconfig.ts b/ironfish-rosetta-api/ormconfig.ts new file mode 100644 index 0000000000..b52d8ebf21 --- /dev/null +++ b/ironfish-rosetta-api/ormconfig.ts @@ -0,0 +1,23 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { ConnectionOptions } from 'typeorm' +import { + DATABASE_HOST, + DATABASE_PASSWORD, + DATABASE_PORT, + DATABASE_USERNAME, + DATABASE_BASE, +} from './src/config' + +const config: ConnectionOptions = { + type: 'postgres', + host: DATABASE_HOST, + port: Number(DATABASE_PORT), + username: DATABASE_USERNAME, + password: DATABASE_PASSWORD, + database: DATABASE_BASE, +} + +export default config diff --git a/ironfish-rosetta-api/package.json b/ironfish-rosetta-api/package.json new file mode 100644 index 0000000000..e934d661d9 --- /dev/null +++ b/ironfish-rosetta-api/package.json @@ -0,0 +1,66 @@ +{ + "name": "ironfish-rosetta-api", + "version": "0.1.0", + "description": "HTTP server to connect to an Iron Fish Node", + "private": true, + "author": "Iron Fish (https://ironfish.network)", + "license": "MPL-2.0", + "scripts": { + "build": "tsc -b", + "dev": "CLIENT_HOST=http://localhost:3000 nodemon --watch src --exec yarn build:start -e ts", + "build:start": "tsc -b && yarn start", + "start": "node ./build/src/index.js", + "start:syncer": "node ./build/src/syncer.js", + "lint": "tsc -b && tsc -b tsconfig.test.json && eslint --ext .ts,.tsx,.js,.jsx src/", + "lint:fix": "tsc -b && tsc -b tsconfig.test.json && eslint --ext .ts,.tsx,.js,.jsx src/ --fix", + "test": "tsc -b tsconfig.test.json && jest", + "test:watch": "tsc -b tsconfig.test.json && jest --watch", + "api:types": "openapi-generator-cli generate -i ./rosetta-specifications/api.json -g typescript-axios --additional-properties=stringEnums=true,supportsES6=true -o ./src/types && yarn run lint:fix", + "typeorm": "node --require ts-node/register ../node_modules/.bin/typeorm" + }, + "devDependencies": { + "@openapitools/openapi-generator-cli": "2.1.10", + "@types/cors": "2.8.9", + "@types/express": "^4.17.9", + "@types/express-openapi": "^1.9.0", + "@types/node": "^14.14.11", + "@types/supertest": "2.0.10", + "@types/swagger-ui-express": "^4.1.2", + "@types/winston": "^2.4.4", + "eslint-config-ironfish": "*", + "jest": "^26.4.2", + "nodemon": "^2.0.6", + "rosetta-specifications": "https://github.com/coinbase/rosetta-specifications.git", + "supertest": "6.0.1", + "ts-jest": "^26.4.0", + "ts-node": "9.1.1", + "typescript": "^4.1.2" + }, + "resolutions": { + "node-forge": "0.10.0", + "object-path": "^0.11.4" + }, + "homepage": "https://github.com/iron-fish/ironfish", + "keywords": [ + "http", + "api" + ], + "repository": "iron-fish/ironfish", + "dependencies": { + "@types/validator": "^13.1.1", + "body-parser": "1.19.0", + "connect": "^3.7.0", + "cors": "2.8.5", + "express": "^4.17.1", + "express-healthcheck": "0.1.0", + "express-openapi-validator": "^4.9.0", + "pg": "8.5.1", + "reflect-metadata": "0.1.13", + "swagger-routes-express": "^3.2.1", + "swagger-ui-express": "^4.1.5", + "typeorm": "0.2.29", + "validator": "^13.5.1", + "winston": "^3.3.3" + }, + "types": "build/index.d.ts" +} diff --git a/ironfish-rosetta-api/scripts/build-docker.sh b/ironfish-rosetta-api/scripts/build-docker.sh new file mode 100755 index 0000000000..fecb1d0185 --- /dev/null +++ b/ironfish-rosetta-api/scripts/build-docker.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash +set -euo pipefail +cd "$(dirname "$0")" +cd ../.. + +echo "Building Docker Image" +cp .gitignore .dockerignore + +docker build . \ + --progress plain \ + --tag ironfish-rosetta-api:latest \ + --file ironfish-rosetta-api/Dockerfile diff --git a/ironfish-rosetta-api/scripts/build.sh b/ironfish-rosetta-api/scripts/build.sh new file mode 100755 index 0000000000..40656a4673 --- /dev/null +++ b/ironfish-rosetta-api/scripts/build.sh @@ -0,0 +1,40 @@ +#!/usr/bin/env bash +set -euo pipefail +cd "$(dirname "$0")" +cd ../../ + +if ! command -v rsync &> /dev/null; then + echo "rsync is not installed but is required" + exit 1 +fi + +echo "Building WASM" +( cd ironfish-wasm && yarn run build:node ) + +echo "Installing from lockfile" +yarn --non-interactive --frozen-lockfile --ignore-scripts + +echo "Building Rosetta project" +cd ironfish-rosetta-api +yarn build + +echo "Outputting build to $PWD/build.rosetta" +rm -rf build.rosetta +mkdir build.rosetta + +echo "Packing Rosetta" +yarn pack -f ./build.rosetta/packaged.tar.gz +cd build.rosetta +tar zxvf packaged.tar.gz + +cd package +echo "Copying build" +cp -R ../../build ./ + +echo "Copying node_modules" +rsync -L -avrq --exclude='ironfish-rosetta-api' ../../../node_modules ./ + +echo "Packaging build into ironfish-rosetta-api.tar.gz" +cd .. +mv package ironfish-rosetta-api +tar -cf ironfish-rosetta-api.tar.gz ironfish-rosetta-api \ No newline at end of file diff --git a/ironfish-rosetta-api/scripts/deploy-docker.sh b/ironfish-rosetta-api/scripts/deploy-docker.sh new file mode 100755 index 0000000000..dc4fb37d2b --- /dev/null +++ b/ironfish-rosetta-api/scripts/deploy-docker.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash +set -euo pipefail +cd "$(dirname "$0")" + +if [ -z "${AWS_BLOCK_EXPLORER_REGISTRY_URL-}" ]; then + echo "Set AWS_BLOCK_EXPLORER_REGISTRY_URL before running deploy-docker.sh" + exit 1 +fi + +docker tag ironfish-rosetta-api:latest ${AWS_BLOCK_EXPLORER_REGISTRY_URL}:latest +docker push ${AWS_BLOCK_EXPLORER_REGISTRY_URL}:latest diff --git a/ironfish-rosetta-api/src/config/config.ts b/ironfish-rosetta-api/src/config/config.ts new file mode 100644 index 0000000000..7172386606 --- /dev/null +++ b/ironfish-rosetta-api/src/config/config.ts @@ -0,0 +1,38 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { NetworkIdentifier } from '../types' + +/** + * Blockchains specific config + * */ +export const BlockchainName = 'Iron Fish' +export const NetworkStage = process.env.IRF_NETWORK || 'testnet' + +export const networkIdentifier: NetworkIdentifier = { + blockchain: BlockchainName, + network: NetworkStage, +} + +/** + * Server specific config + * */ +export const SERVER_PORT = 8080 +export const API_HOST = process.env.CLIENT_HOST || 'https://explorer.ironfish.network' + +/** + * Database specific config + * */ +export const DATABASE_HOST = process.env.DATABASE_HOST || 'localhost' +export const DATABASE_PORT = process.env.DATABASE_PORT || 5432 +export const DATABASE_USERNAME = process.env.DATABASE_USERNAME || 'postgres' +export const DATABASE_PASSWORD = process.env.DATABASE_PASSWORD || '' +export const DATABASE_BASE = process.env.DATABASE_BASE || 'rosetta' + +/** + * RPC Config + * */ +export const RPC_MODE = (process.env.RPC_MODE as 'tcp' | 'ipc') || 'tcp' +export const RPC_HOST = process.env.RPC_HOST || '0.0.0.0' +export const RPC_PORT = process.env.RPC_PORT || 8021 diff --git a/ironfish-rosetta-api/src/config/database.ts b/ironfish-rosetta-api/src/config/database.ts new file mode 100644 index 0000000000..fb742eaa17 --- /dev/null +++ b/ironfish-rosetta-api/src/config/database.ts @@ -0,0 +1,15 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import 'reflect-metadata' +import { createConnection } from 'typeorm' + +import ormConfig from '../../ormconfig' +import { Block, Config, Transaction } from '../entity' + +export const connection = createConnection({ + ...ormConfig, + entities: [Block, Config, Transaction], + synchronize: true, + logging: ['error'], +}) diff --git a/ironfish-rosetta-api/src/config/index.ts b/ironfish-rosetta-api/src/config/index.ts new file mode 100644 index 0000000000..f76033e4d3 --- /dev/null +++ b/ironfish-rosetta-api/src/config/index.ts @@ -0,0 +1,6 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './config' +export * from './routes' diff --git a/ironfish-rosetta-api/src/config/openapi.ts b/ironfish-rosetta-api/src/config/openapi.ts new file mode 100644 index 0000000000..25a905047c --- /dev/null +++ b/ironfish-rosetta-api/src/config/openapi.ts @@ -0,0 +1,125 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +// Read the Open API definition directly from the Rosetta specs Github +import RosettaAPIDefinition from 'rosetta-specifications/api.json' + +const searchBlockEndpointPath = { + '/search/blocks': { + post: { + summary: '[INDEXER] Search for Blocks', + description: + '`/search/blocks` allows the caller to search for blocks that meet certain conditions. ', + operationId: 'searchBlocks', + tags: ['Search'], + requestBody: { + required: true, + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/SearchBlocksRequest', + }, + }, + }, + }, + responses: { + '200': { + description: 'Expected response to a valid request', + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/SearchBlocksResponse', + }, + }, + }, + }, + '500': { + description: 'unexpected error', + content: { + 'application/json': { + schema: { + $ref: '#/components/schemas/Error', + }, + }, + }, + }, + }, + }, + }, +} + +const searchBlockEndpointComponents = { + SearchBlocksRequest: { + description: 'SearchBlocksRequest is used to search for blocks.\n', + type: 'object', + required: ['network_identifier'], + properties: { + network_identifier: { + $ref: '#/components/schemas/NetworkIdentifier', + }, + operator: { + $ref: '#/components/schemas/Operator', + }, + seek: { + description: 'seek parameter to offset the pagination at a previous block sequence.', + type: 'integer', + format: 'int64', + minimum: 0, + example: 5, + }, + limit: { + description: + 'limit is the maximum number of blocks to return in one call. The implementation\nmay return <= limit blocks.\n', + type: 'integer', + format: 'int64', + minimum: 0, + maximum: 25, + example: 5, + }, + query: { + description: 'query to filter blocks on hash or sequence\n', + type: 'string', + }, + }, + }, + SearchBlocksResponse: { + description: + 'SearchBlocksResponse contains an ordered collection of Blocks\nthat match the query in SearchBlocksRequest. These Blocks\nare sorted from most recent block to oldest block.\n', + type: 'object', + required: ['blocks'], + properties: { + blocks: { + type: 'array', + description: 'blocks is an array of Block sorted by most recent BlockIdentifier.', + items: { + $ref: '#/components/schemas/Block', + }, + }, + next_offset: { + description: + 'next_offset is the next offset to use when paginating through\nblock results. If this field is not populated, there are\nno more blocks to query.\n', + type: 'integer', + format: 'int64', + minimum: 0, + example: 5, + }, + }, + }, +} + +// Add new endpoints to the definition +export const OpenAPIDefinition = { + ...RosettaAPIDefinition, + paths: { + ...RosettaAPIDefinition.paths, + ...searchBlockEndpointPath, + }, + components: { + ...RosettaAPIDefinition.components, + schemas: { + ...RosettaAPIDefinition.components.schemas, + ...searchBlockEndpointComponents, + }, + }, +} diff --git a/ironfish-rosetta-api/src/config/routes.ts b/ironfish-rosetta-api/src/config/routes.ts new file mode 100644 index 0000000000..2a57f04932 --- /dev/null +++ b/ironfish-rosetta-api/src/config/routes.ts @@ -0,0 +1,14 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export enum Routes { + BLOCK = '/block', + BLOCK_TRANSACTION = '/block/transaction', + DOCUMENTATION = '/docs', + NETWORK_LIST = '/network/list', + NETWORK_STATUS = '/network/status', + SEARCH_BLOCKS = '/search/blocks', + SEARCH_TRANSACTIONS = '/search/transactions', + HEALTH_CHECK = '/healthcheck', +} diff --git a/ironfish-rosetta-api/src/controllers/Block.ts b/ironfish-rosetta-api/src/controllers/Block.ts new file mode 100644 index 0000000000..c596277196 --- /dev/null +++ b/ironfish-rosetta-api/src/controllers/Block.ts @@ -0,0 +1,14 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Request, Response } from 'express' +import { HandleRequest } from './Controller' + +export const block = async (request: Request, response: Response): Promise => { + await HandleRequest(request, response) +} + +export const blockTransaction = async (request: Request, response: Response): Promise => { + await HandleRequest(request, response) +} diff --git a/ironfish-rosetta-api/src/controllers/Controller.test.ts b/ironfish-rosetta-api/src/controllers/Controller.test.ts new file mode 100644 index 0000000000..7d52aa8e20 --- /dev/null +++ b/ironfish-rosetta-api/src/controllers/Controller.test.ts @@ -0,0 +1,69 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { + SendResponse, + ResponsePayload, + SendError, + ResponseError, + SuccessResponse, +} from './Controller' +import { Response } from 'express' + +describe('Base Controller', () => { + const response = {} as Response + + beforeEach(() => { + response.status = jest.fn() + response.setHeader = jest.fn() + response.json = jest.fn() + response.send = jest.fn() + response.end = jest.fn() + }) + + describe('SuccessResponse', () => { + it('should return the right object', () => { + const payload = { key: 'value' } + expect(SuccessResponse(payload)).toEqual({ + body: payload, + status: 200, + }) + }) + }) + describe('SendError', () => { + it('should return the right error', () => { + const error: ResponseError = { + error: 'error message', + message: 'message', + retriable: true, + status: 500, + } + + SendError(response, error) + + expect(response.status).toHaveBeenCalledWith(500) + expect(response.json).toHaveBeenCalledWith({ + error: 'message', + }) + }) + }) + + describe('SendResponse', () => { + it('should return JSON', () => { + const payload: ResponsePayload = { body: { key: 'value' }, status: 200 } + SendResponse(response, payload) + + expect(response.status).toHaveBeenCalledWith(200) + expect(response.json).toHaveBeenCalledWith({ key: 'value' }) + }) + + it('should return plain text', () => { + const payload: ResponsePayload = { body: 'test test', status: 200 } + SendResponse(response, payload) + + expect(response.status).toHaveBeenCalledWith(200) + expect(response.json).toHaveBeenCalledTimes(0) + expect(response.send).toHaveBeenCalledWith('test test') + }) + }) +}) diff --git a/ironfish-rosetta-api/src/controllers/Controller.ts b/ironfish-rosetta-api/src/controllers/Controller.ts new file mode 100644 index 0000000000..7b9daa618f --- /dev/null +++ b/ironfish-rosetta-api/src/controllers/Controller.ts @@ -0,0 +1,58 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Request, Response } from 'express' +import { requestHandler } from '../middleware/requestHandler' +import { AppRouteHandlers } from '../server/server' + +type ResponseStatusSuccess = 200 +type ResponseStatusError = 400 | 500 +type ResponseBody = Record | string + +export type ResponsePayload = { + body: ResponseBody + status: ResponseStatusSuccess +} + +export type ResponseError = { + status: ResponseStatusError + error: string + message: string + retriable: boolean +} + +export const SendResponse = (response: Response, payload: ResponsePayload): void => { + response.status(payload.status || 200) + response.setHeader('content-type', 'application/json') + + if (payload.body instanceof Object) { + response.json(payload.body) + } else { + response.send(payload.body) + response.end() + } +} + +export const SendError = (response: Response, error: ResponseError): void => { + response.status(error.status || 500) + response.json({ error: error.message }) + response.end() +} + +export const SuccessResponse = (payload: Record): ResponsePayload => { + return { body: payload, status: 200 } +} + +export const HandleRequest = async (request: Request, response: Response): Promise => { + try { + const app = request.app as AppRouteHandlers + + const responseHandler = await requestHandler(app, request, response) + const responsePayload = SuccessResponse(responseHandler) + + SendResponse(response, responsePayload) + } catch (error) { + SendError(response, error) + } +} diff --git a/ironfish-rosetta-api/src/controllers/Network.ts b/ironfish-rosetta-api/src/controllers/Network.ts new file mode 100644 index 0000000000..a1d6f43f05 --- /dev/null +++ b/ironfish-rosetta-api/src/controllers/Network.ts @@ -0,0 +1,17 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Request, Response } from 'express' +import { HandleRequest } from './Controller' + +export const networkList = async (request: Request, response: Response): Promise => { + await HandleRequest(request, response) +} + +export const networkOptions = async (request: Request, response: Response): Promise => { + await HandleRequest(request, response) +} + +export const networkStatus = async (request: Request, response: Response): Promise => { + await HandleRequest(request, response) +} diff --git a/ironfish-rosetta-api/src/controllers/Search.ts b/ironfish-rosetta-api/src/controllers/Search.ts new file mode 100644 index 0000000000..ca77b1ae4a --- /dev/null +++ b/ironfish-rosetta-api/src/controllers/Search.ts @@ -0,0 +1,17 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Request, Response } from 'express' +import { HandleRequest } from './Controller' + +export const searchBlocks = async (request: Request, response: Response): Promise => { + await HandleRequest(request, response) +} + +export const searchTransactions = async ( + request: Request, + response: Response, +): Promise => { + await HandleRequest(request, response) +} diff --git a/ironfish-rosetta-api/src/controllers/index.ts b/ironfish-rosetta-api/src/controllers/index.ts new file mode 100644 index 0000000000..f8f1e3ae1f --- /dev/null +++ b/ironfish-rosetta-api/src/controllers/index.ts @@ -0,0 +1,7 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './Network' +export * from './Block' +export * from './Search' diff --git a/ironfish-rosetta-api/src/entity/Block.ts b/ironfish-rosetta-api/src/entity/Block.ts new file mode 100644 index 0000000000..c28b449d3a --- /dev/null +++ b/ironfish-rosetta-api/src/entity/Block.ts @@ -0,0 +1,39 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Entity, PrimaryColumn, Column, Index, OneToMany } from 'typeorm' +import { Hash, Timestamp } from './SharedColumnType' +import { Transaction } from './Transaction' +import { bigint } from './ValueTransformer' + +@Entity() +export class Block { + @PrimaryColumn(Hash) + hash!: string + + @Index() + @Column({ + type: 'bigint', + transformer: bigint, + }) + sequence!: number + + @Column() + previousBlockHash!: string + previousBlock?: Block + + @Column() + difficulty!: number + + @Column() + size!: number + + @Column(Timestamp) + timestamp!: number + + @Column() + transactionsCount!: number + + @OneToMany(() => Transaction, (transaction) => transaction.block) + transactions!: Transaction[] +} diff --git a/ironfish-rosetta-api/src/entity/Config.ts b/ironfish-rosetta-api/src/entity/Config.ts new file mode 100644 index 0000000000..22270e7c11 --- /dev/null +++ b/ironfish-rosetta-api/src/entity/Config.ts @@ -0,0 +1,16 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Entity, PrimaryColumn, Column } from 'typeorm' + +/** + * Key value store to store informations about the indexer and syncer state + * */ +@Entity() +export class Config { + @PrimaryColumn() + key!: string + + @Column() + value!: string +} diff --git a/ironfish-rosetta-api/src/entity/SharedColumnType.ts b/ironfish-rosetta-api/src/entity/SharedColumnType.ts new file mode 100644 index 0000000000..8497010698 --- /dev/null +++ b/ironfish-rosetta-api/src/entity/SharedColumnType.ts @@ -0,0 +1,16 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { ColumnOptions } from 'typeorm' +import { timestamp } from './ValueTransformer' + +// Hash stored as hex output +export const Hash: ColumnOptions = { + length: 64, + type: 'varchar', +} + +export const Timestamp: ColumnOptions = { + type: 'timestamptz', + transformer: timestamp, +} diff --git a/ironfish-rosetta-api/src/entity/Transaction.ts b/ironfish-rosetta-api/src/entity/Transaction.ts new file mode 100644 index 0000000000..5cdc26c620 --- /dev/null +++ b/ironfish-rosetta-api/src/entity/Transaction.ts @@ -0,0 +1,43 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Entity, PrimaryColumn, Column, Index, ManyToOne } from 'typeorm' +import { Hash, Timestamp } from './SharedColumnType' +import { Block } from './Block' +import { bigint } from './ValueTransformer' + +export interface Note { + commitment: string +} + +export interface Spend { + nullifier: string +} + +@Entity() +export class Transaction { + @PrimaryColumn(Hash) + hash!: string + + @Column({ + type: 'bigint', + transformer: bigint, + }) + fee!: number + + @Column() + size!: number + + @Column(Timestamp) + timestamp!: number + + @Index() + @ManyToOne(() => Block, (block) => block.transactions, { onDelete: 'CASCADE' }) + block!: Block + + @Column('jsonb') + notes!: Note[] + + @Column('jsonb') + spends!: Spend[] +} diff --git a/ironfish-rosetta-api/src/entity/ValueTransformer.ts b/ironfish-rosetta-api/src/entity/ValueTransformer.ts new file mode 100644 index 0000000000..ec09eff146 --- /dev/null +++ b/ironfish-rosetta-api/src/entity/ValueTransformer.ts @@ -0,0 +1,16 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { ValueTransformer } from 'typeorm' + +export const bigint: ValueTransformer = { + to: (entityValue: number) => entityValue, + from: (databaseValue: string): number => parseInt(databaseValue, 10), +} + +export const timestamp: ValueTransformer = { + to: (entityValue: number) => { + return new Date(entityValue).toISOString() + }, + from: (databaseValue: string): number => new Date(databaseValue).getTime(), +} diff --git a/ironfish-rosetta-api/src/entity/index.ts b/ironfish-rosetta-api/src/entity/index.ts new file mode 100644 index 0000000000..6af2bed9ba --- /dev/null +++ b/ironfish-rosetta-api/src/entity/index.ts @@ -0,0 +1,6 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +export * from './Block' +export * from './Config' +export * from './Transaction' diff --git a/ironfish-rosetta-api/src/errors/SyncerError.ts b/ironfish-rosetta-api/src/errors/SyncerError.ts new file mode 100644 index 0000000000..9f8319c0a9 --- /dev/null +++ b/ironfish-rosetta-api/src/errors/SyncerError.ts @@ -0,0 +1,10 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export class SyncerError extends Error { + constructor(message: string) { + super(message) + this.name = 'SyncerError' + } +} diff --git a/ironfish-rosetta-api/src/errors/index.ts b/ironfish-rosetta-api/src/errors/index.ts new file mode 100644 index 0000000000..d408068b72 --- /dev/null +++ b/ironfish-rosetta-api/src/errors/index.ts @@ -0,0 +1,4 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +export * from './SyncerError' diff --git a/ironfish-rosetta-api/src/index.ts b/ironfish-rosetta-api/src/index.ts new file mode 100644 index 0000000000..60a033ae67 --- /dev/null +++ b/ironfish-rosetta-api/src/index.ts @@ -0,0 +1,43 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Routes, SERVER_PORT } from './config' +import { Server } from './server/server' +import { Logger } from './utils/logger' +import { + NetworkList, + NetworkStatus, + Block, + BlockTransaction, + SearchBlocks, + SearchTransactions, +} from './services' +import { connection } from './config/database' + +const server = new Server() + +server + .open(SERVER_PORT) + .then(() => { + Logger.info(`Listening on http://localhost:${SERVER_PORT}`) + }) + .catch((err: string) => { + Logger.error(`Error: ${err}`) + }) + +// Attach services +server.register(Routes.NETWORK_LIST, NetworkList) +server.register(Routes.NETWORK_STATUS, NetworkStatus) +server.register(Routes.BLOCK, Block) +server.register(Routes.BLOCK_TRANSACTION, BlockTransaction) +server.register(Routes.SEARCH_BLOCKS, SearchBlocks) +server.register(Routes.SEARCH_TRANSACTIONS, SearchTransactions) + +const init = async () => { + await connection +} + +init().catch((error) => { + Logger.error(error) +}) diff --git a/ironfish-rosetta-api/src/indexer/Indexer.ts b/ironfish-rosetta-api/src/indexer/Indexer.ts new file mode 100644 index 0000000000..aef37b6d4a --- /dev/null +++ b/ironfish-rosetta-api/src/indexer/Indexer.ts @@ -0,0 +1,145 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import 'reflect-metadata' +import { Repository } from 'typeorm' +import { Config, Block, Transaction, Note, Spend } from '../entity' +import { getManager } from 'typeorm' +import { Transaction as TransactionAPIType } from '../types' +import { GetBlockResponse } from 'ironfish' + +export type IndexerConfigOptions = { + lastBlockHash: string | null +} + +/** + * Wrapper around the database to manage the different database entities: + * config: key value system to store the state of indexer/syncer + * blocks: index every blocks + * transactions: index every transaction for each blocks + * */ +export class Indexer { + indexer: Indexer | null = null + blockRepository: Repository + configRepository: Repository + transactionsRepository: Repository + + config: IndexerConfigOptions = { + lastBlockHash: null, + } + + constructor() { + // Each entity has its own repository which handles all operations with its entity. + // When dealing with entities, Repositories are more convenient to use than EntityManagers: + this.configRepository = getManager().getRepository(Config) + this.blockRepository = getManager().getRepository(Block) + this.transactionsRepository = getManager().getRepository(Transaction) + } + + async init(): Promise { + const indexer = new Indexer() + await indexer.loadConfig() + + return indexer + } + + async setConfig(key: keyof IndexerConfigOptions, value: string): Promise { + if (!(key in this.config)) { + throw 'Invalid key' + } + + const config = await this.configRepository.findOne({ key }) + if (!config) { + throw 'Key not found' + } + config.value = value + await this.configRepository.save(config) + } + + async loadConfig(): Promise { + const configs = await this.configRepository.find() + + if (!configs || configs.length <= 0) { + return + } + + for (const config of configs) { + if (config.key in this.config) { + this.config = { + ...this.config, + [config.key]: config.value, + } + } + } + } + + async getBlock(sequence?: number, hash?: string): Promise { + const blockData = await this.blockRepository.findOne({ + where: { + hash, + sequence: sequence, + }, + }) + + return blockData || null + } + + async deleteAtSequence(sequence: number): Promise { + await this.blockRepository.delete({ sequence: sequence }) + } + + async deleteAllFromSequence(sequence: number): Promise { + await this.blockRepository + .createQueryBuilder() + .delete() + .where('sequence > :sequence', { sequence: sequence }) + .execute() + } + + async addBlock(block: GetBlockResponse): Promise { + const metadata = block.metadata as { size: number; difficulty: number } + + const blockToInsert = new Block() + blockToInsert.hash = block.blockIdentifier.hash + blockToInsert.sequence = Number(block.blockIdentifier.index) + blockToInsert.previousBlockHash = block.parentBlockIdentifier.hash + blockToInsert.size = metadata.size || 0 + blockToInsert.difficulty = metadata.difficulty || 0 + blockToInsert.timestamp = block.timestamp + blockToInsert.transactionsCount = block.transactions.length + + const blockData = await this.blockRepository.save(blockToInsert) + + await this.addTransactions(blockData, block.transactions) + + return blockData + } + + async addTransactions( + blockData: Block, + transactions: TransactionAPIType[], + ): Promise { + const transactionsToInsert: Transaction[] = transactions.map((transaction) => { + const metadata = transaction.metadata as { + size: number + fee: number + timestamp: number + notes: Note[] + spends: Spend[] + } + + return { + hash: transaction.transaction_identifier.hash, + fee: metadata.fee || 0, + size: metadata.size || 0, + timestamp: blockData.timestamp, + block: blockData, + notes: metadata.notes, + spends: metadata.spends, + } as Transaction + }) + + return await this.transactionsRepository.save(transactionsToInsert) + } +} diff --git a/ironfish-rosetta-api/src/indexer/index.ts b/ironfish-rosetta-api/src/indexer/index.ts new file mode 100644 index 0000000000..5139ee8d86 --- /dev/null +++ b/ironfish-rosetta-api/src/indexer/index.ts @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './Indexer' diff --git a/ironfish-rosetta-api/src/middleware/errorHandler.test.ts b/ironfish-rosetta-api/src/middleware/errorHandler.test.ts new file mode 100644 index 0000000000..487ac49ad3 --- /dev/null +++ b/ironfish-rosetta-api/src/middleware/errorHandler.test.ts @@ -0,0 +1,33 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { errorHandler } from './errorHandler' +import { Request, Response } from 'express' + +const mockResponse = () => { + const res = {} as Response + res.status = jest.fn().mockReturnValue(res) + res.json = jest.fn().mockReturnValue(res) + return res +} + +describe('errorHandler middleware', () => { + it('should return the right error response', () => { + const response = mockResponse() + const error = { status: 401, message: 'not authorized' } + errorHandler(error, {} as Request, response, jest.fn()) + expect(response.json).toHaveBeenCalledWith({ + error: { + message: 'not authorized', + type: 'request_validation', + }, + }) + }) + + it('should not handle the unexpected error', () => { + const next = jest.fn() + const error = { message: 'not authorized' } + errorHandler(error, {} as Request, mockResponse(), next) + expect(next).toHaveBeenCalledWith(error) + }) +}) diff --git a/ironfish-rosetta-api/src/middleware/errorHandler.ts b/ironfish-rosetta-api/src/middleware/errorHandler.ts new file mode 100644 index 0000000000..9e9b418291 --- /dev/null +++ b/ironfish-rosetta-api/src/middleware/errorHandler.ts @@ -0,0 +1,24 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Request, Response, NextFunction } from 'express' + +import { RequestError, isRouteErrorType } from '../types/RouteError' + +export const errorHandler = ( + error: RequestError, + req: Request, + res: Response, + next: NextFunction, +): void => { + if (isRouteErrorType(error)) { + res.status(error.status).json({ + error: { + type: 'request_validation', + message: error.message, + }, + }) + return + } + next(error) +} diff --git a/ironfish-rosetta-api/src/middleware/index.ts b/ironfish-rosetta-api/src/middleware/index.ts new file mode 100644 index 0000000000..9642187b53 --- /dev/null +++ b/ironfish-rosetta-api/src/middleware/index.ts @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './requestHandler' diff --git a/ironfish-rosetta-api/src/middleware/requestHandler.test.ts b/ironfish-rosetta-api/src/middleware/requestHandler.test.ts new file mode 100644 index 0000000000..91685f90bb --- /dev/null +++ b/ironfish-rosetta-api/src/middleware/requestHandler.test.ts @@ -0,0 +1,39 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { requestHandler } from './requestHandler' +import { Request, Response } from 'express' +import { AppRouteHandlers, RouteHandlerMap } from '../server/server' +import { Routes } from '../config/routes' + +const spyService = jest.fn() + +const request = {} as Request +const response = {} as Response + +const app = {} as AppRouteHandlers + +app.routeHandlers = {} as RouteHandlerMap +app.routeHandlers[Routes.NETWORK_STATUS] = { service: spyService } + +describe('requestHandler middleware', () => { + it('should throw an error if the path is not found', async () => { + await expect(requestHandler(app, request, response)).rejects.toThrow('No route path found') + }) + + it('should call the right service', async () => { + const requestDefined = {} as Request + requestDefined.body = { key: 'value' } + requestDefined.route = { + path: Routes.NETWORK_STATUS, + } + + await requestHandler(app, requestDefined, response) + + expect(spyService).toHaveBeenCalledWith({ + params: requestDefined.body, + request: requestDefined, + response, + }) + }) +}) diff --git a/ironfish-rosetta-api/src/middleware/requestHandler.ts b/ironfish-rosetta-api/src/middleware/requestHandler.ts new file mode 100644 index 0000000000..1a59465a18 --- /dev/null +++ b/ironfish-rosetta-api/src/middleware/requestHandler.ts @@ -0,0 +1,43 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Request, Response } from 'express' + +import { Routes } from '../config' +import { AppRouteHandlers } from '../server' + +type RequestBodyType = Record +export type RequestHandlerParams = { + params: RequestBodyType + request?: Request + response?: Response +} + +export const requestHandler = async ( + app: AppRouteHandlers, + request: Request, + response: Response, +): Promise> => { + const routeHandlers = app.routeHandlers + const route = request.route as Record + if (!route || !route.path) { + throw new Error(`No route path found`) + } + + const path = route.path as Routes + const routeHandler = routeHandlers[path] + if (!routeHandler || !routeHandler.service) { + throw new Error(`Service for ${path} is not yet implemented`) + } + + const requestParams: RequestHandlerParams = { + params: request.body as RequestBodyType, + request, + response, + } + + // Todo - add type guards for the different response + // eslint-disable-next-line @typescript-eslint/no-unsafe-return + return await routeHandler.service(requestParams) +} diff --git a/ironfish-rosetta-api/src/repository/BlockRepository.ts b/ironfish-rosetta-api/src/repository/BlockRepository.ts new file mode 100644 index 0000000000..37bc73a740 --- /dev/null +++ b/ironfish-rosetta-api/src/repository/BlockRepository.ts @@ -0,0 +1,48 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { EntityRepository, Repository } from 'typeorm' +import { Block } from '../entity' + +@EntityRepository(Block) +export class BlockRepository extends Repository { + getFindWhereParams(hash?: string, sequence?: number): { hash?: string; sequence?: number } { + let where = null + if (hash) { + where = { hash } + } + + if (sequence) { + where = { sequence } + } + + if (!where) { + throw 'Missing hash or sequence param' + } + + return where + } + async findWithInstances(hash?: string, sequence?: number): Promise { + const where = this.getFindWhereParams(hash, sequence) + const block = await this.createQueryBuilder('block') + .leftJoinAndMapOne( + 'block.previousBlock', + 'block', + 'previousBlock', + 'block.previousBlockHash = previousBlock.hash', + ) + .leftJoinAndSelect('block.transactions', 'transaction') + .where(where) + .getOne() + return block || null + } + + async getWithInstances(hash?: string, sequence?: number): Promise { + const block = await this.findWithInstances(hash, sequence) + if (!block) { + throw Error(`Block ${hash || ''} ${sequence || ''} not found`) + } + return block + } +} diff --git a/ironfish-rosetta-api/src/repository/TransactionRepository.ts b/ironfish-rosetta-api/src/repository/TransactionRepository.ts new file mode 100644 index 0000000000..64a804441f --- /dev/null +++ b/ironfish-rosetta-api/src/repository/TransactionRepository.ts @@ -0,0 +1,45 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { EntityRepository, Like, Repository } from 'typeorm' +import { Transaction } from '../entity' + +const FULL_JOINS = ['block'] + +@EntityRepository(Transaction) +export class TransactionRepository extends Repository { + async findWithInstances( + transactionHash?: string, + blockHash?: string, + ): Promise { + const transaction = await this.findOne({ + where: { + hash: transactionHash, + block: { hash: blockHash }, + }, + relations: FULL_JOINS, + }) + return transaction || null + } + + async getWithInstances(transactionHash?: string, blockHash?: string): Promise { + return await this.findOneOrFail({ + where: { + hash: transactionHash, + block: { hash: blockHash }, + }, + relations: FULL_JOINS, + }) + } + + async findByHashWithInstances(hash: string, limit: number): Promise { + return ( + (await this.find({ + where: { hash: Like(`%${hash}%`) }, + take: limit, + relations: FULL_JOINS, + })) || [] + ) + } +} diff --git a/ironfish-rosetta-api/src/repository/index.ts b/ironfish-rosetta-api/src/repository/index.ts new file mode 100644 index 0000000000..b250ee7a12 --- /dev/null +++ b/ironfish-rosetta-api/src/repository/index.ts @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +export * from './BlockRepository' +export * from './TransactionRepository' diff --git a/ironfish-rosetta-api/src/rpc/index.ts b/ironfish-rosetta-api/src/rpc/index.ts new file mode 100644 index 0000000000..1e2f0b1c2d --- /dev/null +++ b/ironfish-rosetta-api/src/rpc/index.ts @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './rpc' diff --git a/ironfish-rosetta-api/src/rpc/rpc.ts b/ironfish-rosetta-api/src/rpc/rpc.ts new file mode 100644 index 0000000000..af805b63bb --- /dev/null +++ b/ironfish-rosetta-api/src/rpc/rpc.ts @@ -0,0 +1,30 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { ConfigOptions, IronfishSdk, NodeFileProvider } from 'ironfish' +import { RPC_HOST, RPC_MODE, RPC_PORT } from '../config' + +export class RPCClient { + sdk: IronfishSdk + + private constructor(sdk: IronfishSdk) { + this.sdk = sdk + } + static async init(): Promise { + const fileSystem = new NodeFileProvider() + await fileSystem.init() + + const configOverrides: Partial = {} + + configOverrides.logLevel = '*:verbose' + configOverrides.enableRpcTcp = RPC_MODE === 'tcp' + configOverrides.rpcTcpHost = RPC_HOST + configOverrides.rpcTcpPort = Number(RPC_PORT) + + const sdk = await IronfishSdk.init({ + configOverrides: configOverrides, + }) + + return new RPCClient(sdk) + } +} diff --git a/ironfish-rosetta-api/src/server/index.ts b/ironfish-rosetta-api/src/server/index.ts new file mode 100644 index 0000000000..c3713736c1 --- /dev/null +++ b/ironfish-rosetta-api/src/server/index.ts @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './server' diff --git a/ironfish-rosetta-api/src/server/server.ts b/ironfish-rosetta-api/src/server/server.ts new file mode 100644 index 0000000000..2c5750574e --- /dev/null +++ b/ironfish-rosetta-api/src/server/server.ts @@ -0,0 +1,119 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import express from 'express' +import * as OpenApiValidator from 'express-openapi-validator' +import { Express } from 'express-serve-static-core' +import { connector } from 'swagger-routes-express' +import swaggerUi from 'swagger-ui-express' +import http from 'http' +import bodyParser from 'body-parser' +import { OpenAPIDefinition } from '../config/openapi' + +import { errorHandler } from '../middleware/errorHandler' +import * as api from '../controllers' +import { Logger } from '../utils/logger' +import { Routes } from '../config/routes' +import cors from 'cors' +import { API_HOST } from '../config' + +// eslint-disable-next-line @typescript-eslint/ban-types +export type RouteHandlerMap = Record + +export interface AppRouteHandlers extends Express { + routeHandlers: RouteHandlerMap +} + +const options: cors.CorsOptions = { + allowedHeaders: ['Origin', 'X-Requested-With', 'Content-Type', 'Accept', 'X-Access-Token'], + credentials: true, + methods: 'GET,HEAD,OPTIONS,PUT,PATCH,POST,DELETE', + origin: API_HOST, + preflightContinue: false, +} + +export class Server { + app: AppRouteHandlers + httpServer: http.Server | null = null + isOpen = false + openPromise: Promise | null = null + + constructor() { + const app = express() as AppRouteHandlers + const corsOptions = cors(options) + app.use(corsOptions) + + app.use(bodyParser.json()) + app.use(bodyParser.text()) + app.use(bodyParser.urlencoded({ extended: false })) + + // Setup API validator + const validatorOptions = { + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment,@typescript-eslint/no-explicit-any + apiSpec: OpenAPIDefinition, + validateRequests: true, + validateResponses: true, + additionalProperties: false, + } + + // Route for health check + // TODO - return a real health check system + app.get(Routes.HEALTH_CHECK, (req, res) => res.end()) + // Route for api documentation + app.use(Routes.DOCUMENTATION, swaggerUi.serve, swaggerUi.setup(OpenAPIDefinition)) + app.use(OpenApiValidator.middleware(validatorOptions)) + app.use(errorHandler) + + app.options('*', corsOptions) + + connector(api, validatorOptions.apiSpec)(app) + + this.app = app + this.app.routeHandlers = {} as RouteHandlerMap + } + + async open(port: number): Promise { + this.isOpen = true + + this.openPromise = new Promise((resolve, reject) => { + const server = this.app.listen(port, (err?: unknown) => { + if (err) { + reject(err) + return + } + + this.httpServer = server + resolve() + }) + }) + + await this.openPromise + } + + // eslint-disable-next-line @typescript-eslint/ban-types + register(route: Routes, service: Function): void { + this.app.routeHandlers[route] = { service } + } + + async close(): Promise { + if (!this.isOpen) return + this.isOpen = false + await this.openPromise + + Logger.info('App server is starting shutdown') + + const httpServer = this.httpServer + + if (httpServer) { + await new Promise((resolve, reject) => { + httpServer.close((err: unknown) => { + if (err) reject(err) + else resolve() + }) + }) + } + + Logger.info('App server is no longer open for connections') + } +} diff --git a/ironfish-rosetta-api/src/services/Block.test.ts b/ironfish-rosetta-api/src/services/Block.test.ts new file mode 100644 index 0000000000..46246b1dc5 --- /dev/null +++ b/ironfish-rosetta-api/src/services/Block.test.ts @@ -0,0 +1,89 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { networkIdentifier as networkIdentifierConfig } from '../config' +import { RequestHandlerParams } from '../middleware' +import { BlockIdentifier, BlockRequest, NetworkIdentifier } from '../types' +import { mocked } from 'ts-jest/utils' +import * as typeorm from 'typeorm' + +const findWithInstances = jest.fn().mockReturnValue(null) +jest.mock('../repository/BlockRepository', () => ({ + findWithInstances, +})) +jest.mock('typeorm', () => { + const moduleMock = jest.requireActual('typeorm') + return { + ...moduleMock, + getCustomRepository: jest.fn().mockReturnValue({ findWithInstances }), + } +}) + +import { Block } from './Block' + +describe('Blocks service', () => { + const getRequestHander = ( + blockIdentifier: BlockIdentifier, + networkIdentifier: NetworkIdentifier, + ): RequestHandlerParams => ({ + params: { + network_identifier: networkIdentifier, + block_identifier: blockIdentifier, + }, + }) + + it('fails without the right network identifier', async () => { + await expect( + Block(getRequestHander({} as BlockIdentifier, {} as NetworkIdentifier)), + ).rejects.toThrow('Network identifier is not valid') + }) + + it('fails without the right block identifier', async () => { + await expect( + Block(getRequestHander({} as BlockIdentifier, networkIdentifierConfig)), + ).rejects.toThrow('Block identifier is not valid') + }) + + it('fails if block does not exists', async () => { + await expect( + Block(getRequestHander({ hash: 'abcd' } as BlockIdentifier, networkIdentifierConfig)), + ).rejects.toThrow('Block data not found') + }) + + describe('with a block returned', () => { + beforeEach(() => { + mocked(findWithInstances).mockReturnValue({ + hash: 'hash2', + sequence: 2, + transactions: [], + previousBlock: { + hash: 'hash1', + sequence: 1, + }, + timestamp: 123, + metadata: {}, + }) + }) + + it('returns the right response', async () => { + const response = await Block( + getRequestHander({ hash: 'abcd' } as BlockIdentifier, networkIdentifierConfig), + ) + expect(response).toEqual({ + block: { + block_identifier: { + hash: 'hash2', + index: 2, + }, + parent_block_identifier: { + hash: 'hash1', + index: 1, + }, + timestamp: 123, + metadata: {}, + transactions: [], + }, + }) + }) + }) +}) diff --git a/ironfish-rosetta-api/src/services/Block.ts b/ironfish-rosetta-api/src/services/Block.ts new file mode 100644 index 0000000000..9854ec7b6d --- /dev/null +++ b/ironfish-rosetta-api/src/services/Block.ts @@ -0,0 +1,68 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { BlockRequest, BlockResponse, Transaction } from '../types' +import { RequestHandlerParams } from '../middleware' +import { getCustomRepository } from 'typeorm' +import { isValidNetworkIdentifier } from '../utils/networkIdentifierUtil' +import { BlockRepository } from '../repository/BlockRepository' + +export const Block = async ( + requestParams: RequestHandlerParams, +): Promise => { + const { params } = requestParams + const { block_identifier: blockIdentifier, network_identifier: networkIdentifier } = params + + // Verify network identifier + if (!isValidNetworkIdentifier(networkIdentifier)) + throw new Error(`Network identifier is not valid`) + + // Verify partial blockIdentifier + if (!blockIdentifier.hash && !blockIdentifier.index) + throw new Error(`Block identifier is not valid`) + + const blockRepository = getCustomRepository(BlockRepository) + + const blockData = await blockRepository.findWithInstances( + blockIdentifier.hash, + blockIdentifier.index, + ) + + if (blockData === null) throw new Error(`Block data not found`) + + const transactions: Transaction[] = blockData.transactions.map((transaction) => ({ + transaction_identifier: { + hash: transaction.hash, + }, + operations: [], + metadata: { + notes: transaction.notes, + spends: transaction.spends, + size: transaction.size, + fee: transaction.fee, + isMinerFee: transaction.fee < 0 && blockData.sequence > 1, + }, + })) + + const response: BlockResponse = { + block: { + block_identifier: { + index: blockData.sequence, + hash: blockData.hash, + }, + parent_block_identifier: { + index: blockData.previousBlock?.sequence || 0, + hash: blockData.previousBlock?.hash || '', + }, + timestamp: Number(blockData.timestamp), + transactions, + metadata: { + size: blockData.size, + difficulty: blockData.difficulty, + }, + }, + } + + return Promise.resolve(response) +} diff --git a/ironfish-rosetta-api/src/services/BlockTransaction.test.ts b/ironfish-rosetta-api/src/services/BlockTransaction.test.ts new file mode 100644 index 0000000000..75f9a1d62f --- /dev/null +++ b/ironfish-rosetta-api/src/services/BlockTransaction.test.ts @@ -0,0 +1,87 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { mocked } from 'ts-jest/utils' +import * as typeorm from 'typeorm' + +import { networkIdentifier as networkIdentifierConfig } from '../config' +import { BlockIdentifier, NetworkIdentifier, TransactionIdentifier } from '../types' + +const findWithInstances = jest.fn().mockReturnValue(null) +jest.mock('../repository/TransactionRepository', () => ({ + findWithInstances, +})) +jest.mock('typeorm', () => { + const moduleMock = jest.requireActual('typeorm') + return { + ...moduleMock, + getCustomRepository: jest.fn().mockReturnValue({ findWithInstances }), + } +}) + +import { BlockTransaction } from './BlockTransaction' + +describe('Block Transaction service', () => { + it('fails without the right network identifier', async () => { + await expect( + BlockTransaction({ + params: { + transaction_identifier: {} as TransactionIdentifier, + block_identifier: {} as BlockIdentifier, + network_identifier: {} as NetworkIdentifier, + }, + }), + ).rejects.toThrow('Network identifier is not valid') + }) + + it('fails if transaction does not exists', async () => { + await expect( + BlockTransaction({ + params: { + transaction_identifier: { hash: 'abcd' }, + block_identifier: { hash: 'abcd', index: 2 }, + network_identifier: networkIdentifierConfig, + }, + }), + ).rejects.toThrow('Transaction data not found') + }) + + describe('with a transaction returned', () => { + beforeEach(() => { + mocked(findWithInstances).mockReturnValue({ + block: { timestamp: Date.now() }, + hash: 'B89726C5FA28FBB7B928F9697015616850618B5F5085E02DC08A98246003D144', + notes: [ + { + commitment: '468b79919960c8c5505be558e0f7d7353639dc3de8ea35c441e9e820b904bf6c', + }, + { + commitment: '6364fed24976a6b5c3f2e15a595786805f70375fe38489f8464f8a98c6957f00', + }, + ], + spends: [ + { + nullifier: '42BC2C20C1B31C2E38A65A6A27204B3DC86B4ED11C4EFDC3D9E933CCADE385DD', + }, + ], + size: 5005, + fee: 0, + }) + }) + + it('returns the right response', async () => { + const response = await BlockTransaction({ + params: { + transaction_identifier: { + hash: 'B89726C5FA28FBB7B928F9697015616850618B5F5085E02DC08A98246003D144', + }, + block_identifier: { hash: 'abcd', index: 2 }, + network_identifier: networkIdentifierConfig, + }, + }) + expect(response.transaction.transaction_identifier.hash).toEqual( + 'B89726C5FA28FBB7B928F9697015616850618B5F5085E02DC08A98246003D144', + ) + }) + }) +}) diff --git a/ironfish-rosetta-api/src/services/BlockTransaction.ts b/ironfish-rosetta-api/src/services/BlockTransaction.ts new file mode 100644 index 0000000000..c678281edd --- /dev/null +++ b/ironfish-rosetta-api/src/services/BlockTransaction.ts @@ -0,0 +1,51 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { BlockTransactionRequest, BlockTransactionResponse } from '../types' +import { RequestHandlerParams } from '../middleware' +import { isValidNetworkIdentifier } from '../utils/networkIdentifierUtil' +import { getCustomRepository } from 'typeorm' +import { TransactionRepository } from '../repository/TransactionRepository' + +export const BlockTransaction = async ( + requestParams: RequestHandlerParams, +): Promise => { + const { params } = requestParams + const { + block_identifier: blockIdentifier, + network_identifier: networkIdentifier, + transaction_identifier: transactionIdentifier, + } = params + + // Verify network identifier + if (!isValidNetworkIdentifier(networkIdentifier)) + throw new Error(`Network identifier is not valid`) + + const transactionRepository = getCustomRepository(TransactionRepository) + const transactionData = await transactionRepository.findWithInstances( + transactionIdentifier.hash, + blockIdentifier.hash, + ) + + if (!transactionData) throw new Error(`Transaction data not found`) + + const response: BlockTransactionResponse = { + transaction: { + transaction_identifier: { + hash: transactionData.hash, + }, + operations: [], + metadata: { + timestamp: transactionData.block.timestamp, + notes: transactionData.notes, + spends: transactionData.spends, + size: transactionData.size, + fee: transactionData.fee, + isMinerFee: transactionData.fee < 0 && transactionData.block.sequence > 1, + }, + }, + } + + return Promise.resolve(response) +} diff --git a/ironfish-rosetta-api/src/services/Network.test.ts b/ironfish-rosetta-api/src/services/Network.test.ts new file mode 100644 index 0000000000..38ef35548b --- /dev/null +++ b/ironfish-rosetta-api/src/services/Network.test.ts @@ -0,0 +1,118 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Request, Response } from 'express' + +const NodeFileProvider = jest.fn() +// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access +const getPeers = jest.fn().mockReturnValue({ + content: { peers: [{ identity: '123' }, { identity: '12345' }] }, +}) +const getChainInfo = jest.fn() +// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access +NodeFileProvider.prototype.init = jest.fn() +jest.mock('ironfish', () => ({ + NodeFileProvider, + IronfishSdk: { + init: jest.fn().mockImplementation(() => ({ + client: { + connect: jest.fn(), + getChainInfo, + getPeers, + }, + })), + }, +})) + +import { NetworkList, NetworkStatus } from './Network' +import { networkIdentifier as networkIdentifierConfig } from '../config' +import { NetworkRequest, NetworkIdentifier } from '../types' +import { RequestHandlerParams } from '../middleware' + +describe('Network service', () => { + describe('NetworkList', () => { + it('returns the right NetworkList', async () => { + const networkList = await NetworkList() + expect(networkList).toEqual({ + network_identifiers: [{ blockchain: 'Iron Fish', network: 'testnet' }], + }) + }) + }) + + describe('NetworkStatus service', () => { + const request = (jest.fn() as unknown) as Request + const response = (jest.fn() as unknown) as Response + + const getRequestHander = ( + networkIdentifier: NetworkIdentifier, + ): RequestHandlerParams => ({ + params: { + network_identifier: networkIdentifier, + }, + request, + response, + }) + + it('fails without the right network identifier', async () => { + await expect(NetworkStatus(getRequestHander({} as NetworkIdentifier))).rejects.toThrow( + 'Network identifier is not valid', + ) + }) + + it('throws an error without a response from the node', async () => { + await expect(NetworkStatus(getRequestHander(networkIdentifierConfig))).rejects.toThrow( + 'Chain info data not found', + ) + }) + + describe('With a response from the node', () => { + beforeAll(() => { + getChainInfo.mockReturnValue({ + content: { + currentBlockIdentifier: { + index: '2', + hash: 'abcd', + }, + genesisBlockIdentifier: { + index: '1', + hash: 'abc', + }, + oldestBlockIdentifier: { + index: '3', + hash: 'abcde', + }, + currentBlockTimestamp: 1234, + }, + }) + }) + + it('returns the right response', async () => { + const response = await NetworkStatus(getRequestHander(networkIdentifierConfig)) + expect(response).toEqual({ + current_block_identifier: { + hash: 'abcd', + index: 2, + }, + current_block_timestamp: 1234, + genesis_block_identifier: { + hash: 'abc', + index: 1, + }, + oldest_block_identifier: { + hash: 'abcde', + index: 3, + }, + peers: [ + { + peer_id: '123', + }, + { + peer_id: '12345', + }, + ], + }) + }) + }) + }) +}) diff --git a/ironfish-rosetta-api/src/services/Network.ts b/ironfish-rosetta-api/src/services/Network.ts new file mode 100644 index 0000000000..80a4a142e2 --- /dev/null +++ b/ironfish-rosetta-api/src/services/Network.ts @@ -0,0 +1,70 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { GetChainInfoResponse, ResponseEnded } from 'ironfish' + +import { networkIdentifier } from '../config' +import { RequestHandlerParams } from '../middleware/requestHandler' +import { NetworkRequest, NetworkListResponse, NetworkStatusResponse, Peer } from '../types' +import { RPCClient } from '../rpc' +import { isValidNetworkIdentifier } from '../utils/networkIdentifierUtil' + +export const NetworkList = async (): Promise => { + const response = { + network_identifiers: [networkIdentifier], + } + + return Promise.resolve(response) +} + +export const NetworkStatus = async ( + requestParams: RequestHandlerParams, +): Promise => { + const { params } = requestParams + const { network_identifier: networkIdentifier } = params + + // Verify network identifier + if (!isValidNetworkIdentifier(networkIdentifier)) + throw new Error(`Network identifier is not valid`) + + const rpc = await RPCClient.init() + await rpc.sdk.client.connect() + + const chainInfo: ResponseEnded = await rpc.sdk.client.getChainInfo({}) + + if (!chainInfo || !chainInfo.content) { + throw new Error(`Chain info data not found`) + } + + const peers = await rpc.sdk.client.getPeers() + const peersResponse: Array = [] + + if (peers.content.peers && Array.isArray(peers.content.peers)) { + peers.content.peers.forEach((peer) => { + if (!peer.identity) return + + peersResponse.push({ + peer_id: peer.identity, + }) + }) + } + + const response: NetworkStatusResponse = { + current_block_identifier: { + index: parseInt(chainInfo.content.currentBlockIdentifier.index), + hash: chainInfo.content.currentBlockIdentifier.hash, + }, + current_block_timestamp: chainInfo.content.currentBlockTimestamp, + genesis_block_identifier: { + index: parseInt(chainInfo.content.genesisBlockIdentifier.index), + hash: chainInfo.content.genesisBlockIdentifier.hash, + }, + oldest_block_identifier: { + index: parseInt(chainInfo.content.oldestBlockIdentifier.index), + hash: chainInfo.content.oldestBlockIdentifier.hash, + }, + peers: peersResponse, + } + + return response +} diff --git a/ironfish-rosetta-api/src/services/Search.test.ts b/ironfish-rosetta-api/src/services/Search.test.ts new file mode 100644 index 0000000000..11380ba1ba --- /dev/null +++ b/ironfish-rosetta-api/src/services/Search.test.ts @@ -0,0 +1,113 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { networkIdentifier as networkIdentifierConfig } from '../config' +import { NetworkIdentifier } from '../types' +import { mocked } from 'ts-jest/utils' +import * as typeorm from 'typeorm' + +const find = jest.fn().mockReturnValue(null) +jest.mock('../repository/BlockRepository', () => ({ + find, +})) +const findTransactions = jest.fn().mockReturnValue(null) +jest.mock('../repository/TransactionRepository', () => ({ + find: findTransactions, +})) +jest.mock('typeorm', () => { + const moduleMock = jest.requireActual('typeorm') + return { + ...moduleMock, + getCustomRepository: jest.fn().mockReturnValue({ find }), + } +}) + +import { SearchBlocks } from './Search' + +describe('SearchBlocks', () => { + it('fails without the right network identifier', async () => { + await expect( + SearchBlocks({ + params: { + network_identifier: {} as NetworkIdentifier, + }, + }), + ).rejects.toThrow('Network identifier is not valid') + }) + + describe('with blocks returned', () => { + beforeEach(() => { + find.mockReset() + mocked(find).mockReturnValue([ + { + hash: 'hash2', + sequence: 2, + transactions: [], + previousBlock: { + hash: 'hash1', + sequence: 1, + }, + timestamp: 123, + metadata: {}, + }, + ]) + }) + + it('returns the latest blocks', async () => { + const response = await SearchBlocks({ + params: { + limit: 10, + network_identifier: networkIdentifierConfig, + }, + }) + expect(find).toBeCalledWith({ order: { sequence: 'DESC' }, take: 10, where: [{}] }) + expect(response.blocks.length).toEqual(1) + expect(response.next_offset).toEqual(2) + }) + + it('filters by hash when string', async () => { + await SearchBlocks({ + params: { + limit: 10, + query: 'abcd', + network_identifier: networkIdentifierConfig, + }, + }) + expect(find).toBeCalledWith({ + order: { sequence: 'DESC' }, + take: 10, + where: [ + { + hash: { + _getSql: undefined, + _multipleParameters: false, + _objectLiteralParameters: undefined, + _type: 'like', + _useParameter: true, + _value: '%abcd%', + }, + }, + ], + }) + }) + }) + + it('filters by sequence when number', async () => { + await SearchBlocks({ + params: { + limit: 10, + query: '12', + network_identifier: networkIdentifierConfig, + }, + }) + expect(find).toBeCalledWith({ + order: { sequence: 'DESC' }, + take: 10, + where: [ + { + sequence: 12, + }, + ], + }) + }) +}) diff --git a/ironfish-rosetta-api/src/services/Search.ts b/ironfish-rosetta-api/src/services/Search.ts new file mode 100644 index 0000000000..58f79428df --- /dev/null +++ b/ironfish-rosetta-api/src/services/Search.ts @@ -0,0 +1,127 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { + SearchBlocksRequest, + SearchBlocksResponse, + SearchTransactionsRequest, + SearchTransactionsResponse, +} from '../types' +import { RequestHandlerParams } from '../middleware' +import { getCustomRepository, LessThan, Like } from 'typeorm' +import { isValidNetworkIdentifier } from '../utils/networkIdentifierUtil' +import { BlockRepository } from '../repository/BlockRepository' +import { TransactionRepository } from '../repository/TransactionRepository' + +export const SearchBlocks = async ( + requestParams: RequestHandlerParams, +): Promise => { + const { params } = requestParams + const { query, limit, seek, network_identifier: networkIdentifier } = params + + // Verify network identifier + if (!isValidNetworkIdentifier(networkIdentifier)) + throw new Error(`Network identifier is not valid`) + + // Search filters: + // - by hash if the query has 4+ characters + // - by sequence if the query is a number + const where = [] + // we can't just use offset / limit since we are adding new blocks every 15s + const seekSequence = seek && seek > 0 ? { sequence: LessThan(seek) } : {} + if (query && query.length > 3) { + where.push({ ...seekSequence, hash: Like('%' + query + '%') }) + } + if (!Number.isNaN(Number(query))) { + where.push({ sequence: Number(query) }) + } + if (where.length <= 0) { + where.push(seekSequence) + } + + const blockRepository = getCustomRepository(BlockRepository) + const blocksData = await blockRepository.find({ + where, + order: { sequence: 'DESC' }, + take: limit, + }) + + const blocks = blocksData.map((block) => ({ + block_identifier: { + index: block.sequence, + hash: block.hash, + }, + parent_block_identifier: { index: 0, hash: '' }, + transactions: [], + timestamp: Number(block.timestamp), + metadata: { + size: block.size, + difficulty: block.difficulty, + transactionsCount: block.transactionsCount, + }, + })) + + const nextOffset = blocks[blocks.length - 1]?.block_identifier.index + + const response: SearchBlocksResponse = { + blocks, + next_offset: nextOffset > 1 ? nextOffset : undefined, + } + + return Promise.resolve(response) +} + +export const SearchTransactions = async ( + requestParams: RequestHandlerParams, +): Promise => { + const { params } = requestParams + const { + transaction_identifier: transactionIdentifier, + network_identifier: networkIdentifier, + limit, + } = params + + // Verify network identifier + if (!isValidNetworkIdentifier(networkIdentifier)) + throw new Error(`Network identifier is not valid`) + + if (!transactionIdentifier) throw new Error(`Transaction identifier is not valid`) + + const { hash } = transactionIdentifier + + if (!hash && hash.length <= 3) throw new Error(`Transaction identifier hash is not valid`) + + const transactionRepository = getCustomRepository(TransactionRepository) + const transactionsData = await transactionRepository.findByHashWithInstances(hash, limit || 5) + + const transactions = transactionsData.map((transaction) => { + return { + block_identifier: { + index: transaction.block.sequence, + hash: transaction.block.hash, + }, + transaction: { + transaction_identifier: { + hash: transaction.hash, + }, + operations: [], + metadata: { + timestamp: transaction.block.timestamp, + notes: transaction.notes, + spends: transaction.spends, + size: transaction.size, + fee: transaction.fee, + isMinerFee: transaction.fee < 0 && transaction.block.sequence > 1, + }, + }, + } + }) + + const response: SearchTransactionsResponse = { + transactions, + total_count: transactions.length, + } + + return Promise.resolve(response) +} diff --git a/ironfish-rosetta-api/src/services/index.ts b/ironfish-rosetta-api/src/services/index.ts new file mode 100644 index 0000000000..a94d8fdf56 --- /dev/null +++ b/ironfish-rosetta-api/src/services/index.ts @@ -0,0 +1,8 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './Network' +export * from './Block' +export * from './BlockTransaction' +export * from './Search' diff --git a/ironfish-rosetta-api/src/syncer.ts b/ironfish-rosetta-api/src/syncer.ts new file mode 100644 index 0000000000..c5e7c93c54 --- /dev/null +++ b/ironfish-rosetta-api/src/syncer.ts @@ -0,0 +1,25 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Logger } from './utils/logger' + +import { Syncer } from './syncer/' +import { connection } from './config/database' + +const SLEEP_BETWEEN_SYNC = 20000 + +const startSyncer = async () => { + await connection + + const syncer = await Syncer.new() + + for (;;) { + await syncer.start() + await new Promise((resolve) => setTimeout(resolve, SLEEP_BETWEEN_SYNC)) + } +} + +startSyncer().catch((error) => { + Logger.error(error) +}) diff --git a/ironfish-rosetta-api/src/syncer/Syncer.ts b/ironfish-rosetta-api/src/syncer/Syncer.ts new file mode 100644 index 0000000000..88ec2ecd16 --- /dev/null +++ b/ironfish-rosetta-api/src/syncer/Syncer.ts @@ -0,0 +1,149 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { SyncerError } from '../errors' +import { Indexer } from '../indexer' +import { RPCClient } from '../rpc' +import { BlockIdentifier } from '../types' +import { Logger } from '../utils/logger' + +type SyncerState = { type: 'STARTED' } | { type: 'STOPPED' } +/** + * Sync the Iron Fish chain blocks and pass it to the + * indexer to store it in the database. + * + * It queries the NetworkStatus endpoint of the node to get the genesis block + * and the heaviest block. It will then go backward from the heaviest block to + * the genesis block and update the database on the way. + * */ +export class Syncer { + indexer: Indexer + rpc: RPCClient + + private _state: Readonly = { type: 'STOPPED' } + + get state(): Readonly { + return this._state + } + + setState(state: Readonly): void { + this._state = state + } + + constructor(indexer: Indexer, rpcClient: RPCClient) { + this.indexer = indexer + this.rpc = rpcClient + } + + static async new(): Promise { + Logger.debug('SYNCER NEW') + const rpc = await RPCClient.init() + + const indexer = await new Indexer().init() + + return new Syncer(indexer, rpc) + } + + async start(): Promise { + Logger.debug('SYNCER START') + if (this.isStarted()) return + + this.setState({ type: 'STARTED' }) + + if (!this.rpc.sdk.client.isConnected) { + const connected = await this.rpc.sdk.client.tryConnect() + + if (!connected) { + Logger.error('Not connected to a node') + this.stop() + return + } + } + + Logger.debug('Syncer connected') + + const networkStatus = await this.rpc.sdk.client.getChainInfo({}) + + // no latest block + if (!networkStatus || !networkStatus.content) { + this.stop() + return + } + + const heaviestBlock = { + index: Number(networkStatus.content.oldestBlockIdentifier.index), + hash: networkStatus.content.oldestBlockIdentifier.hash, + } + const heaviestTail = { + index: Number(networkStatus.content.genesisBlockIdentifier.index), + hash: networkStatus.content.genesisBlockIdentifier.hash, + } + try { + await this.sync(heaviestBlock, heaviestTail) + } catch (err) { + Logger.debug('Error while syncing', err) + } + + this.stop() + } + + isStarted(): boolean { + return this.state.type === 'STARTED' + } + + stop(): void { + this.setState({ type: 'STOPPED' }) + } + + // Sync in reverse order from heaviest head to tail + async sync(startBlock: BlockIdentifier, endBlock: BlockIdentifier): Promise { + Logger.debug(`Syncing from ${startBlock.index} to ${endBlock.index}`) + let blockIdentifier = startBlock + + // check if genesis is the same + const genesis = await this.indexer.getBlock(endBlock.index, endBlock.hash) + if (!genesis) { + Logger.debug(`Genesis changed - delete every block`) + await this.indexer.deleteAllFromSequence(0) + } + + Logger.debug(`Delete any block above ${startBlock.index}`) + await this.indexer.deleteAllFromSequence(startBlock.index) + + while (startBlock.index > endBlock.index) { + const isBlockExisting = await this.indexer.getBlock( + Number(blockIdentifier.index), + blockIdentifier.hash, + ) + if (isBlockExisting) { + Logger.debug( + `Reached an existing block ${String(isBlockExisting.sequence)} ${ + isBlockExisting.hash + }`, + ) + break + } + + const result = await this.rpc.sdk.client.getBlock(blockIdentifier) + + Logger.debug('Fetching ', blockIdentifier) + + if (!result || !result.content) { + throw new SyncerError( + `Cannot fetch block ${blockIdentifier.hash} ${blockIdentifier.index}`, + ) + } + + const block = result.content + + await this.indexer.deleteAtSequence(Number(block.blockIdentifier.index)) + await this.indexer.addBlock(block) + + blockIdentifier = { + hash: block.parentBlockIdentifier.hash, + index: Number(block.parentBlockIdentifier.index), + } + } + } +} diff --git a/ironfish-rosetta-api/src/syncer/index.ts b/ironfish-rosetta-api/src/syncer/index.ts new file mode 100644 index 0000000000..73b6357155 --- /dev/null +++ b/ironfish-rosetta-api/src/syncer/index.ts @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './Syncer' diff --git a/ironfish-rosetta-api/src/types/.gitignore b/ironfish-rosetta-api/src/types/.gitignore new file mode 100644 index 0000000000..149b576547 --- /dev/null +++ b/ironfish-rosetta-api/src/types/.gitignore @@ -0,0 +1,4 @@ +wwwroot/*.js +node_modules +typings +dist diff --git a/ironfish-rosetta-api/src/types/RouteError.ts b/ironfish-rosetta-api/src/types/RouteError.ts new file mode 100644 index 0000000000..db5f24fe96 --- /dev/null +++ b/ironfish-rosetta-api/src/types/RouteError.ts @@ -0,0 +1,15 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export type RequestError = Record +export type RouteError = { + status: number + message?: string +} + +export const isRouteErrorType = (error: RequestError): error is RouteError => + error != null && + 'status' in error && + typeof error.status === 'number' && + (!('message' in error) || typeof error.message === 'string') diff --git a/ironfish-rosetta-api/src/types/api.ts b/ironfish-rosetta-api/src/types/api.ts new file mode 100644 index 0000000000..91463368c9 --- /dev/null +++ b/ironfish-rosetta-api/src/types/api.ts @@ -0,0 +1,3729 @@ +/* tslint:disable */ +/* eslint-disable */ +/** + * Rosetta + * Build Once. Integrate Your Blockchain Everywhere. + * + * The version of the OpenAPI document: 1.4.9 + * + * + * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + * https://openapi-generator.tech + * Do not edit the class manually. + */ + + +import { Configuration } from './configuration'; +import globalAxios, { AxiosPromise, AxiosInstance } from 'axios'; +// Some imports not used depending on template conditions +// @ts-ignore +import { BASE_PATH, COLLECTION_FORMATS, RequestArgs, BaseAPI, RequiredError } from './base'; + +/** + * An AccountBalanceRequest is utilized to make a balance request on the /account/balance endpoint. If the block_identifier is populated, a historical balance query should be performed. + * @export + * @interface AccountBalanceRequest + */ +export interface AccountBalanceRequest { + /** + * + * @type {NetworkIdentifier} + * @memberof AccountBalanceRequest + */ + network_identifier: NetworkIdentifier; + /** + * + * @type {AccountIdentifier} + * @memberof AccountBalanceRequest + */ + account_identifier: AccountIdentifier; + /** + * + * @type {PartialBlockIdentifier} + * @memberof AccountBalanceRequest + */ + block_identifier?: PartialBlockIdentifier; + /** + * In some cases, the caller may not want to retrieve all available balances for an AccountIdentifier. If the currencies field is populated, only balances for the specified currencies will be returned. If not populated, all available balances will be returned. + * @type {Array} + * @memberof AccountBalanceRequest + */ + currencies?: Array; +} +/** + * An AccountBalanceResponse is returned on the /account/balance endpoint. If an account has a balance for each AccountIdentifier describing it (ex: an ERC-20 token balance on a few smart contracts), an account balance request must be made with each AccountIdentifier. The `coins` field was removed and replaced by by `/account/coins` in `v1.4.7`. + * @export + * @interface AccountBalanceResponse + */ +export interface AccountBalanceResponse { + /** + * + * @type {BlockIdentifier} + * @memberof AccountBalanceResponse + */ + block_identifier: BlockIdentifier; + /** + * A single account may have a balance in multiple currencies. + * @type {Array} + * @memberof AccountBalanceResponse + */ + balances: Array; + /** + * Account-based blockchains that utilize a nonce or sequence number should include that number in the metadata. This number could be unique to the identifier or global across the account address. + * @type {object} + * @memberof AccountBalanceResponse + */ + metadata?: object; +} +/** + * AccountCoinsRequest is utilized to make a request on the /account/coins endpoint. + * @export + * @interface AccountCoinsRequest + */ +export interface AccountCoinsRequest { + /** + * + * @type {NetworkIdentifier} + * @memberof AccountCoinsRequest + */ + network_identifier: NetworkIdentifier; + /** + * + * @type {AccountIdentifier} + * @memberof AccountCoinsRequest + */ + account_identifier: AccountIdentifier; + /** + * Include state from the mempool when looking up an account\'s unspent coins. Note, using this functionality breaks any guarantee of idempotency. + * @type {boolean} + * @memberof AccountCoinsRequest + */ + include_mempool: boolean; + /** + * In some cases, the caller may not want to retrieve coins for all currencies for an AccountIdentifier. If the currencies field is populated, only coins for the specified currencies will be returned. If not populated, all unspent coins will be returned. + * @type {Array} + * @memberof AccountCoinsRequest + */ + currencies?: Array; +} +/** + * AccountCoinsResponse is returned on the /account/coins endpoint and includes all unspent Coins owned by an AccountIdentifier. + * @export + * @interface AccountCoinsResponse + */ +export interface AccountCoinsResponse { + /** + * + * @type {BlockIdentifier} + * @memberof AccountCoinsResponse + */ + block_identifier: BlockIdentifier; + /** + * If a blockchain is UTXO-based, all unspent Coins owned by an account_identifier should be returned alongside the balance. It is highly recommended to populate this field so that users of the Rosetta API implementation don\'t need to maintain their own indexer to track their UTXOs. + * @type {Array} + * @memberof AccountCoinsResponse + */ + coins: Array; + /** + * Account-based blockchains that utilize a nonce or sequence number should include that number in the metadata. This number could be unique to the identifier or global across the account address. + * @type {object} + * @memberof AccountCoinsResponse + */ + metadata?: object; +} +/** + * The account_identifier uniquely identifies an account within a network. All fields in the account_identifier are utilized to determine this uniqueness (including the metadata field, if populated). + * @export + * @interface AccountIdentifier + */ +export interface AccountIdentifier { + /** + * The address may be a cryptographic public key (or some encoding of it) or a provided username. + * @type {string} + * @memberof AccountIdentifier + */ + address: string; + /** + * + * @type {SubAccountIdentifier} + * @memberof AccountIdentifier + */ + sub_account?: SubAccountIdentifier; + /** + * Blockchains that utilize a username model (where the address is not a derivative of a cryptographic public key) should specify the public key(s) owned by the address in metadata. + * @type {object} + * @memberof AccountIdentifier + */ + metadata?: object; +} +/** + * Allow specifies supported Operation status, Operation types, and all possible error statuses. This Allow object is used by clients to validate the correctness of a Rosetta Server implementation. It is expected that these clients will error if they receive some response that contains any of the above information that is not specified here. + * @export + * @interface Allow + */ +export interface Allow { + /** + * All Operation.Status this implementation supports. Any status that is returned during parsing that is not listed here will cause client validation to error. + * @type {Array} + * @memberof Allow + */ + operation_statuses: Array; + /** + * All Operation.Type this implementation supports. Any type that is returned during parsing that is not listed here will cause client validation to error. + * @type {Array} + * @memberof Allow + */ + operation_types: Array; + /** + * All Errors that this implementation could return. Any error that is returned during parsing that is not listed here will cause client validation to error. + * @type {Array} + * @memberof Allow + */ + errors: Array; + /** + * Any Rosetta implementation that supports querying the balance of an account at any height in the past should set this to true. + * @type {boolean} + * @memberof Allow + */ + historical_balance_lookup: boolean; + /** + * If populated, `timestamp_start_index` indicates the first block index where block timestamps are considered valid (i.e. all blocks less than `timestamp_start_index` could have invalid timestamps). This is useful when the genesis block (or blocks) of a network have timestamp 0. If not populated, block timestamps are assumed to be valid for all available blocks. + * @type {number} + * @memberof Allow + */ + timestamp_start_index?: number; + /** + * All methods that are supported by the /call endpoint. Communicating which parameters should be provided to /call is the responsibility of the implementer (this is en lieu of defining an entire type system and requiring the implementer to define that in Allow). + * @type {Array} + * @memberof Allow + */ + call_methods: Array; + /** + * BalanceExemptions is an array of BalanceExemption indicating which account balances could change without a corresponding Operation. BalanceExemptions should be used sparingly as they may introduce significant complexity for integrators that attempt to reconcile all account balance changes. If your implementation relies on any BalanceExemptions, you MUST implement historical balance lookup (the ability to query an account balance at any BlockIdentifier). + * @type {Array} + * @memberof Allow + */ + balance_exemptions: Array; + /** + * Any Rosetta implementation that can update an AccountIdentifier\'s unspent coins based on the contents of the mempool should populate this field as true. If false, requests to `/account/coins` that set `include_mempool` as true will be automatically rejected. + * @type {boolean} + * @memberof Allow + */ + mempool_coins: boolean; +} +/** + * Amount is some Value of a Currency. It is considered invalid to specify a Value without a Currency. + * @export + * @interface Amount + */ +export interface Amount { + /** + * Value of the transaction in atomic units represented as an arbitrary-sized signed integer. For example, 1 BTC would be represented by a value of 100000000. + * @type {string} + * @memberof Amount + */ + value: string; + /** + * + * @type {Currency} + * @memberof Amount + */ + currency: Currency; + /** + * + * @type {object} + * @memberof Amount + */ + metadata?: object; +} +/** + * BalanceExemption indicates that the balance for an exempt account could change without a corresponding Operation. This typically occurs with staking rewards, vesting balances, and Currencies with a dynamic supply. Currently, it is possible to exempt an account from strict reconciliation by SubAccountIdentifier.Address or by Currency. This means that any account with SubAccountIdentifier.Address would be exempt or any balance of a particular Currency would be exempt, respectively. BalanceExemptions should be used sparingly as they may introduce significant complexity for integrators that attempt to reconcile all account balance changes. If your implementation relies on any BalanceExemptions, you MUST implement historical balance lookup (the ability to query an account balance at any BlockIdentifier). + * @export + * @interface BalanceExemption + */ +export interface BalanceExemption { + /** + * SubAccountAddress is the SubAccountIdentifier.Address that the BalanceExemption applies to (regardless of the value of SubAccountIdentifier.Metadata). + * @type {string} + * @memberof BalanceExemption + */ + sub_account_address?: string; + /** + * + * @type {Currency} + * @memberof BalanceExemption + */ + currency?: Currency; + /** + * + * @type {ExemptionType} + * @memberof BalanceExemption + */ + exemption_type?: ExemptionType; +} +/** + * Blocks contain an array of Transactions that occurred at a particular BlockIdentifier. A hard requirement for blocks returned by Rosetta implementations is that they MUST be _inalterable_: once a client has requested and received a block identified by a specific BlockIndentifier, all future calls for that same BlockIdentifier must return the same block contents. + * @export + * @interface Block + */ +export interface Block { + /** + * + * @type {BlockIdentifier} + * @memberof Block + */ + block_identifier: BlockIdentifier; + /** + * + * @type {BlockIdentifier} + * @memberof Block + */ + parent_block_identifier: BlockIdentifier; + /** + * The timestamp of the block in milliseconds since the Unix Epoch. The timestamp is stored in milliseconds because some blockchains produce blocks more often than once a second. + * @type {number} + * @memberof Block + */ + timestamp: number; + /** + * + * @type {Array} + * @memberof Block + */ + transactions: Array; + /** + * + * @type {object} + * @memberof Block + */ + metadata?: object; +} +/** + * BlockEvent represents the addition or removal of a BlockIdentifier from storage. Streaming BlockEvents allows lightweight clients to update their own state without needing to implement their own syncing logic. + * @export + * @interface BlockEvent + */ +export interface BlockEvent { + /** + * sequence is the unique identifier of a BlockEvent within the context of a NetworkIdentifier. + * @type {number} + * @memberof BlockEvent + */ + sequence: number; + /** + * + * @type {BlockIdentifier} + * @memberof BlockEvent + */ + block_identifier: BlockIdentifier; + /** + * + * @type {BlockEventType} + * @memberof BlockEvent + */ + type: BlockEventType; +} +/** + * BlockEventType determines if a BlockEvent represents the addition or removal of a block. + * @export + * @enum {string} + */ +export enum BlockEventType { + Added = 'block_added', + Removed = 'block_removed' +} + +/** + * The block_identifier uniquely identifies a block in a particular network. + * @export + * @interface BlockIdentifier + */ +export interface BlockIdentifier { + /** + * This is also known as the block height. + * @type {number} + * @memberof BlockIdentifier + */ + index: number; + /** + * + * @type {string} + * @memberof BlockIdentifier + */ + hash: string; +} +/** + * A BlockRequest is utilized to make a block request on the /block endpoint. + * @export + * @interface BlockRequest + */ +export interface BlockRequest { + /** + * + * @type {NetworkIdentifier} + * @memberof BlockRequest + */ + network_identifier: NetworkIdentifier; + /** + * + * @type {PartialBlockIdentifier} + * @memberof BlockRequest + */ + block_identifier: PartialBlockIdentifier; +} +/** + * A BlockResponse includes a fully-populated block or a partially-populated block with a list of other transactions to fetch (other_transactions). As a result of the consensus algorithm of some blockchains, blocks can be omitted (i.e. certain block indices can be skipped). If a query for one of these omitted indices is made, the response should not include a `Block` object. It is VERY important to note that blocks MUST still form a canonical, connected chain of blocks where each block has a unique index. In other words, the `PartialBlockIdentifier` of a block after an omitted block should reference the last non-omitted block. + * @export + * @interface BlockResponse + */ +export interface BlockResponse { + /** + * + * @type {Block} + * @memberof BlockResponse + */ + block?: Block; + /** + * Some blockchains may require additional transactions to be fetched that weren\'t returned in the block response (ex: block only returns transaction hashes). For blockchains with a lot of transactions in each block, this can be very useful as consumers can concurrently fetch all transactions returned. + * @type {Array} + * @memberof BlockResponse + */ + other_transactions?: Array; +} +/** + * BlockTransaction contains a populated Transaction and the BlockIdentifier that contains it. + * @export + * @interface BlockTransaction + */ +export interface BlockTransaction { + /** + * + * @type {BlockIdentifier} + * @memberof BlockTransaction + */ + block_identifier: BlockIdentifier; + /** + * + * @type {Transaction} + * @memberof BlockTransaction + */ + transaction: Transaction; +} +/** + * A BlockTransactionRequest is used to fetch a Transaction included in a block that is not returned in a BlockResponse. + * @export + * @interface BlockTransactionRequest + */ +export interface BlockTransactionRequest { + /** + * + * @type {NetworkIdentifier} + * @memberof BlockTransactionRequest + */ + network_identifier: NetworkIdentifier; + /** + * + * @type {BlockIdentifier} + * @memberof BlockTransactionRequest + */ + block_identifier: BlockIdentifier; + /** + * + * @type {TransactionIdentifier} + * @memberof BlockTransactionRequest + */ + transaction_identifier: TransactionIdentifier; +} +/** + * A BlockTransactionResponse contains information about a block transaction. + * @export + * @interface BlockTransactionResponse + */ +export interface BlockTransactionResponse { + /** + * + * @type {Transaction} + * @memberof BlockTransactionResponse + */ + transaction: Transaction; +} +/** + * CallRequest is the input to the `/call` endpoint. + * @export + * @interface CallRequest + */ +export interface CallRequest { + /** + * + * @type {NetworkIdentifier} + * @memberof CallRequest + */ + network_identifier: NetworkIdentifier; + /** + * Method is some network-specific procedure call. This method could map to a network-specific RPC endpoint, a method in an SDK generated from a smart contract, or some hybrid of the two. The implementation must define all available methods in the Allow object. However, it is up to the caller to determine which parameters to provide when invoking `/call`. + * @type {string} + * @memberof CallRequest + */ + method: string; + /** + * Parameters is some network-specific argument for a method. It is up to the caller to determine which parameters to provide when invoking `/call`. + * @type {object} + * @memberof CallRequest + */ + parameters: object; +} +/** + * CallResponse contains the result of a `/call` invocation. + * @export + * @interface CallResponse + */ +export interface CallResponse { + /** + * Result contains the result of the `/call` invocation. This result will not be inspected or interpreted by Rosetta tooling and is left to the caller to decode. + * @type {object} + * @memberof CallResponse + */ + result: object; + /** + * Idempotent indicates that if `/call` is invoked with the same CallRequest again, at any point in time, it will return the same CallResponse. Integrators may cache the CallResponse if this is set to true to avoid making unnecessary calls to the Rosetta implementation. For this reason, implementers should be very conservative about returning true here or they could cause issues for the caller. + * @type {boolean} + * @memberof CallResponse + */ + idempotent: boolean; +} +/** + * Coin contains its unique identifier and the amount it represents. + * @export + * @interface Coin + */ +export interface Coin { + /** + * + * @type {CoinIdentifier} + * @memberof Coin + */ + coin_identifier: CoinIdentifier; + /** + * + * @type {Amount} + * @memberof Coin + */ + amount: Amount; +} +/** + * CoinActions are different state changes that a Coin can undergo. When a Coin is created, it is coin_created. When a Coin is spent, it is coin_spent. It is assumed that a single Coin cannot be created or spent more than once. + * @export + * @enum {string} + */ +export enum CoinAction { + Created = 'coin_created', + Spent = 'coin_spent' +} + +/** + * CoinChange is used to represent a change in state of a some coin identified by a coin_identifier. This object is part of the Operation model and must be populated for UTXO-based blockchains. Coincidentally, this abstraction of UTXOs allows for supporting both account-based transfers and UTXO-based transfers on the same blockchain (when a transfer is account-based, don\'t populate this model). + * @export + * @interface CoinChange + */ +export interface CoinChange { + /** + * + * @type {CoinIdentifier} + * @memberof CoinChange + */ + coin_identifier: CoinIdentifier; + /** + * + * @type {CoinAction} + * @memberof CoinChange + */ + coin_action: CoinAction; +} +/** + * CoinIdentifier uniquely identifies a Coin. + * @export + * @interface CoinIdentifier + */ +export interface CoinIdentifier { + /** + * Identifier should be populated with a globally unique identifier of a Coin. In Bitcoin, this identifier would be transaction_hash:index. + * @type {string} + * @memberof CoinIdentifier + */ + identifier: string; +} +/** + * ConstructionCombineRequest is the input to the `/construction/combine` endpoint. It contains the unsigned transaction blob returned by `/construction/payloads` and all required signatures to create a network transaction. + * @export + * @interface ConstructionCombineRequest + */ +export interface ConstructionCombineRequest { + /** + * + * @type {NetworkIdentifier} + * @memberof ConstructionCombineRequest + */ + network_identifier: NetworkIdentifier; + /** + * + * @type {string} + * @memberof ConstructionCombineRequest + */ + unsigned_transaction: string; + /** + * + * @type {Array} + * @memberof ConstructionCombineRequest + */ + signatures: Array; +} +/** + * ConstructionCombineResponse is returned by `/construction/combine`. The network payload will be sent directly to the `construction/submit` endpoint. + * @export + * @interface ConstructionCombineResponse + */ +export interface ConstructionCombineResponse { + /** + * + * @type {string} + * @memberof ConstructionCombineResponse + */ + signed_transaction: string; +} +/** + * ConstructionDeriveRequest is passed to the `/construction/derive` endpoint. Network is provided in the request because some blockchains have different address formats for different networks. Metadata is provided in the request because some blockchains allow for multiple address types (i.e. different address for validators vs normal accounts). + * @export + * @interface ConstructionDeriveRequest + */ +export interface ConstructionDeriveRequest { + /** + * + * @type {NetworkIdentifier} + * @memberof ConstructionDeriveRequest + */ + network_identifier: NetworkIdentifier; + /** + * + * @type {PublicKey} + * @memberof ConstructionDeriveRequest + */ + public_key: PublicKey; + /** + * + * @type {object} + * @memberof ConstructionDeriveRequest + */ + metadata?: object; +} +/** + * ConstructionDeriveResponse is returned by the `/construction/derive` endpoint. + * @export + * @interface ConstructionDeriveResponse + */ +export interface ConstructionDeriveResponse { + /** + * [DEPRECATED by `account_identifier` in `v1.4.4`] Address in network-specific format. + * @type {string} + * @memberof ConstructionDeriveResponse + */ + address?: string; + /** + * + * @type {AccountIdentifier} + * @memberof ConstructionDeriveResponse + */ + account_identifier?: AccountIdentifier; + /** + * + * @type {object} + * @memberof ConstructionDeriveResponse + */ + metadata?: object; +} +/** + * ConstructionHashRequest is the input to the `/construction/hash` endpoint. + * @export + * @interface ConstructionHashRequest + */ +export interface ConstructionHashRequest { + /** + * + * @type {NetworkIdentifier} + * @memberof ConstructionHashRequest + */ + network_identifier: NetworkIdentifier; + /** + * + * @type {string} + * @memberof ConstructionHashRequest + */ + signed_transaction: string; +} +/** + * A ConstructionMetadataRequest is utilized to get information required to construct a transaction. The Options object used to specify which metadata to return is left purposely unstructured to allow flexibility for implementers. Options is not required in the case that there is network-wide metadata of interest. Optionally, the request can also include an array of PublicKeys associated with the AccountIdentifiers returned in ConstructionPreprocessResponse. + * @export + * @interface ConstructionMetadataRequest + */ +export interface ConstructionMetadataRequest { + /** + * + * @type {NetworkIdentifier} + * @memberof ConstructionMetadataRequest + */ + network_identifier: NetworkIdentifier; + /** + * Some blockchains require different metadata for different types of transaction construction (ex: delegation versus a transfer). Instead of requiring a blockchain node to return all possible types of metadata for construction (which may require multiple node fetches), the client can populate an options object to limit the metadata returned to only the subset required. + * @type {object} + * @memberof ConstructionMetadataRequest + */ + options?: object; + /** + * + * @type {Array} + * @memberof ConstructionMetadataRequest + */ + public_keys?: Array; +} +/** + * The ConstructionMetadataResponse returns network-specific metadata used for transaction construction. Optionally, the implementer can return the suggested fee associated with the transaction being constructed. The caller may use this info to adjust the intent of the transaction or to create a transaction with a different account that can pay the suggested fee. Suggested fee is an array in case fee payment must occur in multiple currencies. + * @export + * @interface ConstructionMetadataResponse + */ +export interface ConstructionMetadataResponse { + /** + * + * @type {object} + * @memberof ConstructionMetadataResponse + */ + metadata: object; + /** + * + * @type {Array} + * @memberof ConstructionMetadataResponse + */ + suggested_fee?: Array; +} +/** + * ConstructionParseRequest is the input to the `/construction/parse` endpoint. It allows the caller to parse either an unsigned or signed transaction. + * @export + * @interface ConstructionParseRequest + */ +export interface ConstructionParseRequest { + /** + * + * @type {NetworkIdentifier} + * @memberof ConstructionParseRequest + */ + network_identifier: NetworkIdentifier; + /** + * Signed is a boolean indicating whether the transaction is signed. + * @type {boolean} + * @memberof ConstructionParseRequest + */ + signed: boolean; + /** + * This must be either the unsigned transaction blob returned by `/construction/payloads` or the signed transaction blob returned by `/construction/combine`. + * @type {string} + * @memberof ConstructionParseRequest + */ + transaction: string; +} +/** + * ConstructionParseResponse contains an array of operations that occur in a transaction blob. This should match the array of operations provided to `/construction/preprocess` and `/construction/payloads`. + * @export + * @interface ConstructionParseResponse + */ +export interface ConstructionParseResponse { + /** + * + * @type {Array} + * @memberof ConstructionParseResponse + */ + operations: Array; + /** + * [DEPRECATED by `account_identifier_signers` in `v1.4.4`] All signers (addresses) of a particular transaction. If the transaction is unsigned, it should be empty. + * @type {Array} + * @memberof ConstructionParseResponse + */ + signers?: Array; + /** + * + * @type {Array} + * @memberof ConstructionParseResponse + */ + account_identifier_signers?: Array; + /** + * + * @type {object} + * @memberof ConstructionParseResponse + */ + metadata?: object; +} +/** + * ConstructionPayloadsRequest is the request to `/construction/payloads`. It contains the network, a slice of operations, and arbitrary metadata that was returned by the call to `/construction/metadata`. Optionally, the request can also include an array of PublicKeys associated with the AccountIdentifiers returned in ConstructionPreprocessResponse. + * @export + * @interface ConstructionPayloadsRequest + */ +export interface ConstructionPayloadsRequest { + /** + * + * @type {NetworkIdentifier} + * @memberof ConstructionPayloadsRequest + */ + network_identifier: NetworkIdentifier; + /** + * + * @type {Array} + * @memberof ConstructionPayloadsRequest + */ + operations: Array; + /** + * + * @type {object} + * @memberof ConstructionPayloadsRequest + */ + metadata?: object; + /** + * + * @type {Array} + * @memberof ConstructionPayloadsRequest + */ + public_keys?: Array; +} +/** + * ConstructionTransactionResponse is returned by `/construction/payloads`. It contains an unsigned transaction blob (that is usually needed to construct the a network transaction from a collection of signatures) and an array of payloads that must be signed by the caller. + * @export + * @interface ConstructionPayloadsResponse + */ +export interface ConstructionPayloadsResponse { + /** + * + * @type {string} + * @memberof ConstructionPayloadsResponse + */ + unsigned_transaction: string; + /** + * + * @type {Array} + * @memberof ConstructionPayloadsResponse + */ + payloads: Array; +} +/** + * ConstructionPreprocessRequest is passed to the `/construction/preprocess` endpoint so that a Rosetta implementation can determine which metadata it needs to request for construction. Metadata provided in this object should NEVER be a product of live data (i.e. the caller must follow some network-specific data fetching strategy outside of the Construction API to populate required Metadata). If live data is required for construction, it MUST be fetched in the call to `/construction/metadata`. The caller can provide a max fee they are willing to pay for a transaction. This is an array in the case fees must be paid in multiple currencies. The caller can also provide a suggested fee multiplier to indicate that the suggested fee should be scaled. This may be used to set higher fees for urgent transactions or to pay lower fees when there is less urgency. It is assumed that providing a very low multiplier (like 0.0001) will never lead to a transaction being created with a fee less than the minimum network fee (if applicable). In the case that the caller provides both a max fee and a suggested fee multiplier, the max fee will set an upper bound on the suggested fee (regardless of the multiplier provided). + * @export + * @interface ConstructionPreprocessRequest + */ +export interface ConstructionPreprocessRequest { + /** + * + * @type {NetworkIdentifier} + * @memberof ConstructionPreprocessRequest + */ + network_identifier: NetworkIdentifier; + /** + * + * @type {Array} + * @memberof ConstructionPreprocessRequest + */ + operations: Array; + /** + * + * @type {object} + * @memberof ConstructionPreprocessRequest + */ + metadata?: object; + /** + * + * @type {Array} + * @memberof ConstructionPreprocessRequest + */ + max_fee?: Array; + /** + * + * @type {number} + * @memberof ConstructionPreprocessRequest + */ + suggested_fee_multiplier?: number; +} +/** + * ConstructionPreprocessResponse contains `options` that will be sent unmodified to `/construction/metadata`. If it is not necessary to make a request to `/construction/metadata`, `options` should be omitted. Some blockchains require the PublicKey of particular AccountIdentifiers to construct a valid transaction. To fetch these PublicKeys, populate `required_public_keys` with the AccountIdentifiers associated with the desired PublicKeys. If it is not necessary to retrieve any PublicKeys for construction, `required_public_keys` should be omitted. + * @export + * @interface ConstructionPreprocessResponse + */ +export interface ConstructionPreprocessResponse { + /** + * The options that will be sent directly to `/construction/metadata` by the caller. + * @type {object} + * @memberof ConstructionPreprocessResponse + */ + options?: object; + /** + * + * @type {Array} + * @memberof ConstructionPreprocessResponse + */ + required_public_keys?: Array; +} +/** + * The transaction submission request includes a signed transaction. + * @export + * @interface ConstructionSubmitRequest + */ +export interface ConstructionSubmitRequest { + /** + * + * @type {NetworkIdentifier} + * @memberof ConstructionSubmitRequest + */ + network_identifier: NetworkIdentifier; + /** + * + * @type {string} + * @memberof ConstructionSubmitRequest + */ + signed_transaction: string; +} +/** + * Currency is composed of a canonical Symbol and Decimals. This Decimals value is used to convert an Amount.Value from atomic units (Satoshis) to standard units (Bitcoins). + * @export + * @interface Currency + */ +export interface Currency { + /** + * Canonical symbol associated with a currency. + * @type {string} + * @memberof Currency + */ + symbol: string; + /** + * Number of decimal places in the standard unit representation of the amount. For example, BTC has 8 decimals. Note that it is not possible to represent the value of some currency in atomic units that is not base 10. + * @type {number} + * @memberof Currency + */ + decimals: number; + /** + * Any additional information related to the currency itself. For example, it would be useful to populate this object with the contract address of an ERC-20 token. + * @type {object} + * @memberof Currency + */ + metadata?: object; +} +/** + * CurveType is the type of cryptographic curve associated with a PublicKey. * secp256k1: SEC compressed - `33 bytes` (https://secg.org/sec1-v2.pdf#subsubsection.2.3.3) * secp256r1: SEC compressed - `33 bytes` (https://secg.org/sec1-v2.pdf#subsubsection.2.3.3) * edwards25519: `y (255-bits) || x-sign-bit (1-bit)` - `32 bytes` (https://ed25519.cr.yp.to/ed25519-20110926.pdf) * tweedle: 1st pk : Fq.t (32 bytes) || 2nd pk : Fq.t (32 bytes) (https://github.com/CodaProtocol/coda/blob/develop/rfcs/0038-rosetta-construction-api.md#marshal-keys) + * @export + * @enum {string} + */ +export enum CurveType { + Secp256k1 = 'secp256k1', + Secp256r1 = 'secp256r1', + Edwards25519 = 'edwards25519', + Tweedle = 'tweedle' +} + +/** + * EventsBlocksRequest is utilized to fetch a sequence of BlockEvents indicating which blocks were added and removed from storage to reach the current state. + * @export + * @interface EventsBlocksRequest + */ +export interface EventsBlocksRequest { + /** + * + * @type {NetworkIdentifier} + * @memberof EventsBlocksRequest + */ + network_identifier: NetworkIdentifier; + /** + * offset is the offset into the event stream to sync events from. If this field is not populated, we return the limit events backwards from tip. If this is set to 0, we start from the beginning. + * @type {number} + * @memberof EventsBlocksRequest + */ + offset?: number; + /** + * limit is the maximum number of events to fetch in one call. The implementation may return <= limit events. + * @type {number} + * @memberof EventsBlocksRequest + */ + limit?: number; +} +/** + * EventsBlocksResponse contains an ordered collection of BlockEvents and the max retrievable sequence. + * @export + * @interface EventsBlocksResponse + */ +export interface EventsBlocksResponse { + /** + * max_sequence is the maximum available sequence number to fetch. + * @type {number} + * @memberof EventsBlocksResponse + */ + max_sequence: number; + /** + * events is an array of BlockEvents indicating the order to add and remove blocks to maintain a canonical view of blockchain state. Lightweight clients can use this event stream to update state without implementing their own block syncing logic. + * @type {Array} + * @memberof EventsBlocksResponse + */ + events: Array; +} +/** + * ExemptionType is used to indicate if the live balance for an account subject to a BalanceExemption could increase above, decrease below, or equal the computed balance. * greater_or_equal: The live balance may increase above or equal the computed balance. This typically occurs with staking rewards that accrue on each block. * less_or_equal: The live balance may decrease below or equal the computed balance. This typically occurs as balance moves from locked to spendable on a vesting account. * dynamic: The live balance may increase above, decrease below, or equal the computed balance. This typically occurs with tokens that have a dynamic supply. + * @export + * @enum {string} + */ +export enum ExemptionType { + GreaterOrEqual = 'greater_or_equal', + LessOrEqual = 'less_or_equal', + Dynamic = 'dynamic' +} + +/** + * A MempoolResponse contains all transaction identifiers in the mempool for a particular network_identifier. + * @export + * @interface MempoolResponse + */ +export interface MempoolResponse { + /** + * + * @type {Array} + * @memberof MempoolResponse + */ + transaction_identifiers: Array; +} +/** + * A MempoolTransactionRequest is utilized to retrieve a transaction from the mempool. + * @export + * @interface MempoolTransactionRequest + */ +export interface MempoolTransactionRequest { + /** + * + * @type {NetworkIdentifier} + * @memberof MempoolTransactionRequest + */ + network_identifier: NetworkIdentifier; + /** + * + * @type {TransactionIdentifier} + * @memberof MempoolTransactionRequest + */ + transaction_identifier: TransactionIdentifier; +} +/** + * A MempoolTransactionResponse contains an estimate of a mempool transaction. It may not be possible to know the full impact of a transaction in the mempool (ex: fee paid). + * @export + * @interface MempoolTransactionResponse + */ +export interface MempoolTransactionResponse { + /** + * + * @type {Transaction} + * @memberof MempoolTransactionResponse + */ + transaction: Transaction; + /** + * + * @type {object} + * @memberof MempoolTransactionResponse + */ + metadata?: object; +} +/** + * A MetadataRequest is utilized in any request where the only argument is optional metadata. + * @export + * @interface MetadataRequest + */ +export interface MetadataRequest { + /** + * + * @type {object} + * @memberof MetadataRequest + */ + metadata?: object; +} +/** + * Instead of utilizing HTTP status codes to describe node errors (which often do not have a good analog), rich errors are returned using this object. Both the code and message fields can be individually used to correctly identify an error. Implementations MUST use unique values for both fields. + * @export + * @interface ModelError + */ +export interface ModelError { + /** + * Code is a network-specific error code. If desired, this code can be equivalent to an HTTP status code. + * @type {number} + * @memberof ModelError + */ + code: number; + /** + * Message is a network-specific error message. The message MUST NOT change for a given code. In particular, this means that any contextual information should be included in the details field. + * @type {string} + * @memberof ModelError + */ + message: string; + /** + * Description allows the implementer to optionally provide additional information about an error. In many cases, the content of this field will be a copy-and-paste from existing developer documentation. Description can ONLY be populated with generic information about a particular type of error. It MUST NOT be populated with information about a particular instantiation of an error (use `details` for this). Whereas the content of Error.Message should stay stable across releases, the content of Error.Description will likely change across releases (as implementers improve error documentation). For this reason, the content in this field is not part of any type assertion (unlike Error.Message). + * @type {string} + * @memberof ModelError + */ + description?: string; + /** + * An error is retriable if the same request may succeed if submitted again. + * @type {boolean} + * @memberof ModelError + */ + retriable: boolean; + /** + * Often times it is useful to return context specific to the request that caused the error (i.e. a sample of the stack trace or impacted account) in addition to the standard error message. + * @type {object} + * @memberof ModelError + */ + details?: object; +} +/** + * The network_identifier specifies which network a particular object is associated with. + * @export + * @interface NetworkIdentifier + */ +export interface NetworkIdentifier { + /** + * + * @type {string} + * @memberof NetworkIdentifier + */ + blockchain: string; + /** + * If a blockchain has a specific chain-id or network identifier, it should go in this field. It is up to the client to determine which network-specific identifier is mainnet or testnet. + * @type {string} + * @memberof NetworkIdentifier + */ + network: string; + /** + * + * @type {SubNetworkIdentifier} + * @memberof NetworkIdentifier + */ + sub_network_identifier?: SubNetworkIdentifier; +} +/** + * A NetworkListResponse contains all NetworkIdentifiers that the node can serve information for. + * @export + * @interface NetworkListResponse + */ +export interface NetworkListResponse { + /** + * + * @type {Array} + * @memberof NetworkListResponse + */ + network_identifiers: Array; +} +/** + * NetworkOptionsResponse contains information about the versioning of the node and the allowed operation statuses, operation types, and errors. + * @export + * @interface NetworkOptionsResponse + */ +export interface NetworkOptionsResponse { + /** + * + * @type {Version} + * @memberof NetworkOptionsResponse + */ + version: Version; + /** + * + * @type {Allow} + * @memberof NetworkOptionsResponse + */ + allow: Allow; +} +/** + * A NetworkRequest is utilized to retrieve some data specific exclusively to a NetworkIdentifier. + * @export + * @interface NetworkRequest + */ +export interface NetworkRequest { + /** + * + * @type {NetworkIdentifier} + * @memberof NetworkRequest + */ + network_identifier: NetworkIdentifier; + /** + * + * @type {object} + * @memberof NetworkRequest + */ + metadata?: object; +} +/** + * NetworkStatusResponse contains basic information about the node\'s view of a blockchain network. It is assumed that any BlockIdentifier.Index less than or equal to CurrentBlockIdentifier.Index can be queried. If a Rosetta implementation prunes historical state, it should populate the optional `oldest_block_identifier` field with the oldest block available to query. If this is not populated, it is assumed that the `genesis_block_identifier` is the oldest queryable block. If a Rosetta implementation performs some pre-sync before it is possible to query blocks, sync_status should be populated so that clients can still monitor healthiness. Without this field, it may appear that the implementation is stuck syncing and needs to be terminated. + * @export + * @interface NetworkStatusResponse + */ +export interface NetworkStatusResponse { + /** + * + * @type {BlockIdentifier} + * @memberof NetworkStatusResponse + */ + current_block_identifier: BlockIdentifier; + /** + * The timestamp of the block in milliseconds since the Unix Epoch. The timestamp is stored in milliseconds because some blockchains produce blocks more often than once a second. + * @type {number} + * @memberof NetworkStatusResponse + */ + current_block_timestamp: number; + /** + * + * @type {BlockIdentifier} + * @memberof NetworkStatusResponse + */ + genesis_block_identifier: BlockIdentifier; + /** + * + * @type {BlockIdentifier} + * @memberof NetworkStatusResponse + */ + oldest_block_identifier?: BlockIdentifier; + /** + * + * @type {SyncStatus} + * @memberof NetworkStatusResponse + */ + sync_status?: SyncStatus; + /** + * + * @type {Array} + * @memberof NetworkStatusResponse + */ + peers: Array; +} +/** + * Operations contain all balance-changing information within a transaction. They are always one-sided (only affect 1 AccountIdentifier) and can succeed or fail independently from a Transaction. Operations are used both to represent on-chain data (Data API) and to construct new transactions (Construction API), creating a standard interface for reading and writing to blockchains. + * @export + * @interface Operation + */ +export interface Operation { + /** + * + * @type {OperationIdentifier} + * @memberof Operation + */ + operation_identifier: OperationIdentifier; + /** + * Restrict referenced related_operations to identifier indices < the current operation_identifier.index. This ensures there exists a clear DAG-structure of relations. Since operations are one-sided, one could imagine relating operations in a single transfer or linking operations in a call tree. + * @type {Array} + * @memberof Operation + */ + related_operations?: Array; + /** + * Type is the network-specific type of the operation. Ensure that any type that can be returned here is also specified in the NetworkOptionsResponse. This can be very useful to downstream consumers that parse all block data. + * @type {string} + * @memberof Operation + */ + type: string; + /** + * Status is the network-specific status of the operation. Status is not defined on the transaction object because blockchains with smart contracts may have transactions that partially apply (some operations are successful and some are not). Blockchains with atomic transactions (all operations succeed or all operations fail) will have the same status for each operation. On-chain operations (operations retrieved in the `/block` and `/block/transaction` endpoints) MUST have a populated status field (anything on-chain must have succeeded or failed). However, operations provided during transaction construction (often times called \"intent\" in the documentation) MUST NOT have a populated status field (operations yet to be included on-chain have not yet succeeded or failed). + * @type {string} + * @memberof Operation + */ + status?: string; + /** + * + * @type {AccountIdentifier} + * @memberof Operation + */ + account?: AccountIdentifier; + /** + * + * @type {Amount} + * @memberof Operation + */ + amount?: Amount; + /** + * + * @type {CoinChange} + * @memberof Operation + */ + coin_change?: CoinChange; + /** + * + * @type {object} + * @memberof Operation + */ + metadata?: object; +} +/** + * The operation_identifier uniquely identifies an operation within a transaction. + * @export + * @interface OperationIdentifier + */ +export interface OperationIdentifier { + /** + * The operation index is used to ensure each operation has a unique identifier within a transaction. This index is only relative to the transaction and NOT GLOBAL. The operations in each transaction should start from index 0. To clarify, there may not be any notion of an operation index in the blockchain being described. + * @type {number} + * @memberof OperationIdentifier + */ + index: number; + /** + * Some blockchains specify an operation index that is essential for client use. For example, Bitcoin uses a network_index to identify which UTXO was used in a transaction. network_index should not be populated if there is no notion of an operation index in a blockchain (typically most account-based blockchains). + * @type {number} + * @memberof OperationIdentifier + */ + network_index?: number; +} +/** + * OperationStatus is utilized to indicate which Operation status are considered successful. + * @export + * @interface OperationStatus + */ +export interface OperationStatus { + /** + * The status is the network-specific status of the operation. + * @type {string} + * @memberof OperationStatus + */ + status: string; + /** + * An Operation is considered successful if the Operation.Amount should affect the Operation.Account. Some blockchains (like Bitcoin) only include successful operations in blocks but other blockchains (like Ethereum) include unsuccessful operations that incur a fee. To reconcile the computed balance from the stream of Operations, it is critical to understand which Operation.Status indicate an Operation is successful and should affect an Account. + * @type {boolean} + * @memberof OperationStatus + */ + successful: boolean; +} +/** + * Operator is used by query-related endpoints to determine how to apply conditions. If this field is not populated, the default `and` value will be used. + * @export + * @enum {string} + */ +export enum Operator { + Or = 'or', + And = 'and' +} + +/** + * When fetching data by BlockIdentifier, it may be possible to only specify the index or hash. If neither property is specified, it is assumed that the client is making a request at the current block. + * @export + * @interface PartialBlockIdentifier + */ +export interface PartialBlockIdentifier { + /** + * + * @type {number} + * @memberof PartialBlockIdentifier + */ + index?: number; + /** + * + * @type {string} + * @memberof PartialBlockIdentifier + */ + hash?: string; +} +/** + * A Peer is a representation of a node\'s peer. + * @export + * @interface Peer + */ +export interface Peer { + /** + * + * @type {string} + * @memberof Peer + */ + peer_id: string; + /** + * + * @type {object} + * @memberof Peer + */ + metadata?: object; +} +/** + * PublicKey contains a public key byte array for a particular CurveType encoded in hex. Note that there is no PrivateKey struct as this is NEVER the concern of an implementation. + * @export + * @interface PublicKey + */ +export interface PublicKey { + /** + * Hex-encoded public key bytes in the format specified by the CurveType. + * @type {string} + * @memberof PublicKey + */ + hex_bytes: string; + /** + * + * @type {CurveType} + * @memberof PublicKey + */ + curve_type: CurveType; +} +/** + * SearchTransactionsRequest is used to search for transactions matching a set of provided conditions in canonical blocks. + * @export + * @interface SearchTransactionsRequest + */ +export interface SearchTransactionsRequest { + /** + * + * @type {NetworkIdentifier} + * @memberof SearchTransactionsRequest + */ + network_identifier: NetworkIdentifier; + /** + * + * @type {Operator} + * @memberof SearchTransactionsRequest + */ + operator?: Operator; + /** + * max_block is the largest block index to consider when searching for transactions. If this field is not populated, the current block is considered the max_block. If you do not specify a max_block, it is possible a newly synced block will interfere with paginated transaction queries (as the offset could become invalid with newly added rows). + * @type {number} + * @memberof SearchTransactionsRequest + */ + max_block?: number; + /** + * offset is the offset into the query result to start returning transactions. If any search conditions are changed, the query offset will change and you must restart your search iteration. + * @type {number} + * @memberof SearchTransactionsRequest + */ + offset?: number; + /** + * limit is the maximum number of transactions to return in one call. The implementation may return <= limit transactions. + * @type {number} + * @memberof SearchTransactionsRequest + */ + limit?: number; + /** + * + * @type {TransactionIdentifier} + * @memberof SearchTransactionsRequest + */ + transaction_identifier?: TransactionIdentifier; + /** + * + * @type {AccountIdentifier} + * @memberof SearchTransactionsRequest + */ + account_identifier?: AccountIdentifier; + /** + * + * @type {CoinIdentifier} + * @memberof SearchTransactionsRequest + */ + coin_identifier?: CoinIdentifier; + /** + * + * @type {Currency} + * @memberof SearchTransactionsRequest + */ + currency?: Currency; + /** + * status is the network-specific operation type. + * @type {string} + * @memberof SearchTransactionsRequest + */ + status?: string; + /** + * type is the network-specific operation type. + * @type {string} + * @memberof SearchTransactionsRequest + */ + type?: string; + /** + * address is AccountIdentifier.Address. This is used to get all transactions related to an AccountIdentifier.Address, regardless of SubAccountIdentifier. + * @type {string} + * @memberof SearchTransactionsRequest + */ + address?: string; + /** + * success is a synthetic condition populated by parsing network-specific operation statuses (using the mapping provided in `/network/options`). + * @type {boolean} + * @memberof SearchTransactionsRequest + */ + success?: boolean; +} +/** + * SearchTransactionsResponse contains an ordered collection of BlockTransactions that match the query in SearchTransactionsRequest. These BlockTransactions are sorted from most recent block to oldest block. + * @export + * @interface SearchTransactionsResponse + */ +export interface SearchTransactionsResponse { + /** + * transactions is an array of BlockTransactions sorted by most recent BlockIdentifier (meaning that transactions in recent blocks appear first). If there are many transactions for a particular search, transactions may not contain all matching transactions. It is up to the caller to paginate these transactions using the max_block field. + * @type {Array} + * @memberof SearchTransactionsResponse + */ + transactions: Array; + /** + * total_count is the number of results for a given search. Callers typically use this value to concurrently fetch results by offset or to display a virtual page number associated with results. + * @type {number} + * @memberof SearchTransactionsResponse + */ + total_count: number; + /** + * next_offset is the next offset to use when paginating through transaction results. If this field is not populated, there are no more transactions to query. + * @type {number} + * @memberof SearchTransactionsResponse + */ + next_offset?: number; +} +/** + * Signature contains the payload that was signed, the public keys of the keypairs used to produce the signature, the signature (encoded in hex), and the SignatureType. PublicKey is often times not known during construction of the signing payloads but may be needed to combine signatures properly. + * @export + * @interface Signature + */ +export interface Signature { + /** + * + * @type {SigningPayload} + * @memberof Signature + */ + signing_payload: SigningPayload; + /** + * + * @type {PublicKey} + * @memberof Signature + */ + public_key: PublicKey; + /** + * + * @type {SignatureType} + * @memberof Signature + */ + signature_type: SignatureType; + /** + * + * @type {string} + * @memberof Signature + */ + hex_bytes: string; +} +/** + * SignatureType is the type of a cryptographic signature. * ecdsa: `r (32-bytes) || s (32-bytes)` - `64 bytes` * ecdsa_recovery: `r (32-bytes) || s (32-bytes) || v (1-byte)` - `65 bytes` * ed25519: `R (32-byte) || s (32-bytes)` - `64 bytes` * schnorr_1: `r (32-bytes) || s (32-bytes)` - `64 bytes` (schnorr signature implemented by Zilliqa where both `r` and `s` are scalars encoded as `32-bytes` values, most significant byte first.) * schnorr_poseidon: `r (32-bytes) || s (32-bytes)` where s = Hash(1st pk || 2nd pk || r) - `64 bytes` (schnorr signature w/ Poseidon hash function implemented by O(1) Labs where both `r` and `s` are scalars encoded as `32-bytes` values, least significant byte first. https://github.com/CodaProtocol/signer-reference/blob/master/schnorr.ml ) + * @export + * @enum {string} + */ +export enum SignatureType { + Ecdsa = 'ecdsa', + EcdsaRecovery = 'ecdsa_recovery', + Ed25519 = 'ed25519', + Schnorr1 = 'schnorr_1', + SchnorrPoseidon = 'schnorr_poseidon' +} + +/** + * SigningPayload is signed by the client with the keypair associated with an AccountIdentifier using the specified SignatureType. SignatureType can be optionally populated if there is a restriction on the signature scheme that can be used to sign the payload. + * @export + * @interface SigningPayload + */ +export interface SigningPayload { + /** + * [DEPRECATED by `account_identifier` in `v1.4.4`] The network-specific address of the account that should sign the payload. + * @type {string} + * @memberof SigningPayload + */ + address?: string; + /** + * + * @type {AccountIdentifier} + * @memberof SigningPayload + */ + account_identifier?: AccountIdentifier; + /** + * + * @type {string} + * @memberof SigningPayload + */ + hex_bytes: string; + /** + * + * @type {SignatureType} + * @memberof SigningPayload + */ + signature_type?: SignatureType; +} +/** + * An account may have state specific to a contract address (ERC-20 token) and/or a stake (delegated balance). The sub_account_identifier should specify which state (if applicable) an account instantiation refers to. + * @export + * @interface SubAccountIdentifier + */ +export interface SubAccountIdentifier { + /** + * The SubAccount address may be a cryptographic value or some other identifier (ex: bonded) that uniquely specifies a SubAccount. + * @type {string} + * @memberof SubAccountIdentifier + */ + address: string; + /** + * If the SubAccount address is not sufficient to uniquely specify a SubAccount, any other identifying information can be stored here. It is important to note that two SubAccounts with identical addresses but differing metadata will not be considered equal by clients. + * @type {object} + * @memberof SubAccountIdentifier + */ + metadata?: object; +} +/** + * In blockchains with sharded state, the SubNetworkIdentifier is required to query some object on a specific shard. This identifier is optional for all non-sharded blockchains. + * @export + * @interface SubNetworkIdentifier + */ +export interface SubNetworkIdentifier { + /** + * + * @type {string} + * @memberof SubNetworkIdentifier + */ + network: string; + /** + * + * @type {object} + * @memberof SubNetworkIdentifier + */ + metadata?: object; +} +/** + * SyncStatus is used to provide additional context about an implementation\'s sync status. This object is often used by implementations to indicate healthiness when block data cannot be queried until some sync phase completes or cannot be determined by comparing the timestamp of the most recent block with the current time. + * @export + * @interface SyncStatus + */ +export interface SyncStatus { + /** + * CurrentIndex is the index of the last synced block in the current stage. This is a separate field from current_block_identifier in NetworkStatusResponse because blocks with indices up to and including the current_index may not yet be queryable by the caller. To reiterate, all indices up to and including current_block_identifier in NetworkStatusResponse must be queryable via the /block endpoint (excluding indices less than oldest_block_identifier). + * @type {number} + * @memberof SyncStatus + */ + current_index?: number; + /** + * TargetIndex is the index of the block that the implementation is attempting to sync to in the current stage. + * @type {number} + * @memberof SyncStatus + */ + target_index?: number; + /** + * Stage is the phase of the sync process. + * @type {string} + * @memberof SyncStatus + */ + stage?: string; + /** + * sycned is a boolean that indicates if an implementation has synced up to the most recent block. If this field is not populated, the caller should rely on a traditional tip timestamp comparison to determine if an implementation is synced. This field is particularly useful for quiescent blockchains (blocks only produced when there are pending transactions). In these blockchains, the most recent block could have a timestamp far behind the current time but the node could be healthy and at tip. + * @type {boolean} + * @memberof SyncStatus + */ + synced?: boolean; +} +/** + * Transactions contain an array of Operations that are attributable to the same TransactionIdentifier. + * @export + * @interface Transaction + */ +export interface Transaction { + /** + * + * @type {TransactionIdentifier} + * @memberof Transaction + */ + transaction_identifier: TransactionIdentifier; + /** + * + * @type {Array} + * @memberof Transaction + */ + operations: Array; + /** + * Transactions that are related to other transactions (like a cross-shard transaction) should include the tranaction_identifier of these transactions in the metadata. + * @type {object} + * @memberof Transaction + */ + metadata?: object; +} +/** + * The transaction_identifier uniquely identifies a transaction in a particular network and block or in the mempool. + * @export + * @interface TransactionIdentifier + */ +export interface TransactionIdentifier { + /** + * Any transactions that are attributable only to a block (ex: a block event) should use the hash of the block as the identifier. + * @type {string} + * @memberof TransactionIdentifier + */ + hash: string; +} +/** + * TransactionIdentifierResponse contains the transaction_identifier of a transaction that was submitted to either `/construction/hash` or `/construction/submit`. + * @export + * @interface TransactionIdentifierResponse + */ +export interface TransactionIdentifierResponse { + /** + * + * @type {TransactionIdentifier} + * @memberof TransactionIdentifierResponse + */ + transaction_identifier: TransactionIdentifier; + /** + * + * @type {object} + * @memberof TransactionIdentifierResponse + */ + metadata?: object; +} +/** + * The Version object is utilized to inform the client of the versions of different components of the Rosetta implementation. + * @export + * @interface Version + */ +export interface Version { + /** + * The rosetta_version is the version of the Rosetta interface the implementation adheres to. This can be useful for clients looking to reliably parse responses. + * @type {string} + * @memberof Version + */ + rosetta_version: string; + /** + * The node_version is the canonical version of the node runtime. This can help clients manage deployments. + * @type {string} + * @memberof Version + */ + node_version: string; + /** + * When a middleware server is used to adhere to the Rosetta interface, it should return its version here. This can help clients manage deployments. + * @type {string} + * @memberof Version + */ + middleware_version?: string; + /** + * Any other information that may be useful about versioning of dependent services should be returned here. + * @type {object} + * @memberof Version + */ + metadata?: object; +} + +/** + * AccountApi - axios parameter creator + * @export + */ +export const AccountApiAxiosParamCreator = function (configuration?: Configuration) { + return { + /** + * Get an array of all AccountBalances for an AccountIdentifier and the BlockIdentifier at which the balance lookup was performed. The BlockIdentifier must always be returned because some consumers of account balance data need to know specifically at which block the balance was calculated to compare balances they compute from operations with the balance returned by the node. It is important to note that making a balance request for an account without populating the SubAccountIdentifier should not result in the balance of all possible SubAccountIdentifiers being returned. Rather, it should result in the balance pertaining to no SubAccountIdentifiers being returned (sometimes called the liquid balance). To get all balances associated with an account, it may be necessary to perform multiple balance requests with unique AccountIdentifiers. It is also possible to perform a historical balance lookup (if the server supports it) by passing in an optional BlockIdentifier. + * @summary Get an Account\'s Balance + * @param {AccountBalanceRequest} accountBalanceRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + accountBalance: async (accountBalanceRequest: AccountBalanceRequest, options: any = {}): Promise => { + // verify required parameter 'accountBalanceRequest' is not null or undefined + if (accountBalanceRequest === null || accountBalanceRequest === undefined) { + throw new RequiredError('accountBalanceRequest','Required parameter accountBalanceRequest was null or undefined when calling accountBalance.'); + } + const localVarPath = `/account/balance`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, 'https://example.com'); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + const queryParameters = new URLSearchParams(localVarUrlObj.search); + for (const key in localVarQueryParameter) { + queryParameters.set(key, localVarQueryParameter[key]); + } + for (const key in options.query) { + queryParameters.set(key, options.query[key]); + } + localVarUrlObj.search = (new URLSearchParams(queryParameters)).toString(); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + const nonString = typeof accountBalanceRequest !== 'string'; + const needsSerialization = nonString && configuration && configuration.isJsonMime + ? configuration.isJsonMime(localVarRequestOptions.headers['Content-Type']) + : nonString; + localVarRequestOptions.data = needsSerialization + ? JSON.stringify(accountBalanceRequest !== undefined ? accountBalanceRequest : {}) + : (accountBalanceRequest || ""); + + return { + url: localVarUrlObj.pathname + localVarUrlObj.search + localVarUrlObj.hash, + options: localVarRequestOptions, + }; + }, + /** + * Get an array of all unspent coins for an AccountIdentifier and the BlockIdentifier at which the lookup was performed. If your implementation does not support coins (i.e. it is for an account-based blockchain), you do not need to implement this endpoint. If you implementation does support coins (i.e. it is fro a UTXO-based blockchain), you MUST also complete the `/account/balance` endpoint. It is important to note that making a coins request for an account without populating the SubAccountIdentifier should not result in the coins of all possible SubAccountIdentifiers being returned. Rather, it should result in the coins pertaining to no SubAccountIdentifiers being returned. To get all coins associated with an account, it may be necessary to perform multiple coin requests with unique AccountIdentifiers. Optionally, an implementation may choose to support updating an AccountIdentifier\'s unspent coins based on the contents of the mempool. Note, using this functionality breaks any guarantee of idempotency. + * @summary Get an Account\'s Unspent Coins + * @param {AccountCoinsRequest} accountCoinsRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + accountCoins: async (accountCoinsRequest: AccountCoinsRequest, options: any = {}): Promise => { + // verify required parameter 'accountCoinsRequest' is not null or undefined + if (accountCoinsRequest === null || accountCoinsRequest === undefined) { + throw new RequiredError('accountCoinsRequest','Required parameter accountCoinsRequest was null or undefined when calling accountCoins.'); + } + const localVarPath = `/account/coins`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, 'https://example.com'); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + const queryParameters = new URLSearchParams(localVarUrlObj.search); + for (const key in localVarQueryParameter) { + queryParameters.set(key, localVarQueryParameter[key]); + } + for (const key in options.query) { + queryParameters.set(key, options.query[key]); + } + localVarUrlObj.search = (new URLSearchParams(queryParameters)).toString(); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + const nonString = typeof accountCoinsRequest !== 'string'; + const needsSerialization = nonString && configuration && configuration.isJsonMime + ? configuration.isJsonMime(localVarRequestOptions.headers['Content-Type']) + : nonString; + localVarRequestOptions.data = needsSerialization + ? JSON.stringify(accountCoinsRequest !== undefined ? accountCoinsRequest : {}) + : (accountCoinsRequest || ""); + + return { + url: localVarUrlObj.pathname + localVarUrlObj.search + localVarUrlObj.hash, + options: localVarRequestOptions, + }; + }, + } +}; + +/** + * AccountApi - functional programming interface + * @export + */ +export const AccountApiFp = function(configuration?: Configuration) { + return { + /** + * Get an array of all AccountBalances for an AccountIdentifier and the BlockIdentifier at which the balance lookup was performed. The BlockIdentifier must always be returned because some consumers of account balance data need to know specifically at which block the balance was calculated to compare balances they compute from operations with the balance returned by the node. It is important to note that making a balance request for an account without populating the SubAccountIdentifier should not result in the balance of all possible SubAccountIdentifiers being returned. Rather, it should result in the balance pertaining to no SubAccountIdentifiers being returned (sometimes called the liquid balance). To get all balances associated with an account, it may be necessary to perform multiple balance requests with unique AccountIdentifiers. It is also possible to perform a historical balance lookup (if the server supports it) by passing in an optional BlockIdentifier. + * @summary Get an Account\'s Balance + * @param {AccountBalanceRequest} accountBalanceRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async accountBalance(accountBalanceRequest: AccountBalanceRequest, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await AccountApiAxiosParamCreator(configuration).accountBalance(accountBalanceRequest, options); + return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => { + const axiosRequestArgs = {...localVarAxiosArgs.options, url: basePath + localVarAxiosArgs.url}; + return axios.request(axiosRequestArgs); + }; + }, + /** + * Get an array of all unspent coins for an AccountIdentifier and the BlockIdentifier at which the lookup was performed. If your implementation does not support coins (i.e. it is for an account-based blockchain), you do not need to implement this endpoint. If you implementation does support coins (i.e. it is fro a UTXO-based blockchain), you MUST also complete the `/account/balance` endpoint. It is important to note that making a coins request for an account without populating the SubAccountIdentifier should not result in the coins of all possible SubAccountIdentifiers being returned. Rather, it should result in the coins pertaining to no SubAccountIdentifiers being returned. To get all coins associated with an account, it may be necessary to perform multiple coin requests with unique AccountIdentifiers. Optionally, an implementation may choose to support updating an AccountIdentifier\'s unspent coins based on the contents of the mempool. Note, using this functionality breaks any guarantee of idempotency. + * @summary Get an Account\'s Unspent Coins + * @param {AccountCoinsRequest} accountCoinsRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async accountCoins(accountCoinsRequest: AccountCoinsRequest, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await AccountApiAxiosParamCreator(configuration).accountCoins(accountCoinsRequest, options); + return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => { + const axiosRequestArgs = {...localVarAxiosArgs.options, url: basePath + localVarAxiosArgs.url}; + return axios.request(axiosRequestArgs); + }; + }, + } +}; + +/** + * AccountApi - factory interface + * @export + */ +export const AccountApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { + return { + /** + * Get an array of all AccountBalances for an AccountIdentifier and the BlockIdentifier at which the balance lookup was performed. The BlockIdentifier must always be returned because some consumers of account balance data need to know specifically at which block the balance was calculated to compare balances they compute from operations with the balance returned by the node. It is important to note that making a balance request for an account without populating the SubAccountIdentifier should not result in the balance of all possible SubAccountIdentifiers being returned. Rather, it should result in the balance pertaining to no SubAccountIdentifiers being returned (sometimes called the liquid balance). To get all balances associated with an account, it may be necessary to perform multiple balance requests with unique AccountIdentifiers. It is also possible to perform a historical balance lookup (if the server supports it) by passing in an optional BlockIdentifier. + * @summary Get an Account\'s Balance + * @param {AccountBalanceRequest} accountBalanceRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + accountBalance(accountBalanceRequest: AccountBalanceRequest, options?: any): AxiosPromise { + return AccountApiFp(configuration).accountBalance(accountBalanceRequest, options).then((request) => request(axios, basePath)); + }, + /** + * Get an array of all unspent coins for an AccountIdentifier and the BlockIdentifier at which the lookup was performed. If your implementation does not support coins (i.e. it is for an account-based blockchain), you do not need to implement this endpoint. If you implementation does support coins (i.e. it is fro a UTXO-based blockchain), you MUST also complete the `/account/balance` endpoint. It is important to note that making a coins request for an account without populating the SubAccountIdentifier should not result in the coins of all possible SubAccountIdentifiers being returned. Rather, it should result in the coins pertaining to no SubAccountIdentifiers being returned. To get all coins associated with an account, it may be necessary to perform multiple coin requests with unique AccountIdentifiers. Optionally, an implementation may choose to support updating an AccountIdentifier\'s unspent coins based on the contents of the mempool. Note, using this functionality breaks any guarantee of idempotency. + * @summary Get an Account\'s Unspent Coins + * @param {AccountCoinsRequest} accountCoinsRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + accountCoins(accountCoinsRequest: AccountCoinsRequest, options?: any): AxiosPromise { + return AccountApiFp(configuration).accountCoins(accountCoinsRequest, options).then((request) => request(axios, basePath)); + }, + }; +}; + +/** + * AccountApi - object-oriented interface + * @export + * @class AccountApi + * @extends {BaseAPI} + */ +export class AccountApi extends BaseAPI { + /** + * Get an array of all AccountBalances for an AccountIdentifier and the BlockIdentifier at which the balance lookup was performed. The BlockIdentifier must always be returned because some consumers of account balance data need to know specifically at which block the balance was calculated to compare balances they compute from operations with the balance returned by the node. It is important to note that making a balance request for an account without populating the SubAccountIdentifier should not result in the balance of all possible SubAccountIdentifiers being returned. Rather, it should result in the balance pertaining to no SubAccountIdentifiers being returned (sometimes called the liquid balance). To get all balances associated with an account, it may be necessary to perform multiple balance requests with unique AccountIdentifiers. It is also possible to perform a historical balance lookup (if the server supports it) by passing in an optional BlockIdentifier. + * @summary Get an Account\'s Balance + * @param {AccountBalanceRequest} accountBalanceRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof AccountApi + */ + public accountBalance(accountBalanceRequest: AccountBalanceRequest, options?: any) { + return AccountApiFp(this.configuration).accountBalance(accountBalanceRequest, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * Get an array of all unspent coins for an AccountIdentifier and the BlockIdentifier at which the lookup was performed. If your implementation does not support coins (i.e. it is for an account-based blockchain), you do not need to implement this endpoint. If you implementation does support coins (i.e. it is fro a UTXO-based blockchain), you MUST also complete the `/account/balance` endpoint. It is important to note that making a coins request for an account without populating the SubAccountIdentifier should not result in the coins of all possible SubAccountIdentifiers being returned. Rather, it should result in the coins pertaining to no SubAccountIdentifiers being returned. To get all coins associated with an account, it may be necessary to perform multiple coin requests with unique AccountIdentifiers. Optionally, an implementation may choose to support updating an AccountIdentifier\'s unspent coins based on the contents of the mempool. Note, using this functionality breaks any guarantee of idempotency. + * @summary Get an Account\'s Unspent Coins + * @param {AccountCoinsRequest} accountCoinsRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof AccountApi + */ + public accountCoins(accountCoinsRequest: AccountCoinsRequest, options?: any) { + return AccountApiFp(this.configuration).accountCoins(accountCoinsRequest, options).then((request) => request(this.axios, this.basePath)); + } +} + + +/** + * BlockApi - axios parameter creator + * @export + */ +export const BlockApiAxiosParamCreator = function (configuration?: Configuration) { + return { + /** + * Get a block by its Block Identifier. If transactions are returned in the same call to the node as fetching the block, the response should include these transactions in the Block object. If not, an array of Transaction Identifiers should be returned so /block/transaction fetches can be done to get all transaction information. When requesting a block by the hash component of the BlockIdentifier, this request MUST be idempotent: repeated invocations for the same hash-identified block must return the exact same block contents. No such restriction is imposed when requesting a block by height, given that a chain reorg event might cause the specific block at height `n` to be set to a different one. + * @summary Get a Block + * @param {BlockRequest} blockRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + block: async (blockRequest: BlockRequest, options: any = {}): Promise => { + // verify required parameter 'blockRequest' is not null or undefined + if (blockRequest === null || blockRequest === undefined) { + throw new RequiredError('blockRequest','Required parameter blockRequest was null or undefined when calling block.'); + } + const localVarPath = `/block`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, 'https://example.com'); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + const queryParameters = new URLSearchParams(localVarUrlObj.search); + for (const key in localVarQueryParameter) { + queryParameters.set(key, localVarQueryParameter[key]); + } + for (const key in options.query) { + queryParameters.set(key, options.query[key]); + } + localVarUrlObj.search = (new URLSearchParams(queryParameters)).toString(); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + const nonString = typeof blockRequest !== 'string'; + const needsSerialization = nonString && configuration && configuration.isJsonMime + ? configuration.isJsonMime(localVarRequestOptions.headers['Content-Type']) + : nonString; + localVarRequestOptions.data = needsSerialization + ? JSON.stringify(blockRequest !== undefined ? blockRequest : {}) + : (blockRequest || ""); + + return { + url: localVarUrlObj.pathname + localVarUrlObj.search + localVarUrlObj.hash, + options: localVarRequestOptions, + }; + }, + /** + * Get a transaction in a block by its Transaction Identifier. This endpoint should only be used when querying a node for a block does not return all transactions contained within it. All transactions returned by this endpoint must be appended to any transactions returned by the /block method by consumers of this data. Fetching a transaction by hash is considered an Explorer Method (which is classified under the Future Work section). This method can be used to let consumers to paginate results when the block trasactions count is too big to be returned in a single BlockResponse. Calling this endpoint requires reference to a BlockIdentifier because transaction parsing can change depending on which block contains the transaction. For example, in Bitcoin it is necessary to know which block contains a transaction to determine the destination of fee payments. Without specifying a block identifier, the node would have to infer which block to use (which could change during a re-org). Implementations that require fetching previous transactions to populate the response (ex: Previous UTXOs in Bitcoin) may find it useful to run a cache within the Rosetta server in the /data directory (on a path that does not conflict with the node). + * @summary Get a Block Transaction + * @param {BlockTransactionRequest} blockTransactionRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + blockTransaction: async (blockTransactionRequest: BlockTransactionRequest, options: any = {}): Promise => { + // verify required parameter 'blockTransactionRequest' is not null or undefined + if (blockTransactionRequest === null || blockTransactionRequest === undefined) { + throw new RequiredError('blockTransactionRequest','Required parameter blockTransactionRequest was null or undefined when calling blockTransaction.'); + } + const localVarPath = `/block/transaction`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, 'https://example.com'); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + const queryParameters = new URLSearchParams(localVarUrlObj.search); + for (const key in localVarQueryParameter) { + queryParameters.set(key, localVarQueryParameter[key]); + } + for (const key in options.query) { + queryParameters.set(key, options.query[key]); + } + localVarUrlObj.search = (new URLSearchParams(queryParameters)).toString(); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + const nonString = typeof blockTransactionRequest !== 'string'; + const needsSerialization = nonString && configuration && configuration.isJsonMime + ? configuration.isJsonMime(localVarRequestOptions.headers['Content-Type']) + : nonString; + localVarRequestOptions.data = needsSerialization + ? JSON.stringify(blockTransactionRequest !== undefined ? blockTransactionRequest : {}) + : (blockTransactionRequest || ""); + + return { + url: localVarUrlObj.pathname + localVarUrlObj.search + localVarUrlObj.hash, + options: localVarRequestOptions, + }; + }, + } +}; + +/** + * BlockApi - functional programming interface + * @export + */ +export const BlockApiFp = function(configuration?: Configuration) { + return { + /** + * Get a block by its Block Identifier. If transactions are returned in the same call to the node as fetching the block, the response should include these transactions in the Block object. If not, an array of Transaction Identifiers should be returned so /block/transaction fetches can be done to get all transaction information. When requesting a block by the hash component of the BlockIdentifier, this request MUST be idempotent: repeated invocations for the same hash-identified block must return the exact same block contents. No such restriction is imposed when requesting a block by height, given that a chain reorg event might cause the specific block at height `n` to be set to a different one. + * @summary Get a Block + * @param {BlockRequest} blockRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async block(blockRequest: BlockRequest, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await BlockApiAxiosParamCreator(configuration).block(blockRequest, options); + return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => { + const axiosRequestArgs = {...localVarAxiosArgs.options, url: basePath + localVarAxiosArgs.url}; + return axios.request(axiosRequestArgs); + }; + }, + /** + * Get a transaction in a block by its Transaction Identifier. This endpoint should only be used when querying a node for a block does not return all transactions contained within it. All transactions returned by this endpoint must be appended to any transactions returned by the /block method by consumers of this data. Fetching a transaction by hash is considered an Explorer Method (which is classified under the Future Work section). This method can be used to let consumers to paginate results when the block trasactions count is too big to be returned in a single BlockResponse. Calling this endpoint requires reference to a BlockIdentifier because transaction parsing can change depending on which block contains the transaction. For example, in Bitcoin it is necessary to know which block contains a transaction to determine the destination of fee payments. Without specifying a block identifier, the node would have to infer which block to use (which could change during a re-org). Implementations that require fetching previous transactions to populate the response (ex: Previous UTXOs in Bitcoin) may find it useful to run a cache within the Rosetta server in the /data directory (on a path that does not conflict with the node). + * @summary Get a Block Transaction + * @param {BlockTransactionRequest} blockTransactionRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async blockTransaction(blockTransactionRequest: BlockTransactionRequest, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await BlockApiAxiosParamCreator(configuration).blockTransaction(blockTransactionRequest, options); + return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => { + const axiosRequestArgs = {...localVarAxiosArgs.options, url: basePath + localVarAxiosArgs.url}; + return axios.request(axiosRequestArgs); + }; + }, + } +}; + +/** + * BlockApi - factory interface + * @export + */ +export const BlockApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { + return { + /** + * Get a block by its Block Identifier. If transactions are returned in the same call to the node as fetching the block, the response should include these transactions in the Block object. If not, an array of Transaction Identifiers should be returned so /block/transaction fetches can be done to get all transaction information. When requesting a block by the hash component of the BlockIdentifier, this request MUST be idempotent: repeated invocations for the same hash-identified block must return the exact same block contents. No such restriction is imposed when requesting a block by height, given that a chain reorg event might cause the specific block at height `n` to be set to a different one. + * @summary Get a Block + * @param {BlockRequest} blockRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + block(blockRequest: BlockRequest, options?: any): AxiosPromise { + return BlockApiFp(configuration).block(blockRequest, options).then((request) => request(axios, basePath)); + }, + /** + * Get a transaction in a block by its Transaction Identifier. This endpoint should only be used when querying a node for a block does not return all transactions contained within it. All transactions returned by this endpoint must be appended to any transactions returned by the /block method by consumers of this data. Fetching a transaction by hash is considered an Explorer Method (which is classified under the Future Work section). This method can be used to let consumers to paginate results when the block trasactions count is too big to be returned in a single BlockResponse. Calling this endpoint requires reference to a BlockIdentifier because transaction parsing can change depending on which block contains the transaction. For example, in Bitcoin it is necessary to know which block contains a transaction to determine the destination of fee payments. Without specifying a block identifier, the node would have to infer which block to use (which could change during a re-org). Implementations that require fetching previous transactions to populate the response (ex: Previous UTXOs in Bitcoin) may find it useful to run a cache within the Rosetta server in the /data directory (on a path that does not conflict with the node). + * @summary Get a Block Transaction + * @param {BlockTransactionRequest} blockTransactionRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + blockTransaction(blockTransactionRequest: BlockTransactionRequest, options?: any): AxiosPromise { + return BlockApiFp(configuration).blockTransaction(blockTransactionRequest, options).then((request) => request(axios, basePath)); + }, + }; +}; + +/** + * BlockApi - object-oriented interface + * @export + * @class BlockApi + * @extends {BaseAPI} + */ +export class BlockApi extends BaseAPI { + /** + * Get a block by its Block Identifier. If transactions are returned in the same call to the node as fetching the block, the response should include these transactions in the Block object. If not, an array of Transaction Identifiers should be returned so /block/transaction fetches can be done to get all transaction information. When requesting a block by the hash component of the BlockIdentifier, this request MUST be idempotent: repeated invocations for the same hash-identified block must return the exact same block contents. No such restriction is imposed when requesting a block by height, given that a chain reorg event might cause the specific block at height `n` to be set to a different one. + * @summary Get a Block + * @param {BlockRequest} blockRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof BlockApi + */ + public block(blockRequest: BlockRequest, options?: any) { + return BlockApiFp(this.configuration).block(blockRequest, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * Get a transaction in a block by its Transaction Identifier. This endpoint should only be used when querying a node for a block does not return all transactions contained within it. All transactions returned by this endpoint must be appended to any transactions returned by the /block method by consumers of this data. Fetching a transaction by hash is considered an Explorer Method (which is classified under the Future Work section). This method can be used to let consumers to paginate results when the block trasactions count is too big to be returned in a single BlockResponse. Calling this endpoint requires reference to a BlockIdentifier because transaction parsing can change depending on which block contains the transaction. For example, in Bitcoin it is necessary to know which block contains a transaction to determine the destination of fee payments. Without specifying a block identifier, the node would have to infer which block to use (which could change during a re-org). Implementations that require fetching previous transactions to populate the response (ex: Previous UTXOs in Bitcoin) may find it useful to run a cache within the Rosetta server in the /data directory (on a path that does not conflict with the node). + * @summary Get a Block Transaction + * @param {BlockTransactionRequest} blockTransactionRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof BlockApi + */ + public blockTransaction(blockTransactionRequest: BlockTransactionRequest, options?: any) { + return BlockApiFp(this.configuration).blockTransaction(blockTransactionRequest, options).then((request) => request(this.axios, this.basePath)); + } +} + + +/** + * CallApi - axios parameter creator + * @export + */ +export const CallApiAxiosParamCreator = function (configuration?: Configuration) { + return { + /** + * Call invokes an arbitrary, network-specific procedure call with network-specific parameters. The guidance for what this endpoint should or could do is purposely left vague. In Ethereum, this could be used to invoke `eth_call` to implement an entire Rosetta API interface for some smart contract that is not parsed by the implementation creator (like a DEX). This endpoint could also be used to provide access to data that does not map to any Rosetta models instead of requiring an integrator to use some network-specific SDK and call some network-specific endpoint (like surfacing staking parameters). Call is NOT a replacement for implementing Rosetta API endpoints or mapping network-specific data to Rosetta models. Rather, it enables developers to build additional Rosetta API interfaces for things they care about without introducing complexity into a base-level Rosetta implementation. Simply put, imagine that the average integrator will use layered Rosetta API implementations that each surfaces unique data. + * @summary Make a Network-Specific Procedure Call + * @param {CallRequest} callRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + call: async (callRequest: CallRequest, options: any = {}): Promise => { + // verify required parameter 'callRequest' is not null or undefined + if (callRequest === null || callRequest === undefined) { + throw new RequiredError('callRequest','Required parameter callRequest was null or undefined when calling call.'); + } + const localVarPath = `/call`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, 'https://example.com'); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + const queryParameters = new URLSearchParams(localVarUrlObj.search); + for (const key in localVarQueryParameter) { + queryParameters.set(key, localVarQueryParameter[key]); + } + for (const key in options.query) { + queryParameters.set(key, options.query[key]); + } + localVarUrlObj.search = (new URLSearchParams(queryParameters)).toString(); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + const nonString = typeof callRequest !== 'string'; + const needsSerialization = nonString && configuration && configuration.isJsonMime + ? configuration.isJsonMime(localVarRequestOptions.headers['Content-Type']) + : nonString; + localVarRequestOptions.data = needsSerialization + ? JSON.stringify(callRequest !== undefined ? callRequest : {}) + : (callRequest || ""); + + return { + url: localVarUrlObj.pathname + localVarUrlObj.search + localVarUrlObj.hash, + options: localVarRequestOptions, + }; + }, + } +}; + +/** + * CallApi - functional programming interface + * @export + */ +export const CallApiFp = function(configuration?: Configuration) { + return { + /** + * Call invokes an arbitrary, network-specific procedure call with network-specific parameters. The guidance for what this endpoint should or could do is purposely left vague. In Ethereum, this could be used to invoke `eth_call` to implement an entire Rosetta API interface for some smart contract that is not parsed by the implementation creator (like a DEX). This endpoint could also be used to provide access to data that does not map to any Rosetta models instead of requiring an integrator to use some network-specific SDK and call some network-specific endpoint (like surfacing staking parameters). Call is NOT a replacement for implementing Rosetta API endpoints or mapping network-specific data to Rosetta models. Rather, it enables developers to build additional Rosetta API interfaces for things they care about without introducing complexity into a base-level Rosetta implementation. Simply put, imagine that the average integrator will use layered Rosetta API implementations that each surfaces unique data. + * @summary Make a Network-Specific Procedure Call + * @param {CallRequest} callRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async call(callRequest: CallRequest, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await CallApiAxiosParamCreator(configuration).call(callRequest, options); + return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => { + const axiosRequestArgs = {...localVarAxiosArgs.options, url: basePath + localVarAxiosArgs.url}; + return axios.request(axiosRequestArgs); + }; + }, + } +}; + +/** + * CallApi - factory interface + * @export + */ +export const CallApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { + return { + /** + * Call invokes an arbitrary, network-specific procedure call with network-specific parameters. The guidance for what this endpoint should or could do is purposely left vague. In Ethereum, this could be used to invoke `eth_call` to implement an entire Rosetta API interface for some smart contract that is not parsed by the implementation creator (like a DEX). This endpoint could also be used to provide access to data that does not map to any Rosetta models instead of requiring an integrator to use some network-specific SDK and call some network-specific endpoint (like surfacing staking parameters). Call is NOT a replacement for implementing Rosetta API endpoints or mapping network-specific data to Rosetta models. Rather, it enables developers to build additional Rosetta API interfaces for things they care about without introducing complexity into a base-level Rosetta implementation. Simply put, imagine that the average integrator will use layered Rosetta API implementations that each surfaces unique data. + * @summary Make a Network-Specific Procedure Call + * @param {CallRequest} callRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + call(callRequest: CallRequest, options?: any): AxiosPromise { + return CallApiFp(configuration).call(callRequest, options).then((request) => request(axios, basePath)); + }, + }; +}; + +/** + * CallApi - object-oriented interface + * @export + * @class CallApi + * @extends {BaseAPI} + */ +export class CallApi extends BaseAPI { + /** + * Call invokes an arbitrary, network-specific procedure call with network-specific parameters. The guidance for what this endpoint should or could do is purposely left vague. In Ethereum, this could be used to invoke `eth_call` to implement an entire Rosetta API interface for some smart contract that is not parsed by the implementation creator (like a DEX). This endpoint could also be used to provide access to data that does not map to any Rosetta models instead of requiring an integrator to use some network-specific SDK and call some network-specific endpoint (like surfacing staking parameters). Call is NOT a replacement for implementing Rosetta API endpoints or mapping network-specific data to Rosetta models. Rather, it enables developers to build additional Rosetta API interfaces for things they care about without introducing complexity into a base-level Rosetta implementation. Simply put, imagine that the average integrator will use layered Rosetta API implementations that each surfaces unique data. + * @summary Make a Network-Specific Procedure Call + * @param {CallRequest} callRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof CallApi + */ + public call(callRequest: CallRequest, options?: any) { + return CallApiFp(this.configuration).call(callRequest, options).then((request) => request(this.axios, this.basePath)); + } +} + + +/** + * ConstructionApi - axios parameter creator + * @export + */ +export const ConstructionApiAxiosParamCreator = function (configuration?: Configuration) { + return { + /** + * Combine creates a network-specific transaction from an unsigned transaction and an array of provided signatures. The signed transaction returned from this method will be sent to the `/construction/submit` endpoint by the caller. + * @summary Create Network Transaction from Signatures + * @param {ConstructionCombineRequest} constructionCombineRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + constructionCombine: async (constructionCombineRequest: ConstructionCombineRequest, options: any = {}): Promise => { + // verify required parameter 'constructionCombineRequest' is not null or undefined + if (constructionCombineRequest === null || constructionCombineRequest === undefined) { + throw new RequiredError('constructionCombineRequest','Required parameter constructionCombineRequest was null or undefined when calling constructionCombine.'); + } + const localVarPath = `/construction/combine`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, 'https://example.com'); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + const queryParameters = new URLSearchParams(localVarUrlObj.search); + for (const key in localVarQueryParameter) { + queryParameters.set(key, localVarQueryParameter[key]); + } + for (const key in options.query) { + queryParameters.set(key, options.query[key]); + } + localVarUrlObj.search = (new URLSearchParams(queryParameters)).toString(); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + const nonString = typeof constructionCombineRequest !== 'string'; + const needsSerialization = nonString && configuration && configuration.isJsonMime + ? configuration.isJsonMime(localVarRequestOptions.headers['Content-Type']) + : nonString; + localVarRequestOptions.data = needsSerialization + ? JSON.stringify(constructionCombineRequest !== undefined ? constructionCombineRequest : {}) + : (constructionCombineRequest || ""); + + return { + url: localVarUrlObj.pathname + localVarUrlObj.search + localVarUrlObj.hash, + options: localVarRequestOptions, + }; + }, + /** + * Derive returns the AccountIdentifier associated with a public key. Blockchains that require an on-chain action to create an account should not implement this method. + * @summary Derive an AccountIdentifier from a PublicKey + * @param {ConstructionDeriveRequest} constructionDeriveRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + constructionDerive: async (constructionDeriveRequest: ConstructionDeriveRequest, options: any = {}): Promise => { + // verify required parameter 'constructionDeriveRequest' is not null or undefined + if (constructionDeriveRequest === null || constructionDeriveRequest === undefined) { + throw new RequiredError('constructionDeriveRequest','Required parameter constructionDeriveRequest was null or undefined when calling constructionDerive.'); + } + const localVarPath = `/construction/derive`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, 'https://example.com'); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + const queryParameters = new URLSearchParams(localVarUrlObj.search); + for (const key in localVarQueryParameter) { + queryParameters.set(key, localVarQueryParameter[key]); + } + for (const key in options.query) { + queryParameters.set(key, options.query[key]); + } + localVarUrlObj.search = (new URLSearchParams(queryParameters)).toString(); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + const nonString = typeof constructionDeriveRequest !== 'string'; + const needsSerialization = nonString && configuration && configuration.isJsonMime + ? configuration.isJsonMime(localVarRequestOptions.headers['Content-Type']) + : nonString; + localVarRequestOptions.data = needsSerialization + ? JSON.stringify(constructionDeriveRequest !== undefined ? constructionDeriveRequest : {}) + : (constructionDeriveRequest || ""); + + return { + url: localVarUrlObj.pathname + localVarUrlObj.search + localVarUrlObj.hash, + options: localVarRequestOptions, + }; + }, + /** + * TransactionHash returns the network-specific transaction hash for a signed transaction. + * @summary Get the Hash of a Signed Transaction + * @param {ConstructionHashRequest} constructionHashRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + constructionHash: async (constructionHashRequest: ConstructionHashRequest, options: any = {}): Promise => { + // verify required parameter 'constructionHashRequest' is not null or undefined + if (constructionHashRequest === null || constructionHashRequest === undefined) { + throw new RequiredError('constructionHashRequest','Required parameter constructionHashRequest was null or undefined when calling constructionHash.'); + } + const localVarPath = `/construction/hash`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, 'https://example.com'); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + const queryParameters = new URLSearchParams(localVarUrlObj.search); + for (const key in localVarQueryParameter) { + queryParameters.set(key, localVarQueryParameter[key]); + } + for (const key in options.query) { + queryParameters.set(key, options.query[key]); + } + localVarUrlObj.search = (new URLSearchParams(queryParameters)).toString(); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + const nonString = typeof constructionHashRequest !== 'string'; + const needsSerialization = nonString && configuration && configuration.isJsonMime + ? configuration.isJsonMime(localVarRequestOptions.headers['Content-Type']) + : nonString; + localVarRequestOptions.data = needsSerialization + ? JSON.stringify(constructionHashRequest !== undefined ? constructionHashRequest : {}) + : (constructionHashRequest || ""); + + return { + url: localVarUrlObj.pathname + localVarUrlObj.search + localVarUrlObj.hash, + options: localVarRequestOptions, + }; + }, + /** + * Get any information required to construct a transaction for a specific network. Metadata returned here could be a recent hash to use, an account sequence number, or even arbitrary chain state. The request used when calling this endpoint is created by calling `/construction/preprocess` in an offline environment. You should NEVER assume that the request sent to this endpoint will be created by the caller or populated with any custom parameters. This must occur in `/construction/preprocess`. It is important to clarify that this endpoint should not pre-construct any transactions for the client (this should happen in `/construction/payloads`). This endpoint is left purposely unstructured because of the wide scope of metadata that could be required. + * @summary Get Metadata for Transaction Construction + * @param {ConstructionMetadataRequest} constructionMetadataRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + constructionMetadata: async (constructionMetadataRequest: ConstructionMetadataRequest, options: any = {}): Promise => { + // verify required parameter 'constructionMetadataRequest' is not null or undefined + if (constructionMetadataRequest === null || constructionMetadataRequest === undefined) { + throw new RequiredError('constructionMetadataRequest','Required parameter constructionMetadataRequest was null or undefined when calling constructionMetadata.'); + } + const localVarPath = `/construction/metadata`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, 'https://example.com'); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + const queryParameters = new URLSearchParams(localVarUrlObj.search); + for (const key in localVarQueryParameter) { + queryParameters.set(key, localVarQueryParameter[key]); + } + for (const key in options.query) { + queryParameters.set(key, options.query[key]); + } + localVarUrlObj.search = (new URLSearchParams(queryParameters)).toString(); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + const nonString = typeof constructionMetadataRequest !== 'string'; + const needsSerialization = nonString && configuration && configuration.isJsonMime + ? configuration.isJsonMime(localVarRequestOptions.headers['Content-Type']) + : nonString; + localVarRequestOptions.data = needsSerialization + ? JSON.stringify(constructionMetadataRequest !== undefined ? constructionMetadataRequest : {}) + : (constructionMetadataRequest || ""); + + return { + url: localVarUrlObj.pathname + localVarUrlObj.search + localVarUrlObj.hash, + options: localVarRequestOptions, + }; + }, + /** + * Parse is called on both unsigned and signed transactions to understand the intent of the formulated transaction. This is run as a sanity check before signing (after `/construction/payloads`) and before broadcast (after `/construction/combine`). + * @summary Parse a Transaction + * @param {ConstructionParseRequest} constructionParseRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + constructionParse: async (constructionParseRequest: ConstructionParseRequest, options: any = {}): Promise => { + // verify required parameter 'constructionParseRequest' is not null or undefined + if (constructionParseRequest === null || constructionParseRequest === undefined) { + throw new RequiredError('constructionParseRequest','Required parameter constructionParseRequest was null or undefined when calling constructionParse.'); + } + const localVarPath = `/construction/parse`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, 'https://example.com'); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + const queryParameters = new URLSearchParams(localVarUrlObj.search); + for (const key in localVarQueryParameter) { + queryParameters.set(key, localVarQueryParameter[key]); + } + for (const key in options.query) { + queryParameters.set(key, options.query[key]); + } + localVarUrlObj.search = (new URLSearchParams(queryParameters)).toString(); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + const nonString = typeof constructionParseRequest !== 'string'; + const needsSerialization = nonString && configuration && configuration.isJsonMime + ? configuration.isJsonMime(localVarRequestOptions.headers['Content-Type']) + : nonString; + localVarRequestOptions.data = needsSerialization + ? JSON.stringify(constructionParseRequest !== undefined ? constructionParseRequest : {}) + : (constructionParseRequest || ""); + + return { + url: localVarUrlObj.pathname + localVarUrlObj.search + localVarUrlObj.hash, + options: localVarRequestOptions, + }; + }, + /** + * Payloads is called with an array of operations and the response from `/construction/metadata`. It returns an unsigned transaction blob and a collection of payloads that must be signed by particular AccountIdentifiers using a certain SignatureType. The array of operations provided in transaction construction often times can not specify all \"effects\" of a transaction (consider invoked transactions in Ethereum). However, they can deterministically specify the \"intent\" of the transaction, which is sufficient for construction. For this reason, parsing the corresponding transaction in the Data API (when it lands on chain) will contain a superset of whatever operations were provided during construction. + * @summary Generate an Unsigned Transaction and Signing Payloads + * @param {ConstructionPayloadsRequest} constructionPayloadsRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + constructionPayloads: async (constructionPayloadsRequest: ConstructionPayloadsRequest, options: any = {}): Promise => { + // verify required parameter 'constructionPayloadsRequest' is not null or undefined + if (constructionPayloadsRequest === null || constructionPayloadsRequest === undefined) { + throw new RequiredError('constructionPayloadsRequest','Required parameter constructionPayloadsRequest was null or undefined when calling constructionPayloads.'); + } + const localVarPath = `/construction/payloads`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, 'https://example.com'); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + const queryParameters = new URLSearchParams(localVarUrlObj.search); + for (const key in localVarQueryParameter) { + queryParameters.set(key, localVarQueryParameter[key]); + } + for (const key in options.query) { + queryParameters.set(key, options.query[key]); + } + localVarUrlObj.search = (new URLSearchParams(queryParameters)).toString(); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + const nonString = typeof constructionPayloadsRequest !== 'string'; + const needsSerialization = nonString && configuration && configuration.isJsonMime + ? configuration.isJsonMime(localVarRequestOptions.headers['Content-Type']) + : nonString; + localVarRequestOptions.data = needsSerialization + ? JSON.stringify(constructionPayloadsRequest !== undefined ? constructionPayloadsRequest : {}) + : (constructionPayloadsRequest || ""); + + return { + url: localVarUrlObj.pathname + localVarUrlObj.search + localVarUrlObj.hash, + options: localVarRequestOptions, + }; + }, + /** + * Preprocess is called prior to `/construction/payloads` to construct a request for any metadata that is needed for transaction construction given (i.e. account nonce). The `options` object returned from this endpoint will be sent to the `/construction/metadata` endpoint UNMODIFIED by the caller (in an offline execution environment). If your Construction API implementation has configuration options, they MUST be specified in the `/construction/preprocess` request (in the `metadata` field). + * @summary Create a Request to Fetch Metadata + * @param {ConstructionPreprocessRequest} constructionPreprocessRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + constructionPreprocess: async (constructionPreprocessRequest: ConstructionPreprocessRequest, options: any = {}): Promise => { + // verify required parameter 'constructionPreprocessRequest' is not null or undefined + if (constructionPreprocessRequest === null || constructionPreprocessRequest === undefined) { + throw new RequiredError('constructionPreprocessRequest','Required parameter constructionPreprocessRequest was null or undefined when calling constructionPreprocess.'); + } + const localVarPath = `/construction/preprocess`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, 'https://example.com'); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + const queryParameters = new URLSearchParams(localVarUrlObj.search); + for (const key in localVarQueryParameter) { + queryParameters.set(key, localVarQueryParameter[key]); + } + for (const key in options.query) { + queryParameters.set(key, options.query[key]); + } + localVarUrlObj.search = (new URLSearchParams(queryParameters)).toString(); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + const nonString = typeof constructionPreprocessRequest !== 'string'; + const needsSerialization = nonString && configuration && configuration.isJsonMime + ? configuration.isJsonMime(localVarRequestOptions.headers['Content-Type']) + : nonString; + localVarRequestOptions.data = needsSerialization + ? JSON.stringify(constructionPreprocessRequest !== undefined ? constructionPreprocessRequest : {}) + : (constructionPreprocessRequest || ""); + + return { + url: localVarUrlObj.pathname + localVarUrlObj.search + localVarUrlObj.hash, + options: localVarRequestOptions, + }; + }, + /** + * Submit a pre-signed transaction to the node. This call should not block on the transaction being included in a block. Rather, it should return immediately with an indication of whether or not the transaction was included in the mempool. The transaction submission response should only return a 200 status if the submitted transaction could be included in the mempool. Otherwise, it should return an error. + * @summary Submit a Signed Transaction + * @param {ConstructionSubmitRequest} constructionSubmitRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + constructionSubmit: async (constructionSubmitRequest: ConstructionSubmitRequest, options: any = {}): Promise => { + // verify required parameter 'constructionSubmitRequest' is not null or undefined + if (constructionSubmitRequest === null || constructionSubmitRequest === undefined) { + throw new RequiredError('constructionSubmitRequest','Required parameter constructionSubmitRequest was null or undefined when calling constructionSubmit.'); + } + const localVarPath = `/construction/submit`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, 'https://example.com'); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + const queryParameters = new URLSearchParams(localVarUrlObj.search); + for (const key in localVarQueryParameter) { + queryParameters.set(key, localVarQueryParameter[key]); + } + for (const key in options.query) { + queryParameters.set(key, options.query[key]); + } + localVarUrlObj.search = (new URLSearchParams(queryParameters)).toString(); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + const nonString = typeof constructionSubmitRequest !== 'string'; + const needsSerialization = nonString && configuration && configuration.isJsonMime + ? configuration.isJsonMime(localVarRequestOptions.headers['Content-Type']) + : nonString; + localVarRequestOptions.data = needsSerialization + ? JSON.stringify(constructionSubmitRequest !== undefined ? constructionSubmitRequest : {}) + : (constructionSubmitRequest || ""); + + return { + url: localVarUrlObj.pathname + localVarUrlObj.search + localVarUrlObj.hash, + options: localVarRequestOptions, + }; + }, + } +}; + +/** + * ConstructionApi - functional programming interface + * @export + */ +export const ConstructionApiFp = function(configuration?: Configuration) { + return { + /** + * Combine creates a network-specific transaction from an unsigned transaction and an array of provided signatures. The signed transaction returned from this method will be sent to the `/construction/submit` endpoint by the caller. + * @summary Create Network Transaction from Signatures + * @param {ConstructionCombineRequest} constructionCombineRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async constructionCombine(constructionCombineRequest: ConstructionCombineRequest, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await ConstructionApiAxiosParamCreator(configuration).constructionCombine(constructionCombineRequest, options); + return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => { + const axiosRequestArgs = {...localVarAxiosArgs.options, url: basePath + localVarAxiosArgs.url}; + return axios.request(axiosRequestArgs); + }; + }, + /** + * Derive returns the AccountIdentifier associated with a public key. Blockchains that require an on-chain action to create an account should not implement this method. + * @summary Derive an AccountIdentifier from a PublicKey + * @param {ConstructionDeriveRequest} constructionDeriveRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async constructionDerive(constructionDeriveRequest: ConstructionDeriveRequest, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await ConstructionApiAxiosParamCreator(configuration).constructionDerive(constructionDeriveRequest, options); + return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => { + const axiosRequestArgs = {...localVarAxiosArgs.options, url: basePath + localVarAxiosArgs.url}; + return axios.request(axiosRequestArgs); + }; + }, + /** + * TransactionHash returns the network-specific transaction hash for a signed transaction. + * @summary Get the Hash of a Signed Transaction + * @param {ConstructionHashRequest} constructionHashRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async constructionHash(constructionHashRequest: ConstructionHashRequest, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await ConstructionApiAxiosParamCreator(configuration).constructionHash(constructionHashRequest, options); + return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => { + const axiosRequestArgs = {...localVarAxiosArgs.options, url: basePath + localVarAxiosArgs.url}; + return axios.request(axiosRequestArgs); + }; + }, + /** + * Get any information required to construct a transaction for a specific network. Metadata returned here could be a recent hash to use, an account sequence number, or even arbitrary chain state. The request used when calling this endpoint is created by calling `/construction/preprocess` in an offline environment. You should NEVER assume that the request sent to this endpoint will be created by the caller or populated with any custom parameters. This must occur in `/construction/preprocess`. It is important to clarify that this endpoint should not pre-construct any transactions for the client (this should happen in `/construction/payloads`). This endpoint is left purposely unstructured because of the wide scope of metadata that could be required. + * @summary Get Metadata for Transaction Construction + * @param {ConstructionMetadataRequest} constructionMetadataRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async constructionMetadata(constructionMetadataRequest: ConstructionMetadataRequest, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await ConstructionApiAxiosParamCreator(configuration).constructionMetadata(constructionMetadataRequest, options); + return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => { + const axiosRequestArgs = {...localVarAxiosArgs.options, url: basePath + localVarAxiosArgs.url}; + return axios.request(axiosRequestArgs); + }; + }, + /** + * Parse is called on both unsigned and signed transactions to understand the intent of the formulated transaction. This is run as a sanity check before signing (after `/construction/payloads`) and before broadcast (after `/construction/combine`). + * @summary Parse a Transaction + * @param {ConstructionParseRequest} constructionParseRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async constructionParse(constructionParseRequest: ConstructionParseRequest, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await ConstructionApiAxiosParamCreator(configuration).constructionParse(constructionParseRequest, options); + return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => { + const axiosRequestArgs = {...localVarAxiosArgs.options, url: basePath + localVarAxiosArgs.url}; + return axios.request(axiosRequestArgs); + }; + }, + /** + * Payloads is called with an array of operations and the response from `/construction/metadata`. It returns an unsigned transaction blob and a collection of payloads that must be signed by particular AccountIdentifiers using a certain SignatureType. The array of operations provided in transaction construction often times can not specify all \"effects\" of a transaction (consider invoked transactions in Ethereum). However, they can deterministically specify the \"intent\" of the transaction, which is sufficient for construction. For this reason, parsing the corresponding transaction in the Data API (when it lands on chain) will contain a superset of whatever operations were provided during construction. + * @summary Generate an Unsigned Transaction and Signing Payloads + * @param {ConstructionPayloadsRequest} constructionPayloadsRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async constructionPayloads(constructionPayloadsRequest: ConstructionPayloadsRequest, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await ConstructionApiAxiosParamCreator(configuration).constructionPayloads(constructionPayloadsRequest, options); + return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => { + const axiosRequestArgs = {...localVarAxiosArgs.options, url: basePath + localVarAxiosArgs.url}; + return axios.request(axiosRequestArgs); + }; + }, + /** + * Preprocess is called prior to `/construction/payloads` to construct a request for any metadata that is needed for transaction construction given (i.e. account nonce). The `options` object returned from this endpoint will be sent to the `/construction/metadata` endpoint UNMODIFIED by the caller (in an offline execution environment). If your Construction API implementation has configuration options, they MUST be specified in the `/construction/preprocess` request (in the `metadata` field). + * @summary Create a Request to Fetch Metadata + * @param {ConstructionPreprocessRequest} constructionPreprocessRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async constructionPreprocess(constructionPreprocessRequest: ConstructionPreprocessRequest, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await ConstructionApiAxiosParamCreator(configuration).constructionPreprocess(constructionPreprocessRequest, options); + return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => { + const axiosRequestArgs = {...localVarAxiosArgs.options, url: basePath + localVarAxiosArgs.url}; + return axios.request(axiosRequestArgs); + }; + }, + /** + * Submit a pre-signed transaction to the node. This call should not block on the transaction being included in a block. Rather, it should return immediately with an indication of whether or not the transaction was included in the mempool. The transaction submission response should only return a 200 status if the submitted transaction could be included in the mempool. Otherwise, it should return an error. + * @summary Submit a Signed Transaction + * @param {ConstructionSubmitRequest} constructionSubmitRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async constructionSubmit(constructionSubmitRequest: ConstructionSubmitRequest, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await ConstructionApiAxiosParamCreator(configuration).constructionSubmit(constructionSubmitRequest, options); + return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => { + const axiosRequestArgs = {...localVarAxiosArgs.options, url: basePath + localVarAxiosArgs.url}; + return axios.request(axiosRequestArgs); + }; + }, + } +}; + +/** + * ConstructionApi - factory interface + * @export + */ +export const ConstructionApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { + return { + /** + * Combine creates a network-specific transaction from an unsigned transaction and an array of provided signatures. The signed transaction returned from this method will be sent to the `/construction/submit` endpoint by the caller. + * @summary Create Network Transaction from Signatures + * @param {ConstructionCombineRequest} constructionCombineRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + constructionCombine(constructionCombineRequest: ConstructionCombineRequest, options?: any): AxiosPromise { + return ConstructionApiFp(configuration).constructionCombine(constructionCombineRequest, options).then((request) => request(axios, basePath)); + }, + /** + * Derive returns the AccountIdentifier associated with a public key. Blockchains that require an on-chain action to create an account should not implement this method. + * @summary Derive an AccountIdentifier from a PublicKey + * @param {ConstructionDeriveRequest} constructionDeriveRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + constructionDerive(constructionDeriveRequest: ConstructionDeriveRequest, options?: any): AxiosPromise { + return ConstructionApiFp(configuration).constructionDerive(constructionDeriveRequest, options).then((request) => request(axios, basePath)); + }, + /** + * TransactionHash returns the network-specific transaction hash for a signed transaction. + * @summary Get the Hash of a Signed Transaction + * @param {ConstructionHashRequest} constructionHashRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + constructionHash(constructionHashRequest: ConstructionHashRequest, options?: any): AxiosPromise { + return ConstructionApiFp(configuration).constructionHash(constructionHashRequest, options).then((request) => request(axios, basePath)); + }, + /** + * Get any information required to construct a transaction for a specific network. Metadata returned here could be a recent hash to use, an account sequence number, or even arbitrary chain state. The request used when calling this endpoint is created by calling `/construction/preprocess` in an offline environment. You should NEVER assume that the request sent to this endpoint will be created by the caller or populated with any custom parameters. This must occur in `/construction/preprocess`. It is important to clarify that this endpoint should not pre-construct any transactions for the client (this should happen in `/construction/payloads`). This endpoint is left purposely unstructured because of the wide scope of metadata that could be required. + * @summary Get Metadata for Transaction Construction + * @param {ConstructionMetadataRequest} constructionMetadataRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + constructionMetadata(constructionMetadataRequest: ConstructionMetadataRequest, options?: any): AxiosPromise { + return ConstructionApiFp(configuration).constructionMetadata(constructionMetadataRequest, options).then((request) => request(axios, basePath)); + }, + /** + * Parse is called on both unsigned and signed transactions to understand the intent of the formulated transaction. This is run as a sanity check before signing (after `/construction/payloads`) and before broadcast (after `/construction/combine`). + * @summary Parse a Transaction + * @param {ConstructionParseRequest} constructionParseRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + constructionParse(constructionParseRequest: ConstructionParseRequest, options?: any): AxiosPromise { + return ConstructionApiFp(configuration).constructionParse(constructionParseRequest, options).then((request) => request(axios, basePath)); + }, + /** + * Payloads is called with an array of operations and the response from `/construction/metadata`. It returns an unsigned transaction blob and a collection of payloads that must be signed by particular AccountIdentifiers using a certain SignatureType. The array of operations provided in transaction construction often times can not specify all \"effects\" of a transaction (consider invoked transactions in Ethereum). However, they can deterministically specify the \"intent\" of the transaction, which is sufficient for construction. For this reason, parsing the corresponding transaction in the Data API (when it lands on chain) will contain a superset of whatever operations were provided during construction. + * @summary Generate an Unsigned Transaction and Signing Payloads + * @param {ConstructionPayloadsRequest} constructionPayloadsRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + constructionPayloads(constructionPayloadsRequest: ConstructionPayloadsRequest, options?: any): AxiosPromise { + return ConstructionApiFp(configuration).constructionPayloads(constructionPayloadsRequest, options).then((request) => request(axios, basePath)); + }, + /** + * Preprocess is called prior to `/construction/payloads` to construct a request for any metadata that is needed for transaction construction given (i.e. account nonce). The `options` object returned from this endpoint will be sent to the `/construction/metadata` endpoint UNMODIFIED by the caller (in an offline execution environment). If your Construction API implementation has configuration options, they MUST be specified in the `/construction/preprocess` request (in the `metadata` field). + * @summary Create a Request to Fetch Metadata + * @param {ConstructionPreprocessRequest} constructionPreprocessRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + constructionPreprocess(constructionPreprocessRequest: ConstructionPreprocessRequest, options?: any): AxiosPromise { + return ConstructionApiFp(configuration).constructionPreprocess(constructionPreprocessRequest, options).then((request) => request(axios, basePath)); + }, + /** + * Submit a pre-signed transaction to the node. This call should not block on the transaction being included in a block. Rather, it should return immediately with an indication of whether or not the transaction was included in the mempool. The transaction submission response should only return a 200 status if the submitted transaction could be included in the mempool. Otherwise, it should return an error. + * @summary Submit a Signed Transaction + * @param {ConstructionSubmitRequest} constructionSubmitRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + constructionSubmit(constructionSubmitRequest: ConstructionSubmitRequest, options?: any): AxiosPromise { + return ConstructionApiFp(configuration).constructionSubmit(constructionSubmitRequest, options).then((request) => request(axios, basePath)); + }, + }; +}; + +/** + * ConstructionApi - object-oriented interface + * @export + * @class ConstructionApi + * @extends {BaseAPI} + */ +export class ConstructionApi extends BaseAPI { + /** + * Combine creates a network-specific transaction from an unsigned transaction and an array of provided signatures. The signed transaction returned from this method will be sent to the `/construction/submit` endpoint by the caller. + * @summary Create Network Transaction from Signatures + * @param {ConstructionCombineRequest} constructionCombineRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ConstructionApi + */ + public constructionCombine(constructionCombineRequest: ConstructionCombineRequest, options?: any) { + return ConstructionApiFp(this.configuration).constructionCombine(constructionCombineRequest, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * Derive returns the AccountIdentifier associated with a public key. Blockchains that require an on-chain action to create an account should not implement this method. + * @summary Derive an AccountIdentifier from a PublicKey + * @param {ConstructionDeriveRequest} constructionDeriveRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ConstructionApi + */ + public constructionDerive(constructionDeriveRequest: ConstructionDeriveRequest, options?: any) { + return ConstructionApiFp(this.configuration).constructionDerive(constructionDeriveRequest, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * TransactionHash returns the network-specific transaction hash for a signed transaction. + * @summary Get the Hash of a Signed Transaction + * @param {ConstructionHashRequest} constructionHashRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ConstructionApi + */ + public constructionHash(constructionHashRequest: ConstructionHashRequest, options?: any) { + return ConstructionApiFp(this.configuration).constructionHash(constructionHashRequest, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * Get any information required to construct a transaction for a specific network. Metadata returned here could be a recent hash to use, an account sequence number, or even arbitrary chain state. The request used when calling this endpoint is created by calling `/construction/preprocess` in an offline environment. You should NEVER assume that the request sent to this endpoint will be created by the caller or populated with any custom parameters. This must occur in `/construction/preprocess`. It is important to clarify that this endpoint should not pre-construct any transactions for the client (this should happen in `/construction/payloads`). This endpoint is left purposely unstructured because of the wide scope of metadata that could be required. + * @summary Get Metadata for Transaction Construction + * @param {ConstructionMetadataRequest} constructionMetadataRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ConstructionApi + */ + public constructionMetadata(constructionMetadataRequest: ConstructionMetadataRequest, options?: any) { + return ConstructionApiFp(this.configuration).constructionMetadata(constructionMetadataRequest, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * Parse is called on both unsigned and signed transactions to understand the intent of the formulated transaction. This is run as a sanity check before signing (after `/construction/payloads`) and before broadcast (after `/construction/combine`). + * @summary Parse a Transaction + * @param {ConstructionParseRequest} constructionParseRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ConstructionApi + */ + public constructionParse(constructionParseRequest: ConstructionParseRequest, options?: any) { + return ConstructionApiFp(this.configuration).constructionParse(constructionParseRequest, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * Payloads is called with an array of operations and the response from `/construction/metadata`. It returns an unsigned transaction blob and a collection of payloads that must be signed by particular AccountIdentifiers using a certain SignatureType. The array of operations provided in transaction construction often times can not specify all \"effects\" of a transaction (consider invoked transactions in Ethereum). However, they can deterministically specify the \"intent\" of the transaction, which is sufficient for construction. For this reason, parsing the corresponding transaction in the Data API (when it lands on chain) will contain a superset of whatever operations were provided during construction. + * @summary Generate an Unsigned Transaction and Signing Payloads + * @param {ConstructionPayloadsRequest} constructionPayloadsRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ConstructionApi + */ + public constructionPayloads(constructionPayloadsRequest: ConstructionPayloadsRequest, options?: any) { + return ConstructionApiFp(this.configuration).constructionPayloads(constructionPayloadsRequest, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * Preprocess is called prior to `/construction/payloads` to construct a request for any metadata that is needed for transaction construction given (i.e. account nonce). The `options` object returned from this endpoint will be sent to the `/construction/metadata` endpoint UNMODIFIED by the caller (in an offline execution environment). If your Construction API implementation has configuration options, they MUST be specified in the `/construction/preprocess` request (in the `metadata` field). + * @summary Create a Request to Fetch Metadata + * @param {ConstructionPreprocessRequest} constructionPreprocessRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ConstructionApi + */ + public constructionPreprocess(constructionPreprocessRequest: ConstructionPreprocessRequest, options?: any) { + return ConstructionApiFp(this.configuration).constructionPreprocess(constructionPreprocessRequest, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * Submit a pre-signed transaction to the node. This call should not block on the transaction being included in a block. Rather, it should return immediately with an indication of whether or not the transaction was included in the mempool. The transaction submission response should only return a 200 status if the submitted transaction could be included in the mempool. Otherwise, it should return an error. + * @summary Submit a Signed Transaction + * @param {ConstructionSubmitRequest} constructionSubmitRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ConstructionApi + */ + public constructionSubmit(constructionSubmitRequest: ConstructionSubmitRequest, options?: any) { + return ConstructionApiFp(this.configuration).constructionSubmit(constructionSubmitRequest, options).then((request) => request(this.axios, this.basePath)); + } +} + + +/** + * EventsApi - axios parameter creator + * @export + */ +export const EventsApiAxiosParamCreator = function (configuration?: Configuration) { + return { + /** + * `/events/blocks` allows the caller to query a sequence of BlockEvents indicating which blocks were added and removed from storage to reach the current state. Following BlockEvents allows lightweight clients to update their state without needing to implement their own syncing logic (like finding the common parent in a reorg). `/events/blocks` is considered an \"indexer\" endpoint and Rosetta implementations are not required to complete it to adhere to the Rosetta spec. However, any Rosetta \"indexer\" MUST support this endpoint. + * @summary [INDEXER] Get a range of BlockEvents + * @param {EventsBlocksRequest} eventsBlocksRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + eventsBlocks: async (eventsBlocksRequest: EventsBlocksRequest, options: any = {}): Promise => { + // verify required parameter 'eventsBlocksRequest' is not null or undefined + if (eventsBlocksRequest === null || eventsBlocksRequest === undefined) { + throw new RequiredError('eventsBlocksRequest','Required parameter eventsBlocksRequest was null or undefined when calling eventsBlocks.'); + } + const localVarPath = `/events/blocks`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, 'https://example.com'); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + const queryParameters = new URLSearchParams(localVarUrlObj.search); + for (const key in localVarQueryParameter) { + queryParameters.set(key, localVarQueryParameter[key]); + } + for (const key in options.query) { + queryParameters.set(key, options.query[key]); + } + localVarUrlObj.search = (new URLSearchParams(queryParameters)).toString(); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + const nonString = typeof eventsBlocksRequest !== 'string'; + const needsSerialization = nonString && configuration && configuration.isJsonMime + ? configuration.isJsonMime(localVarRequestOptions.headers['Content-Type']) + : nonString; + localVarRequestOptions.data = needsSerialization + ? JSON.stringify(eventsBlocksRequest !== undefined ? eventsBlocksRequest : {}) + : (eventsBlocksRequest || ""); + + return { + url: localVarUrlObj.pathname + localVarUrlObj.search + localVarUrlObj.hash, + options: localVarRequestOptions, + }; + }, + } +}; + +/** + * EventsApi - functional programming interface + * @export + */ +export const EventsApiFp = function(configuration?: Configuration) { + return { + /** + * `/events/blocks` allows the caller to query a sequence of BlockEvents indicating which blocks were added and removed from storage to reach the current state. Following BlockEvents allows lightweight clients to update their state without needing to implement their own syncing logic (like finding the common parent in a reorg). `/events/blocks` is considered an \"indexer\" endpoint and Rosetta implementations are not required to complete it to adhere to the Rosetta spec. However, any Rosetta \"indexer\" MUST support this endpoint. + * @summary [INDEXER] Get a range of BlockEvents + * @param {EventsBlocksRequest} eventsBlocksRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async eventsBlocks(eventsBlocksRequest: EventsBlocksRequest, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await EventsApiAxiosParamCreator(configuration).eventsBlocks(eventsBlocksRequest, options); + return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => { + const axiosRequestArgs = {...localVarAxiosArgs.options, url: basePath + localVarAxiosArgs.url}; + return axios.request(axiosRequestArgs); + }; + }, + } +}; + +/** + * EventsApi - factory interface + * @export + */ +export const EventsApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { + return { + /** + * `/events/blocks` allows the caller to query a sequence of BlockEvents indicating which blocks were added and removed from storage to reach the current state. Following BlockEvents allows lightweight clients to update their state without needing to implement their own syncing logic (like finding the common parent in a reorg). `/events/blocks` is considered an \"indexer\" endpoint and Rosetta implementations are not required to complete it to adhere to the Rosetta spec. However, any Rosetta \"indexer\" MUST support this endpoint. + * @summary [INDEXER] Get a range of BlockEvents + * @param {EventsBlocksRequest} eventsBlocksRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + eventsBlocks(eventsBlocksRequest: EventsBlocksRequest, options?: any): AxiosPromise { + return EventsApiFp(configuration).eventsBlocks(eventsBlocksRequest, options).then((request) => request(axios, basePath)); + }, + }; +}; + +/** + * EventsApi - object-oriented interface + * @export + * @class EventsApi + * @extends {BaseAPI} + */ +export class EventsApi extends BaseAPI { + /** + * `/events/blocks` allows the caller to query a sequence of BlockEvents indicating which blocks were added and removed from storage to reach the current state. Following BlockEvents allows lightweight clients to update their state without needing to implement their own syncing logic (like finding the common parent in a reorg). `/events/blocks` is considered an \"indexer\" endpoint and Rosetta implementations are not required to complete it to adhere to the Rosetta spec. However, any Rosetta \"indexer\" MUST support this endpoint. + * @summary [INDEXER] Get a range of BlockEvents + * @param {EventsBlocksRequest} eventsBlocksRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof EventsApi + */ + public eventsBlocks(eventsBlocksRequest: EventsBlocksRequest, options?: any) { + return EventsApiFp(this.configuration).eventsBlocks(eventsBlocksRequest, options).then((request) => request(this.axios, this.basePath)); + } +} + + +/** + * MempoolApi - axios parameter creator + * @export + */ +export const MempoolApiAxiosParamCreator = function (configuration?: Configuration) { + return { + /** + * Get all Transaction Identifiers in the mempool + * @summary Get All Mempool Transactions + * @param {NetworkRequest} networkRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + mempool: async (networkRequest: NetworkRequest, options: any = {}): Promise => { + // verify required parameter 'networkRequest' is not null or undefined + if (networkRequest === null || networkRequest === undefined) { + throw new RequiredError('networkRequest','Required parameter networkRequest was null or undefined when calling mempool.'); + } + const localVarPath = `/mempool`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, 'https://example.com'); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + const queryParameters = new URLSearchParams(localVarUrlObj.search); + for (const key in localVarQueryParameter) { + queryParameters.set(key, localVarQueryParameter[key]); + } + for (const key in options.query) { + queryParameters.set(key, options.query[key]); + } + localVarUrlObj.search = (new URLSearchParams(queryParameters)).toString(); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + const nonString = typeof networkRequest !== 'string'; + const needsSerialization = nonString && configuration && configuration.isJsonMime + ? configuration.isJsonMime(localVarRequestOptions.headers['Content-Type']) + : nonString; + localVarRequestOptions.data = needsSerialization + ? JSON.stringify(networkRequest !== undefined ? networkRequest : {}) + : (networkRequest || ""); + + return { + url: localVarUrlObj.pathname + localVarUrlObj.search + localVarUrlObj.hash, + options: localVarRequestOptions, + }; + }, + /** + * Get a transaction in the mempool by its Transaction Identifier. This is a separate request than fetching a block transaction (/block/transaction) because some blockchain nodes need to know that a transaction query is for something in the mempool instead of a transaction in a block. Transactions may not be fully parsable until they are in a block (ex: may not be possible to determine the fee to pay before a transaction is executed). On this endpoint, it is ok that returned transactions are only estimates of what may actually be included in a block. + * @summary Get a Mempool Transaction + * @param {MempoolTransactionRequest} mempoolTransactionRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + mempoolTransaction: async (mempoolTransactionRequest: MempoolTransactionRequest, options: any = {}): Promise => { + // verify required parameter 'mempoolTransactionRequest' is not null or undefined + if (mempoolTransactionRequest === null || mempoolTransactionRequest === undefined) { + throw new RequiredError('mempoolTransactionRequest','Required parameter mempoolTransactionRequest was null or undefined when calling mempoolTransaction.'); + } + const localVarPath = `/mempool/transaction`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, 'https://example.com'); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + const queryParameters = new URLSearchParams(localVarUrlObj.search); + for (const key in localVarQueryParameter) { + queryParameters.set(key, localVarQueryParameter[key]); + } + for (const key in options.query) { + queryParameters.set(key, options.query[key]); + } + localVarUrlObj.search = (new URLSearchParams(queryParameters)).toString(); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + const nonString = typeof mempoolTransactionRequest !== 'string'; + const needsSerialization = nonString && configuration && configuration.isJsonMime + ? configuration.isJsonMime(localVarRequestOptions.headers['Content-Type']) + : nonString; + localVarRequestOptions.data = needsSerialization + ? JSON.stringify(mempoolTransactionRequest !== undefined ? mempoolTransactionRequest : {}) + : (mempoolTransactionRequest || ""); + + return { + url: localVarUrlObj.pathname + localVarUrlObj.search + localVarUrlObj.hash, + options: localVarRequestOptions, + }; + }, + } +}; + +/** + * MempoolApi - functional programming interface + * @export + */ +export const MempoolApiFp = function(configuration?: Configuration) { + return { + /** + * Get all Transaction Identifiers in the mempool + * @summary Get All Mempool Transactions + * @param {NetworkRequest} networkRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async mempool(networkRequest: NetworkRequest, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await MempoolApiAxiosParamCreator(configuration).mempool(networkRequest, options); + return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => { + const axiosRequestArgs = {...localVarAxiosArgs.options, url: basePath + localVarAxiosArgs.url}; + return axios.request(axiosRequestArgs); + }; + }, + /** + * Get a transaction in the mempool by its Transaction Identifier. This is a separate request than fetching a block transaction (/block/transaction) because some blockchain nodes need to know that a transaction query is for something in the mempool instead of a transaction in a block. Transactions may not be fully parsable until they are in a block (ex: may not be possible to determine the fee to pay before a transaction is executed). On this endpoint, it is ok that returned transactions are only estimates of what may actually be included in a block. + * @summary Get a Mempool Transaction + * @param {MempoolTransactionRequest} mempoolTransactionRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async mempoolTransaction(mempoolTransactionRequest: MempoolTransactionRequest, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await MempoolApiAxiosParamCreator(configuration).mempoolTransaction(mempoolTransactionRequest, options); + return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => { + const axiosRequestArgs = {...localVarAxiosArgs.options, url: basePath + localVarAxiosArgs.url}; + return axios.request(axiosRequestArgs); + }; + }, + } +}; + +/** + * MempoolApi - factory interface + * @export + */ +export const MempoolApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { + return { + /** + * Get all Transaction Identifiers in the mempool + * @summary Get All Mempool Transactions + * @param {NetworkRequest} networkRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + mempool(networkRequest: NetworkRequest, options?: any): AxiosPromise { + return MempoolApiFp(configuration).mempool(networkRequest, options).then((request) => request(axios, basePath)); + }, + /** + * Get a transaction in the mempool by its Transaction Identifier. This is a separate request than fetching a block transaction (/block/transaction) because some blockchain nodes need to know that a transaction query is for something in the mempool instead of a transaction in a block. Transactions may not be fully parsable until they are in a block (ex: may not be possible to determine the fee to pay before a transaction is executed). On this endpoint, it is ok that returned transactions are only estimates of what may actually be included in a block. + * @summary Get a Mempool Transaction + * @param {MempoolTransactionRequest} mempoolTransactionRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + mempoolTransaction(mempoolTransactionRequest: MempoolTransactionRequest, options?: any): AxiosPromise { + return MempoolApiFp(configuration).mempoolTransaction(mempoolTransactionRequest, options).then((request) => request(axios, basePath)); + }, + }; +}; + +/** + * MempoolApi - object-oriented interface + * @export + * @class MempoolApi + * @extends {BaseAPI} + */ +export class MempoolApi extends BaseAPI { + /** + * Get all Transaction Identifiers in the mempool + * @summary Get All Mempool Transactions + * @param {NetworkRequest} networkRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof MempoolApi + */ + public mempool(networkRequest: NetworkRequest, options?: any) { + return MempoolApiFp(this.configuration).mempool(networkRequest, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * Get a transaction in the mempool by its Transaction Identifier. This is a separate request than fetching a block transaction (/block/transaction) because some blockchain nodes need to know that a transaction query is for something in the mempool instead of a transaction in a block. Transactions may not be fully parsable until they are in a block (ex: may not be possible to determine the fee to pay before a transaction is executed). On this endpoint, it is ok that returned transactions are only estimates of what may actually be included in a block. + * @summary Get a Mempool Transaction + * @param {MempoolTransactionRequest} mempoolTransactionRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof MempoolApi + */ + public mempoolTransaction(mempoolTransactionRequest: MempoolTransactionRequest, options?: any) { + return MempoolApiFp(this.configuration).mempoolTransaction(mempoolTransactionRequest, options).then((request) => request(this.axios, this.basePath)); + } +} + + +/** + * NetworkApi - axios parameter creator + * @export + */ +export const NetworkApiAxiosParamCreator = function (configuration?: Configuration) { + return { + /** + * This endpoint returns a list of NetworkIdentifiers that the Rosetta server supports. + * @summary Get List of Available Networks + * @param {MetadataRequest} metadataRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + networkList: async (metadataRequest: MetadataRequest, options: any = {}): Promise => { + // verify required parameter 'metadataRequest' is not null or undefined + if (metadataRequest === null || metadataRequest === undefined) { + throw new RequiredError('metadataRequest','Required parameter metadataRequest was null or undefined when calling networkList.'); + } + const localVarPath = `/network/list`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, 'https://example.com'); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + const queryParameters = new URLSearchParams(localVarUrlObj.search); + for (const key in localVarQueryParameter) { + queryParameters.set(key, localVarQueryParameter[key]); + } + for (const key in options.query) { + queryParameters.set(key, options.query[key]); + } + localVarUrlObj.search = (new URLSearchParams(queryParameters)).toString(); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + const nonString = typeof metadataRequest !== 'string'; + const needsSerialization = nonString && configuration && configuration.isJsonMime + ? configuration.isJsonMime(localVarRequestOptions.headers['Content-Type']) + : nonString; + localVarRequestOptions.data = needsSerialization + ? JSON.stringify(metadataRequest !== undefined ? metadataRequest : {}) + : (metadataRequest || ""); + + return { + url: localVarUrlObj.pathname + localVarUrlObj.search + localVarUrlObj.hash, + options: localVarRequestOptions, + }; + }, + /** + * This endpoint returns the version information and allowed network-specific types for a NetworkIdentifier. Any NetworkIdentifier returned by /network/list should be accessible here. Because options are retrievable in the context of a NetworkIdentifier, it is possible to define unique options for each network. + * @summary Get Network Options + * @param {NetworkRequest} networkRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + networkOptions: async (networkRequest: NetworkRequest, options: any = {}): Promise => { + // verify required parameter 'networkRequest' is not null or undefined + if (networkRequest === null || networkRequest === undefined) { + throw new RequiredError('networkRequest','Required parameter networkRequest was null or undefined when calling networkOptions.'); + } + const localVarPath = `/network/options`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, 'https://example.com'); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + const queryParameters = new URLSearchParams(localVarUrlObj.search); + for (const key in localVarQueryParameter) { + queryParameters.set(key, localVarQueryParameter[key]); + } + for (const key in options.query) { + queryParameters.set(key, options.query[key]); + } + localVarUrlObj.search = (new URLSearchParams(queryParameters)).toString(); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + const nonString = typeof networkRequest !== 'string'; + const needsSerialization = nonString && configuration && configuration.isJsonMime + ? configuration.isJsonMime(localVarRequestOptions.headers['Content-Type']) + : nonString; + localVarRequestOptions.data = needsSerialization + ? JSON.stringify(networkRequest !== undefined ? networkRequest : {}) + : (networkRequest || ""); + + return { + url: localVarUrlObj.pathname + localVarUrlObj.search + localVarUrlObj.hash, + options: localVarRequestOptions, + }; + }, + /** + * This endpoint returns the current status of the network requested. Any NetworkIdentifier returned by /network/list should be accessible here. + * @summary Get Network Status + * @param {NetworkRequest} networkRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + networkStatus: async (networkRequest: NetworkRequest, options: any = {}): Promise => { + // verify required parameter 'networkRequest' is not null or undefined + if (networkRequest === null || networkRequest === undefined) { + throw new RequiredError('networkRequest','Required parameter networkRequest was null or undefined when calling networkStatus.'); + } + const localVarPath = `/network/status`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, 'https://example.com'); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + const queryParameters = new URLSearchParams(localVarUrlObj.search); + for (const key in localVarQueryParameter) { + queryParameters.set(key, localVarQueryParameter[key]); + } + for (const key in options.query) { + queryParameters.set(key, options.query[key]); + } + localVarUrlObj.search = (new URLSearchParams(queryParameters)).toString(); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + const nonString = typeof networkRequest !== 'string'; + const needsSerialization = nonString && configuration && configuration.isJsonMime + ? configuration.isJsonMime(localVarRequestOptions.headers['Content-Type']) + : nonString; + localVarRequestOptions.data = needsSerialization + ? JSON.stringify(networkRequest !== undefined ? networkRequest : {}) + : (networkRequest || ""); + + return { + url: localVarUrlObj.pathname + localVarUrlObj.search + localVarUrlObj.hash, + options: localVarRequestOptions, + }; + }, + } +}; + +/** + * NetworkApi - functional programming interface + * @export + */ +export const NetworkApiFp = function(configuration?: Configuration) { + return { + /** + * This endpoint returns a list of NetworkIdentifiers that the Rosetta server supports. + * @summary Get List of Available Networks + * @param {MetadataRequest} metadataRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async networkList(metadataRequest: MetadataRequest, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await NetworkApiAxiosParamCreator(configuration).networkList(metadataRequest, options); + return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => { + const axiosRequestArgs = {...localVarAxiosArgs.options, url: basePath + localVarAxiosArgs.url}; + return axios.request(axiosRequestArgs); + }; + }, + /** + * This endpoint returns the version information and allowed network-specific types for a NetworkIdentifier. Any NetworkIdentifier returned by /network/list should be accessible here. Because options are retrievable in the context of a NetworkIdentifier, it is possible to define unique options for each network. + * @summary Get Network Options + * @param {NetworkRequest} networkRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async networkOptions(networkRequest: NetworkRequest, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await NetworkApiAxiosParamCreator(configuration).networkOptions(networkRequest, options); + return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => { + const axiosRequestArgs = {...localVarAxiosArgs.options, url: basePath + localVarAxiosArgs.url}; + return axios.request(axiosRequestArgs); + }; + }, + /** + * This endpoint returns the current status of the network requested. Any NetworkIdentifier returned by /network/list should be accessible here. + * @summary Get Network Status + * @param {NetworkRequest} networkRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async networkStatus(networkRequest: NetworkRequest, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await NetworkApiAxiosParamCreator(configuration).networkStatus(networkRequest, options); + return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => { + const axiosRequestArgs = {...localVarAxiosArgs.options, url: basePath + localVarAxiosArgs.url}; + return axios.request(axiosRequestArgs); + }; + }, + } +}; + +/** + * NetworkApi - factory interface + * @export + */ +export const NetworkApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { + return { + /** + * This endpoint returns a list of NetworkIdentifiers that the Rosetta server supports. + * @summary Get List of Available Networks + * @param {MetadataRequest} metadataRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + networkList(metadataRequest: MetadataRequest, options?: any): AxiosPromise { + return NetworkApiFp(configuration).networkList(metadataRequest, options).then((request) => request(axios, basePath)); + }, + /** + * This endpoint returns the version information and allowed network-specific types for a NetworkIdentifier. Any NetworkIdentifier returned by /network/list should be accessible here. Because options are retrievable in the context of a NetworkIdentifier, it is possible to define unique options for each network. + * @summary Get Network Options + * @param {NetworkRequest} networkRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + networkOptions(networkRequest: NetworkRequest, options?: any): AxiosPromise { + return NetworkApiFp(configuration).networkOptions(networkRequest, options).then((request) => request(axios, basePath)); + }, + /** + * This endpoint returns the current status of the network requested. Any NetworkIdentifier returned by /network/list should be accessible here. + * @summary Get Network Status + * @param {NetworkRequest} networkRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + networkStatus(networkRequest: NetworkRequest, options?: any): AxiosPromise { + return NetworkApiFp(configuration).networkStatus(networkRequest, options).then((request) => request(axios, basePath)); + }, + }; +}; + +/** + * NetworkApi - object-oriented interface + * @export + * @class NetworkApi + * @extends {BaseAPI} + */ +export class NetworkApi extends BaseAPI { + /** + * This endpoint returns a list of NetworkIdentifiers that the Rosetta server supports. + * @summary Get List of Available Networks + * @param {MetadataRequest} metadataRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof NetworkApi + */ + public networkList(metadataRequest: MetadataRequest, options?: any) { + return NetworkApiFp(this.configuration).networkList(metadataRequest, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * This endpoint returns the version information and allowed network-specific types for a NetworkIdentifier. Any NetworkIdentifier returned by /network/list should be accessible here. Because options are retrievable in the context of a NetworkIdentifier, it is possible to define unique options for each network. + * @summary Get Network Options + * @param {NetworkRequest} networkRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof NetworkApi + */ + public networkOptions(networkRequest: NetworkRequest, options?: any) { + return NetworkApiFp(this.configuration).networkOptions(networkRequest, options).then((request) => request(this.axios, this.basePath)); + } + + /** + * This endpoint returns the current status of the network requested. Any NetworkIdentifier returned by /network/list should be accessible here. + * @summary Get Network Status + * @param {NetworkRequest} networkRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof NetworkApi + */ + public networkStatus(networkRequest: NetworkRequest, options?: any) { + return NetworkApiFp(this.configuration).networkStatus(networkRequest, options).then((request) => request(this.axios, this.basePath)); + } +} + + +/** + * SearchApi - axios parameter creator + * @export + */ +export const SearchApiAxiosParamCreator = function (configuration?: Configuration) { + return { + /** + * `/search/transactions` allows the caller to search for transactions that meet certain conditions. Some conditions include matching a transaction hash, containing an operation with a certain status, or containing an operation that affects a certain account. `/search/transactions` is considered an \"indexer\" endpoint and Rosetta implementations are not required to complete it to adhere to the Rosetta spec. However, any Rosetta \"indexer\" MUST support this endpoint. + * @summary [INDEXER] Search for Transactions + * @param {SearchTransactionsRequest} searchTransactionsRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + searchTransactions: async (searchTransactionsRequest: SearchTransactionsRequest, options: any = {}): Promise => { + // verify required parameter 'searchTransactionsRequest' is not null or undefined + if (searchTransactionsRequest === null || searchTransactionsRequest === undefined) { + throw new RequiredError('searchTransactionsRequest','Required parameter searchTransactionsRequest was null or undefined when calling searchTransactions.'); + } + const localVarPath = `/search/transactions`; + // use dummy base URL string because the URL constructor only accepts absolute URLs. + const localVarUrlObj = new URL(localVarPath, 'https://example.com'); + let baseOptions; + if (configuration) { + baseOptions = configuration.baseOptions; + } + + const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options}; + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + const queryParameters = new URLSearchParams(localVarUrlObj.search); + for (const key in localVarQueryParameter) { + queryParameters.set(key, localVarQueryParameter[key]); + } + for (const key in options.query) { + queryParameters.set(key, options.query[key]); + } + localVarUrlObj.search = (new URLSearchParams(queryParameters)).toString(); + let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {}; + localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers}; + const nonString = typeof searchTransactionsRequest !== 'string'; + const needsSerialization = nonString && configuration && configuration.isJsonMime + ? configuration.isJsonMime(localVarRequestOptions.headers['Content-Type']) + : nonString; + localVarRequestOptions.data = needsSerialization + ? JSON.stringify(searchTransactionsRequest !== undefined ? searchTransactionsRequest : {}) + : (searchTransactionsRequest || ""); + + return { + url: localVarUrlObj.pathname + localVarUrlObj.search + localVarUrlObj.hash, + options: localVarRequestOptions, + }; + }, + } +}; + +/** + * SearchApi - functional programming interface + * @export + */ +export const SearchApiFp = function(configuration?: Configuration) { + return { + /** + * `/search/transactions` allows the caller to search for transactions that meet certain conditions. Some conditions include matching a transaction hash, containing an operation with a certain status, or containing an operation that affects a certain account. `/search/transactions` is considered an \"indexer\" endpoint and Rosetta implementations are not required to complete it to adhere to the Rosetta spec. However, any Rosetta \"indexer\" MUST support this endpoint. + * @summary [INDEXER] Search for Transactions + * @param {SearchTransactionsRequest} searchTransactionsRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + async searchTransactions(searchTransactionsRequest: SearchTransactionsRequest, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> { + const localVarAxiosArgs = await SearchApiAxiosParamCreator(configuration).searchTransactions(searchTransactionsRequest, options); + return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => { + const axiosRequestArgs = {...localVarAxiosArgs.options, url: basePath + localVarAxiosArgs.url}; + return axios.request(axiosRequestArgs); + }; + }, + } +}; + +/** + * SearchApi - factory interface + * @export + */ +export const SearchApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) { + return { + /** + * `/search/transactions` allows the caller to search for transactions that meet certain conditions. Some conditions include matching a transaction hash, containing an operation with a certain status, or containing an operation that affects a certain account. `/search/transactions` is considered an \"indexer\" endpoint and Rosetta implementations are not required to complete it to adhere to the Rosetta spec. However, any Rosetta \"indexer\" MUST support this endpoint. + * @summary [INDEXER] Search for Transactions + * @param {SearchTransactionsRequest} searchTransactionsRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + searchTransactions(searchTransactionsRequest: SearchTransactionsRequest, options?: any): AxiosPromise { + return SearchApiFp(configuration).searchTransactions(searchTransactionsRequest, options).then((request) => request(axios, basePath)); + }, + }; +}; + +/** + * SearchApi - object-oriented interface + * @export + * @class SearchApi + * @extends {BaseAPI} + */ +export class SearchApi extends BaseAPI { + /** + * `/search/transactions` allows the caller to search for transactions that meet certain conditions. Some conditions include matching a transaction hash, containing an operation with a certain status, or containing an operation that affects a certain account. `/search/transactions` is considered an \"indexer\" endpoint and Rosetta implementations are not required to complete it to adhere to the Rosetta spec. However, any Rosetta \"indexer\" MUST support this endpoint. + * @summary [INDEXER] Search for Transactions + * @param {SearchTransactionsRequest} searchTransactionsRequest + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof SearchApi + */ + public searchTransactions(searchTransactionsRequest: SearchTransactionsRequest, options?: any) { + return SearchApiFp(this.configuration).searchTransactions(searchTransactionsRequest, options).then((request) => request(this.axios, this.basePath)); + } +} + + diff --git a/ironfish-rosetta-api/src/types/base.ts b/ironfish-rosetta-api/src/types/base.ts new file mode 100644 index 0000000000..11a4b500a9 --- /dev/null +++ b/ironfish-rosetta-api/src/types/base.ts @@ -0,0 +1,71 @@ +/* tslint:disable */ +/* eslint-disable */ +/** + * Rosetta + * Build Once. Integrate Your Blockchain Everywhere. + * + * The version of the OpenAPI document: 1.4.9 + * + * + * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + * https://openapi-generator.tech + * Do not edit the class manually. + */ + + +import { Configuration } from "./configuration"; +// Some imports not used depending on template conditions +// @ts-ignore +import globalAxios, { AxiosPromise, AxiosInstance } from 'axios'; + +export const BASE_PATH = "http://localhost".replace(/\/+$/, ""); + +/** + * + * @export + */ +export const COLLECTION_FORMATS = { + csv: ",", + ssv: " ", + tsv: "\t", + pipes: "|", +}; + +/** + * + * @export + * @interface RequestArgs + */ +export interface RequestArgs { + url: string; + options: any; +} + +/** + * + * @export + * @class BaseAPI + */ +export class BaseAPI { + protected configuration: Configuration | undefined; + + constructor(configuration?: Configuration, protected basePath: string = BASE_PATH, protected axios: AxiosInstance = globalAxios) { + if (configuration) { + this.configuration = configuration; + this.basePath = configuration.basePath || this.basePath; + } + } +}; + +/** + * + * @export + * @class RequiredError + * @extends {Error} + */ +export class RequiredError extends Error { + name: "RequiredError" = "RequiredError"; + constructor(public field: string, msg?: string) { + super(msg); + } +} diff --git a/ironfish-rosetta-api/src/types/configuration.ts b/ironfish-rosetta-api/src/types/configuration.ts new file mode 100644 index 0000000000..0d31924f60 --- /dev/null +++ b/ironfish-rosetta-api/src/types/configuration.ts @@ -0,0 +1,101 @@ +/* tslint:disable */ +/* eslint-disable */ +/** + * Rosetta + * Build Once. Integrate Your Blockchain Everywhere. + * + * The version of the OpenAPI document: 1.4.9 + * + * + * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + * https://openapi-generator.tech + * Do not edit the class manually. + */ + + +export interface ConfigurationParameters { + apiKey?: string | Promise | ((name: string) => string) | ((name: string) => Promise); + username?: string; + password?: string; + accessToken?: string | Promise | ((name?: string, scopes?: string[]) => string) | ((name?: string, scopes?: string[]) => Promise); + basePath?: string; + baseOptions?: any; + formDataCtor?: new () => any; +} + +export class Configuration { + /** + * parameter for apiKey security + * @param name security name + * @memberof Configuration + */ + apiKey?: string | Promise | ((name: string) => string) | ((name: string) => Promise); + /** + * parameter for basic security + * + * @type {string} + * @memberof Configuration + */ + username?: string; + /** + * parameter for basic security + * + * @type {string} + * @memberof Configuration + */ + password?: string; + /** + * parameter for oauth2 security + * @param name security name + * @param scopes oauth2 scope + * @memberof Configuration + */ + accessToken?: string | Promise | ((name?: string, scopes?: string[]) => string) | ((name?: string, scopes?: string[]) => Promise); + /** + * override base path + * + * @type {string} + * @memberof Configuration + */ + basePath?: string; + /** + * base options for axios calls + * + * @type {any} + * @memberof Configuration + */ + baseOptions?: any; + /** + * The FormData constructor that will be used to create multipart form data + * requests. You can inject this here so that execution environments that + * do not support the FormData class can still run the generated client. + * + * @type {new () => FormData} + */ + formDataCtor?: new () => any; + + constructor(param: ConfigurationParameters = {}) { + this.apiKey = param.apiKey; + this.username = param.username; + this.password = param.password; + this.accessToken = param.accessToken; + this.basePath = param.basePath; + this.baseOptions = param.baseOptions; + this.formDataCtor = param.formDataCtor; + } + + /** + * Check if the given MIME is a JSON MIME. + * JSON MIME examples: + * application/json + * application/json; charset=UTF8 + * APPLICATION/JSON + * application/vnd.company+json + * @param mime - MIME (Multipurpose Internet Mail Extensions) + * @return True if the given MIME is JSON, false otherwise. + */ + public isJsonMime(mime: string): boolean { + const jsonMime: RegExp = new RegExp('^(application\/json|[^;/ \t]+\/[^;/ \t]+[+]json)[ \t]*(;.*)?$', 'i'); + return mime !== null && (jsonMime.test(mime) || mime.toLowerCase() === 'application/json-patch+json'); + } +} diff --git a/ironfish-rosetta-api/src/types/index.ts b/ironfish-rosetta-api/src/types/index.ts new file mode 100644 index 0000000000..1a7872e760 --- /dev/null +++ b/ironfish-rosetta-api/src/types/index.ts @@ -0,0 +1,7 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './RouteError' +export * from './api' +export * from './searchBlocksApi' diff --git a/ironfish-rosetta-api/src/types/searchBlocksApi.ts b/ironfish-rosetta-api/src/types/searchBlocksApi.ts new file mode 100644 index 0000000000..b8f1f3abaf --- /dev/null +++ b/ironfish-rosetta-api/src/types/searchBlocksApi.ts @@ -0,0 +1,56 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Block, NetworkIdentifier } from './api' + +/** + * SearchBlocksRequest is used to search for transactions matching a set of provided conditions in canonical blocks. + * @export + * @interface SearchBlocksRequest + */ +export interface SearchBlocksRequest { + /** + * + * @type {NetworkIdentifier} + * @memberof SearchBlocksRequest + */ + network_identifier: NetworkIdentifier + /** + * seek parameter to offset the pagination at a previous block.\n + * @type {number} + * @memberof SearchBlocksRequest + */ + seek?: number + /** + * limit is the maximum number of blocks to return in one call. The implementation may return <= limit blocks. + * @type {number} + * @memberof SearchBlocksRequest + */ + limit?: number + /** + * + * @type {string} + * @memberof SearchBlocksRequest + */ + query?: string +} +/** + * SearchBlocksResponse contains an ordered collection of Blocks that match the query in SearchBlocksRequest. These Blocks are sorted from most recent block to oldest block. + * @export + * @interface SearchBlocksResponse + */ +export interface SearchBlocksResponse { + /** + * blocks is an array of Block sorted by most recent BlockIdentifier + * @type {Array} + * @memberof SearchBlocksResponse + */ + blocks: Array + /** + * next_offset is the next offset to use when paginating through block results. If this field is not populated, there are no more blocks to query. + * @type {number} + * @memberof SearchBlocksResponse + */ + next_offset?: number +} diff --git a/ironfish-rosetta-api/src/types/swagger-routes-express.d.ts b/ironfish-rosetta-api/src/types/swagger-routes-express.d.ts new file mode 100644 index 0000000000..f2ca553ac0 --- /dev/null +++ b/ironfish-rosetta-api/src/types/swagger-routes-express.d.ts @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +declare module 'swagger-routes-express' diff --git a/ironfish-rosetta-api/src/utils/logger.ts b/ironfish-rosetta-api/src/utils/logger.ts new file mode 100644 index 0000000000..2dabceba4e --- /dev/null +++ b/ironfish-rosetta-api/src/utils/logger.ts @@ -0,0 +1,20 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { transports, createLogger, format } from 'winston' + +/** + * Logger system using winston + * Allows to write errors to error.log and other log to combined.log + */ +export const Logger = createLogger({ + level: 'debug', + format: format.combine(format.timestamp(), format.json()), + defaultMeta: { service: 'user-service' }, + transports: [ + new transports.Console({ format: format.simple() }), + new transports.File({ filename: 'error.log', level: 'error' }), + new transports.File({ filename: 'combined.log' }), + ], +}) diff --git a/ironfish-rosetta-api/src/utils/networkIdentifierUtil.test.ts b/ironfish-rosetta-api/src/utils/networkIdentifierUtil.test.ts new file mode 100644 index 0000000000..82504739f8 --- /dev/null +++ b/ironfish-rosetta-api/src/utils/networkIdentifierUtil.test.ts @@ -0,0 +1,18 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { isValidNetworkIdentifier } from './networkIdentifierUtil' +import { networkIdentifier } from '../config' + +describe('isValidNetworkIdentifier util', () => { + it(`returns false if it's not valid`, () => { + expect( + isValidNetworkIdentifier({ blockchain: 'this is not iron fish', network: 'staging' }), + ).toBe(false) + }) + + it(`returns true if it's valid`, () => { + expect(isValidNetworkIdentifier(networkIdentifier)).toBe(true) + }) +}) diff --git a/ironfish-rosetta-api/src/utils/networkIdentifierUtil.ts b/ironfish-rosetta-api/src/utils/networkIdentifierUtil.ts new file mode 100644 index 0000000000..b8505cd699 --- /dev/null +++ b/ironfish-rosetta-api/src/utils/networkIdentifierUtil.ts @@ -0,0 +1,13 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { networkIdentifier as networkIdentifierConfig } from '../config' +import { NetworkIdentifier } from '../types' + +export const isValidNetworkIdentifier = (networkIdentifier: NetworkIdentifier): boolean => { + return ( + networkIdentifier.blockchain === networkIdentifierConfig.blockchain && + networkIdentifier.network === networkIdentifierConfig.network + ) +} diff --git a/ironfish-rosetta-api/tsconfig.eslint.json b/ironfish-rosetta-api/tsconfig.eslint.json new file mode 100644 index 0000000000..a8d4317b49 --- /dev/null +++ b/ironfish-rosetta-api/tsconfig.eslint.json @@ -0,0 +1,4 @@ +{ + "extends": "./tsconfig.json", + "exclude": [] +} diff --git a/ironfish-rosetta-api/tsconfig.json b/ironfish-rosetta-api/tsconfig.json new file mode 100644 index 0000000000..7efb9dacb6 --- /dev/null +++ b/ironfish-rosetta-api/tsconfig.json @@ -0,0 +1,14 @@ +{ + "extends": "../config/tsconfig.base.json", + "compilerOptions": { + "outDir": "build", + "resolveJsonModule": true, + "emitDecoratorMetadata": true, + "experimentalDecorators": true + }, + "include": ["ormconfig.ts", "src", "src/config/*.json"], + "exclude": ["src/**/*.test.*"], + "references": [ + { "path": "../ironfish" }, + ] +} diff --git a/ironfish-rosetta-api/tsconfig.test.json b/ironfish-rosetta-api/tsconfig.test.json new file mode 100644 index 0000000000..33d207ff42 --- /dev/null +++ b/ironfish-rosetta-api/tsconfig.test.json @@ -0,0 +1,8 @@ +{ + "extends": "../config/tsconfig.base.json", + "compilerOptions": { + "noEmit": true + }, + "include": [], + "references": [{ "path": "../ironfish" }] +} diff --git a/ironfish-rust/Cargo.lock b/ironfish-rust/Cargo.lock new file mode 100644 index 0000000000..02750bc681 --- /dev/null +++ b/ironfish-rust/Cargo.lock @@ -0,0 +1,1091 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "addchain" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1177222c93a7bb492002e9a3cd947c7fd869e085d6e81a9e415ff1be65b3489c" +dependencies = [ + "num-bigint", + "num-integer", + "num-traits", +] + +[[package]] +name = "aes" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54eb1d8fe354e5fc611daf4f2ea97dd45a765f4f1e4512306ec183ae2e8f20c9" +dependencies = [ + "aes-soft", + "aesni", + "block-cipher-trait", +] + +[[package]] +name = "aes-soft" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfd7e7ae3f9a1fb5c03b389fc6bb9a51400d0c13053f0dca698c832bfd893a0d" +dependencies = [ + "block-cipher-trait", + "byteorder", + "opaque-debug 0.2.3", +] + +[[package]] +name = "aesni" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f70a6b5f971e473091ab7cfb5ffac6cde81666c4556751d8d5620ead8abf100" +dependencies = [ + "block-cipher-trait", + "opaque-debug 0.2.3", +] + +[[package]] +name = "anyhow" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28b2cd92db5cbd74e8e5028f7e27dd7aa3090e89e4f2a197cc7c8dfb69c7063b" + +[[package]] +name = "arrayref" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544" + +[[package]] +name = "arrayvec" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" + +[[package]] +name = "autocfg" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" + +[[package]] +name = "base64" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30e93c03064e7590d0466209155251b90c22e37fab1daf2771582598b5827557" +dependencies = [ + "byteorder", +] + +[[package]] +name = "base64" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" + +[[package]] +name = "bellman" +version = "0.6.0" +source = "git+https://github.com/iron-fish/librustzcash.git#cd0afdc0f3eea4f78f14132616c8ec9a4c9c44d5" +dependencies = [ + "bit-vec", + "blake2s_simd", + "byteorder", + "ff", + "futures", + "group", + "pairing", + "rand_core 0.5.1", + "subtle", +] + +[[package]] +name = "bit-vec" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02b4ff8b16e6076c3e14220b39fbc1fabb6737522281a388998046859400895f" + +[[package]] +name = "bitflags" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" + +[[package]] +name = "blake2b_simd" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afa748e348ad3be8263be728124b24a24f268266f6f5d58af9d75f6a40b5c587" +dependencies = [ + "arrayref", + "arrayvec", + "constant_time_eq", +] + +[[package]] +name = "blake2s_simd" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e461a7034e85b211a4acb57ee2e6730b32912b06c08cc242243c39fc21ae6a2" +dependencies = [ + "arrayref", + "arrayvec", + "constant_time_eq", +] + +[[package]] +name = "block-buffer" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" +dependencies = [ + "block-padding", + "byte-tools", + "byteorder", + "generic-array 0.12.3", +] + +[[package]] +name = "block-buffer" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" +dependencies = [ + "generic-array 0.14.4", +] + +[[package]] +name = "block-cipher-trait" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c924d49bd09e7c06003acda26cd9742e796e34282ec6c1189404dee0c1f4774" +dependencies = [ + "generic-array 0.12.3", +] + +[[package]] +name = "block-padding" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5" +dependencies = [ + "byte-tools", +] + +[[package]] +name = "bumpalo" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e8c087f005730276d1096a652e92a8bacee2e2472bcc9715a74d2bec38b5820" + +[[package]] +name = "byte-tools" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" + +[[package]] +name = "byteorder" +version = "1.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" + +[[package]] +name = "cfg-if" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "cloudabi" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4344512281c643ae7638bbabc3af17a11307803ec8f0fcad9fae512a8bf36467" +dependencies = [ + "bitflags", +] + +[[package]] +name = "constant_time_eq" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" + +[[package]] +name = "cpuid-bool" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8aebca1129a03dc6dc2b127edd729435bbc4a37e1d5f4d7513165089ceb02634" + +[[package]] +name = "crossbeam-utils" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8" +dependencies = [ + "autocfg", + "cfg-if 0.1.10", + "lazy_static", +] + +[[package]] +name = "crypto-mac" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" +dependencies = [ + "generic-array 0.14.4", + "subtle", +] + +[[package]] +name = "crypto_api" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f855e87e75a4799e18b8529178adcde6fd4f97c1449ff4821e747ff728bb102" + +[[package]] +name = "crypto_api_chachapoly" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95b2ad7cab08fd71addba81df5077c49df208effdfb3118a1519f9cdeac5aaf2" +dependencies = [ + "crypto_api", +] + +[[package]] +name = "digest" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5" +dependencies = [ + "generic-array 0.12.3", +] + +[[package]] +name = "digest" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" +dependencies = [ + "generic-array 0.14.4", +] + +[[package]] +name = "directories" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8fed639d60b58d0f53498ab13d26f621fd77569cc6edb031f4cc36a2ad9da0f" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs-sys" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e93d7f5705de3e49895a2b5e0b8855a1c27f080192ae9c32a6432d50741a57a" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + +[[package]] +name = "either" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" + +[[package]] +name = "equihash" +version = "0.1.0" +source = "git+https://github.com/iron-fish/librustzcash.git#cd0afdc0f3eea4f78f14132616c8ec9a4c9c44d5" +dependencies = [ + "blake2b_simd", + "byteorder", +] + +[[package]] +name = "fake-simd" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" + +[[package]] +name = "ff" +version = "0.6.0" +source = "git+https://github.com/iron-fish/librustzcash.git#cd0afdc0f3eea4f78f14132616c8ec9a4c9c44d5" +dependencies = [ + "byteorder", + "ff_derive", + "rand_core 0.5.1", + "subtle", +] + +[[package]] +name = "ff_derive" +version = "0.6.0" +source = "git+https://github.com/iron-fish/librustzcash.git#cd0afdc0f3eea4f78f14132616c8ec9a4c9c44d5" +dependencies = [ + "addchain", + "num-bigint", + "num-integer", + "num-traits", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "fpe" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21988a326139165b75e3196bc6962ca638e5fb0c95102fbf152a3743174b01e4" +dependencies = [ + "aes", + "byteorder", + "num-bigint", + "num-integer", + "num-traits", +] + +[[package]] +name = "fuchsia-cprng" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" + +[[package]] +name = "futures" +version = "0.1.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c7e4c2612746b0df8fed4ce0c69156021b704c9aefa360311c04e6e9e002eed" + +[[package]] +name = "gcc" +version = "0.3.55" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f5f3913fa0bfe7ee1fd8248b6b9f42a5af4b9d65ec2dd2c3c26132b950ecfc2" + +[[package]] +name = "generic-array" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c68f0274ae0e023facc3c97b2e00f076be70e254bc851d972503b328db79b2ec" +dependencies = [ + "typenum", +] + +[[package]] +name = "generic-array" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "501466ecc8a30d1d3b7fc9229b122b2ce8ed6e9d9223f1138d4babb253e51817" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc587bc0ec293155d5bfa6b9891ec18a1e330c234f896ea47fbada4cadbe47e6" +dependencies = [ + "cfg-if 0.1.10", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", + "wasm-bindgen", +] + +[[package]] +name = "group" +version = "0.6.0" +source = "git+https://github.com/iron-fish/librustzcash.git#cd0afdc0f3eea4f78f14132616c8ec9a4c9c44d5" +dependencies = [ + "byteorder", + "ff", + "rand 0.7.3", + "rand_xorshift", + "subtle", +] + +[[package]] +name = "hex" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6a22814455d41612f41161581c2883c0c6a1c41852729b17d5ed88f01e153aa" + +[[package]] +name = "hex" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77" + +[[package]] +name = "hmac" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "126888268dcc288495a26bf004b38c5fdbb31682f992c84ceb046a1f0fe38840" +dependencies = [ + "crypto-mac", + "digest 0.9.0", +] + +[[package]] +name = "instant" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb1fc4429a33e1f80d41dc9fea4d108a88bec1de8053878898ae448a0b52f613" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "ironfish_rust" +version = "0.1.0" +dependencies = [ + "bellman", + "blake2b_simd", + "blake2s_simd", + "byteorder", + "ff", + "lazy_static", + "pairing", + "rand 0.7.3", + "rust-crypto-wasm", + "shrinkwraprs", + "tiny-bip39", + "zcash_primitives", + "zcash_proofs", +] + +[[package]] +name = "itertools" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f56a2d0bc861f9165be4eb3442afd3c236d8a98afd426f65d92324ae1091a484" +dependencies = [ + "either", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.80" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d58d1b70b004888f764dfbf6a26a3b0342a1632d33968e4a179d8011c760614" + +[[package]] +name = "lock_api" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd96ffd135b2fd7b973ac026d28085defbe8983df057ced3eb4f2130b0831312" +dependencies = [ + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b" +dependencies = [ + "cfg-if 0.1.10", +] + +[[package]] +name = "num-bigint" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "090c7f9998ee0ff65aa5b723e4009f7b217707f1fb5ea551329cc4d6231fb304" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-integer" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" +dependencies = [ + "autocfg", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" +dependencies = [ + "autocfg", +] + +[[package]] +name = "once_cell" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13bd41f508810a131401606d54ac32a467c97172d74ba7662562ebba5ad07fa0" +dependencies = [ + "parking_lot", +] + +[[package]] +name = "opaque-debug" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c" + +[[package]] +name = "opaque-debug" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" + +[[package]] +name = "pairing" +version = "0.16.0" +source = "git+https://github.com/iron-fish/librustzcash.git#cd0afdc0f3eea4f78f14132616c8ec9a4c9c44d5" +dependencies = [ + "byteorder", + "ff", + "group", + "rand_core 0.5.1", + "subtle", +] + +[[package]] +name = "parking_lot" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d7744ac029df22dca6284efe4e898991d28e3085c706c972bcd7da4a27a15eb" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c361aa727dd08437f2f1447be8b59a33b0edd15e0fcee698f935613d9efbca9b" +dependencies = [ + "cfg-if 0.1.10", + "cloudabi", + "instant", + "libc", + "redox_syscall", + "smallvec", + "winapi", +] + +[[package]] +name = "pbkdf2" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "216eaa586a190f0a738f2f918511eecfa90f13295abec0e457cdebcceda80cbd" +dependencies = [ + "crypto-mac", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857" + +[[package]] +name = "proc-macro2" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71" +dependencies = [ + "unicode-xid", +] + +[[package]] +name = "quote" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.3.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ac302d8f83c0c1974bf758f6b041c6c8ada916fbb44a609158ca8b064cc76c" +dependencies = [ + "libc", + "rand 0.4.6", +] + +[[package]] +name = "rand" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" +dependencies = [ + "fuchsia-cprng", + "libc", + "rand_core 0.3.1", + "rdrand", + "winapi", +] + +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "getrandom", + "libc", + "rand_chacha", + "rand_core 0.5.1", + "rand_hc", +] + +[[package]] +name = "rand_chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +dependencies = [ + "ppv-lite86", + "rand_core 0.5.1", +] + +[[package]] +name = "rand_core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +dependencies = [ + "rand_core 0.4.2", +] + +[[package]] +name = "rand_core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "rand_xorshift" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77d416b86801d23dde1aa643023b775c3a462efc0ed96443add11546cdf1dca8" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "redox_syscall" +version = "0.1.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" + +[[package]] +name = "redox_users" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de0737333e7a9502c789a36d7c7fa6092a49895d4faa31ca5df163857ded2e9d" +dependencies = [ + "getrandom", + "redox_syscall", + "rust-argon2", +] + +[[package]] +name = "rust-argon2" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dab61250775933275e84053ac235621dfb739556d5c54a2f2e9313b7cf43a19" +dependencies = [ + "base64 0.12.3", + "blake2b_simd", + "constant_time_eq", + "crossbeam-utils", +] + +[[package]] +name = "rust-crypto-wasm" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dcf11edbc9a0effb4a99ddbe909dd26fb2e71459064879218c27b0add1cb6ec" +dependencies = [ + "base64 0.5.2", + "gcc", + "hex 0.2.0", + "libc", + "rand 0.3.23", + "time", +] + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "scopeguard" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" + +[[package]] +name = "sha2" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a256f46ea78a0c0d9ff00077504903ac881a1dafdc20da66545699e7776b3e69" +dependencies = [ + "block-buffer 0.7.3", + "digest 0.8.1", + "fake-simd", + "opaque-debug 0.2.3", +] + +[[package]] +name = "sha2" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa827a14b29ab7f44778d14a88d3cb76e949c45083f7dbfa507d0cb699dc12de" +dependencies = [ + "block-buffer 0.9.0", + "cfg-if 1.0.0", + "cpuid-bool", + "digest 0.9.0", + "opaque-debug 0.3.0", +] + +[[package]] +name = "shrinkwraprs" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83695fde96cbe9e08f0e4eb96b1b56fdbd44f2098ee27462dda964c7745fddc7" +dependencies = [ + "bitflags", + "itertools", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "smallvec" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7acad6f34eb9e8a259d3283d1e8c1d34d7415943d4895f65cc73813c7396fc85" + +[[package]] +name = "subtle" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "343f3f510c2915908f155e94f17220b19ccfacf2a64a2a5d8004f2c3e311e7fd" + +[[package]] +name = "syn" +version = "1.0.48" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc371affeffc477f42a221a1e4297aedcea33d47d19b61455588bd9d8f6b19ac" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] + +[[package]] +name = "synstructure" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b834f2d66f734cb897113e34aaff2f1ab4719ca946f9a7358dba8f8064148701" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "unicode-xid", +] + +[[package]] +name = "thiserror" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0f4a65597094d4483ddaed134f409b2cb7c1beccf25201a9f73c719254fa98e" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7765189610d8241a44529806d6fd1f2e0a08734313a35d5b3a556f92b381f3c0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "time" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255" +dependencies = [ + "libc", + "wasi 0.10.0+wasi-snapshot-preview1", + "winapi", +] + +[[package]] +name = "tiny-bip39" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9e44c4759bae7f1032e286a7ef990bd9ed23fe831b7eeba0beb97484c2e59b8" +dependencies = [ + "anyhow", + "hmac", + "once_cell", + "pbkdf2", + "rand 0.7.3", + "rustc-hash", + "sha2 0.9.3", + "thiserror", + "unicode-normalization", + "zeroize", +] + +[[package]] +name = "tinyvec" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b78a366903f506d2ad52ca8dc552102ffdd3e937ba8a227f024dc1d1eae28575" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" + +[[package]] +name = "typenum" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33" + +[[package]] +name = "unicode-normalization" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a13e63ab62dbe32aeee58d1c5408d35c36c392bba5d9d3142287219721afe606" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-xid" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564" + +[[package]] +name = "version_check" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe" + +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + +[[package]] +name = "wasi" +version = "0.10.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" + +[[package]] +name = "wasm-bindgen" +version = "0.2.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ac64ead5ea5f05873d7c12b545865ca2b8d28adfc50a49b84770a3a97265d42" +dependencies = [ + "cfg-if 0.1.10", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f22b422e2a757c35a73774860af8e112bff612ce6cb604224e8e47641a9e4f68" +dependencies = [ + "bumpalo", + "lazy_static", + "log", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b13312a745c08c469f0b292dd2fcd6411dba5f7160f593da6ef69b64e407038" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f249f06ef7ee334cc3b8ff031bfc11ec99d00f34d86da7498396dc1e3b1498fe" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d649a3145108d7d3fbcde896a468d1bd636791823c9921135218ad89be08307" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "zcash_primitives" +version = "0.2.0" +source = "git+https://github.com/iron-fish/librustzcash.git#cd0afdc0f3eea4f78f14132616c8ec9a4c9c44d5" +dependencies = [ + "aes", + "blake2b_simd", + "blake2s_simd", + "byteorder", + "crypto_api_chachapoly", + "equihash", + "ff", + "fpe", + "hex 0.3.2", + "lazy_static", + "log", + "pairing", + "rand 0.7.3", + "rand_core 0.5.1", + "sha2 0.8.2", + "subtle", +] + +[[package]] +name = "zcash_proofs" +version = "0.2.0" +source = "git+https://github.com/iron-fish/librustzcash.git#cd0afdc0f3eea4f78f14132616c8ec9a4c9c44d5" +dependencies = [ + "bellman", + "blake2b_simd", + "byteorder", + "directories", + "ff", + "pairing", + "rand_core 0.5.1", + "zcash_primitives", +] + +[[package]] +name = "zeroize" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81a974bcdd357f0dca4d41677db03436324d45a4c9ed2d0b873a5a360ce41c36" +dependencies = [ + "zeroize_derive", +] + +[[package]] +name = "zeroize_derive" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3f369ddb18862aba61aa49bf31e74d29f0f162dec753063200e1dc084345d16" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] diff --git a/ironfish-rust/Cargo.toml b/ironfish-rust/Cargo.toml new file mode 100644 index 0000000000..9384c5bafa --- /dev/null +++ b/ironfish-rust/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "ironfish_rust" +version = "0.1.0" +authors = ["Iron Fish (https://ironfish.network)"] +edition = "2018" +license = "MPL-2.0" + +[lib] +name = "ironfish_rust" +path = "src/lib.rs" + + +[dependencies] +zcash_primitives = {git = "https://github.com/iron-fish/librustzcash.git"} +zcash_proofs = {git = "https://github.com/iron-fish/librustzcash.git"} +bellman = {git = "https://github.com/iron-fish/librustzcash.git", version = "0.6", default-features = false, features = ["groth16"]} +pairing = { git = "https://github.com/iron-fish/librustzcash.git", version = "0.16", features = ["expose-arith"]} +ff = { git = "https://github.com/iron-fish/librustzcash.git", version = "0.6"} +byteorder = "1.3.1" +lazy_static = "1.4.0" +# in favor of rust-crypto as this one is wasm friendly +rust-crypto-wasm="0.3.1" +rand = {version = "0.7", features = ["wasm-bindgen"]} +shrinkwraprs = "0.2.1" +tiny-bip39 = "0.8.0" +blake2b_simd = "0.5" +blake2s_simd = "0.5" diff --git a/ironfish-rust/README.md b/ironfish-rust/README.md new file mode 100644 index 0000000000..d203c681ce --- /dev/null +++ b/ironfish-rust/README.md @@ -0,0 +1,13 @@ +[![codecov](https://codecov.io/gh/iron-fish/ironfish/branch/master/graph/badge.svg?token=PCSVEVEW5V&flag=ironfish-rust)](https://codecov.io/gh/iron-fish/ironfish) + +This is the core API for interacting with transactions and the chain. It's essentially a facade to a lot of different projects. + +This is the only Iron Fish project that knows about the +[Sapling](https://github.com/zcash/librustzcash/tree/master/sapling-crypto) +api and its zero knowledge proving mechanism. + +There are theoretically different kinds of elliptical curves that can be used with Sapling, but we are currently +depending on the BLS12 curve. Everything in ironfish-rust is parameterized on the curve type, but there +are easy facades exported from sapling::bls12 for the different struct types. + +This layer is tangentially aware of the chain. It is not aware of the peer to peer network or client APIs. \ No newline at end of file diff --git a/ironfish-rust/clippy.toml b/ironfish-rust/clippy.toml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/ironfish-rust/src/errors.rs b/ironfish-rust/src/errors.rs new file mode 100644 index 0000000000..99249dca8a --- /dev/null +++ b/ironfish-rust/src/errors.rs @@ -0,0 +1,138 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use std::error::Error; +use std::fmt; +use std::io; + +use bellman::SynthesisError; + +/// Error raised if constructing a sapling key fails for any reason. +#[derive(Debug)] +pub enum SaplingKeyError { + IOError, + FieldDecodingError, + InvalidViewingKey, + InvalidPaymentAddress, + InvalidPublicAddress, + DiversificationError, + InvalidLanguageEncoding, + InvalidWord, +} + +impl fmt::Display for SaplingKeyError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self) + } +} + +impl Error for SaplingKeyError {} + +impl From for SaplingKeyError { + fn from(_e: io::Error) -> SaplingKeyError { + SaplingKeyError::IOError + } +} + +/// Error raised if proving fails for some reason +#[derive(Debug)] +pub enum SaplingProofError { + SpendCircuitProofError(String), + ReceiptCircuitProofError, + SaplingKeyError, + IOError, + SigningError, + VerificationFailed, + InconsistentWitness, +} + +impl fmt::Display for SaplingProofError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self) + } +} + +impl Error for SaplingProofError {} + +impl From for SaplingProofError { + fn from(_e: SaplingKeyError) -> SaplingProofError { + SaplingProofError::SaplingKeyError + } +} + +impl From for SaplingProofError { + fn from(e: SynthesisError) -> SaplingProofError { + SaplingProofError::SpendCircuitProofError(e.to_string()) + } +} + +impl From for SaplingProofError { + fn from(_e: io::Error) -> SaplingProofError { + SaplingProofError::IOError + } +} + +/// Errors raised when constructing a transaction +#[derive(Debug)] +pub enum TransactionError { + InvalidBalanceError, + IllegalValueError, + SigningError, + ProvingError, + IoError(io::Error), + VerificationFailed, +} + +impl fmt::Display for TransactionError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self) + } +} + +impl Error for TransactionError {} + +impl From for TransactionError { + fn from(e: SaplingProofError) -> TransactionError { + match e { + SaplingProofError::SigningError => TransactionError::SigningError, + SaplingProofError::VerificationFailed => TransactionError::VerificationFailed, + _ => TransactionError::ProvingError, + } + } +} + +impl From for TransactionError { + fn from(e: io::Error) -> TransactionError { + TransactionError::IoError(e) + } +} + +/// Errors raised when constructing a note +#[derive(Debug)] +pub enum NoteError { + IoError, + RandomnessError, + KeyError, + InvalidCommitment, +} + +impl fmt::Display for NoteError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self) + } +} + +impl Error for NoteError {} + +impl From for NoteError { + fn from(_e: io::Error) -> NoteError { + NoteError::IoError + } +} + +impl From for NoteError { + fn from(_e: SaplingKeyError) -> NoteError { + NoteError::KeyError + } +} diff --git a/ironfish-rust/src/keys/mod.rs b/ironfish-rust/src/keys/mod.rs new file mode 100644 index 0000000000..28ebe7f4d2 --- /dev/null +++ b/ironfish-rust/src/keys/mod.rs @@ -0,0 +1,363 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use super::errors; +use super::serializing::{ + bytes_to_hex, hex_to_bytes, point_to_bytes, read_scalar, scalar_to_bytes, +}; +use super::Sapling; +use bip39::{Language, Mnemonic}; +use blake2b_simd::Params as Blake2b; +use blake2s_simd::Params as Blake2s; +use rand::prelude::*; +// use rand_core::{OsRng, RngCore}; +use zcash_primitives::constants::CRH_IVK_PERSONALIZATION; + +use std::{io, sync::Arc}; +use zcash_primitives::jubjub::{ + edwards, FixedGenerators, JubjubEngine, JubjubParams, PrimeOrder, ToUniform, +}; +use zcash_primitives::primitives::{ProofGenerationKey, ViewingKey}; + +mod public_address; +pub use public_address::*; +mod view_keys; +pub use view_keys::*; + +#[cfg(test)] +mod test; + +const EXPANDED_SPEND_BLAKE2_KEY: &[u8; 16] = b"Beanstalk Money "; + +/// A single private key generates multiple other key parts that can +/// be used to allow various forms of access to a commitment note: +/// +/// While the key parts are all represented as 256 bit keys to the outside +/// world, inside the API they map to Edwards points or scalar values +/// on the JubJub curve. +#[derive(Clone)] +pub struct SaplingKey { + pub(crate) sapling: Arc>, + + /// The private (secret) key from which all the other key parts are derived. + /// The expanded form of this key is required before a note can be spent. + spending_key: [u8; 32], + + /// Part of the expanded form of the spending key, generally referred to as + /// `ask` in the literature. Derived from spending key using a seeded + /// pseudorandom hash function. Used to construct authorizing_key. + pub(crate) spend_authorizing_key: J::Fs, + + /// Part of the expanded form of the spending key, generally referred to as + /// `nsk` in the literature. Derived from spending key using a seeded + /// pseudorandom hash function. Used to construct nullifier_deriving_key + pub(crate) proof_authorizing_key: J::Fs, + + /// Part of the expanded form of the spending key, as well as being used + /// directly in the full viewing key. Generally referred to as + /// `ovk` in the literature. Derived from spending key using a seeded + /// pseudorandom hash function. This allows the creator of a note to access + /// keys needed to decrypt the note's contents. + pub(crate) outgoing_viewing_key: OutgoingViewKey, + + /// Part of the full viewing key. Generally referred to as + /// `ak` in the literature. Derived from spend_authorizing_key using scalar + /// multiplication in Sapling. Used to construct incoming viewing key. + pub(crate) authorizing_key: edwards::Point, + + /// Part of the full viewing key. Generally referred to as + /// `nk` in the literature. Derived from proof_authorizing_key using scalar + /// multiplication. Used to construct incoming viewing key. + pub(crate) nullifier_deriving_key: edwards::Point, + + /// Part of the payment_address. Generally referred to as + /// `ivk` in the literature. Derived from authorizing key and + /// nullifier deriving key. Used to construct payment address and + /// transmission key. This key allows the receiver of a note to decrypt its + /// contents. + pub(crate) incoming_viewing_key: IncomingViewKey, +} + +impl<'a, J: JubjubEngine + pairing::MultiMillerLoop> SaplingKey { + /// Construct a new key from an array of bytes + pub fn new( + sapling: Arc>, + spending_key: [u8; 32], + ) -> Result { + let spend_authorizing_key = J::Fs::to_uniform(&Self::convert_key(spending_key, 0)); + let proof_authorizing_key = J::Fs::to_uniform(&Self::convert_key(spending_key, 1)); + let mut outgoing_viewing_key = [0; 32]; + outgoing_viewing_key[0..32].clone_from_slice(&Self::convert_key(spending_key, 2)[0..32]); + let outgoing_viewing_key = OutgoingViewKey { + sapling: sapling.clone(), + view_key: outgoing_viewing_key, + }; + let authorizing_key = sapling + .jubjub + .generator(FixedGenerators::SpendingKeyGenerator) + .mul(spend_authorizing_key, &sapling.jubjub); + let nullifier_deriving_key = sapling + .jubjub + .generator(FixedGenerators::ProofGenerationKey) + .mul(proof_authorizing_key, &sapling.jubjub); + let incoming_viewing_key = IncomingViewKey { + sapling: sapling.clone(), + view_key: Self::hash_viewing_key(&authorizing_key, &nullifier_deriving_key)?, + }; + + Ok(SaplingKey { + sapling, + spending_key, + spend_authorizing_key, + proof_authorizing_key, + outgoing_viewing_key, + authorizing_key, + nullifier_deriving_key, + incoming_viewing_key, + }) + } + + /// Load a new key from a Read implementation (e.g: socket, file) + pub fn read( + sapling: Arc>, + reader: &mut R, + ) -> Result { + let mut spending_key = [0; 32]; + reader.read_exact(&mut spending_key)?; + Self::new(sapling, spending_key) + } + + /// Load a key from a string of hexadecimal digits + pub fn from_hex( + sapling: Arc>, + value: &str, + ) -> Result { + match hex_to_bytes(value) { + Err(()) => Err(errors::SaplingKeyError::InvalidPaymentAddress), + Ok(bytes) => { + if bytes.len() != 32 { + Err(errors::SaplingKeyError::InvalidPaymentAddress) + } else { + let mut byte_arr = [0; 32]; + byte_arr.clone_from_slice(&bytes[0..32]); + Self::new(sapling, byte_arr) + } + } + } + } + + /// Load a key from a string of words to be decoded into bytes. + pub fn from_words( + sapling: Arc>, + language_code: &str, + value: String, + ) -> Result { + let language = Language::from_language_code(language_code) + .ok_or(errors::SaplingKeyError::InvalidLanguageEncoding)?; + let mnemonic = Mnemonic::from_phrase(&value, language) + .map_err(|_| errors::SaplingKeyError::InvalidPaymentAddress)?; + let bytes = mnemonic.entropy(); + let mut byte_arr = [0; 32]; + byte_arr.clone_from_slice(&bytes[0..32]); + Self::new(sapling, byte_arr) + } + + /// Generate a new random secret key. + /// + /// This would normally be used for a new account coming online for the + /// first time. + /// Note that unlike `new`, this function always successfully returns a value. + pub fn generate_key(sapling: Arc>) -> Self { + let spending_key: [u8; 32] = random(); + // OsRng.fill_bytes(&mut spending_key); + loop { + if let Ok(key) = Self::new(sapling.clone(), spending_key) { + return key; + } + } + } + + /// Generate a public address from the incoming viewing key, given a specific + /// 11 byte diversifier. + /// + /// This may fail, as not all diversifiers are created equal. + /// + /// Note: This may need to be public at some point. I'm hoping the client + /// API would never have to deal with diversifiers, but I'm not sure, yet. + pub fn public_address( + &self, + diversifier: &[u8; 11], + ) -> Result, errors::SaplingKeyError> { + PublicAddress::from_key(self, diversifier) + } + + /// Generate a public address from this key's incoming viewing key, + /// picking a diversifier that is guaranteed to work with it. + /// + /// This method always succeeds, retrying with a different diversifier if + /// one doesn't work + pub fn generate_public_address(&self) -> PublicAddress { + self.incoming_viewing_key.generate_public_address() + } + + // Write a bytes representation of this key to the provided stream + pub fn write(&self, mut writer: W) -> io::Result<()> { + let num_bytes_written = writer.write(&self.spending_key)?; + if num_bytes_written != 32 { + Err(io::Error::new( + io::ErrorKind::InvalidInput, + "Couldn't write entire key", + )) + } else { + Ok(()) + } + } + + /// Retrieve the private spending key + pub fn spending_key(&self) -> [u8; 32] { + self.spending_key + } + + /// Private spending key as hexadecimal. This is slightly + /// more human readable. + pub fn hex_spending_key(&self) -> String { + bytes_to_hex(&self.spending_key) + } + + /// Private spending key as words. This is even more human readable. + /// + /// We abuse the bip-39 to directly encode the key as words, instead of as + /// a seed. This isn't strictly necessary for private key, but view keys + /// will need a direct mapping. The private key could still be generated + /// using bip-32 and bip-39 if desired. + pub fn words_spending_key( + &self, + language_code: &str, + ) -> Result { + let language = Language::from_language_code(language_code) + .ok_or(errors::SaplingKeyError::InvalidLanguageEncoding)?; + let mnemonic = Mnemonic::from_entropy(&self.spending_key, language).unwrap(); + Ok(mnemonic.phrase().to_string()) + } + + /// Retrieve the publicly visible outgoing viewing key + pub fn outgoing_view_key(&self) -> &OutgoingViewKey { + &self.outgoing_viewing_key + } + + /// Retrieve the publicly visible incoming viewing key + pub fn incoming_view_key(&self) -> &IncomingViewKey { + &self.incoming_viewing_key + } + + /// Retrieve both the view keys. These would normally used for third-party audits + /// or for light clients. + pub fn view_keys(&self) -> ViewKeys { + ViewKeys { + incoming: self.incoming_view_key().clone(), + outgoing: self.outgoing_view_key().clone(), + } + } + + #[deprecated(note = "I'm not aware that this ever needs to be publicly visible")] + /// Retrieve the spend authorizing key + pub fn spend_authorizing_key(&self) -> [u8; 32] { + scalar_to_bytes(&self.spend_authorizing_key) + } + + #[deprecated(note = "I'm not aware that this ever needs to be publicly visible")] + /// Retrieve the byte representation of the proof authorizing key + pub fn proof_authorizing_key(&self) -> [u8; 32] { + scalar_to_bytes(&self.proof_authorizing_key) + } + + #[deprecated(note = "I'm not aware that this ever needs to be publicly visible")] + /// Retrieve the byte representation of the authorizing key + pub fn authorizing_key(&self) -> [u8; 32] { + point_to_bytes(&self.authorizing_key) + .expect("authorizing key should be convertible to bytes") + } + + #[deprecated(note = "I'm not aware that this ever needs to be publicly visible")] + /// Retrieve the byte representation of the nullifier_deriving_key + pub fn nullifier_deriving_key(&self) -> [u8; 32] { + point_to_bytes(&self.nullifier_deriving_key) + .expect("nullifier deriving key should be convertible to bytes") + } + + /// Adapter to convert this key to a viewing key for use in sapling + /// functions. + pub(crate) fn sapling_viewing_key(&self) -> ViewingKey { + ViewingKey { + ak: self.authorizing_key.clone(), + nk: self.nullifier_deriving_key.clone(), + } + } + + /// Adapter to convert this key to a proof generation key for use in + /// sapling functions + pub(crate) fn sapling_proof_generation_key(&self) -> ProofGenerationKey { + ProofGenerationKey { + ak: self.authorizing_key.clone(), + nsk: self.proof_authorizing_key, + } + } + + /// Convert the spending key to another value using a pseudorandom hash + /// function. Used during key construction to derive the following keys: + /// * `spend_authorizing_key` (represents a sapling scalar Fs type) + /// * `proof_authorizing_key` (represents a sapling scalar Fs type) + /// * `outgoing_viewing_key (just some bytes) + /// + /// # Arguments + /// * `spending_key` The 32 byte spending key + /// * `modifier` a byte to add to tweak the hash for each of the three + /// values + fn convert_key(spending_key: [u8; 32], modifier: u8) -> [u8; 64] { + let mut hasher = Blake2b::new() + .hash_length(64) + .personal(EXPANDED_SPEND_BLAKE2_KEY) + .to_state(); + + hasher.update(&spending_key); + hasher.update(&[modifier]); + let mut hash_result = [0; 64]; + hash_result[0..64].clone_from_slice(&hasher.finalize().as_ref()[0..64]); + hash_result + } + + /// Helper method to construct the viewing key from the authorizing key + /// and nullifier deriving key using a blake2 hash of their respective bytes. + /// + /// This method is only called once, but it's kind of messy, so I pulled it + /// out of the constructor for easier maintenance. + fn hash_viewing_key( + authorizing_key: &edwards::Point, + nullifier_deriving_key: &edwards::Point, + ) -> Result { + let mut view_key_contents = [0; 64]; + authorizing_key + .write(&mut view_key_contents[0..32]) + .unwrap(); + nullifier_deriving_key + .write(&mut view_key_contents[32..64]) + .unwrap(); + // let mut hasher = Blake2s::with_params(32, &[], &[], CRH_IVK_PERSONALIZATION); + + let mut hash_result = [0; 32]; + hash_result.copy_from_slice( + Blake2s::new() + .hash_length(32) + .personal(CRH_IVK_PERSONALIZATION) + .hash(&view_key_contents) + .as_bytes(), + ); + // Drop the last five bits, so it can be interpreted as a scalar. + hash_result[31] &= 0b0000_0111; + if hash_result == [0; 32] { + return Err(errors::SaplingKeyError::InvalidViewingKey); + } + Ok(read_scalar(&hash_result[..])?) + } +} diff --git a/ironfish-rust/src/keys/public_address.rs b/ironfish-rust/src/keys/public_address.rs new file mode 100644 index 0000000000..6a4969e7e3 --- /dev/null +++ b/ironfish-rust/src/keys/public_address.rs @@ -0,0 +1,201 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use crate::serializing::{bytes_to_hex, hex_to_bytes, point_to_bytes}; +use rand::{thread_rng, Rng}; +use zcash_primitives::primitives::{Diversifier, PaymentAddress}; + +use std::{io, sync::Arc}; +use zcash_primitives::jubjub::{edwards, JubjubEngine, PrimeOrder, ToUniform, Unknown}; + +use super::{errors, IncomingViewKey, Sapling, SaplingKey}; + +/// The address to which funds can be sent, stored as a diversifier and public +/// transmission key. Combining a diversifier with an incoming_viewing_key allows +/// the creation of multiple public addresses without revealing the viewing key. +/// This allows the user to have multiple "accounts", or to even have different +/// payment addresses per transaction. +#[derive(Clone)] +pub struct PublicAddress { + /// Diversifier is a struct of 11 bytes. The array is hashed and interpreted + /// as an edwards point, but we have to store the diversifier independently + /// because the pre-hashed bytes cannot be extracted from the point. + pub(crate) diversifier: Diversifier, + + /// The same diversifier, but represented as a point on the jubjub curve. + /// Often referred to as + /// `g_d` in the literature. + pub(crate) diversifier_point: edwards::Point, + + /// The transmission key is the result of combining the diversifier with the + /// incoming viewing key (a non-reversible operation). Together, the two + /// form a public address to which payments can be sent. + pub(crate) transmission_key: edwards::Point, +} + +impl PublicAddress { + /// Initialize a public address from its 43 byte representation. + pub fn new( + sapling: Arc>, + address_bytes: &[u8; 43], + ) -> Result, errors::SaplingKeyError> { + let (diversifier, diversifier_point) = + PublicAddress::load_diversifier(&sapling.jubjub, &address_bytes[..11])?; + let transmission_key = + PublicAddress::load_transmission_key(&sapling.jubjub, &address_bytes[11..])?; + + Ok(PublicAddress { + diversifier, + diversifier_point, + transmission_key, + }) + } + + /// Load a public address from a Read implementation (e.g: socket, file) + pub fn read( + sapling: Arc>, + reader: &mut R, + ) -> Result { + let mut address_bytes = [0; 43]; + reader.read_exact(&mut address_bytes)?; + Self::new(sapling, &address_bytes) + } + + /// Initialize a public address from a sapling key and the bytes + /// representing a diversifier. Typically constructed from + /// SaplingKey::public_address() + pub fn from_key( + sapling_key: &SaplingKey, + diversifier: &[u8; 11], + ) -> Result, errors::SaplingKeyError> { + Self::from_view_key(&sapling_key.incoming_view_key(), diversifier) + } + + pub fn from_view_key( + view_key: &IncomingViewKey, + diversifier: &[u8; 11], + ) -> Result, errors::SaplingKeyError> { + let diversifier = Diversifier(*diversifier); + if let Some(key_part) = diversifier.g_d(&view_key.sapling.jubjub) { + Ok(PublicAddress { + diversifier, + diversifier_point: key_part.clone(), + transmission_key: key_part.mul(view_key.view_key, &view_key.sapling.jubjub), + }) + } else { + Err(errors::SaplingKeyError::DiversificationError) + } + } + + /// Convert a String of hex values to a PublicAddress. The String must + /// be 86 hexadecimal characters representing the 43 bytes of an address + /// or it fails. + pub fn from_hex( + sapling: Arc>, + value: &str, + ) -> Result { + match hex_to_bytes(value) { + Err(()) => Err(errors::SaplingKeyError::InvalidPublicAddress), + Ok(bytes) => { + if bytes.len() != 43 { + Err(errors::SaplingKeyError::InvalidPublicAddress) + } else { + let mut byte_arr = [0; 43]; + byte_arr.clone_from_slice(&bytes[0..43]); + Self::new(sapling, &byte_arr) + } + } + } + } + + /// Retrieve the public address in byte form. It is comprised of the + /// 11 byte diversifier followed by the 32 byte transmission key. + pub fn public_address(&self) -> [u8; 43] { + let mut result = [0; 43]; + result[..11].copy_from_slice(&self.diversifier.0); + result[11..].copy_from_slice( + &point_to_bytes(&self.transmission_key) + .expect("transmission key should be convertible to bytes"), + ); + result + } + + /// Retrieve the public address in hex form. + pub fn hex_public_address(&self) -> String { + bytes_to_hex(&self.public_address()) + } + + /// Store the bytes of this public address in the given writer. + pub fn write(&self, mut writer: W) -> io::Result<()> { + writer.write_all(&self.public_address())?; + Ok(()) + } + + pub(crate) fn load_diversifier( + jubjub: &J::Params, + diversifier_slice: &[u8], + ) -> Result<(Diversifier, edwards::Point), errors::SaplingKeyError> { + let mut diversifier_bytes = [0; 11]; + diversifier_bytes.clone_from_slice(diversifier_slice); + let diversifier = Diversifier(diversifier_bytes); + let diversifier_point = diversifier + .g_d(jubjub) + .ok_or(errors::SaplingKeyError::DiversificationError)?; + Ok((diversifier, diversifier_point)) + } + + pub(crate) fn load_transmission_key( + jubjub: &J::Params, + transmission_key_bytes: &[u8], + ) -> Result, errors::SaplingKeyError> { + assert!(transmission_key_bytes.len() == 32); + let transmission_key_non_prime = + edwards::Point::::read(transmission_key_bytes, jubjub)?; + transmission_key_non_prime + .as_prime_order(jubjub) + .ok_or(errors::SaplingKeyError::InvalidPaymentAddress) + } + + /// Calculate secret key and ephemeral public key for Diffie Hellman + /// Key exchange as used in note encryption. + /// + /// The returned values can be used according to the protocol described in + /// the module-level shared_secret function + /// + /// Returns a tuple of: + /// * the ephemeral secret key as a scalar FS + /// * the ephemeral public key as an edwards point + pub fn generate_diffie_hellman_keys( + &self, + jubjub: &J::Params, + ) -> (J::Fs, edwards::Point) { + let mut buffer = [0u8; 64]; + thread_rng().fill(&mut buffer[..]); + + let secret_key: J::Fs = J::Fs::to_uniform(&buffer[..]); + let public_key = self.diversifier_point.mul(secret_key, jubjub); + (secret_key, public_key) + } + + /// Convert this key to a payment address for use in the zcash_primitives + /// crate. This is essentially just an adapter from one struct name to + /// another because `pk_d` is not a name I want to expose in a public + /// interface. + pub(crate) fn sapling_payment_address(&self) -> PaymentAddress { + PaymentAddress::from_parts(self.diversifier, self.transmission_key.clone()) + .expect("Converting PaymentAddress types shouldn't fail") + } +} + +impl std::fmt::Debug for PublicAddress { + fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "PublicAddress {}", self.hex_public_address()) + } +} + +impl std::cmp::PartialEq for PublicAddress { + fn eq(&self, other: &Self) -> bool { + self.hex_public_address() == other.hex_public_address() + } +} diff --git a/ironfish-rust/src/keys/test.rs b/ironfish-rust/src/keys/test.rs new file mode 100644 index 0000000000..368d7d1ee0 --- /dev/null +++ b/ironfish-rust/src/keys/test.rs @@ -0,0 +1,104 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use super::{shared_secret, PublicAddress, SaplingKey}; +use crate::sapling_bls12; +use pairing::bls12_381::Bls12; + +#[test] +fn test_key_generation_and_construction() { + let sapling = &*sapling_bls12::SAPLING; + let key: SaplingKey = SaplingKey::generate_key(sapling.clone()); + let key2: SaplingKey = SaplingKey::new(sapling.clone(), key.spending_key).unwrap(); + assert!(key.spending_key != [0; 32]); + assert!(key2.spending_key == key.spending_key); + assert!(key2.incoming_viewing_key.view_key == key.incoming_viewing_key.view_key); + + // should not fail or infinite loop + key2.generate_public_address(); +} + +#[test] +fn test_diffie_hellman_shared_key() { + let sapling = &*sapling_bls12::SAPLING; + let key1: SaplingKey = SaplingKey::generate_key(sapling.clone()); + + // second address has to use the same diversifier for the keys to be valid + let address1 = key1.generate_public_address(); + let (secret_key, public_key) = address1.generate_diffie_hellman_keys(&sapling.jubjub); + let shared_secret1 = shared_secret( + &sapling.jubjub, + &secret_key, + &address1.transmission_key, + &public_key, + ); + let shared_secret2 = shared_secret( + &sapling.jubjub, + &key1.incoming_viewing_key.view_key, + &public_key, + &public_key, + ); + if shared_secret1 != shared_secret2 { + assert!(false, "secrets don't match"); + } +} + +#[test] +fn test_serialization() { + let sapling = &*sapling_bls12::SAPLING; + let key: SaplingKey = SaplingKey::generate_key(sapling.clone()); + let mut serialized_key = [0; 32]; + key.write(&mut serialized_key[..]) + .expect("Should be able to serialize key"); + assert_ne!(serialized_key, [0; 32]); + + let read_back_key: SaplingKey = + SaplingKey::read(sapling.clone(), &mut serialized_key.as_ref()) + .expect("Should be able to load key from valid bytes"); + assert_eq!( + read_back_key.incoming_view_key().view_key, + key.incoming_view_key().view_key + ); + + let public_address = key.generate_public_address(); + let mut serialized_address = [0; 43]; + public_address + .write(&mut serialized_address[..]) + .expect("should be able to serialize address"); + + let read_back_address: PublicAddress = + PublicAddress::new(sapling.clone(), &serialized_address) + .expect("Should be able to construct address from valid bytes"); + assert_eq!( + read_back_address.diversifier.0, + public_address.diversifier.0 + ); + assert_eq!( + read_back_address.diversifier_point.to_xy(), + public_address.diversifier_point.to_xy() + ); + assert_eq!( + read_back_address.transmission_key.to_xy(), + public_address.transmission_key.to_xy() + ) +} + +#[test] +fn test_hex_conversion() { + let sapling = &*sapling_bls12::SAPLING; + let key: SaplingKey = SaplingKey::generate_key(sapling.clone()); + + let hex = key.hex_spending_key(); + assert_eq!(hex.len(), 64); + let second_key: SaplingKey = SaplingKey::from_hex(sapling.clone(), &hex).unwrap(); + assert_eq!(second_key.spending_key, key.spending_key); + + let address = key.generate_public_address(); + let hex = address.hex_public_address(); + assert_eq!(hex.len(), 86); + let second_address = PublicAddress::from_hex(sapling.clone(), &hex).unwrap(); + assert_eq!(second_address, address); + + assert!(PublicAddress::from_hex(sapling.clone(), "invalid").is_err()); +} diff --git a/ironfish-rust/src/keys/view_keys.rs b/ironfish-rust/src/keys/view_keys.rs new file mode 100644 index 0000000000..72f4500733 --- /dev/null +++ b/ironfish-rust/src/keys/view_keys.rs @@ -0,0 +1,260 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +//! View keys allow your transactions to be read +//! by a third party without giving the option to spend your +//! coins. This was designed for auditing systems, but may have other purposes +//! such as in the use of light clients. +//! +//! There are two kinds of view keys. One allows you to share transactions +//! that you have received, while the other allows you to share transactions +//! that you have spent. +//! + +use super::{errors, PublicAddress, Sapling}; +use crate::serializing::{ + bytes_to_hex, hex_to_bytes, point_to_bytes, read_scalar, scalar_to_bytes, +}; +use bip39::{Language, Mnemonic}; +use blake2b_simd::Params as Blake2b; +use rand::{thread_rng, Rng}; + +use std::{io, sync::Arc}; +use zcash_primitives::jubjub::{edwards, JubjubEngine, PrimeOrder}; + +const DIFFIE_HELLMAN_PERSONALIZATION: &[u8; 16] = b"Beanstalk shared"; + +/// Key that allows someone to view a transaction that you have received. +/// +/// Referred to as `ivk` in the literature. +#[derive(Clone)] +pub struct IncomingViewKey { + pub(crate) sapling: Arc>, + pub(crate) view_key: J::Fs, +} + +impl IncomingViewKey { + /// load view key from a Read implementation + pub fn read( + sapling: Arc>, + reader: &mut R, + ) -> Result { + let view_key = read_scalar(reader)?; + Ok(IncomingViewKey { sapling, view_key }) + } + + /// Load a key from a string of hexadecimal digits + pub fn from_hex( + sapling: Arc>, + value: &str, + ) -> Result { + match hex_to_bytes(&value) { + Err(()) => Err(errors::SaplingKeyError::InvalidViewingKey), + Ok(bytes) => { + if bytes.len() != 32 { + Err(errors::SaplingKeyError::InvalidViewingKey) + } else { + Self::read(sapling, &mut bytes[..].as_ref()) + } + } + } + } + + /// Load a key from a string of words to be decoded into bytes. + /// + /// See https://github.com/BeanstalkNetwork/word-encoding + pub fn from_words( + sapling: Arc>, + language_code: &str, + value: String, + ) -> Result { + let language = Language::from_language_code(language_code) + .ok_or(errors::SaplingKeyError::InvalidLanguageEncoding)?; + let mnemonic = Mnemonic::from_phrase(&value, language) + .map_err(|_| errors::SaplingKeyError::InvalidPaymentAddress)?; + let bytes = mnemonic.entropy(); + let mut byte_arr = [0; 32]; + byte_arr.clone_from_slice(&bytes[0..32]); + Self::read(sapling, &mut byte_arr[..].as_ref()) + } + + /// Viewing key as hexadecimal, for readability. + pub fn hex_key(&self) -> String { + bytes_to_hex(&scalar_to_bytes(&self.view_key)) + } + + /// Even more readable + pub fn words_key(&self, language_code: &str) -> Result { + let language = Language::from_language_code(language_code) + .ok_or(errors::SaplingKeyError::InvalidLanguageEncoding)?; + let mnemonic = Mnemonic::from_entropy(&scalar_to_bytes(&self.view_key), language).unwrap(); + Ok(mnemonic.phrase().to_string()) + } + + /// Generate a public address from the incoming viewing key, given a specific + /// 11 byte diversifier. + /// + /// This may fail, as not all diversifiers are created equal. + /// + /// Note: This may need to be public at some point. I'm hoping the client + /// API would never have to deal with diversifiers, but I'm not sure, yet. + pub fn public_address( + &self, + diversifier: &[u8; 11], + ) -> Result, errors::SaplingKeyError> { + PublicAddress::from_view_key(self, diversifier) + } + + /// Generate a public address from this key, + /// picking a diversifier that is guaranteed to work with it. + /// + /// This method always succeeds, retrying with a different diversifier if + /// one doesn't work. + pub fn generate_public_address(&self) -> PublicAddress { + let public_address; + loop { + let mut diversifier_candidate = [0u8; 11]; + thread_rng().fill(&mut diversifier_candidate); + + if let Ok(key) = self.public_address(&diversifier_candidate) { + public_address = key; + break; + } + } + public_address + } + + /// Calculate the shared secret key given the ephemeral public key that was + /// created for a transaction. + pub(crate) fn shared_secret( + &self, + ephemeral_public_key: &edwards::Point, + ) -> [u8; 32] { + shared_secret( + &self.sapling.jubjub, + &self.view_key, + ephemeral_public_key, + ephemeral_public_key, + ) + } +} + +/// Key that allows someone to view a transaction that you have spent. +/// +/// Referred to as `ovk` in the literature. +#[derive(Clone)] +pub struct OutgoingViewKey { + pub(crate) sapling: Arc>, + pub(crate) view_key: [u8; 32], +} + +impl OutgoingViewKey { + /// Load a key from a string of hexadecimal digits + pub fn from_hex( + sapling: Arc>, + value: &str, + ) -> Result { + match hex_to_bytes(&value) { + Err(()) => Err(errors::SaplingKeyError::InvalidViewingKey), + Ok(bytes) => { + if bytes.len() != 32 { + Err(errors::SaplingKeyError::InvalidViewingKey) + } else { + let mut view_key = [0; 32]; + view_key.clone_from_slice(&bytes[0..32]); + Ok(Self { sapling, view_key }) + } + } + } + } + + /// Load a key from a string of words to be decoded into bytes. + /// + /// See https://github.com/BeanstalkNetwork/word-encoding + pub fn from_words( + sapling: Arc>, + language_code: &str, + value: String, + ) -> Result { + let language = Language::from_language_code(language_code) + .ok_or(errors::SaplingKeyError::InvalidLanguageEncoding)?; + let mnemonic = Mnemonic::from_phrase(&value, language) + .map_err(|_| errors::SaplingKeyError::InvalidPaymentAddress)?; + let bytes = mnemonic.entropy(); + let mut view_key = [0; 32]; + view_key.clone_from_slice(&bytes[0..32]); + Ok(Self { sapling, view_key }) + } + + /// Viewing key as hexadecimal, for readability. + pub fn hex_key(&self) -> String { + bytes_to_hex(&self.view_key) + } + + /// Even more readable + pub fn words_key(&self, language_code: &str) -> Result { + let language = Language::from_language_code(language_code) + .ok_or(errors::SaplingKeyError::InvalidLanguageEncoding)?; + let mnemonic = Mnemonic::from_entropy(&self.view_key, language).unwrap(); + Ok(mnemonic.phrase().to_string()) + } +} + +/// Pair of outgoing and incoming view keys for a complete audit +/// of spends and receipts +#[derive(Clone)] +pub struct ViewKeys { + pub incoming: IncomingViewKey, + pub outgoing: OutgoingViewKey, +} + +/// Derive a shared secret key from a secret key and the other person's public +/// key. +/// +/// +/// The shared secret point is calculated by multiplying the public and private +/// keys. This gets converted to bytes and hashed together with the reference +/// public key to generate the final shared secret as used in encryption. + +/// A Diffie Hellman key exchange might look like this: +/// * alice generates her DH secret key as SaplingKeys::internal_viewing_key +/// * alice chooses a diversifier and publishes it and the transmission key +/// generated from it as a PublicAddress +/// * The transmission key becomes her DH public_key +/// * Bob chooses some randomness as his secret key using the +/// generate_diffie_hellman_keys method on alice's PublicAddress +/// * That method calculates bob's public key as (alice diversifier * bob secret key) +/// * This public key becomes the reference public key for both sides +/// * bob sends public key to Alice +/// * bob calculates shared secret key as (alice public key * bob secret key) +/// * which is (alice transmission key * bob secret key) +/// * maths to (alice internal viewing key * diversifier * bob secret key) +/// * alice calculates shared secret key as (bob public key * alice internal viewing key) +/// * this maths to (alice diversifier * bob secret key * alice internal viewing key) +/// * both alice and bob hash the shared secret key with the reference public +/// key (bob's public key) to get the final shared secret +/// +/// The resulting key can be used in any symmetric cipher +pub(crate) fn shared_secret( + jubjub: &J::Params, + secret_key: &J::Fs, + other_public_key: &edwards::Point, + reference_public_key: &edwards::Point, +) -> [u8; 32] { + let shared_secret = point_to_bytes(&other_public_key.mul(*secret_key, jubjub)) + .expect("should be able to convert point to bytes"); + let reference_bytes = + point_to_bytes(&reference_public_key).expect("should be able to convert point to bytes"); + + let mut hasher = Blake2b::new() + .hash_length(32) + .personal(DIFFIE_HELLMAN_PERSONALIZATION) + .to_state(); + + hasher.update(&shared_secret); + hasher.update(&reference_bytes); + let mut hash_result = [0; 32]; + hash_result[..].clone_from_slice(&hasher.finalize().as_ref()[..]); + hash_result +} diff --git a/ironfish-rust/src/lib.rs b/ironfish-rust/src/lib.rs new file mode 100644 index 0000000000..4bff130d3a --- /dev/null +++ b/ironfish-rust/src/lib.rs @@ -0,0 +1,100 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +#[macro_use] +extern crate lazy_static; + +#[macro_use] +extern crate shrinkwraprs; + +use bellman::groth16; +use zcash_primitives::jubjub::{edwards, JubjubEngine}; + +mod serializing; + +pub mod errors; +pub mod keys; +pub mod merkle_note; +pub mod merkle_note_hash; +pub mod note; +pub mod nullifiers; +pub mod receiving; +pub mod spending; +pub mod transaction; +pub mod witness; +pub use { + keys::{IncomingViewKey, OutgoingViewKey, PublicAddress, SaplingKey, ViewKeys}, + merkle_note::MerkleNote, + merkle_note_hash::MerkleNoteHash, + note::Note, + receiving::{ReceiptParams, ReceiptProof}, + spending::{SpendParams, SpendProof}, + transaction::{ProposedTransaction, SimpleTransaction, Transaction}, +}; +pub mod sapling_bls12; + +#[cfg(test)] +pub(crate) mod test_util; // I'm not sure if this is the right way to publish the utility library. + +// The main entry-point to the sapling API. Construct this with loaded parameters, and then call +// methods on it to do the actual work. +// +// spend and output are two arithmetic circuits for use in zksnark calculations provided by Bellman. +// Though the *_params have a verifying key on them, they are not the prepared verifying keys, +// so we store the prepared keys separately at the time of loading the params. +// +// The values are all loaded from a file in serialized form. +pub struct Sapling { + spend_params: groth16::Parameters, + receipt_params: groth16::Parameters, + spend_verifying_key: groth16::PreparedVerifyingKey, + receipt_verifying_key: groth16::PreparedVerifyingKey, + pub jubjub: J::Params, // Initial point on the jubjub curve +} + +impl Sapling { + /// Initialize a Sapling instance and prepare for proving. Load the parameters from a config file + /// at a known location (`./sapling_params`, for now). + /// + /// The argument `jubjub` is the parameters for a given JubjubEngine. They have to be passed + /// in instead of being constructed locally because this code is generic across curves, but + /// zcash_primitives's `J::Params` trait doesn't have a method to construct a default. + pub fn load(jubjub: J::Params) -> Self { + // TODO: We'll need to build our own parameters using a trusted set up at some point. + // These params were borrowed from zcash + let spend_bytes = include_bytes!("sapling_params/sapling-spend.params"); + let receipt_bytes = include_bytes!("sapling_params/sapling-output.params"); + + let spend_params = Sapling::load_params(&spend_bytes[..]); + let receipt_params = Sapling::load_params(&receipt_bytes[..]); + + let spend_vk = groth16::prepare_verifying_key(&spend_params.vk); + let receipt_vk = groth16::prepare_verifying_key(&receipt_params.vk); + + Sapling { + spend_verifying_key: spend_vk, + receipt_verifying_key: receipt_vk, + spend_params, + receipt_params, + jubjub, + } + } + + /// Load sapling parameters from a provided filename. The parameters are huge and take a + /// couple seconds to load. They primarily contain the "toxic waste" for a specific sapling + /// curve. + /// + /// NOTE: If this is stupidly slow for you, try compiling in --release mode + fn load_params(bytes: &[u8]) -> groth16::Parameters { + groth16::Parameters::read(bytes, false).unwrap() + } +} + +// TODO: This belongs in a utility library if we ever need one +fn is_small_order( + jubjub: &J::Params, + point: &edwards::Point, +) -> bool { + point.double(jubjub).double(jubjub).double(jubjub) == edwards::Point::zero() +} diff --git a/ironfish-rust/src/merkle_note.rs b/ironfish-rust/src/merkle_note.rs new file mode 100644 index 0000000000..62f967e31f --- /dev/null +++ b/ironfish-rust/src/merkle_note.rs @@ -0,0 +1,362 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +/// Implement a merkle note to store all the values that need to go into a merkle tree. +/// A tree containing these values can serve as a snapshot of the entire chain. +use super::{ + errors, + keys::{shared_secret, IncomingViewKey, OutgoingViewKey, PublicAddress, SaplingKey}, + note::{Note, ENCRYPTED_NOTE_SIZE}, + serializing::{aead, read_scalar}, + witness::{WitnessNode, WitnessTrait}, + MerkleNoteHash, Sapling, +}; + +use blake2b_simd::Params as Blake2b; +use ff::PrimeField; +use zcash_primitives::primitives::ValueCommitment; + +use std::{convert::TryInto, io, sync::Arc}; +use zcash_primitives::jubjub::{edwards, JubjubEngine, PrimeOrder, Unknown}; + +pub const ENCRYPTED_SHARED_KEY_SIZE: usize = 64; +/// The note encryption keys are used to allow the spender to +/// read notes that they have themselves have spent. +/// In the case of miner notes, the note is created out of thin air +/// and there is no actual spender. We set the note encryption keys +/// to a known value, so they can be identified in the trees. +/// +/// This does not leak information, since miner notes are identifiably +/// stored separately on the header of blocks already. +pub const NOTE_ENCRYPTION_MINER_KEYS: &[u8; ENCRYPTED_SHARED_KEY_SIZE + aead::MAC_SIZE] = + b"Beanstalk note encryption miner key000000000000000000000000000000000000000000000"; +const SHARED_KEY_PERSONALIZATION: &[u8; 16] = b"Beanstalk Keyenc"; + +#[derive(Clone)] +pub struct MerkleNote { + /// Randomized value commitment. Sometimes referred to as + /// `cv` in the literature. It's calculated by multiplying a value by a + /// random number. Commits this note to the value it contains + /// without revealing what that value is. + pub(crate) value_commitment: edwards::Point, + + /// The hash of the note, committing to it's internal state + pub(crate) note_commitment: J::Fr, + + /// Public part of ephemeral diffie-hellman key-pair. See the discussion on + /// keys::shared_secret to understand how this is used + pub(crate) ephemeral_public_key: edwards::Point, + + /// note as encrypted by the diffie hellman public key + pub(crate) encrypted_note: [u8; ENCRYPTED_NOTE_SIZE + aead::MAC_SIZE], + + /// Keys used to encrypt the note. These are stored in encrypted format + /// using the spender's outgoing viewing key, and allow the spender to + /// decrypt it. The receiver (owner) doesn't need these, as they can decrypt + /// the note directly using their incoming viewing key. + pub(crate) note_encryption_keys: [u8; ENCRYPTED_SHARED_KEY_SIZE + aead::MAC_SIZE], +} + +impl PartialEq for MerkleNote { + fn eq(&self, other: &MerkleNote) -> bool { + self.note_commitment == other.note_commitment + && self.value_commitment == other.value_commitment + } +} + +impl MerkleNote { + pub fn new( + spender_key: &SaplingKey, + note: &Note, + value_commitment: &ValueCommitment, + diffie_hellman_keys: &(J::Fs, edwards::Point), + ) -> MerkleNote { + let (secret_key, public_key) = diffie_hellman_keys; + + let encrypted_note = note.encrypt(&shared_secret( + &spender_key.sapling.jubjub, + secret_key, + ¬e.owner.transmission_key, + public_key, + )); + + let mut key_bytes = [0; 64]; + note.owner + .transmission_key + .write(&mut key_bytes[..32]) + .expect("transmission key should be convertible to bytes"); + + key_bytes[32..].clone_from_slice(secret_key.to_repr().as_ref()); + + let encryption_key = calculate_key_for_encryption_keys( + &spender_key.outgoing_view_key(), + &value_commitment.cm(&spender_key.sapling.jubjub).into(), + ¬e.commitment_point(), + &public_key, + ); + let mut note_encryption_keys = [0; ENCRYPTED_SHARED_KEY_SIZE + aead::MAC_SIZE]; + aead::encrypt(&encryption_key, &key_bytes, &mut note_encryption_keys); + + MerkleNote { + value_commitment: value_commitment.cm(&spender_key.sapling.jubjub).into(), + note_commitment: note.commitment_point(), + ephemeral_public_key: (*public_key).clone(), + encrypted_note, + note_encryption_keys, + } + } + + /// Load a MerkleNote from the given stream + #[allow(clippy::or_fun_call)] + pub fn read(mut reader: R, sapling: Arc>) -> io::Result { + let value_commitment = edwards::Point::::read(&mut reader, &sapling.jubjub)?; + let note_commitment = read_scalar(&mut reader).map_err(|_| { + io::Error::new( + io::ErrorKind::InvalidInput, + "Unable to convert note commitment", + ) + })?; + + let public_key_non_prime = + edwards::Point::::read(&mut reader, &sapling.jubjub)?; + let ephemeral_public_key = + public_key_non_prime + .as_prime_order(&sapling.jubjub) + .ok_or(io::Error::new( + io::ErrorKind::InvalidInput, + "Unable to convert note commitment", + ))?; + let mut encrypted_note = [0; ENCRYPTED_NOTE_SIZE + aead::MAC_SIZE]; + reader.read_exact(&mut encrypted_note[..])?; + let mut note_encryption_keys = [0; ENCRYPTED_SHARED_KEY_SIZE + aead::MAC_SIZE]; + reader.read_exact(&mut note_encryption_keys[..])?; + Ok(MerkleNote { + value_commitment, + note_commitment, + ephemeral_public_key, + encrypted_note, + note_encryption_keys, + }) + } + + pub fn write(&self, mut writer: &mut W) -> io::Result<()> { + self.value_commitment.write(&mut writer)?; + writer.write_all(self.note_commitment.to_repr().as_ref())?; + self.ephemeral_public_key.write(&mut writer)?; + writer.write_all(&self.encrypted_note[..])?; + writer.write_all(&self.note_encryption_keys[..])?; + Ok(()) + } + + pub fn merkle_hash(&self) -> MerkleNoteHash { + MerkleNoteHash::new(self.note_commitment) + } + + pub fn decrypt_note_for_owner( + &self, + owner_view_key: &IncomingViewKey, + ) -> Result, errors::NoteError> { + let shared_secret = owner_view_key.shared_secret(&self.ephemeral_public_key); + let note = + Note::from_owner_encrypted(owner_view_key, &shared_secret, &self.encrypted_note)?; + note.verify_commitment(self.note_commitment)?; + Ok(note) + } + + pub fn decrypt_note_for_spender( + &self, + spender_key: &OutgoingViewKey, + ) -> Result, errors::NoteError> { + let encryption_key = calculate_key_for_encryption_keys( + spender_key, + &self.value_commitment, + &self.note_commitment, + &self.ephemeral_public_key, + ); + + let mut note_encryption_keys = [0; ENCRYPTED_SHARED_KEY_SIZE]; + aead::decrypt( + &encryption_key, + &self.note_encryption_keys, + &mut note_encryption_keys, + )?; + + let transmission_key = PublicAddress::load_transmission_key( + &spender_key.sapling.jubjub, + ¬e_encryption_keys[..32], + )?; + let secret_key = read_scalar(¬e_encryption_keys[32..])?; + let shared_key = shared_secret( + &spender_key.sapling.jubjub, + &secret_key, + &transmission_key, + &self.ephemeral_public_key, + ); + let note = Note::from_spender_encrypted( + spender_key.sapling.clone(), + transmission_key, + &shared_key, + &self.encrypted_note, + )?; + note.verify_commitment(self.note_commitment)?; + Ok(note) + } +} + +pub(crate) fn sapling_auth_path( + witness: &dyn WitnessTrait, +) -> Vec> { + let mut auth_path = vec![]; + for element in &witness.get_auth_path() { + let sapling_element = match element { + WitnessNode::Left(ref sibling_hash) => Some((*sibling_hash, false)), + WitnessNode::Right(ref sibling_hash) => Some((*sibling_hash, true)), + }; + auth_path.push(sapling_element); + } + auth_path +} + +/// Calculate the position of a leaf node from it's witness, assuming the +/// auth path is from a fixed-sized complete merkle tree. +/// +/// This can't just be a default method on the Witness trait, since it relies +/// on an assumption that the tree is complete and binary. And I didn't feel +/// like making Witness a trait since it's otherwise very simple. +/// So this hacky function gets to live here. +pub(crate) fn position( + witness: &dyn WitnessTrait, +) -> u64 { + let mut pos = 0; + for (i, element) in witness.get_auth_path().iter().enumerate() { + if let WitnessNode::Right(_) = element { + pos |= 1 << i; + } + } + pos +} + +/// Calculate the key used to encrypt the shared keys for a ReceiptProof or +/// ReceiptParams. +/// +/// The shared keys are encrypted using the outgoing viewing key for the +/// spender (the person creating the note owned by the receiver). This gets +/// combined with hashes of the receipt values to make a key unique to, and +/// signed by, the receipt. +/// +/// Naming is getting a bit far-fetched here because it's the keys used to +/// encrypt other keys. Keys, all the way down! +fn calculate_key_for_encryption_keys( + outgoing_view_key: &OutgoingViewKey, + value_commitment: &edwards::Point, + note_commitment: &J::Fr, + public_key: &edwards::Point, +) -> [u8; 32] { + let mut key_input = [0u8; 128]; + key_input[0..32].copy_from_slice(&outgoing_view_key.view_key); + value_commitment.write(&mut key_input[32..64]).unwrap(); + key_input[64..96].copy_from_slice(note_commitment.to_repr().as_ref()); + public_key.write(&mut key_input[96..128]).unwrap(); + + Blake2b::new() + .hash_length(32) + .personal(SHARED_KEY_PERSONALIZATION) + .hash(&key_input) + .as_bytes() + .try_into() + .expect("has has incorrect length") +} + +#[cfg(test)] +mod test { + use super::MerkleNote; + use crate::{ + keys::SaplingKey, + note::{Memo, Note}, + sapling_bls12, + }; + + use pairing::bls12_381::Bls12; + use rand::prelude::*; + use rand::{thread_rng, Rng}; + use zcash_primitives::{ + jubjub::{fs::Fs, ToUniform}, + primitives::ValueCommitment, + }; + + #[test] + fn test_view_key_encryption() { + let sapling = &*sapling_bls12::SAPLING; + let spender_key: SaplingKey = SaplingKey::generate_key(sapling.clone()); + let receiver_key: SaplingKey = SaplingKey::generate_key(sapling.clone()); + let note = Note::new( + sapling.clone(), + receiver_key.generate_public_address(), + 42, + Memo([0; 32]), + ); + let diffie_hellman_keys = note.owner.generate_diffie_hellman_keys(&sapling.jubjub); + + let mut buffer = [0u8; 64]; + thread_rng().fill(&mut buffer[..]); + + let value_commitment_randomness: Fs = Fs::to_uniform(&buffer[..]); + + let value_commitment = ValueCommitment:: { + value: note.value, + randomness: value_commitment_randomness, + }; + + let merkle_note = + MerkleNote::new(&spender_key, ¬e, &value_commitment, &diffie_hellman_keys); + merkle_note + .decrypt_note_for_owner(receiver_key.incoming_view_key()) + .expect("should be able to decrypt note"); + merkle_note + .decrypt_note_for_spender(spender_key.outgoing_view_key()) + .expect("should be able to decrypt note"); + } + + #[test] + fn test_receipt_invalid_commitment() { + let sapling = &*sapling_bls12::SAPLING; + let spender_key: SaplingKey = SaplingKey::generate_key(sapling.clone()); + let note = Note::new( + sapling.clone(), + spender_key.generate_public_address(), + 42, + Memo([0; 32]), + ); + let diffie_hellman_keys = note.owner.generate_diffie_hellman_keys(&sapling.jubjub); + + let mut buffer = [0u8; 64]; + thread_rng().fill(&mut buffer[..]); + + let value_commitment_randomness: Fs = Fs::to_uniform(&buffer[..]); + + let value_commitment = ValueCommitment:: { + value: note.value, + randomness: value_commitment_randomness, + }; + + let mut merkle_note = + MerkleNote::new(&spender_key, ¬e, &value_commitment, &diffie_hellman_keys); + merkle_note + .decrypt_note_for_owner(spender_key.incoming_view_key()) + .expect("should be able to decrypt note"); + merkle_note + .decrypt_note_for_spender(spender_key.outgoing_view_key()) + .expect("should be able to decrypt note"); + + // should fail if note_commitment doesn't match + let note_randomness: u64 = random(); + merkle_note.note_commitment = pairing::bls12_381::Fr::from(note_randomness); + assert!(merkle_note + .decrypt_note_for_owner(spender_key.incoming_view_key()) + .is_err()); + assert!(merkle_note + .decrypt_note_for_spender(spender_key.outgoing_view_key()) + .is_err()); + } +} diff --git a/ironfish-rust/src/merkle_note_hash.rs b/ironfish-rust/src/merkle_note_hash.rs new file mode 100644 index 0000000000..d1557ee3ed --- /dev/null +++ b/ironfish-rust/src/merkle_note_hash.rs @@ -0,0 +1,60 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +/// Implement a merkle note to store all the values that need to go into a merkle tree. +/// A tree containing these values can serve as a snapshot of the entire chain. +use super::{serializing::read_scalar, Sapling}; + +use ff::{BitIterator, PrimeField}; + +use std::io; +use zcash_primitives::jubjub::JubjubEngine; +use zcash_primitives::pedersen_hash::{pedersen_hash, Personalization}; + +#[derive(Clone, Debug, Eq)] +pub struct MerkleNoteHash(pub J::Fr); + +impl PartialEq for MerkleNoteHash { + fn eq(&self, other: &MerkleNoteHash) -> bool { + self.0.eq(&other.0) + } +} + +impl MerkleNoteHash { + // Tuple struct constructors can't be used with type aliases, + // so explicitly define one here + pub fn new(fr: J::Fr) -> MerkleNoteHash { + MerkleNoteHash(fr) + } + + pub fn read(reader: &mut R) -> io::Result> { + let res = read_scalar(reader).map_err(|_| { + io::Error::new(io::ErrorKind::InvalidInput, "Unable to convert note hash") + }); + Ok(MerkleNoteHash(res.unwrap())) + } + + pub fn write(&self, writer: &mut W) -> io::Result<()> { + writer.write_all(self.0.to_repr().as_ref()) + } + + /// Hash two child hashes together to calculate the hash of the + /// new parent + pub fn combine_hash(sapling: &Sapling, depth: usize, left: &J::Fr, right: &J::Fr) -> J::Fr { + let mut lhs: Vec = BitIterator::::new(left.to_repr()).collect(); + let mut rhs: Vec = BitIterator::::new(right.to_repr()).collect(); + lhs.reverse(); + rhs.reverse(); + let num_bits = ::NUM_BITS as usize; + pedersen_hash::( + Personalization::MerkleTree(depth), + lhs.into_iter() + .take(num_bits) + .chain(rhs.into_iter().take(num_bits)), + &sapling.jubjub, + ) + .to_xy() + .0 + } +} diff --git a/ironfish-rust/src/note.rs b/ironfish-rust/src/note.rs new file mode 100644 index 0000000000..3246da9ca9 --- /dev/null +++ b/ironfish-rust/src/note.rs @@ -0,0 +1,397 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use super::{ + errors, + keys::{IncomingViewKey, PublicAddress, SaplingKey}, + nullifiers::Nullifier, + serializing::{aead, read_scalar, scalar_to_bytes}, + Sapling, +}; +use byteorder::{ByteOrder, LittleEndian, ReadBytesExt, WriteBytesExt}; +use ff::PrimeField; +use rand::{thread_rng, Rng}; +use zcash_primitives::primitives::Note as SaplingNote; + +use std::{fmt, io, io::Read, sync::Arc}; +use zcash_primitives::jubjub::{edwards, JubjubEngine, PrimeOrder, ToUniform}; + +pub const ENCRYPTED_NOTE_SIZE: usize = 83; + +/// Memo field on a Note. Used to encode transaction IDs or other information +/// about the transaction. +#[derive(Shrinkwrap, Debug, Clone, Copy, PartialEq)] +pub struct Memo(pub [u8; 32]); + +impl From<&str> for Memo { + fn from(string: &str) -> Self { + let memo_as_bytes = string.as_bytes(); + let num_to_clone = std::cmp::min(memo_as_bytes.len(), 32); + let mut memo_bytes = [0; 32]; + memo_bytes[..num_to_clone].clone_from_slice(&memo_as_bytes[..num_to_clone]); + Memo(memo_bytes) + } +} + +impl From for Memo { + fn from(string: String) -> Self { + Memo::from(string.as_str()) + } +} + +impl fmt::Display for Memo { + /// This can be lossy because it assumes that the + /// memo is in valid UTF-8 format. + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", String::from_utf8_lossy(&self.0)) + } +} + +/// A note (think bank note) represents a value in the owner's "account". +/// When spending, proof that the note exists in the tree needs to be provided, +/// along with a nullifier key that is made public so the owner cannot attempt +/// to spend that note again.447903 +/// +/// When receiving funds, a new note needs to be created for the new owner +/// to hold those funds. +#[derive(Clone)] +pub struct Note { + pub(crate) sapling: Arc>, + /// A public address for the owner of the note. One owner can have multiple public addresses, + /// each associated with a different diversifier. + pub(crate) owner: PublicAddress, + + /// Value this note represents. + pub(crate) value: u64, + + /// A random value generated when the note is constructed. + /// This helps create zero knowledge around the note, + /// allowing the owner to prove they have the note without revealing + /// anything else about it. + pub(crate) randomness: J::Fs, + + /// Arbitrary note the spender can supply when constructing a spend so the + /// receiver has some record from whence it came. + /// Note: While this is encrypted with the output, it is not encoded into + /// the proof in any way. + pub(crate) memo: Memo, +} + +impl<'a, J: JubjubEngine + pairing::MultiMillerLoop> Note { + /// Construct a new Note. + pub fn new(sapling: Arc>, owner: PublicAddress, value: u64, memo: Memo) -> Self { + let mut buffer = [0u8; 64]; + thread_rng().fill(&mut buffer[..]); + + let randomness: J::Fs = J::Fs::to_uniform(&buffer[..]); + + Self { + sapling, + owner, + value, + randomness, + memo, + } + } + + /// Read a note from the given stream IN PLAINTEXT. + /// + /// You probably don't want to use this unless you are transmitting + /// across nodejs threads in memory. + pub fn read( + mut reader: R, + sapling: Arc>, + ) -> Result { + let owner = PublicAddress::read(sapling.clone(), &mut reader)?; + let value = reader.read_u64::()?; + let randomness: J::Fs = read_scalar(&mut reader)?; + + let mut memo_vec = vec![]; + let mut memo = Memo([0; 32]); + reader.read_to_end(&mut memo_vec)?; + assert_eq!(memo_vec.len(), 32); + memo.0.copy_from_slice(&memo_vec[..]); + + Ok(Self { + sapling, + owner, + value, + randomness, + memo, + }) + } + + /// Write the note to the given stream IN PLAINTEXT. + /// + /// This should generally never be used to serialize to disk or the network. + /// It is primarily added as a device for transmitting the note across + /// thread boundaries. + pub fn write(&self, mut writer: &mut W) -> io::Result<()> { + self.owner.write(&mut writer)?; + writer.write_u64::(self.value)?; + writer.write_all(self.randomness.to_repr().as_ref())?; + writer.write_all(&self.memo.0)?; + Ok(()) + } + + /// Create a note from its encrypted representation, given the owner's + /// view key. + /// + /// The note is stored on the ReceiptProof in encrypted form. The spender + /// encrypts it when they construct the receipt using a shared secret + /// derived from the owner's public key. + /// + /// This function allows the owner to decrypt the note using the derived + /// shared secret and their own view key. + pub fn from_owner_encrypted( + owner_view_key: &'a IncomingViewKey, + shared_secret: &[u8; 32], + encrypted_bytes: &[u8; ENCRYPTED_NOTE_SIZE + aead::MAC_SIZE], + ) -> Result { + let (diversifier_bytes, randomness, value, memo) = + Note::::decrypt_note_parts(shared_secret, encrypted_bytes)?; + let owner = owner_view_key.public_address(&diversifier_bytes)?; + + Ok(Note { + sapling: owner_view_key.sapling.clone(), + owner, + value, + randomness, + memo, + }) + } + + /// Create a note from its encrypted representation, given the spender's + /// view key. + /// + /// The note is stored on the ReceiptProof in encrypted form. The spender + /// encrypts it when they construct the receipt using a shared secret + /// derived from the owner's public key. + /// + /// This function allows the owner to decrypt the note using the derived + /// shared secret and their own view key. + pub(crate) fn from_spender_encrypted( + sapling: Arc>, + transmission_key: edwards::Point, + shared_secret: &[u8; 32], + encrypted_bytes: &[u8; ENCRYPTED_NOTE_SIZE + aead::MAC_SIZE], + ) -> Result { + let (diversifier_bytes, randomness, value, memo) = + Note::::decrypt_note_parts(shared_secret, encrypted_bytes)?; + let (diversifier, diversifier_point) = + PublicAddress::load_diversifier(&sapling.jubjub, &diversifier_bytes[..])?; + let owner = PublicAddress { + diversifier, + diversifier_point, + transmission_key, + }; + + Ok(Note { + sapling, + owner, + value, + randomness, + memo, + }) + } + + pub fn value(&self) -> u64 { + self.value + } + + pub fn memo(&self) -> Memo { + self.memo + } + + pub fn owner(&self) -> PublicAddress { + self.owner.clone() + } + + /// Send encrypted form of the note, which is what gets publicly stored on + /// the tree. Only someone with the incoming viewing key for the note can + /// actually read the contents. + pub fn encrypt(&self, shared_secret: &[u8; 32]) -> [u8; ENCRYPTED_NOTE_SIZE + aead::MAC_SIZE] { + let mut bytes_to_encrypt = [0; ENCRYPTED_NOTE_SIZE]; + bytes_to_encrypt[..11].copy_from_slice(&self.owner.diversifier.0[..]); + bytes_to_encrypt[11..43].clone_from_slice(self.randomness.to_repr().as_ref()); + + LittleEndian::write_u64_into(&[self.value], &mut bytes_to_encrypt[43..51]); + bytes_to_encrypt[51..].copy_from_slice(&self.memo[..]); + let mut encrypted_bytes = [0; ENCRYPTED_NOTE_SIZE + aead::MAC_SIZE]; + aead::encrypt(shared_secret, &bytes_to_encrypt, &mut encrypted_bytes); + + encrypted_bytes + } + + /// Compute the nullifier for this note, given the private key of its owner. + /// + /// The nullifier is a series of bytes that is published by the note owner + /// only at the time the note is spent. This key is collected in a massive + /// 'nullifier set', preventing double-spend. + pub fn nullifier(&self, private_key: &SaplingKey, position: u64) -> Nullifier { + let mut result = [0; 32]; + let result_as_vec = self.sapling_note().nf( + &private_key.sapling_viewing_key(), + position, + &self.sapling.jubjub, + ); + assert_eq!(result_as_vec.len(), 32); + result[0..32].copy_from_slice(&result_as_vec[0..32]); + result + } + + /// Get the commitment hash for this note. This encapsulates all the values + /// in the note, including the randomness and converts them to a byte + /// format. This hash is what gets used for the leaf nodes in a Merkle Tree. + pub fn commitment(&self) -> [u8; 32] { + scalar_to_bytes(&self.commitment_point()) + } + + /// Compute the commitment of this note. This is essentially a hash of all + /// the note values, including randomness. + /// + /// The owner can publish this value to commit to the fact that the note + /// exists, without revealing any of the values on the note until later. + pub(crate) fn commitment_point(&self) -> J::Fr { + self.sapling_note().cm(&self.sapling.jubjub) + } + + /// Verify that the note's commitment matches the one passed in + pub(crate) fn verify_commitment(&self, commitment: J::Fr) -> Result<(), errors::NoteError> { + if commitment == self.commitment_point() { + Ok(()) + } else { + Err(errors::NoteError::InvalidCommitment) + } + } + + fn decrypt_note_parts( + shared_secret: &[u8; 32], + encrypted_bytes: &[u8; ENCRYPTED_NOTE_SIZE + aead::MAC_SIZE], + ) -> Result<([u8; 11], J::Fs, u64, Memo), errors::NoteError> { + let mut plaintext_bytes = [0; ENCRYPTED_NOTE_SIZE]; + aead::decrypt(shared_secret, encrypted_bytes, &mut plaintext_bytes)?; + + let mut reader = plaintext_bytes[..].as_ref(); + let mut diversifier_bytes = [0; 11]; + reader.read_exact(&mut diversifier_bytes[..])?; + + let randomness: J::Fs = read_scalar(&mut reader)?; + let value = reader.read_u64::()?; + let mut memo_vec = vec![]; + let mut memo = Memo([0; 32]); + reader.read_to_end(&mut memo_vec)?; + assert_eq!(memo_vec.len(), 32); + memo.0.copy_from_slice(&memo_vec[..]); + Ok((diversifier_bytes, randomness, value, memo)) + } + + /// The zcash_primitives version of the Note API is kind of klunky with + /// annoying variable names and exposed values, but it contains the methods + /// used to calculate nullifier and commitment. + /// + /// This is somewhat suboptimal with extra calculations and bytes being + /// passed around. I'm not worried about it yet, since only notes actively + /// being spent have to create these. + fn sapling_note(&self) -> SaplingNote { + SaplingNote { + value: self.value, + g_d: self.owner.diversifier.g_d(&self.sapling.jubjub).unwrap(), + pk_d: self.owner.transmission_key.clone(), + r: self.randomness, + } + } +} + +#[cfg(test)] +mod test { + use super::{Memo, Note}; + use crate::{ + keys::{shared_secret, SaplingKey}, + sapling_bls12, + }; + use pairing::bls12_381::Bls12; + + #[test] + fn test_plaintext_serialization() { + let sapling = &*sapling_bls12::SAPLING; + let owner_key: SaplingKey = SaplingKey::generate_key(sapling.clone()); + let public_address = owner_key.generate_public_address(); + let note = Note::new(sapling.clone(), public_address, 42, "serialize me".into()); + let mut serialized = Vec::new(); + note.write(&mut serialized) + .expect("Should serialize cleanly"); + + let note2 = + Note::read(&serialized[..], sapling.clone()).expect("It should deserialize cleanly"); + assert_eq!(note2.owner.public_address(), note.owner.public_address()); + assert_eq!(note2.value, 42); + assert_eq!(note2.randomness, note.randomness); + assert_eq!(note2.memo, note.memo); + + let mut serialized2 = Vec::new(); + note2 + .write(&mut serialized2) + .expect("Should still serialize cleanly"); + assert_eq!(serialized, serialized2) + } + + #[test] + fn test_note_encryption() { + let sapling = &*sapling_bls12::SAPLING; + let owner_key: SaplingKey = SaplingKey::generate_key(sapling.clone()); + let public_address = owner_key.generate_public_address(); + let (dh_secret, dh_public) = public_address.generate_diffie_hellman_keys(&sapling.jubjub); + let public_shared_secret = shared_secret( + &sapling.jubjub, + &dh_secret, + &public_address.transmission_key, + &dh_public, + ); + let note = Note::new(sapling.clone(), public_address, 42, Memo([0; 32])); + let encryption_result = note.encrypt(&public_shared_secret); + + let private_shared_secret = owner_key.incoming_view_key().shared_secret(&dh_public); + assert_eq!(private_shared_secret, public_shared_secret); + + let restored_note = Note::from_owner_encrypted( + owner_key.incoming_view_key(), + &private_shared_secret, + &encryption_result, + ) + .expect("Should be able to decrypt bytes"); + assert!( + restored_note.owner.public_address().as_ref() == note.owner.public_address().as_ref() + ); + assert!(note.value == restored_note.value); + assert!(note.randomness == restored_note.randomness); + assert!(note.memo == restored_note.memo); + + let spender_decrypted = Note::from_spender_encrypted( + sapling.clone(), + note.owner.transmission_key.clone(), + &public_shared_secret, + &encryption_result, + ) + .expect("Should be able to load from transmission key"); + assert!( + spender_decrypted.owner.public_address().as_ref() + == note.owner.public_address().as_ref() + ); + assert!(note.value == spender_decrypted.value); + assert!(note.randomness == spender_decrypted.randomness); + assert!(note.memo == spender_decrypted.memo); + } + + #[test] + fn construct_memo_from_string() { + let memo = Memo::from("a memo"); + assert_eq!(&memo.0[..6], b"a memo"); + let string = "a memo".to_string(); + let memo = Memo::from(&*string); + assert_eq!(&memo.0[..6], b"a memo"); + let memo = Memo::from(string); + assert_eq!(&memo.0[..6], b"a memo"); + } +} diff --git a/ironfish-rust/src/nullifiers.rs b/ironfish-rust/src/nullifiers.rs new file mode 100644 index 0000000000..dff46605be --- /dev/null +++ b/ironfish-rust/src/nullifiers.rs @@ -0,0 +1,7 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +//! Lightweight wrapper of 32 byte nullifiers, which can be stored in a merkle_notes Merkle Tree. + +pub type Nullifier = [u8; 32]; diff --git a/ironfish-rust/src/receiving.rs b/ironfish-rust/src/receiving.rs new file mode 100644 index 0000000000..f29da92d38 --- /dev/null +++ b/ironfish-rust/src/receiving.rs @@ -0,0 +1,255 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use super::{ + errors, is_small_order, keys::SaplingKey, merkle_note::MerkleNote, note::Note, Sapling, +}; +use bellman::groth16; +use ff::Field; +use rand::{rngs::OsRng, thread_rng, Rng}; +use zcash_primitives::jubjub::{JubjubEngine, ToUniform}; +use zcash_primitives::primitives::ValueCommitment; +use zcash_proofs::circuit::sapling::Output; + +use std::{io, sync::Arc}; + +/// Parameters used when constructing proof that a new note exists. The owner +/// of this note is the recipient of funds in a transaction. The note is signed +/// with the owners public key so only they can read it. +pub struct ReceiptParams { + /// Parameters for a Jubjub BLS12 curve. This is essentially just a global + /// value. + pub(crate) sapling: Arc>, + + /// Proof that the output circuit was valid and successful + pub(crate) proof: groth16::Proof, + + /// Randomness used to create the ValueCommitment point on the Merkle Note + pub(crate) value_commitment_randomness: J::Fs, + + /// Merkle note containing all the values verified by the proof. These values + /// are shared on the blockchain and can be snapshotted into a Merkle Tree + pub(crate) merkle_note: MerkleNote, +} + +impl ReceiptParams { + /// Construct the parameters for proving a new specific note + pub(crate) fn new( + sapling: Arc>, + spender_key: &SaplingKey, + note: &Note, + ) -> Result, errors::SaplingProofError> { + let diffie_hellman_keys = note.owner.generate_diffie_hellman_keys(&sapling.jubjub); + + let mut buffer = [0u8; 64]; + thread_rng().fill(&mut buffer[..]); + + let value_commitment_randomness: J::Fs = J::Fs::to_uniform(&buffer[..]); + + let value_commitment = ValueCommitment:: { + value: note.value, + randomness: value_commitment_randomness, + }; + + let merkle_note = + MerkleNote::new(spender_key, note, &value_commitment, &diffie_hellman_keys); + + let output_circuit = Output { + params: &sapling.jubjub, + value_commitment: Some(value_commitment), + payment_address: Some(note.owner.sapling_payment_address()), + commitment_randomness: Some(note.randomness), + esk: Some(diffie_hellman_keys.0), + }; + let proof = + groth16::create_random_proof(output_circuit, &sapling.receipt_params, &mut OsRng)?; + + let receipt_proof = ReceiptParams { + sapling, + proof, + merkle_note, + value_commitment_randomness, + }; + + Ok(receipt_proof) + } + + /// Output the committed ReceiptProof for this receiving calculation. + /// + /// The ReceiptProof is the publicly visible form of the new note, not + /// including any keys or intermediate working values. + /// + /// Verifies the proof before returning to prevent posting broken + /// transactions. + pub fn post(&self) -> Result, errors::SaplingProofError> { + let receipt_proof = ReceiptProof { + proof: self.proof.clone(), + merkle_note: self.merkle_note.clone(), + }; + receipt_proof.verify_proof(&self.sapling)?; + + Ok(receipt_proof) + } + + /// Write the signature of this proof to the provided writer. + /// + /// The signature is used by the transaction to calculate the signature + /// hash. Having this data essentially binds the note to the transaction, + /// proving that it is actually part of that transaction. + pub(crate) fn serialize_signature_fields(&self, mut writer: W) -> io::Result<()> { + self.proof.write(&mut writer)?; + self.merkle_note.write(&mut writer)?; + Ok(()) + } +} + +/// The publicly visible values of a received note in a transaction. These +/// values are calculated by the spender using only the public address of the +/// owner of this new note. +/// +/// This is the variation of a Receipt that gets serialized to bytes and can +/// be loaded from bytes. +#[derive(Clone)] +pub struct ReceiptProof { + /// Proof that the output circuit was valid and successful + pub(crate) proof: groth16::Proof, + + pub(crate) merkle_note: MerkleNote, +} + +impl ReceiptProof { + /// Load a ReceiptProof from a Read implementation( e.g: socket, file) + /// This is the main entry-point when reconstructing a serialized + /// transaction. + pub fn read( + sapling: Arc>, + mut reader: R, + ) -> Result { + let proof = groth16::Proof::read(&mut reader)?; + let merkle_note = MerkleNote::read(&mut reader, sapling)?; + + Ok(ReceiptProof { proof, merkle_note }) + } + + /// Stow the bytes of this ReceiptProof in the given writer. + pub fn write(&self, writer: W) -> io::Result<()> { + self.serialize_signature_fields(writer) + } + + /// Verify that the proof demonstrates knowledge that a note exists with + /// the value_commitment, public_key, and note_commitment on this proof. + pub fn verify_proof(&self, sapling: &Sapling) -> Result<(), errors::SaplingProofError> { + if is_small_order(&sapling.jubjub, &self.merkle_note.value_commitment) + || is_small_order(&sapling.jubjub, &self.merkle_note.ephemeral_public_key) + { + return Err(errors::SaplingProofError::VerificationFailed); + } + let mut public_input = [J::Fr::zero(); 5]; + let (x, y) = self.merkle_note.value_commitment.to_xy(); + public_input[0] = x; + public_input[1] = y; + + let (x, y) = self.merkle_note.ephemeral_public_key.to_xy(); + public_input[2] = x; + public_input[3] = y; + + public_input[4] = self.merkle_note.note_commitment; + + match groth16::verify_proof( + &sapling.receipt_verifying_key, + &self.proof, + &public_input[..], + ) { + Ok(true) => Ok(()), + _ => Err(errors::SaplingProofError::VerificationFailed), + } + } + /// Get a MerkleNote, which can be used as a node in a Merkle Tree. + pub fn merkle_note(&self) -> MerkleNote { + self.merkle_note.clone() + } + + /// Write the signature of this proof to the provided writer. + /// + /// The signature is used by the transaction to calculate the signature + /// hash. Having this data essentially binds the note to the transaction, + /// proving that it is actually part of that transaction. + pub(crate) fn serialize_signature_fields(&self, mut writer: W) -> io::Result<()> { + self.proof.write(&mut writer)?; + self.merkle_note.write(&mut writer)?; + Ok(()) + } +} + +#[cfg(test)] +mod test { + use super::{ReceiptParams, ReceiptProof}; + use crate::{ + keys::SaplingKey, + note::{Memo, Note}, + sapling_bls12, + }; + use ff::PrimeField; + use pairing::bls12_381::Bls12; + + #[test] + fn test_receipt_round_trip() { + let sapling = &*sapling_bls12::SAPLING; + let spender_key: SaplingKey = SaplingKey::generate_key(sapling.clone()); + let note = Note::new( + sapling.clone(), + spender_key.generate_public_address(), + 42, + Memo([0; 32]), + ); + + let receipt = ReceiptParams::new(sapling.clone(), &spender_key, ¬e) + .expect("should be able to create receipt proof"); + let proof = receipt + .post() + .expect("Should be able to post receipt proof"); + proof + .verify_proof(&sapling) + .expect("proof should check out"); + + // test serialization + let mut serialized_proof = vec![]; + proof + .write(&mut serialized_proof) + .expect("Should be able to serialize proof"); + let read_back_proof: ReceiptProof = + ReceiptProof::read(sapling.clone(), &mut serialized_proof[..].as_ref()) + .expect("Should be able to deserialize valid proof"); + + assert_eq!(proof.proof.a, read_back_proof.proof.a); + assert_eq!(proof.proof.b, read_back_proof.proof.b); + assert_eq!(proof.proof.c, read_back_proof.proof.c); + assert_eq!( + proof.merkle_note.value_commitment.to_xy(), + read_back_proof.merkle_note.value_commitment.to_xy() + ); + assert_eq!( + proof.merkle_note.note_commitment.to_repr(), + read_back_proof.merkle_note.note_commitment.to_repr() + ); + assert_eq!( + proof.merkle_note.ephemeral_public_key.to_xy(), + read_back_proof.merkle_note.ephemeral_public_key.to_xy() + ); + assert_eq!( + proof.merkle_note.encrypted_note[..], + read_back_proof.merkle_note.encrypted_note[..] + ); + assert_eq!( + proof.merkle_note.note_encryption_keys[..], + read_back_proof.merkle_note.note_encryption_keys[..] + ); + + let mut serialized_again = vec![]; + read_back_proof + .write(&mut serialized_again) + .expect("should be able to serialize proof again"); + assert_eq!(serialized_proof, serialized_again); + } +} diff --git a/ironfish-rust/src/sapling_bls12.rs b/ironfish-rust/src/sapling_bls12.rs new file mode 100644 index 0000000000..8edf7b6152 --- /dev/null +++ b/ironfish-rust/src/sapling_bls12.rs @@ -0,0 +1,40 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +pub use pairing::bls12_381::{Bls12, Fr}; +use std::sync::Arc; + +pub type Key = super::SaplingKey; +pub type IncomingViewKey = super::IncomingViewKey; +pub type OutgoingViewKey = super::OutgoingViewKey; + +pub type PublicAddress = super::PublicAddress; +pub type ViewKeys = super::ViewKeys; +pub type Address = super::PublicAddress; +pub type Sapling = super::Sapling; +pub type ProposedTransaction = super::ProposedTransaction; +pub type ProposedSpend = super::SpendParams; +pub type Transaction = super::Transaction; +pub type ReceiptProof = super::ReceiptProof; +pub type SimpleTransaction = super::SimpleTransaction; +pub type SpendProof = super::SpendProof; +pub type Note = super::Note; +pub type MerkleNote = super::MerkleNote; +pub type MerkleNoteHash = super::MerkleNoteHash; + +// Loads the Sapling object once when dereferenced, +// then reuses the reference on future calls. +lazy_static! { + pub static ref SAPLING: Arc = Arc::new(load()); +} + +/// Load a sapling object configured to a BLS12 jubjub curve. This is currently +/// the only pairing for which a jubjub curve has been defined, and is the +/// default implementation. +/// +/// Provided as a convenience method so clients don't have to depend +/// explicitly on zcash_primitives just to define a JubjubBls12 point. +fn load() -> Sapling { + Sapling::load(zcash_primitives::jubjub::JubjubBls12::new()) +} diff --git a/ironfish-rust/src/sapling_params/sapling-output.params b/ironfish-rust/src/sapling_params/sapling-output.params new file mode 100644 index 0000000000..616a397817 Binary files /dev/null and b/ironfish-rust/src/sapling_params/sapling-output.params differ diff --git a/ironfish-rust/src/sapling_params/sapling-spend.params b/ironfish-rust/src/sapling_params/sapling-spend.params new file mode 100644 index 0000000000..c7528bf733 Binary files /dev/null and b/ironfish-rust/src/sapling_params/sapling-spend.params differ diff --git a/ironfish-rust/src/serializing.rs b/ironfish-rust/src/serializing.rs new file mode 100644 index 0000000000..f5c26cd7ce --- /dev/null +++ b/ironfish-rust/src/serializing.rs @@ -0,0 +1,148 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +/// Helper functions to convert pairing parts to bytes +/// +/// The traits in the pairing and zcash_primitives libraries +/// all have functions for serializing, but their interface +/// can be a bit clunky if you're just working with bytearrays. +use super::errors; +use ff::PrimeField; + +use std::io; +use zcash_primitives::jubjub::{edwards, JubjubEngine, PrimeOrder}; + +/// convert an edwards point of prime order to a bytes representation +pub(crate) fn point_to_bytes( + point: &edwards::Point, +) -> Result<[u8; 32], errors::SaplingKeyError> { + let mut result: [u8; 32] = [0; 32]; + point.write(&mut result[..])?; + Ok(result) +} + +/// convert a scalar to a bytes representation +pub(crate) fn scalar_to_bytes(scalar: &F) -> [u8; 32] { + let mut result = [0; 32]; + result[..].clone_from_slice(scalar.to_repr().as_ref()); + + result +} + +#[allow(dead_code)] +pub(crate) fn bytes_to_scalar(bytes: &[u8; 32]) -> F { + read_scalar(bytes[..].as_ref()) + .expect("Should be able to construct prime field from hash bytes") +} + +pub(crate) fn read_scalar( + mut reader: R, +) -> Result { + let mut fr_repr = F::Repr::default(); + reader.read_exact(fr_repr.as_mut())?; + let scalar = F::from_repr(fr_repr).ok_or(errors::SaplingKeyError::IOError)?; + Ok(scalar) +} + +/// Output the bytes as a hexadecimal String +pub(crate) fn bytes_to_hex(bytes: &[u8]) -> String { + bytes + .iter() + .map(|b| format!("{:02x}", b)) + .collect::>() + .join("") +} + +/// Output the hexadecimal String as bytes +pub(crate) fn hex_to_bytes(hex: &str) -> Result, ()> { + let mut bite_iterator = hex.as_bytes().iter().map(|b| match b { + b'0'..=b'9' => Ok(b - b'0'), + b'a'..=b'f' => Ok(b - b'a' + 10), + b'A'..=b'F' => Ok(b - b'A' + 10), + _ => Err(()), + }); + let mut bytes = Vec::new(); + let mut high = bite_iterator.next(); + let mut low = bite_iterator.next(); + loop { + match (high, low) { + (Some(Ok(h)), Some(Ok(l))) => bytes.push(h << 4 | l), + (None, None) => break, + _ => return Err(()), + } + high = bite_iterator.next(); + low = bite_iterator.next(); + } + + Ok(bytes) +} + +pub(crate) mod aead { + use crate::errors; + use crypto::{ + aead::{AeadDecryptor, AeadEncryptor}, + chacha20poly1305::ChaCha20Poly1305, + }; + + pub const MAC_SIZE: usize = 16; + + /// Encrypt the plaintext using the given key, and append the MAC tag to the + /// end of the output array to be decrypted and checked in one step below. + /// + /// This is just a facade around the ChaCha20Poly1305 struct. It ignores + /// nonce and aad and automatically stores the mac tag. + pub(crate) fn encrypt(key: &[u8], plaintext: &[u8], encrypted_output: &mut [u8]) { + assert_eq!(encrypted_output.len(), plaintext.len() + MAC_SIZE); + let mut encryptor = ChaCha20Poly1305::new(key, &[0; 8], &[0; 8]); + let mut tag = [0; MAC_SIZE]; + encryptor.encrypt( + plaintext, + &mut encrypted_output[..plaintext.len()], + &mut tag, + ); + encrypted_output[plaintext.len()..].clone_from_slice(&tag); + } + + /// Decrypt the encrypted text using the given key and ciphertext, also checking + /// that the mac tag is correct. + /// + /// Returns Ok(()) if the mac matches the decrypted text, Err(()) if not + pub(crate) fn decrypt( + key: &[u8], + ciphertext: &[u8], + mut plaintext_output: &mut [u8], + ) -> Result<(), errors::NoteError> { + assert!(plaintext_output.len() == ciphertext.len() - MAC_SIZE); + let mut decryptor = ChaCha20Poly1305::new(key, &[0; 8], &[0; 8]); + let success = decryptor.decrypt( + &ciphertext[..ciphertext.len() - MAC_SIZE], + &mut plaintext_output, + &ciphertext[ciphertext.len() - MAC_SIZE..], + ); + + if success { + Ok(()) + } else { + Err(errors::NoteError::KeyError) + } + } + + #[cfg(test)] + mod test { + use super::{decrypt, encrypt}; + + #[test] + fn test_aead_facade() { + let key = b"I'm so secret!!!"; + let plaintext = b"hello world"; + let mut encrypted_text = [0; 27]; + encrypt(&key[..], &plaintext[..], &mut encrypted_text[..]); + + let mut decrypted_plaintext = [0; 11]; + decrypt(&key[..], &encrypted_text[..], &mut decrypted_plaintext[..]) + .expect("Should successfully decrypt with MAC verification"); + assert_eq!(&decrypted_plaintext, plaintext); + } + } +} diff --git a/ironfish-rust/src/spending.rs b/ironfish-rust/src/spending.rs new file mode 100644 index 0000000000..283d7d0e30 --- /dev/null +++ b/ironfish-rust/src/spending.rs @@ -0,0 +1,506 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use super::{ + errors, is_small_order, + keys::SaplingKey, + merkle_note::{position as witness_position, sapling_auth_path}, + merkle_note_hash::MerkleNoteHash, + note::Note, + nullifiers::Nullifier, + serializing::read_scalar, + witness::WitnessTrait, + Sapling, +}; +use bellman::gadgets::multipack; +use bellman::groth16; +use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; +use ff::Field; +use rand::{rngs::OsRng, thread_rng, Rng}; + +use zcash_proofs::circuit::sapling::Spend; + +use ff::PrimeField; +use std::{io, sync::Arc}; +use zcash_primitives::jubjub::{edwards, FixedGenerators, JubjubEngine, ToUniform, Unknown}; +use zcash_primitives::primitives::ValueCommitment; +use zcash_primitives::redjubjub; + +/// Parameters used when constructing proof that the spender owns a note with +/// a given value. +/// +/// Contains all the working values needed to construct the proof, including +/// private key of the spender. +pub struct SpendParams { + /// Parameters for a Jubjub BLS12 curve. This is essentially just a global + /// value. + pub(crate) sapling: Arc>, + + /// Private key of the person spending the note. + spender_key: SaplingKey, + + /// Used to add randomness to signature generation without leaking the key. + /// Referred to as + /// `ar` in the literature. + pub(crate) public_key_randomness: J::Fs, + + /// Proof that the spend was valid and successful for the provided owner + /// and note. + pub(crate) proof: groth16::Proof, + + /// Randomized value commitment. Sometimes referred to as + /// `cv` in the literature. It's calculated by multiplying a value by a + /// random number. Randomized to help maintain zero knowledge. + pub(crate) value_commitment: ValueCommitment, + + /// The public key after randomization has been applied. This is used + /// during signature verification. Referred to as + /// `rk` in the literature Calculated from the authorizing key and + /// the public_key_randomness. + pub(crate) randomized_public_key: redjubjub::PublicKey, + + /// The root hash of the tree at the time the proof was calculated. Referred to as + /// `anchor` in the literature. + pub(crate) root_hash: J::Fr, + + /// The size of the tree at the time the proof was calculated. This is not + /// incorporated into the proof, but is supplied to help miners verify the + /// root hash at the time of spend. + pub(crate) tree_size: u32, + + /// Bytes to be placed into the nullifier set to verify whether this + /// note has been previously spent. + pub(crate) nullifier: Nullifier, +} + +impl<'a, J: JubjubEngine + pairing::MultiMillerLoop> SpendParams { + /// Construct a new SpendParams attempting to spend a note at a given location + /// in the merkle tree. + /// + /// This is the only time this API thinks about the merkle tree. The witness + /// contains the root-hash at the time the witness was created and the path + /// to verify the location of that note in the tree. + pub fn new( + sapling: Arc>, + spender_key: SaplingKey, + note: &Note, + witness: &dyn WitnessTrait, + ) -> Result, errors::SaplingProofError> { + // This is a sanity check; it would be caught in proving the circuit anyway, + // but this gives us more information in the event of a failure + if !witness.verify(&MerkleNoteHash::new(note.commitment_point())) { + return Err(errors::SaplingProofError::InconsistentWitness); + } + + let mut buffer = [0u8; 64]; + thread_rng().fill(&mut buffer[..]); + + let value_commitment = ValueCommitment:: { + value: note.value, + randomness: J::Fs::to_uniform(&buffer[..]), + }; + + let mut buffer = [0u8; 64]; + thread_rng().fill(&mut buffer[..]); + let public_key_randomness = J::Fs::to_uniform(&buffer[..]); + + let proof_generation_key = spender_key.sapling_proof_generation_key(); + + let spend_circuit = Spend { + params: &sapling.jubjub, + value_commitment: Some(value_commitment.clone()), + proof_generation_key: Some(proof_generation_key), + payment_address: Some(note.owner.sapling_payment_address()), + auth_path: sapling_auth_path::(witness), + commitment_randomness: Some(note.randomness), + anchor: Some(witness.root_hash()), + ar: Some(public_key_randomness), + }; + let proof = groth16::create_random_proof(spend_circuit, &sapling.spend_params, &mut OsRng)?; + + let randomized_public_key = + redjubjub::PublicKey(spender_key.authorizing_key.clone().into()).randomize( + public_key_randomness, + FixedGenerators::SpendingKeyGenerator, + &sapling.jubjub, + ); + let nullifier = note.nullifier(&spender_key, witness_position::(witness)); + + Ok(SpendParams { + sapling, + spender_key, + public_key_randomness, + proof, + value_commitment, + randomized_public_key, + root_hash: witness.root_hash(), + tree_size: witness.tree_size(), + nullifier, + }) + } + + /// Sign this spend with the stored private key, and return a SpendProof + /// suitable for serialization. + /// + /// Verifies the proof before returning to prevent posting broken + /// transactions + pub fn post( + &self, + signature_hash: &[u8; 32], + ) -> Result, errors::SaplingProofError> { + let private_key = redjubjub::PrivateKey::(self.spender_key.spend_authorizing_key); + let randomized_private_key = private_key.randomize(self.public_key_randomness); + let randomized_public_key = redjubjub::PublicKey::from_private( + &randomized_private_key, + FixedGenerators::SpendingKeyGenerator, + &self.sapling.jubjub, + ); + if randomized_public_key.0 != self.randomized_public_key.0 { + return Err(errors::SaplingProofError::SigningError); + } + let mut data_to_be_signed = [0; 64]; + randomized_public_key + .0 + .write(&mut data_to_be_signed[..32])?; + data_to_be_signed[32..].copy_from_slice(&signature_hash[..]); + + let authorizing_signature = randomized_private_key.sign( + &data_to_be_signed, + &mut OsRng, + FixedGenerators::SpendingKeyGenerator, + &self.sapling.jubjub, + ); + + let spend_proof = SpendProof { + proof: self.proof.clone(), + value_commitment: self.value_commitment(), + randomized_public_key, + root_hash: self.root_hash, + tree_size: self.tree_size, + nullifier: self.nullifier, + authorizing_signature, + }; + + spend_proof.verify_proof(&self.sapling)?; + + Ok(spend_proof) + } + + /// Serialize the fields that are needed in calculating a signature to + /// the provided writer (probably a Blake2B writer) + /// + /// This signature is used by the transaction to calculate the signature hash, + /// which binds the spend to the transaction. + /// + /// It is also used during verification, which is why there is an identical + /// function on the SpendProof struct. + pub(crate) fn serialize_signature_fields(&self, writer: W) -> io::Result<()> { + serialize_signature_fields( + writer, + &self.proof, + &self.value_commitment(), + &self.randomized_public_key, + &self.root_hash, + self.tree_size, + &self.nullifier, + ) + } + + /// Get the value_commitment from this proof as an edwards Point. + /// + /// This integrates the value and randomness into a single point, using + /// an appropriate generator. + pub(crate) fn value_commitment(&self) -> edwards::Point { + self.value_commitment.cm(&self.sapling.jubjub).into() + } +} +/// The publicly visible value of a spent note. These get serialized to prove +/// that the owner once had access to these values. It also publishes the +/// nullifier so that they can't pretend they still have access to them. +pub struct SpendProof { + /// Proof that the spend was valid and successful for the provided owner + /// and note. + pub(crate) proof: groth16::Proof, + + /// Randomized value commitment. Sometimes referred to as + /// `cv` in the literature. It's calculated by multiplying a value by a + /// random number. Randomized to help maintain zero knowledge. + pub(crate) value_commitment: edwards::Point, + + /// The public key after randomization has been applied. This is used + /// during signature verification to confirm that the owner of the note + /// authorized the spend. Referred to as + /// `rk` in the literature Calculated from the authorizing key and + /// the public_key_randomness. + pub(crate) randomized_public_key: redjubjub::PublicKey, + + /// The root hash of the merkle tree at the time the proof was calculated. + /// Referred to as `anchor` in the literature. + pub(crate) root_hash: J::Fr, + + /// The size of the tree at the time the proof was calculated. This is not + /// incorporated into the proof, but helps miners verify that the root + /// hash the client supplied is valid in the tree. + pub(crate) tree_size: u32, + + /// Bytes to be placed into the nullifier set to verify whether this + /// note has been previously spent. + pub(crate) nullifier: Nullifier, + + /// Signature of the note owner authorizing the spend. This is calculated + /// after the transaction is complete, as it depends on a binding signature + /// key that incorporates calculations from all the spends and outputs + /// in that transaction. It's optional because it is calculated after + /// construction. + pub(crate) authorizing_signature: redjubjub::Signature, +} + +impl Clone for SpendProof { + fn clone(&self) -> SpendProof { + let randomized_public_key = redjubjub::PublicKey(self.randomized_public_key.0.clone()); + SpendProof { + proof: self.proof.clone(), + value_commitment: self.value_commitment.clone(), + randomized_public_key, + root_hash: self.root_hash, + tree_size: self.tree_size, + nullifier: self.nullifier, + authorizing_signature: self.authorizing_signature, + } + } +} + +impl SpendProof { + /// Load a SpendProof from a Read implementation (e.g: socket, file) + /// This is the main entry-point when reconstructing a serialized + /// transaction. + pub fn read( + jubjub: &J::Params, + mut reader: R, + ) -> Result { + let proof = groth16::Proof::read(&mut reader)?; + let value_commitment = edwards::Point::::read(&mut reader, &jubjub)?; + let randomized_public_key = redjubjub::PublicKey::::read(&mut reader, &jubjub)?; + let root_hash = read_scalar(&mut reader)?; + let tree_size = reader.read_u32::()?; + let mut nullifier = [0; 32]; + reader.read_exact(&mut nullifier)?; + let authorizing_signature = redjubjub::Signature::read(&mut reader)?; + + Ok(SpendProof { + proof, + value_commitment, + randomized_public_key, + root_hash, + tree_size, + nullifier, + authorizing_signature, + }) + } + + /// Stow the bytes of this SpendProof in the given writer. + pub fn write(&self, mut writer: W) -> io::Result<()> { + self.serialize_signature_fields(&mut writer)?; + self.authorizing_signature.write(&mut writer)?; + + Ok(()) + } + + pub fn nullifier(&self) -> Nullifier { + self.nullifier + } + + pub fn root_hash(&self) -> J::Fr { + self.root_hash + } + + pub fn tree_size(&self) -> u32 { + self.tree_size + } + + /// Verify that the signature on this proof is signing the provided input + /// with the randomized_public_key on this proof. + pub fn verify_signature( + &self, + jubjub: &J::Params, + signature_hash_value: &[u8; 32], + ) -> Result<(), errors::SaplingProofError> { + if is_small_order(jubjub, &self.randomized_public_key.0) { + return Err(errors::SaplingProofError::VerificationFailed); + } + let mut data_to_be_signed = [0; 64]; + self.randomized_public_key + .0 + .write(&mut data_to_be_signed[..32]) + .expect("should be able to write public key point"); + data_to_be_signed[32..].copy_from_slice(&signature_hash_value[..]); + + if !self.randomized_public_key.verify( + &data_to_be_signed, + &self.authorizing_signature, + FixedGenerators::SpendingKeyGenerator, + jubjub, + ) { + Err(errors::SaplingProofError::VerificationFailed) + } else { + Ok(()) + } + } + + /// Verify that the bellman proof confirms the randomized_public_key, + /// commitment_value, nullifier, and anchor attached to this SpendProof. + /// + /// This entails converting all the values to appropriate inputs to the + /// bellman circuit and executing it. + pub fn verify_proof(&self, sapling: &Sapling) -> Result<(), errors::SaplingProofError> { + if is_small_order(&sapling.jubjub, &self.value_commitment) { + return Err(errors::SaplingProofError::VerificationFailed); + } + + let mut public_input = [J::Fr::zero(); 7]; + let (x, y) = self.randomized_public_key.0.to_xy(); + public_input[0] = x; + public_input[1] = y; + + let (x, y) = self.value_commitment.to_xy(); + public_input[2] = x; + public_input[3] = y; + + public_input[4] = self.root_hash; + + let nullifier = multipack::bytes_to_bits_le(&self.nullifier); + let nullifier = multipack::compute_multipacking(&nullifier); + public_input[5] = nullifier[0]; + public_input[6] = nullifier[1]; + + match groth16::verify_proof(&sapling.spend_verifying_key, &self.proof, &public_input[..]) { + Ok(true) => Ok(()), + _ => Err(errors::SaplingProofError::VerificationFailed), + } + } + + /// Serialize the fields that are needed in calculating a signature to + /// the provided writer (probably a Blake2B writer) + pub(crate) fn serialize_signature_fields(&self, writer: W) -> io::Result<()> { + serialize_signature_fields( + writer, + &self.proof, + &self.value_commitment, + &self.randomized_public_key, + &self.root_hash, + self.tree_size, + &self.nullifier, + ) + } +} + +/// Given a writer (probably a Blake2b hasher), write byte representations +/// of the parameters that are used in calculating the signature of a transaction. +/// This function is called from both SpendProof and SpendParams because +/// signing and verifying both need to calculate the signature after all spends +/// have been recorded. +fn serialize_signature_fields( + mut writer: W, + proof: &groth16::Proof, + value_commitment: &edwards::Point, + randomized_public_key: &redjubjub::PublicKey, + root_hash: &J::Fr, + tree_size: u32, + nullifier: &[u8; 32], +) -> io::Result<()> { + proof.write(&mut writer)?; + value_commitment.write(&mut writer)?; + randomized_public_key.write(&mut writer)?; + writer.write_all(root_hash.to_repr().as_ref())?; + writer.write_u32::(tree_size)?; + writer.write_all(nullifier)?; + Ok(()) +} + +#[cfg(test)] +mod test { + extern crate bellman; + extern crate pairing; + + use super::{SpendParams, SpendProof}; + use crate::{ + keys::SaplingKey, + note::{Memo, Note}, + sapling_bls12, + test_util::make_fake_witness, + }; + use pairing::bls12_381::Bls12; + use rand::prelude::*; + use rand::{thread_rng, Rng}; + + #[test] + fn test_spend_round_trip() { + let sapling = sapling_bls12::SAPLING.clone(); + + let key = SaplingKey::generate_key(sapling.clone()); + let public_address = key.generate_public_address(); + + let note_randomness = random(); + + let note = Note::new( + sapling.clone(), + public_address.clone(), + note_randomness, + Memo([0; 32]), + ); + let witness = make_fake_witness(sapling.clone(), ¬e); + + let spend = SpendParams::new(sapling.clone(), key, ¬e, &witness) + .expect("should be able to create spend proof"); + + // signature comes from transaction, normally + let mut sig_hash = [0u8; 32]; + thread_rng().fill(&mut sig_hash[..]); + + let proof = spend.post(&sig_hash).expect("should be able to sign proof"); + proof + .verify_proof(&sapling) + .expect("proof should check out"); + proof + .verify_signature(&sapling.jubjub, &sig_hash) + .expect("should be able to verify signature"); + + let mut other_hash = [0u8; 32]; + thread_rng().fill(&mut other_hash[..]); + assert!( + proof + .verify_signature(&sapling.jubjub, &other_hash) + .is_err(), + "should error if not signing correct value" + ); + + // test serialization + let mut serialized_proof = vec![]; + proof + .write(&mut serialized_proof) + .expect("should be able to serialize proof"); + let read_back_proof: SpendProof = + SpendProof::read(&sapling.jubjub, &mut serialized_proof[..].as_ref()) + .expect("should be able to deserialize valid proof"); + + assert_eq!(proof.proof.a, read_back_proof.proof.a); + assert_eq!(proof.proof.b, read_back_proof.proof.b); + assert_eq!(proof.proof.c, read_back_proof.proof.c); + assert_eq!( + proof.value_commitment.to_xy(), + read_back_proof.value_commitment.to_xy() + ); + assert_eq!( + proof.randomized_public_key.0.to_xy(), + read_back_proof.randomized_public_key.0.to_xy() + ); + assert_eq!(proof.root_hash, read_back_proof.root_hash); + assert_eq!(proof.nullifier, read_back_proof.nullifier); + let mut serialized_again = vec![]; + read_back_proof + .write(&mut serialized_again) + .expect("should be able to serialize proof again"); + assert_eq!(serialized_proof, serialized_again); + } +} diff --git a/ironfish-rust/src/test_util.rs b/ironfish-rust/src/test_util.rs new file mode 100644 index 0000000000..d0ed5e77b4 --- /dev/null +++ b/ironfish-rust/src/test_util.rs @@ -0,0 +1,69 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use super::{ + note::Note, + witness::{Witness, WitnessNode}, + MerkleNoteHash, Sapling, +}; +use rand::{thread_rng, Rng}; +use std::sync::Arc; +use zcash_primitives::jubjub::JubjubEngine; +use zcash_proofs::circuit::sapling::TREE_DEPTH; + +/// Given a note, construct a Witness with a valid root_hash and authentication +/// path placing that note at a random location in a Merkle tree. +#[cfg(test)] +pub(crate) fn make_fake_witness( + sapling: Arc>, + note: &Note, +) -> Witness { + let mut rng = thread_rng(); + let mut buffer = [0u8; 64]; + thread_rng().fill(&mut buffer[..]); + + let mut witness_auth_path = vec![]; + for _ in 0..TREE_DEPTH { + witness_auth_path.push(match rng.gen() { + false => WitnessNode::Left(J::Fr::from(rng.gen::())), + true => WitnessNode::Right(J::Fr::from(rng.gen::())), + }) + } + let root_hash = + auth_path_to_root_hash::(&sapling, &witness_auth_path, note.commitment_point()); + Witness { + hasher: sapling.clone(), + auth_path: witness_auth_path, + root_hash: root_hash, + tree_size: 1400, + } +} + +/// Helper function to calculate a root hash given an authentication path from +/// a specific child_hash. +/// +/// Currently marked for test-only compilation, +/// but it may be useful to publish +/// something like this in the future. +#[cfg(test)] +pub(crate) fn auth_path_to_root_hash( + sapling: &Sapling, + auth_path: &Vec>, + child_hash: J::Fr, +) -> J::Fr { + let mut cur = child_hash.clone(); + + for (i, node) in auth_path.iter().enumerate() { + cur = match node { + WitnessNode::Left(ref sibling_hash) => { + MerkleNoteHash::combine_hash(sapling, i, &cur, &sibling_hash.clone()) + } + WitnessNode::Right(ref sibling_hash) => { + MerkleNoteHash::combine_hash(sapling, i, &sibling_hash.clone(), &cur) + } + } + } + + cur +} diff --git a/ironfish-rust/src/transaction/mod.rs b/ironfish-rust/src/transaction/mod.rs new file mode 100644 index 0000000000..50196e057d --- /dev/null +++ b/ironfish-rust/src/transaction/mod.rs @@ -0,0 +1,553 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use super::{ + errors::{SaplingProofError, TransactionError}, + keys::{PublicAddress, SaplingKey}, + merkle_note::NOTE_ENCRYPTION_MINER_KEYS, + note::{Memo, Note}, + receiving::{ReceiptParams, ReceiptProof}, + spending::{SpendParams, SpendProof}, + witness::WitnessTrait, + Sapling, +}; +use blake2b_simd::Params as Blake2b; +use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; +use ff::Field; +use rand::rngs::OsRng; + +use zcash_primitives::redjubjub::{PrivateKey, PublicKey, Signature}; + +use std::{io, slice::Iter, sync::Arc}; +use zcash_primitives::jubjub::{edwards, FixedGenerators, JubjubEngine, JubjubParams, Unknown}; + +mod simple; +pub use simple::SimpleTransaction; +use std::ops::AddAssign; +use std::ops::SubAssign; + +#[cfg(test)] +mod tests; + +const SIGNATURE_HASH_PERSONALIZATION: &[u8; 8] = b"Bnsighsh"; +const TRANSACTION_SIGNATURE_VERSION: &[u8; 1] = &[0]; + +/// A collection of spend and receipt proofs that can be signed and verified. +/// In general, all the spent values should add up to all the receipt values. +/// +/// The Transaction is used while the spends and receipts are being constructed, +/// and contains working state that is used to create the transaction information. +/// +/// The Transaction, below, contains the serializable version, without any +/// secret keys or state not needed for verifying. +pub struct ProposedTransaction { + /// Essentially a global reference to the sapling parameters, including + /// proving and verification keys. + sapling: Arc>, + + /// A "private key" manufactured from a bunch of randomness added for each + /// spend and output. + binding_signature_key: J::Fs, + + /// A "public key" manufactured from a combination of the values of each + /// transaction and the same randomness as above + binding_verification_key: edwards::Point, + + /// Proofs of the individual spends with all values required to calculate + /// the signatures. + spends: Vec>, + + /// proofs of the individual receipts with values required to calculate + /// signatures. Note: This is commonly referred to as + /// `outputs` in the literature. + receipts: Vec>, + + /// The balance of all the spends minus all the receipts. The difference + /// is the fee paid to the miner for mining the transaction. + transaction_fee: i64, + // + // NOTE: If adding fields here, you may need to add fields to + // signature hash method, and also to Transaction. +} + +impl ProposedTransaction { + pub fn new(sapling: Arc>) -> ProposedTransaction { + ProposedTransaction { + sapling, + binding_signature_key: ::zero(), + binding_verification_key: edwards::Point::zero(), + spends: vec![], + receipts: vec![], + transaction_fee: 0, + } + } + + /// Spend the note owned by spender_key at the given witness location. + pub fn spend( + &mut self, + spender_key: SaplingKey, + note: &Note, + witness: &dyn WitnessTrait, + ) -> Result<(), SaplingProofError> { + let proof = SpendParams::new(self.sapling.clone(), spender_key, note, witness)?; + self.add_spend_proof(proof, note.value()); + Ok(()) + } + + /// Add a spend proof that was created externally. + /// + /// This allows for parallel immutable spends without having to take + /// a mutable pointer out on self. + pub fn add_spend_proof(&mut self, spend: SpendParams, note_value: u64) { + self.increment_binding_signature_key(&spend.value_commitment.randomness, false); + self.increment_binding_verification_key(&spend.value_commitment(), false); + + self.spends.push(spend); + self.transaction_fee += note_value as i64; + } + + /// Create a proof of a new note owned by the recipient in this + /// transaction. + pub fn receive( + &mut self, + spender_key: &SaplingKey, + note: &Note, + ) -> Result<(), SaplingProofError> { + let proof = ReceiptParams::new(self.sapling.clone(), spender_key, note)?; + + self.increment_binding_signature_key(&proof.value_commitment_randomness, true); + self.increment_binding_verification_key(&proof.merkle_note.value_commitment, true); + + self.receipts.push(proof); + self.transaction_fee -= note.value as i64; + + Ok(()) + } + + /// Post the transaction. This performs a bit of validation, and signs + /// the spends with a signature that proves the spends are part of this + /// transaction. + /// + /// Transaction fee is the amount the spender wants to send to the miner + /// for mining this transaction. This has to be non-negative; sane miners + /// wouldn't accept a transaction that takes money away from them. + /// + /// sum(spends) - sum(outputs) - intended_transaction_fee - change = 0 + /// aka: self.transaction_fee - intended_transaction_fee - change = 0 + pub fn post( + &mut self, + spender_key: &SaplingKey, + change_goes_to: Option>, + intended_transaction_fee: u64, + ) -> Result, TransactionError> { + let change_amount = self.transaction_fee - intended_transaction_fee as i64; + + if change_amount < 0 { + return Err(TransactionError::InvalidBalanceError); + } + if change_amount > 0 { + // TODO: The public address generated from the spender_key if + // change_goes_to is None should probably be associated with a + // known diversifier (eg: that used on other notes?) + // But we haven't worked out why determinacy in public addresses + // would be useful yet. + let change_address = + change_goes_to.unwrap_or_else(|| spender_key.generate_public_address()); + let change_note = Note::new( + self.sapling.clone(), + change_address, + change_amount as u64, // we checked it was positive + Memo([0; 32]), + ); + self.receive(&spender_key, &change_note)?; + } + self._partial_post() + } + + /// Special case for posting a miners fee transaction. Miner fee transactions + /// are unique in that they generate currency. They do not have any spends + /// or change and therefore have a negative transaction fee. In normal use, + /// a miner would not accept such a transaction unless it was explicitly set + /// as the miners fee. + pub fn post_miners_fee(&mut self) -> Result, TransactionError> { + if !self.spends.is_empty() || self.receipts.len() != 1 { + return Err(TransactionError::InvalidBalanceError); + } + // Ensure the merkle note has an identifiable encryption key + self.receipts + .get_mut(0) + .expect("bounds checked above") + .merkle_note + .note_encryption_keys = *NOTE_ENCRYPTION_MINER_KEYS; + self._partial_post() + } + /// Super special case for generating an illegal transaction for the genesis block. + /// Don't bother using this anywhere else, it won't pass verification. + #[deprecated(note = "Use only in genesis block generation")] + pub fn post_genesis_transaction(&self) -> Result, TransactionError> { + self._partial_post() + } + + // post transaction without much validation. + fn _partial_post(&self) -> Result, TransactionError> { + self.check_value_consistency()?; + let data_to_sign = self.transaction_signature_hash(); + let binding_signature = self.binding_signature()?; + let mut spend_proofs = vec![]; + for spend in &self.spends { + spend_proofs.push(spend.post(&data_to_sign)?); + } + let mut receipt_proofs = vec![]; + for receipt in &self.receipts { + receipt_proofs.push(receipt.post()?); + } + Ok(Transaction { + sapling: self.sapling.clone(), + transaction_fee: self.transaction_fee, + spends: spend_proofs, + receipts: receipt_proofs, + binding_signature, + }) + } + + /// Calculate a hash of the transaction data. This hash is what gets signed + /// by the private keys to verify that the transaction actually happened. + /// + /// This is called during final posting of the transaction + /// + fn transaction_signature_hash(&self) -> [u8; 32] { + let mut hasher = Blake2b::new() + .hash_length(32) + .personal(SIGNATURE_HASH_PERSONALIZATION) + .to_state(); + + hasher.update(TRANSACTION_SIGNATURE_VERSION); + hasher + .write_i64::(self.transaction_fee) + .unwrap(); + for spend in self.spends.iter() { + spend.serialize_signature_fields(&mut hasher).unwrap(); + } + for receipt in self.receipts.iter() { + receipt.serialize_signature_fields(&mut hasher).unwrap(); + } + + let mut hash_result = [0; 32]; + hash_result[..].clone_from_slice(&hasher.finalize().as_ref()[..]); + hash_result + } + + /// Confirm that balance of input and receipt values is consistent with + /// those used in the proofs. + /// + /// Does not confirm that the transactions add up to zero. The calculation + /// for fees and change happens elsewhere. + /// + /// Can be safely called after each spend or receipt is added. + /// + /// Note: There is some duplication of effort between this function and + /// binding_signature below. I find the separation of concerns easier + /// to read, but it's an easy win if we see a performance bottleneck here. + fn check_value_consistency(&self) -> Result<(), TransactionError> { + let jubjub = &self.sapling.jubjub; + let private_key = PrivateKey::(self.binding_signature_key); + let public_key = PublicKey::from_private( + &private_key, + FixedGenerators::ValueCommitmentRandomness, + jubjub, + ); + let mut value_balance_point = value_balance_to_point(self.transaction_fee as i64, jubjub)?; + + value_balance_point = value_balance_point.negate(); + let mut calculated_public_key = self.binding_verification_key.clone(); + calculated_public_key = calculated_public_key.add(&value_balance_point, jubjub); + + if calculated_public_key != public_key.0 { + Err(TransactionError::InvalidBalanceError) + } else { + Ok(()) + } + } + + /// The binding signature ties up all the randomness generated with the + /// transaction and uses it as a private key to sign all the values + /// that were calculated as part of the transaction. This function + /// performs the calculation and sets the value on this struct. + fn binding_signature(&self) -> Result { + let mut data_to_be_signed = [0u8; 64]; + let private_key = PrivateKey::(self.binding_signature_key); + let public_key = PublicKey::from_private( + &private_key, + FixedGenerators::ValueCommitmentRandomness, + &self.sapling.jubjub, + ); + + public_key + .0 + .write(&mut data_to_be_signed[..32]) + .expect("Should be able to copy key"); + (&mut data_to_be_signed[32..]).copy_from_slice(&self.transaction_signature_hash()); + + Ok(private_key.sign( + &data_to_be_signed, + &mut OsRng, + FixedGenerators::ValueCommitmentRandomness, + &self.sapling.jubjub, + )) + } + + /// Helper method to encapsulate the verbose way incrementing the signature + /// key works + fn increment_binding_signature_key(&mut self, value: &J::Fs, negate: bool) { + let tmp = *value; + if negate { + //binding_signature_key - value + self.binding_signature_key.sub_assign(&tmp); + } else { + //binding_signature_key + value + self.binding_signature_key.add_assign(&tmp); + } + } + + /// Helper method to encapsulate the verboseness around incrementing the + /// binding verificaiton key + fn increment_binding_verification_key( + &mut self, + value: &edwards::Point, + negate: bool, + ) { + let mut tmp = value.clone(); + if negate { + tmp = tmp.negate(); + } + tmp = tmp.add(&self.binding_verification_key, &self.sapling.jubjub); + self.binding_verification_key = tmp; + } +} + +/// A transaction that has been published and can be read by anyone, not storing +/// any of the working data or private keys used in creating the proofs. +/// +/// This is the serializable form of a transaction. +#[derive(Clone)] +pub struct Transaction { + /// reference to the sapling object associated with this transaction + sapling: Arc>, + + /// The balance of total spends - outputs, which is the amount that the miner gets to keep + transaction_fee: i64, + + /// List of spends, or input notes, that have been destroyed. + spends: Vec>, + + /// List of receipts, or output notes that have been created. + receipts: Vec>, + + /// Signature calculated from accumulating randomness with all the spends + /// and receipts when the transaction was created. + binding_signature: Signature, +} + +impl Transaction { + /// Load a Transaction from a Read implementation (e.g: socket, file) + /// This is the main entry-point when reconstructing a serialized transaction + /// for verifying. + pub fn read( + sapling: Arc>, + mut reader: R, + ) -> Result { + let num_spends = reader.read_u64::()?; + let num_receipts = reader.read_u64::()?; + let transaction_fee = reader.read_i64::()?; + let mut spends = vec![]; + let mut receipts = vec![]; + for _ in 0..num_spends { + spends.push(SpendProof::read(&sapling.jubjub, &mut reader)?); + } + for _ in 0..num_receipts { + receipts.push(ReceiptProof::read(sapling.clone(), &mut reader)?); + } + let binding_signature = Signature::read(&mut reader)?; + + Ok(Transaction { + sapling, + transaction_fee, + spends, + receipts, + binding_signature, + }) + } + + /// Store the bytes of this transaction in the given writer. This is used + /// to serialize transactions to file or network + pub fn write(&self, mut writer: W) -> io::Result<()> { + writer.write_u64::(self.spends.len() as u64)?; + writer.write_u64::(self.receipts.len() as u64)?; + writer.write_i64::(self.transaction_fee)?; + for spend in self.spends.iter() { + spend.write(&mut writer)?; + } + for receipt in self.receipts.iter() { + receipt.write(&mut writer)?; + } + self.binding_signature.write(&mut writer)?; + + Ok(()) + } + + /// Validate the transaction. Confirms that: + /// * Each of the spend proofs has the inputs it says it does + /// * Each of the receipt proofs has the inputs it says it has + /// * Each of the spend proofs was signed by the owner + /// * The entire transaction was signed with a binding signature + /// containing those proofs (and only those proofs) + /// + pub fn verify(&self) -> Result<(), TransactionError> { + // Context to accumulate a signature of all the spends and outputs and + // guarantee they are part of this transaction, unmodified. + let mut binding_verification_key = edwards::Point::zero(); + + for spend in self.spends.iter() { + spend.verify_proof(&self.sapling)?; + let mut tmp = spend.value_commitment.clone(); + tmp = tmp.add(&binding_verification_key, &self.sapling.jubjub); + binding_verification_key = tmp; + } + + for receipt in self.receipts.iter() { + receipt.verify_proof(&self.sapling)?; + let mut tmp = receipt.merkle_note.value_commitment.clone(); + tmp = tmp.negate(); + tmp = tmp.add(&binding_verification_key, &self.sapling.jubjub); + binding_verification_key = tmp; + } + + let hash_to_verify_signature = self.transaction_signature_hash(); + + for spend in self.spends.iter() { + spend.verify_signature(&self.sapling.jubjub, &hash_to_verify_signature)?; + } + + self.verify_binding_signature(&self.sapling, &binding_verification_key)?; + + Ok(()) + } + + /// Get an iterator over the spends in this transaction. Each spend + /// is by reference + pub fn iter_spends(&self) -> Iter> { + self.spends.iter() + } + + pub fn spends(&self) -> &Vec> { + &self.spends + } + + /// Get an iterator over the receipts in this transaction, by reference + pub fn iter_receipts(&self) -> Iter> { + self.receipts.iter() + } + + pub fn receipts(&self) -> &Vec> { + &self.receipts + } + + /// Get the transaction fee for this transaction. Miners should generally + /// expect this to be positive (or they would lose money mining it!). + /// The miners_fee transaction would be a special case. + pub fn transaction_fee(&self) -> i64 { + self.transaction_fee + } + + /// Get the transaction signature for this transaction. + pub fn binding_signature(&self) -> &Signature { + &self.binding_signature + } + + /// Calculate a hash of the transaction data. This hash was signed by the + /// private keys when the transaction was constructed, and will now be + /// reconstructed to verify the signature. + /// + /// TODO: This is very likely not hashing the right values or enough + /// values. + pub fn transaction_signature_hash(&self) -> [u8; 32] { + let mut hasher = Blake2b::new() + .hash_length(32) + .personal(SIGNATURE_HASH_PERSONALIZATION) + .to_state(); + hasher.update(TRANSACTION_SIGNATURE_VERSION); + hasher + .write_i64::(self.transaction_fee) + .unwrap(); + for spend in self.spends.iter() { + spend.serialize_signature_fields(&mut hasher).unwrap(); + } + for receipt in self.receipts.iter() { + receipt.serialize_signature_fields(&mut hasher).unwrap(); + } + + let mut hash_result = [0; 32]; + hash_result[..].clone_from_slice(&hasher.finalize().as_ref()[..]); + hash_result + } + + /// Confirm that this transaction was signed by the values it contains. + /// Called from the public verify function. + fn verify_binding_signature( + &self, + sapling: &Sapling, + binding_verification_key: &edwards::Point, + ) -> Result<(), TransactionError> { + let mut value_balance_point = + value_balance_to_point(self.transaction_fee, &sapling.jubjub)?; + value_balance_point = value_balance_point.negate(); + + let mut public_key_point = binding_verification_key.clone(); + public_key_point = public_key_point.add(&value_balance_point, &sapling.jubjub); + let public_key = PublicKey(public_key_point); + + let mut data_to_verify_signature = [0; 64]; + public_key + .0 + .write(&mut data_to_verify_signature[..32]) + .expect("Should be able to copy key"); + (&mut data_to_verify_signature[32..]).copy_from_slice(&self.transaction_signature_hash()); + + if !public_key.verify( + &data_to_verify_signature, + &self.binding_signature, + FixedGenerators::ValueCommitmentRandomness, + &sapling.jubjub, + ) { + Err(TransactionError::VerificationFailed) + } else { + Ok(()) + } + } +} + +// Convert the integer value to a point on the Jubjub curve, accounting for +// negative values +fn value_balance_to_point( + value: i64, + params: &J::Params, +) -> Result, TransactionError> { + // Can only construct edwards point on positive numbers, so need to + // add and possibly negate later + let is_negative = value.is_negative(); + let abs = match value.checked_abs() { + Some(a) => a as u64, + None => return Err(TransactionError::IllegalValueError), + }; + + let mut value_balance = params + .generator(FixedGenerators::ValueCommitmentValue) + .mul(J::Fs::from(abs), params); + + if is_negative { + value_balance = value_balance.negate(); + } + + Ok(value_balance.into()) +} diff --git a/ironfish-rust/src/transaction/simple.rs b/ironfish-rust/src/transaction/simple.rs new file mode 100644 index 0000000000..c0752f71f2 --- /dev/null +++ b/ironfish-rust/src/transaction/simple.rs @@ -0,0 +1,64 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use crate::{ + errors::{SaplingProofError, TransactionError}, + keys::SaplingKey, + note::Note, + transaction::{ProposedTransaction, Transaction}, + witness::WitnessTrait, + Sapling, +}; +use std::sync::Arc; +use zcash_primitives::jubjub::JubjubEngine; + +/// Simple wrapper of the Transaction API for the case where there is exactly +/// one spender, and that spender is the person who receives the change. +/// +/// This is a really lightweight facade for the normal client usage. The only +/// reason you might expect someone to use the ProposedTransaction it wraps is +/// for multi-party spending. +pub struct SimpleTransaction { + transaction: ProposedTransaction, + spender_key: SaplingKey, + intended_transaction_fee: u64, +} + +impl SimpleTransaction { + /// Create a new SimpleTransaction, initializing the sapling object and + /// storing the spender_key of the person who receives all transactions. + /// + /// intended_transaction_fee is the amount the spender is willing to yield + /// to the miner. Any excess spends above this fee go back to the spender in + /// a change calculation. + pub fn new( + sapling: Arc>, + spender_key: SaplingKey, + intended_transaction_fee: u64, + ) -> SimpleTransaction { + SimpleTransaction { + spender_key, + transaction: ProposedTransaction::new(sapling), + intended_transaction_fee, + } + } + + pub fn spend( + &mut self, + note: &Note, + witness: &dyn WitnessTrait, + ) -> Result<(), SaplingProofError> { + self.transaction + .spend(self.spender_key.clone(), note, witness) + } + + pub fn receive(&mut self, note: &Note) -> Result<(), SaplingProofError> { + self.transaction.receive(&self.spender_key, note) + } + + pub fn post(&mut self) -> Result, TransactionError> { + self.transaction + .post(&self.spender_key, None, self.intended_transaction_fee) + } +} diff --git a/ironfish-rust/src/transaction/tests.rs b/ironfish-rust/src/transaction/tests.rs new file mode 100644 index 0000000000..e3ea45dbd5 --- /dev/null +++ b/ironfish-rust/src/transaction/tests.rs @@ -0,0 +1,199 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +#[cfg(test)] +use super::{ProposedTransaction, SimpleTransaction, Transaction}; +use crate::{ + keys::SaplingKey, + merkle_note::NOTE_ENCRYPTION_MINER_KEYS, + note::{Memo, Note}, + sapling_bls12, + test_util::make_fake_witness, +}; +use pairing::bls12_381::Bls12; + +use zcash_primitives::redjubjub::Signature; + +#[test] +fn test_transaction() { + let sapling = sapling_bls12::SAPLING.clone(); + let mut transaction = ProposedTransaction::new(sapling.clone()); + let spender_key: SaplingKey = SaplingKey::generate_key(sapling.clone()); + let receiver_key: SaplingKey = SaplingKey::generate_key(sapling.clone()); + let in_note = Note::new( + sapling.clone(), + spender_key.generate_public_address(), + 42, + Memo([0; 32]), + ); + let out_note = Note::new( + sapling.clone(), + receiver_key.generate_public_address(), + 40, + Memo([0; 32]), + ); + let in_note2 = Note::new( + sapling.clone(), + spender_key.generate_public_address(), + 18, + Memo([0; 32]), + ); + let witness = make_fake_witness(sapling.clone(), &in_note); + let _witness2 = make_fake_witness(sapling.clone(), &in_note2); + transaction + .spend(spender_key.clone(), &in_note, &witness) + .expect("should be able to prove spend"); + assert_eq!(transaction.spends.len(), 1); + transaction + .check_value_consistency() + .expect("should be consistent after spend"); + transaction + .receive(&spender_key, &out_note) + .expect("should be able to prove receipt"); + assert_eq!(transaction.receipts.len(), 1); + transaction + .check_value_consistency() + .expect("should be consistent after receipt"); + + // This fails because witness and witness2 have different root hashes, and constructing + // an auth_path with consistent hashes is non-trivial without a real merkle tree + // implementation. Multiple spends should be tested at the integration level instead. + // + // If you comment the sanity check at the beginning of Transaction.spend, it should pass + // + // transaction + // .spend(&spender_key, &in_note2, &witness2) + // .expect("should be able to prove second spend"); + + let public_transaction = transaction + .post(&spender_key, None, 1) + .expect("should be able to post transaction"); + public_transaction + .verify() + .expect("Should be able to verify transaction"); + assert_eq!(public_transaction.transaction_fee(), 1); + + // A change note was created + assert_eq!(public_transaction.receipts.len(), 2); + + // test serialization + let mut serialized_transaction = vec![]; + public_transaction + .write(&mut serialized_transaction) + .expect("should be able to serialize transaction"); + let read_back_transaction: Transaction = + Transaction::read(sapling.clone(), &mut serialized_transaction[..].as_ref()) + .expect("should be able to deserialize valid transaction"); + assert_eq!( + public_transaction.transaction_fee, + read_back_transaction.transaction_fee + ); + assert_eq!( + public_transaction.spends.len(), + read_back_transaction.spends.len() + ); + assert_eq!( + public_transaction.receipts.len(), + read_back_transaction.receipts.len() + ); + let mut serialized_again = vec![]; + read_back_transaction + .write(&mut serialized_again) + .expect("should be able to serialize transaction again"); + assert_eq!(serialized_transaction, serialized_again); +} + +#[test] +fn test_simple_transaction() { + let sapling = sapling_bls12::SAPLING.clone(); + let spender_key = SaplingKey::generate_key(sapling.clone()); + let receiver_key = SaplingKey::generate_key(sapling.clone()); + let spender_address = spender_key.generate_public_address(); + let receiver_address = receiver_key.generate_public_address(); + + let mut transaction = SimpleTransaction::new(sapling.clone(), spender_key, 0); + let in_note = Note::new(sapling.clone(), spender_address.clone(), 42, Memo([0; 32])); + let out_note = Note::new(sapling.clone(), receiver_address.clone(), 41, Memo([0; 32])); + let witness = make_fake_witness(sapling.clone(), &in_note); + + transaction + .spend(&in_note, &witness) + .expect("should be able to spend note"); + + transaction + .receive(&out_note) + .expect("Should be able to receive note"); + + let public_transaction = transaction + .post() + .expect("should be able to post transaction"); + + public_transaction + .verify() + .expect("should be able to verify transaction") +} + +#[test] +fn test_miners_fee() { + let sapling = &*sapling_bls12::SAPLING; + let mut transaction = ProposedTransaction::new(sapling.clone()); + let receiver_key: SaplingKey = SaplingKey::generate_key(sapling.clone()); + let out_note = Note::new( + sapling.clone(), + receiver_key.generate_public_address(), + 42, + Memo([0; 32]), + ); + transaction + .receive(&receiver_key, &out_note) + .expect("It's a valid note"); + let posted_transaction = transaction + .post_miners_fee() + .expect("it is a valid miner's fee"); + assert_eq!(posted_transaction.transaction_fee, -42); + assert_eq!( + posted_transaction + .iter_receipts() + .next() + .unwrap() + .merkle_note + .note_encryption_keys[0..30], + NOTE_ENCRYPTION_MINER_KEYS[0..30] + ); +} + +#[test] +fn test_transaction_signature() { + let sapling = sapling_bls12::SAPLING.clone(); + let spender_key = SaplingKey::generate_key(sapling.clone()); + let receiver_key = SaplingKey::generate_key(sapling.clone()); + let spender_address = spender_key.generate_public_address(); + let receiver_address = receiver_key.generate_public_address(); + + let mut transaction = SimpleTransaction::new(sapling.clone(), spender_key, 0); + let in_note = Note::new(sapling.clone(), spender_address.clone(), 42, Memo([0; 32])); + let out_note = Note::new(sapling.clone(), receiver_address.clone(), 41, Memo([0; 32])); + let witness = make_fake_witness(sapling.clone(), &in_note); + + transaction + .spend(&in_note, &witness) + .expect("should be able to spend note"); + + transaction + .receive(&out_note) + .expect("Should be able to receive note"); + + let public_transaction = transaction + .post() + .expect("should be able to post transaction"); + + let mut serialized_signature = vec![]; + public_transaction + .binding_signature() + .write(&mut serialized_signature) + .unwrap(); + assert_eq!(serialized_signature.len(), 64); + Signature::read(&mut serialized_signature[..].as_ref()) + .expect("Can deserialize back into a valid Signature"); +} diff --git a/ironfish-rust/src/witness.rs b/ironfish-rust/src/witness.rs new file mode 100644 index 0000000000..74856f0593 --- /dev/null +++ b/ironfish-rust/src/witness.rs @@ -0,0 +1,99 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use super::{MerkleNoteHash, Sapling}; +use std::fmt::{self, Debug}; +use std::sync::Arc; + +use zcash_primitives::jubjub::JubjubEngine; + +/// Witness to a specific node in an authentication path. +/// +/// The Left/Right is the Hash of THIS node, but the MerkleHash at node.0 is +/// the hash of the SIBLING node. +#[derive(PartialEq, Debug, Clone)] +pub enum WitnessNode { + Left(H), + Right(H), +} + +/// Commitment that a leaf node exists in the tree, with an authentication path +/// and the root_hash of the tree at the time the authentication_path was +/// calculated. +pub trait WitnessTrait { + /// verify that the root hash and authentication path on this witness is a + /// valid confirmation that the given element exists at this point in the + /// tree. + fn verify(&self, my_hash: &MerkleNoteHash) -> bool; + + fn get_auth_path(&self) -> Vec>; + + fn root_hash(&self) -> J::Fr; + + fn tree_size(&self) -> u32; +} + +/// A Rust implementation of a WitnessTrait, used for testing Witness-related +/// code within Rust. +pub struct Witness { + pub hasher: Arc>, + pub tree_size: usize, + pub root_hash: J::Fr, + pub auth_path: Vec>, +} + +/// Implement partial equality, ignoring the Sapling Arc +impl PartialEq for Witness { + fn eq(&self, other: &Witness) -> bool { + self.tree_size == other.tree_size + && self.root_hash == other.root_hash + && self.auth_path == other.auth_path + } +} + +impl WitnessTrait for Witness { + fn verify(&self, my_hash: &MerkleNoteHash) -> bool { + let mut cur_hash = my_hash.0; + for (i, node) in self.auth_path.iter().enumerate() { + cur_hash = match node { + WitnessNode::Left(ref right_hash) => { + MerkleNoteHash::combine_hash(&self.hasher, i, &cur_hash, right_hash) + } + WitnessNode::Right(ref left_hash) => { + MerkleNoteHash::combine_hash(&self.hasher, i, left_hash, &cur_hash) + } + } + } + + cur_hash == self.root_hash + } + + fn get_auth_path(&self) -> Vec> { + self.auth_path.clone() + } + + fn root_hash(&self) -> J::Fr { + self.root_hash + } + + fn tree_size(&self) -> u32 { + self.tree_size as u32 + } +} + +impl fmt::Debug for Witness { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + writeln!(f, "Witness {{")?; + writeln!(f, " tree_size: {}", self.tree_size)?; + writeln!(f, " root_hash: {:?}", self.root_hash)?; + writeln!(f, " auth_path: {{")?; + + for hash in self.auth_path.iter() { + writeln!(f, " {:?},", hash)?; + } + writeln!(f, " }}")?; + writeln!(f, "}}")?; + Ok(()) + } +} diff --git a/ironfish-wasm/Cargo.lock b/ironfish-wasm/Cargo.lock new file mode 100644 index 0000000000..a87c2a9c18 --- /dev/null +++ b/ironfish-wasm/Cargo.lock @@ -0,0 +1,1123 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "addchain" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1177222c93a7bb492002e9a3cd947c7fd869e085d6e81a9e415ff1be65b3489c" +dependencies = [ + "num-bigint", + "num-integer", + "num-traits", +] + +[[package]] +name = "aes" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54eb1d8fe354e5fc611daf4f2ea97dd45a765f4f1e4512306ec183ae2e8f20c9" +dependencies = [ + "aes-soft", + "aesni", + "block-cipher-trait", +] + +[[package]] +name = "aes-soft" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfd7e7ae3f9a1fb5c03b389fc6bb9a51400d0c13053f0dca698c832bfd893a0d" +dependencies = [ + "block-cipher-trait", + "byteorder", + "opaque-debug 0.2.3", +] + +[[package]] +name = "aesni" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f70a6b5f971e473091ab7cfb5ffac6cde81666c4556751d8d5620ead8abf100" +dependencies = [ + "block-cipher-trait", + "opaque-debug 0.2.3", +] + +[[package]] +name = "anyhow" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28b2cd92db5cbd74e8e5028f7e27dd7aa3090e89e4f2a197cc7c8dfb69c7063b" + +[[package]] +name = "arrayref" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544" + +[[package]] +name = "arrayvec" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" + +[[package]] +name = "autocfg" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" + +[[package]] +name = "base64" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30e93c03064e7590d0466209155251b90c22e37fab1daf2771582598b5827557" +dependencies = [ + "byteorder", +] + +[[package]] +name = "base64" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" + +[[package]] +name = "bellman" +version = "0.6.0" +source = "git+https://github.com/iron-fish/librustzcash.git#8a2fe71c8aed7ff9271fe97cd64fd8abacff892c" +dependencies = [ + "bit-vec", + "blake2s_simd", + "byteorder", + "ff", + "futures", + "group", + "pairing", + "rand_core 0.5.1", + "subtle", +] + +[[package]] +name = "bit-vec" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02b4ff8b16e6076c3e14220b39fbc1fabb6737522281a388998046859400895f" + +[[package]] +name = "bitflags" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" + +[[package]] +name = "blake2b_simd" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afa748e348ad3be8263be728124b24a24f268266f6f5d58af9d75f6a40b5c587" +dependencies = [ + "arrayref", + "arrayvec", + "constant_time_eq", +] + +[[package]] +name = "blake2s_simd" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e461a7034e85b211a4acb57ee2e6730b32912b06c08cc242243c39fc21ae6a2" +dependencies = [ + "arrayref", + "arrayvec", + "constant_time_eq", +] + +[[package]] +name = "block-buffer" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" +dependencies = [ + "block-padding", + "byte-tools", + "byteorder", + "generic-array 0.12.3", +] + +[[package]] +name = "block-buffer" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" +dependencies = [ + "generic-array 0.14.4", +] + +[[package]] +name = "block-cipher-trait" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c924d49bd09e7c06003acda26cd9742e796e34282ec6c1189404dee0c1f4774" +dependencies = [ + "generic-array 0.12.3", +] + +[[package]] +name = "block-padding" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5" +dependencies = [ + "byte-tools", +] + +[[package]] +name = "bumpalo" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e8c087f005730276d1096a652e92a8bacee2e2472bcc9715a74d2bec38b5820" + +[[package]] +name = "byte-tools" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" + +[[package]] +name = "byteorder" +version = "1.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" + +[[package]] +name = "cfg-if" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "cloudabi" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4344512281c643ae7638bbabc3af17a11307803ec8f0fcad9fae512a8bf36467" +dependencies = [ + "bitflags", +] + +[[package]] +name = "console_error_panic_hook" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8d976903543e0c48546a91908f21588a680a8c8f984df9a5d69feccb2b2a211" +dependencies = [ + "cfg-if 0.1.10", + "wasm-bindgen", +] + +[[package]] +name = "constant_time_eq" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" + +[[package]] +name = "cpuid-bool" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8aebca1129a03dc6dc2b127edd729435bbc4a37e1d5f4d7513165089ceb02634" + +[[package]] +name = "crossbeam-utils" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02d96d1e189ef58269ebe5b97953da3274d83a93af647c2ddd6f9dab28cedb8d" +dependencies = [ + "autocfg", + "cfg-if 1.0.0", + "lazy_static", +] + +[[package]] +name = "crypto-mac" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" +dependencies = [ + "generic-array 0.14.4", + "subtle", +] + +[[package]] +name = "crypto_api" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f855e87e75a4799e18b8529178adcde6fd4f97c1449ff4821e747ff728bb102" + +[[package]] +name = "crypto_api_chachapoly" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95b2ad7cab08fd71addba81df5077c49df208effdfb3118a1519f9cdeac5aaf2" +dependencies = [ + "crypto_api", +] + +[[package]] +name = "digest" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5" +dependencies = [ + "generic-array 0.12.3", +] + +[[package]] +name = "digest" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" +dependencies = [ + "generic-array 0.14.4", +] + +[[package]] +name = "directories" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8fed639d60b58d0f53498ab13d26f621fd77569cc6edb031f4cc36a2ad9da0f" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs-sys" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e93d7f5705de3e49895a2b5e0b8855a1c27f080192ae9c32a6432d50741a57a" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + +[[package]] +name = "either" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" + +[[package]] +name = "equihash" +version = "0.1.0" +source = "git+https://github.com/iron-fish/librustzcash.git#8a2fe71c8aed7ff9271fe97cd64fd8abacff892c" +dependencies = [ + "blake2b_simd", + "byteorder", +] + +[[package]] +name = "fake-simd" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" + +[[package]] +name = "ff" +version = "0.6.0" +source = "git+https://github.com/iron-fish/librustzcash.git#8a2fe71c8aed7ff9271fe97cd64fd8abacff892c" +dependencies = [ + "byteorder", + "ff_derive", + "rand_core 0.5.1", + "subtle", +] + +[[package]] +name = "ff_derive" +version = "0.6.0" +source = "git+https://github.com/iron-fish/librustzcash.git#8a2fe71c8aed7ff9271fe97cd64fd8abacff892c" +dependencies = [ + "addchain", + "num-bigint", + "num-integer", + "num-traits", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "fpe" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21988a326139165b75e3196bc6962ca638e5fb0c95102fbf152a3743174b01e4" +dependencies = [ + "aes", + "byteorder", + "num-bigint", + "num-integer", + "num-traits", +] + +[[package]] +name = "fuchsia-cprng" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" + +[[package]] +name = "futures" +version = "0.1.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c7e4c2612746b0df8fed4ce0c69156021b704c9aefa360311c04e6e9e002eed" + +[[package]] +name = "gcc" +version = "0.3.55" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f5f3913fa0bfe7ee1fd8248b6b9f42a5af4b9d65ec2dd2c3c26132b950ecfc2" + +[[package]] +name = "generic-array" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c68f0274ae0e023facc3c97b2e00f076be70e254bc851d972503b328db79b2ec" +dependencies = [ + "typenum", +] + +[[package]] +name = "generic-array" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "501466ecc8a30d1d3b7fc9229b122b2ce8ed6e9d9223f1138d4babb253e51817" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc587bc0ec293155d5bfa6b9891ec18a1e330c234f896ea47fbada4cadbe47e6" +dependencies = [ + "cfg-if 0.1.10", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", + "wasm-bindgen", +] + +[[package]] +name = "group" +version = "0.6.0" +source = "git+https://github.com/iron-fish/librustzcash.git#8a2fe71c8aed7ff9271fe97cd64fd8abacff892c" +dependencies = [ + "byteorder", + "ff", + "rand 0.7.3", + "rand_xorshift", + "subtle", +] + +[[package]] +name = "hex" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6a22814455d41612f41161581c2883c0c6a1c41852729b17d5ed88f01e153aa" + +[[package]] +name = "hex" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77" + +[[package]] +name = "hmac" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "126888268dcc288495a26bf004b38c5fdbb31682f992c84ceb046a1f0fe38840" +dependencies = [ + "crypto-mac", + "digest 0.9.0", +] + +[[package]] +name = "instant" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61124eeebbd69b8190558df225adf7e4caafce0d743919e5d6b19652314ec5ec" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "ironfish_rust" +version = "0.1.0" +dependencies = [ + "bellman", + "blake2b_simd", + "blake2s_simd", + "byteorder", + "ff", + "lazy_static", + "pairing", + "rand 0.7.3", + "rust-crypto-wasm", + "shrinkwraprs", + "tiny-bip39", + "zcash_primitives", + "zcash_proofs", +] + +[[package]] +name = "ironfish_wasm" +version = "0.1.0" +dependencies = [ + "console_error_panic_hook", + "ironfish_rust", + "js-sys", + "pairing", + "rand 0.7.3", + "wasm-bindgen", + "zcash_primitives", +] + +[[package]] +name = "itertools" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f56a2d0bc861f9165be4eb3442afd3c236d8a98afd426f65d92324ae1091a484" +dependencies = [ + "either", +] + +[[package]] +name = "js-sys" +version = "0.3.48" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc9f84f9b115ce7843d60706df1422a916680bfdfcbdb0447c5614ff9d7e4d78" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.80" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d58d1b70b004888f764dfbf6a26a3b0342a1632d33968e4a179d8011c760614" + +[[package]] +name = "lock_api" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd96ffd135b2fd7b973ac026d28085defbe8983df057ced3eb4f2130b0831312" +dependencies = [ + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b" +dependencies = [ + "cfg-if 0.1.10", +] + +[[package]] +name = "num-bigint" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "090c7f9998ee0ff65aa5b723e4009f7b217707f1fb5ea551329cc4d6231fb304" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-integer" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" +dependencies = [ + "autocfg", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" +dependencies = [ + "autocfg", +] + +[[package]] +name = "once_cell" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13bd41f508810a131401606d54ac32a467c97172d74ba7662562ebba5ad07fa0" +dependencies = [ + "parking_lot", +] + +[[package]] +name = "opaque-debug" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c" + +[[package]] +name = "opaque-debug" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" + +[[package]] +name = "pairing" +version = "0.16.0" +source = "git+https://github.com/iron-fish/librustzcash.git#8a2fe71c8aed7ff9271fe97cd64fd8abacff892c" +dependencies = [ + "byteorder", + "ff", + "group", + "rand_core 0.5.1", + "subtle", +] + +[[package]] +name = "parking_lot" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d7744ac029df22dca6284efe4e898991d28e3085c706c972bcd7da4a27a15eb" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c361aa727dd08437f2f1447be8b59a33b0edd15e0fcee698f935613d9efbca9b" +dependencies = [ + "cfg-if 0.1.10", + "cloudabi", + "instant", + "libc", + "redox_syscall", + "smallvec", + "winapi", +] + +[[package]] +name = "pbkdf2" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "216eaa586a190f0a738f2f918511eecfa90f13295abec0e457cdebcceda80cbd" +dependencies = [ + "crypto-mac", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857" + +[[package]] +name = "proc-macro2" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71" +dependencies = [ + "unicode-xid", +] + +[[package]] +name = "quote" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.3.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ac302d8f83c0c1974bf758f6b041c6c8ada916fbb44a609158ca8b064cc76c" +dependencies = [ + "libc", + "rand 0.4.6", +] + +[[package]] +name = "rand" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" +dependencies = [ + "fuchsia-cprng", + "libc", + "rand_core 0.3.1", + "rdrand", + "winapi", +] + +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "getrandom", + "libc", + "rand_chacha", + "rand_core 0.5.1", + "rand_hc", +] + +[[package]] +name = "rand_chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +dependencies = [ + "ppv-lite86", + "rand_core 0.5.1", +] + +[[package]] +name = "rand_core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +dependencies = [ + "rand_core 0.4.2", +] + +[[package]] +name = "rand_core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "rand_xorshift" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77d416b86801d23dde1aa643023b775c3a462efc0ed96443add11546cdf1dca8" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "redox_syscall" +version = "0.1.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" + +[[package]] +name = "redox_users" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de0737333e7a9502c789a36d7c7fa6092a49895d4faa31ca5df163857ded2e9d" +dependencies = [ + "getrandom", + "redox_syscall", + "rust-argon2", +] + +[[package]] +name = "rust-argon2" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b18820d944b33caa75a71378964ac46f58517c92b6ae5f762636247c09e78fb" +dependencies = [ + "base64 0.13.0", + "blake2b_simd", + "constant_time_eq", + "crossbeam-utils", +] + +[[package]] +name = "rust-crypto-wasm" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dcf11edbc9a0effb4a99ddbe909dd26fb2e71459064879218c27b0add1cb6ec" +dependencies = [ + "base64 0.5.2", + "gcc", + "hex 0.2.0", + "libc", + "rand 0.3.23", + "time", +] + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "scopeguard" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" + +[[package]] +name = "sha2" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a256f46ea78a0c0d9ff00077504903ac881a1dafdc20da66545699e7776b3e69" +dependencies = [ + "block-buffer 0.7.3", + "digest 0.8.1", + "fake-simd", + "opaque-debug 0.2.3", +] + +[[package]] +name = "sha2" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa827a14b29ab7f44778d14a88d3cb76e949c45083f7dbfa507d0cb699dc12de" +dependencies = [ + "block-buffer 0.9.0", + "cfg-if 1.0.0", + "cpuid-bool", + "digest 0.9.0", + "opaque-debug 0.3.0", +] + +[[package]] +name = "shrinkwraprs" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83695fde96cbe9e08f0e4eb96b1b56fdbd44f2098ee27462dda964c7745fddc7" +dependencies = [ + "bitflags", + "itertools", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "smallvec" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7acad6f34eb9e8a259d3283d1e8c1d34d7415943d4895f65cc73813c7396fc85" + +[[package]] +name = "subtle" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "343f3f510c2915908f155e94f17220b19ccfacf2a64a2a5d8004f2c3e311e7fd" + +[[package]] +name = "syn" +version = "1.0.53" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8833e20724c24de12bbaba5ad230ea61c3eafb05b881c7c9d3cfe8638b187e68" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] + +[[package]] +name = "synstructure" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b834f2d66f734cb897113e34aaff2f1ab4719ca946f9a7358dba8f8064148701" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "unicode-xid", +] + +[[package]] +name = "thiserror" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0f4a65597094d4483ddaed134f409b2cb7c1beccf25201a9f73c719254fa98e" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7765189610d8241a44529806d6fd1f2e0a08734313a35d5b3a556f92b381f3c0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "time" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255" +dependencies = [ + "libc", + "wasi 0.10.0+wasi-snapshot-preview1", + "winapi", +] + +[[package]] +name = "tiny-bip39" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9e44c4759bae7f1032e286a7ef990bd9ed23fe831b7eeba0beb97484c2e59b8" +dependencies = [ + "anyhow", + "hmac", + "once_cell", + "pbkdf2", + "rand 0.7.3", + "rustc-hash", + "sha2 0.9.3", + "thiserror", + "unicode-normalization", + "zeroize", +] + +[[package]] +name = "tinyvec" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf8dbc19eb42fba10e8feaaec282fb50e2c14b2726d6301dbfeed0f73306a6f" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" + +[[package]] +name = "typenum" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33" + +[[package]] +name = "unicode-normalization" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a13e63ab62dbe32aeee58d1c5408d35c36c392bba5d9d3142287219721afe606" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-xid" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564" + +[[package]] +name = "version_check" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe" + +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + +[[package]] +name = "wasi" +version = "0.10.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" + +[[package]] +name = "wasm-bindgen" +version = "0.2.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ee1280240b7c461d6a0071313e08f34a60b0365f14260362e5a2b17d1d31aa7" +dependencies = [ + "cfg-if 1.0.0", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b7d8b6942b8bb3a9b0e73fc79b98095a27de6fa247615e59d096754a3bc2aa8" +dependencies = [ + "bumpalo", + "lazy_static", + "log", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5ac38da8ef716661f0f36c0d8320b89028efe10c7c0afde65baffb496ce0d3b" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc053ec74d454df287b9374ee8abb36ffd5acb95ba87da3ba5b7d3fe20eb401e" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d6f8ec44822dd71f5f221a5847fb34acd9060535c1211b70a05844c0f6383b1" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "zcash_primitives" +version = "0.2.0" +source = "git+https://github.com/iron-fish/librustzcash.git#8a2fe71c8aed7ff9271fe97cd64fd8abacff892c" +dependencies = [ + "aes", + "blake2b_simd", + "blake2s_simd", + "byteorder", + "crypto_api_chachapoly", + "equihash", + "ff", + "fpe", + "hex 0.3.2", + "lazy_static", + "log", + "pairing", + "rand 0.7.3", + "rand_core 0.5.1", + "sha2 0.8.2", + "subtle", +] + +[[package]] +name = "zcash_proofs" +version = "0.2.0" +source = "git+https://github.com/iron-fish/librustzcash.git#8a2fe71c8aed7ff9271fe97cd64fd8abacff892c" +dependencies = [ + "bellman", + "blake2b_simd", + "byteorder", + "directories", + "ff", + "pairing", + "rand_core 0.5.1", + "zcash_primitives", +] + +[[package]] +name = "zeroize" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81a974bcdd357f0dca4d41677db03436324d45a4c9ed2d0b873a5a360ce41c36" +dependencies = [ + "zeroize_derive", +] + +[[package]] +name = "zeroize_derive" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3f369ddb18862aba61aa49bf31e74d29f0f162dec753063200e1dc084345d16" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] diff --git a/ironfish-wasm/Cargo.toml b/ironfish-wasm/Cargo.toml new file mode 100644 index 0000000000..e6e9a91493 --- /dev/null +++ b/ironfish-wasm/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "ironfish_wasm" +version = "0.1.0" +authors = ["Iron Fish (https://ironfish.network)"] +edition = "2018" +license = "MPL-2.0" + +[dev-dependencies] +zcash_primitives = {git = "https://github.com/iron-fish/librustzcash.git"} +pairing = { git = "https://github.com/iron-fish/librustzcash.git", version = "0.16", features = ["expose-arith"]} +rand = {version = "0.7", features = ["wasm-bindgen"]} + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[dependencies] +console_error_panic_hook = "0.1.6" +ironfish_rust= { path = "../ironfish-rust" } +js-sys = "0.3.48" +wasm-bindgen = "0.2.71" + +[profile.release] +opt-level = 3 +lto = true + +[package.metadata.wasm-pack.profile.release] +wasm-opt = ["-O4", "-g", "--enable-mutable-globals"] + +[lib] +crate-type = ["cdylib", "rlib"] \ No newline at end of file diff --git a/ironfish-wasm/README.md b/ironfish-wasm/README.md new file mode 100644 index 0000000000..43f712e261 --- /dev/null +++ b/ironfish-wasm/README.md @@ -0,0 +1,21 @@ +[![codecov](https://codecov.io/gh/iron-fish/ironfish/branch/master/graph/badge.svg?token=PCSVEVEW5V&flag=ironfish-wasm-web)](https://codecov.io/gh/iron-fish/ironfish) +[![codecov](https://codecov.io/gh/iron-fish/ironfish/branch/master/graph/badge.svg?token=PCSVEVEW5V&flag=ironfish-wasm-nodejs)](https://codecov.io/gh/iron-fish/ironfish) + +## Accounts + +This is a Rust wrapper for creating accounts and transactions to be converted into WASM. + +### To Compile WASM + +``` +yarn build +``` + +This will generate `web` and `nodejs` folders that you can import in package.json files elsewhere in the repository with the following (choose either as appropriate): + +``` + "dependencies": { + "ironfish-wasm-web": "*", + "ironfish-wasm-nodejs": "*" + }, +``` diff --git a/ironfish-wasm/build.js b/ironfish-wasm/build.js new file mode 100644 index 0000000000..796c4a591f --- /dev/null +++ b/ironfish-wasm/build.js @@ -0,0 +1,61 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +const cp = require('child_process'); +const fs = require('fs'); + +let buildWeb = process.argv.includes('--web') +let buildNode = process.argv.includes('--node') + +if(!buildWeb && !buildNode) { + buildWeb = true + buildNode = true +} + +console.log(`Building Web: ${buildWeb.toString().toUpperCase()}`) +console.log(`Building Node: ${buildNode.toString().toUpperCase()}`) + +if(buildNode) { + console.log('Generating nodejs build...'); + const result = cp.spawnSync('wasm-pack', 'build -t nodejs -d nodejs --out-name ironfish-wasm-nodejs'.split(' '), { + stdio: 'inherit', + }); + if (result.error) { + if (result.error.message.includes('ENOENT')) { + console.error('wasm-pack is not installed. Run `cargo install wasm-pack`.') + } else { + console.error(result.error.message); + } + process.exit(1); + } + if (result.status) { + process.exit(result.status); + } + + console.log('Replacing name in nodejs/package.json...'); + const nodeFile = String(fs.readFileSync('nodejs/package.json', 'utf-8')); + const newNodeFile = nodeFile.replace('"ironfish_wasm"', '"ironfish-wasm-nodejs"'); + fs.writeFileSync('nodejs/package.json', newNodeFile); +} + +if(buildWeb) { + console.log('Generating web build...'); + const result = cp.spawnSync('wasm-pack', 'build -t bundler -d web --out-name ironfish-wasm-web'.split(' '), { + stdio: 'inherit', + }); + if (result.error) { + console.error(result.error.message); + process.exit(1); + } + if (result.status) { + process.exit(result.status); + } + + console.log('Replacing name in web/package.json...'); + const webFile = String(fs.readFileSync('web/package.json', 'utf-8')); + const newWebFile = webFile.replace('"ironfish_wasm"', '"ironfish-wasm-web"'); + fs.writeFileSync('web/package.json', newWebFile); +} + +console.log('Done!'); diff --git a/ironfish-wasm/package.json b/ironfish-wasm/package.json new file mode 100644 index 0000000000..0629a9effe --- /dev/null +++ b/ironfish-wasm/package.json @@ -0,0 +1,10 @@ +{ + "name": "ironfish-wasm", + "author": "Iron Fish (https://ironfish.network)", + "license": "MPL-2.0", + "scripts": { + "build:node": "node build.js --node", + "build:web": "node build.js --web", + "build": "node build.js" + } +} diff --git a/ironfish-wasm/src/lib.rs b/ironfish-wasm/src/lib.rs new file mode 100644 index 0000000000..1aa74f3899 --- /dev/null +++ b/ironfish-wasm/src/lib.rs @@ -0,0 +1,90 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +extern crate console_error_panic_hook; +extern crate ironfish_rust; +extern crate wasm_bindgen; + +use ironfish_rust::sapling_bls12; + +pub mod wasm_structs; + +use std::str; +use wasm_bindgen::prelude::*; + +#[wasm_bindgen] +pub struct Key { + spending_key: String, + incoming_view_key: String, + outgoing_view_key: String, + public_address: String, +} + +#[wasm_bindgen] +impl Key { + #[wasm_bindgen(getter)] + pub fn spending_key(&self) -> String { + self.spending_key.clone() + } + + #[wasm_bindgen(getter)] + pub fn incoming_view_key(&self) -> String { + self.incoming_view_key.clone() + } + + #[wasm_bindgen(getter)] + pub fn outgoing_view_key(&self) -> String { + self.outgoing_view_key.clone() + } + + #[wasm_bindgen(getter)] + pub fn public_address(&self) -> String { + self.public_address.clone() + } +} + +#[wasm_bindgen(js_name = "generateKey")] +pub fn create_key_to_js() -> Key { + console_error_panic_hook::set_once(); + let hasher = sapling_bls12::SAPLING.clone(); + let sapling_key = sapling_bls12::Key::generate_key(hasher); + + Key { + spending_key: sapling_key.hex_spending_key(), + incoming_view_key: sapling_key.incoming_view_key().hex_key(), + outgoing_view_key: sapling_key.outgoing_view_key().hex_key(), + public_address: sapling_key.generate_public_address().hex_public_address(), + } +} + +#[wasm_bindgen(catch, js_name = "generateNewPublicAddress")] +pub fn create_new_public_key_to_js(private_key: &str) -> Key { + console_error_panic_hook::set_once(); + let hasher = sapling_bls12::SAPLING.clone(); + let sapling_key = sapling_bls12::Key::from_hex(hasher, private_key).unwrap(); + + Key { + spending_key: sapling_key.hex_spending_key(), + incoming_view_key: sapling_key.incoming_view_key().hex_key(), + outgoing_view_key: sapling_key.outgoing_view_key().hex_key(), + public_address: sapling_key.generate_public_address().hex_public_address(), + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_create_new_public_key_to_js() { + let key1 = create_key_to_js(); + let key2 = create_new_public_key_to_js(&key1.spending_key); + + assert_eq!(key1.spending_key(), key2.spending_key()); + assert_eq!(key1.incoming_view_key(), key2.incoming_view_key()); + assert_eq!(key1.outgoing_view_key(), key2.outgoing_view_key()); + + assert_ne!(key1.public_address(), key2.public_address()); + } +} diff --git a/ironfish-wasm/src/wasm_structs/mod.rs b/ironfish-wasm/src/wasm_structs/mod.rs new file mode 100644 index 0000000000..d40c0a6728 --- /dev/null +++ b/ironfish-wasm/src/wasm_structs/mod.rs @@ -0,0 +1,20 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +mod note_encrypted; +pub use note_encrypted::WasmNoteEncrypted; + +mod note; +pub use note::WasmNote; + +mod spend_proof; +pub use spend_proof::WasmSpendProof; + +mod transaction; +pub use transaction::WasmSimpleTransaction; +pub use transaction::WasmTransaction; +pub use transaction::WasmTransactionPosted; + +mod witness; +pub use witness::JsWitness; diff --git a/ironfish-wasm/src/wasm_structs/note.rs b/ironfish-wasm/src/wasm_structs/note.rs new file mode 100644 index 0000000000..441d199617 --- /dev/null +++ b/ironfish-wasm/src/wasm_structs/note.rs @@ -0,0 +1,64 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use ironfish_rust::note::Memo; +use ironfish_rust::sapling_bls12::{Key, Note, SAPLING}; +use wasm_bindgen::prelude::*; + +#[wasm_bindgen] +pub struct WasmNote { + pub(crate) note: Note, +} + +#[wasm_bindgen] +impl WasmNote { + #[wasm_bindgen(constructor)] + pub fn new(owner: &str, value: u64, memo: &str) -> WasmNote { + let owner_address = ironfish_rust::PublicAddress::from_hex(SAPLING.clone(), owner).unwrap(); + WasmNote { + note: Note::new(SAPLING.clone(), owner_address, value, Memo::from(memo)), + } + } + + #[wasm_bindgen] + pub fn deserialize(bytes: &[u8]) -> WasmNote { + let hasher = SAPLING.clone(); + let cursor: std::io::Cursor<&[u8]> = std::io::Cursor::new(bytes); + let note = Note::read(cursor, hasher).unwrap(); + WasmNote { note } + } + + #[wasm_bindgen] + pub fn serialize(&self) -> Vec { + let mut cursor: std::io::Cursor> = std::io::Cursor::new(vec![]); + self.note.write(&mut cursor).unwrap(); + cursor.into_inner() + } + + /// Value this note represents. + #[wasm_bindgen(getter)] + pub fn value(&self) -> u64 { + self.note.value() + } + + /// Arbitrary note the spender can supply when constructing a spend so the + /// receiver has some record from whence it came. + /// Note: While this is encrypted with the output, it is not encoded into + /// the proof in any way. + #[wasm_bindgen(getter)] + pub fn memo(&self) -> String { + self.note.memo().to_string() + } + + /// Compute the nullifier for this note, given the private key of its owner. + /// + /// The nullifier is a series of bytes that is published by the note owner + /// only at the time the note is spent. This key is collected in a massive + /// 'nullifier set', preventing double-spend. + #[wasm_bindgen] + pub fn nullifier(&self, owner_private_key: &str, position: u64) -> Vec { + let private_key = Key::from_hex(SAPLING.clone(), owner_private_key).unwrap(); + self.note.nullifier(&private_key, position).to_vec() + } +} diff --git a/ironfish-wasm/src/wasm_structs/note_encrypted.rs b/ironfish-wasm/src/wasm_structs/note_encrypted.rs new file mode 100644 index 0000000000..58585c1813 --- /dev/null +++ b/ironfish-wasm/src/wasm_structs/note_encrypted.rs @@ -0,0 +1,159 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use ironfish_rust::sapling_bls12; +use ironfish_rust::MerkleNote; +use wasm_bindgen::prelude::*; + +use super::WasmNote; + +#[wasm_bindgen] +pub struct WasmNoteEncrypted { + pub(crate) note: sapling_bls12::MerkleNote, +} + +#[wasm_bindgen] +impl WasmNoteEncrypted { + #[wasm_bindgen] + pub fn deserialize(bytes: &[u8]) -> WasmNoteEncrypted { + let hasher = sapling_bls12::SAPLING.clone(); + let cursor: std::io::Cursor<&[u8]> = std::io::Cursor::new(bytes); + let note = MerkleNote::read(cursor, hasher).unwrap(); + WasmNoteEncrypted { note } + } + + #[wasm_bindgen] + pub fn serialize(&self) -> Vec { + let mut cursor: std::io::Cursor> = std::io::Cursor::new(vec![]); + self.note.write(&mut cursor).unwrap(); + cursor.into_inner() + } + + #[wasm_bindgen] + pub fn equals(&self, other: &WasmNoteEncrypted) -> bool { + self.note.eq(&other.note) + } + + #[wasm_bindgen(js_name = "merkleHash")] + pub fn merkle_hash(&self) -> Vec { + let mut cursor: Vec = Vec::with_capacity(32); + self.note.merkle_hash().write(&mut cursor).unwrap(); + cursor + } + + /// Hash two child hashes together to calculate the hash of the + /// new parent + #[wasm_bindgen(js_name = "combineHash")] + pub fn combine_hash(depth: usize, left: &[u8], right: &[u8]) -> Vec { + let mut left_hash_reader: std::io::Cursor<&[u8]> = std::io::Cursor::new(left); + let mut right_hash_reader: std::io::Cursor<&[u8]> = std::io::Cursor::new(right); + let left_hash = sapling_bls12::MerkleNoteHash::read(&mut left_hash_reader).unwrap(); + let right_hash = sapling_bls12::MerkleNoteHash::read(&mut right_hash_reader).unwrap(); + + let mut cursor: Vec = Vec::with_capacity(32); + + sapling_bls12::MerkleNoteHash::new(sapling_bls12::MerkleNoteHash::combine_hash( + &sapling_bls12::SAPLING.clone(), + depth, + &left_hash.0, + &right_hash.0, + )) + .write(&mut cursor) + .unwrap(); + + cursor + } + + /// Returns undefined if the note was unable to be decrypted with the given key. + #[wasm_bindgen(js_name = "decryptNoteForOwner")] + pub fn decrypt_note_for_owner(&self, owner_hex_key: &str) -> Option { + let owner_view_key = + sapling_bls12::IncomingViewKey::from_hex(sapling_bls12::SAPLING.clone(), owner_hex_key) + .unwrap(); + match self.note.decrypt_note_for_owner(&owner_view_key) { + Ok(n) => Some(WasmNote { note: { n } }), + Err(_) => None, + } + } + + /// Returns undefined if the note was unable to be decrypted with the given key. + #[wasm_bindgen(js_name = "decryptNoteForSpender")] + pub fn decrypt_note_for_spender(&self, spender_hex_key: &str) -> Option { + let spender_view_key = sapling_bls12::OutgoingViewKey::from_hex( + sapling_bls12::SAPLING.clone(), + spender_hex_key, + ) + .unwrap(); + match self.note.decrypt_note_for_spender(&spender_view_key) { + Ok(n) => Some(WasmNote { note: { n } }), + Err(_) => None, + } + } +} + +#[cfg(test)] +mod tests { + use rand::{thread_rng, Rng}; + use zcash_primitives::{ + jubjub::{fs::Fs, ToUniform}, + primitives::ValueCommitment, + }; + + use super::*; + use ironfish_rust::merkle_note::MerkleNote; + use ironfish_rust::note::Memo; + use ironfish_rust::sapling_bls12::Note; + use ironfish_rust::SaplingKey; + use pairing::bls12_381::Bls12; + + #[test] + fn test_merkle_notes_are_equal() { + let spender_key: SaplingKey = + SaplingKey::generate_key(sapling_bls12::SAPLING.clone()); + let receiver_key: SaplingKey = + SaplingKey::generate_key(sapling_bls12::SAPLING.clone()); + let owner = receiver_key.generate_public_address(); + let note = Note::new( + sapling_bls12::SAPLING.clone(), + owner.clone(), + 42, + Memo([0; 32]), + ); + let diffie_hellman_keys = + owner.generate_diffie_hellman_keys(&sapling_bls12::SAPLING.jubjub); + + let mut buffer = [0u8; 64]; + thread_rng().fill(&mut buffer[..]); + + let value_commitment_randomness: Fs = Fs::to_uniform(&buffer[..]); + + let value_commitment = ValueCommitment:: { + value: note.value(), + randomness: value_commitment_randomness, + }; + + let merkle_note = + MerkleNote::new(&spender_key, ¬e, &value_commitment, &diffie_hellman_keys); + + let mut cursor: std::io::Cursor> = std::io::Cursor::new(vec![]); + merkle_note.write(&mut cursor).unwrap(); + + let vec = cursor.into_inner(); + let wasm1 = WasmNoteEncrypted::deserialize(&vec); + let wasm2 = WasmNoteEncrypted::deserialize(&vec); + assert!(wasm1.equals(&wasm2)) + } + + #[test] + fn test_can_combine_merkle_note_hashes() { + let arr: [u8; 32] = Default::default(); + let combined_hash = WasmNoteEncrypted::combine_hash(1, &arr, &arr); + + let expected = &[ + 78, 74, 99, 96, 68, 196, 78, 82, 234, 152, 143, 34, 78, 141, 112, 9, 118, 118, 97, 40, + 219, 166, 197, 144, 93, 94, 133, 118, 88, 127, 57, 32, + ]; + assert_eq!(&combined_hash, &expected) + } +} diff --git a/ironfish-wasm/src/wasm_structs/spend_proof.rs b/ironfish-wasm/src/wasm_structs/spend_proof.rs new file mode 100644 index 0000000000..71449c2907 --- /dev/null +++ b/ironfish-wasm/src/wasm_structs/spend_proof.rs @@ -0,0 +1,34 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use wasm_bindgen::prelude::*; + +use ironfish_rust::sapling_bls12::{MerkleNoteHash, SpendProof}; + +#[wasm_bindgen] +pub struct WasmSpendProof { + pub(crate) proof: SpendProof, +} + +#[wasm_bindgen] +impl WasmSpendProof { + #[wasm_bindgen(getter, js_name = "treeSize")] + pub fn tree_size(&self) -> u32 { + self.proof.tree_size() + } + + #[wasm_bindgen(getter, js_name = "rootHash")] + pub fn root_hash(&self) -> Vec { + let mut cursor: std::io::Cursor> = std::io::Cursor::new(vec![]); + MerkleNoteHash::new(self.proof.root_hash()) + .write(&mut cursor) + .unwrap(); + cursor.into_inner() + } + + #[wasm_bindgen(getter)] + pub fn nullifier(&self) -> Vec { + self.proof.nullifier().to_vec() + } +} diff --git a/ironfish-wasm/src/wasm_structs/transaction.rs b/ironfish-wasm/src/wasm_structs/transaction.rs new file mode 100644 index 0000000000..68770fb27a --- /dev/null +++ b/ironfish-wasm/src/wasm_structs/transaction.rs @@ -0,0 +1,293 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use wasm_bindgen::prelude::*; + +use ironfish_rust::sapling_bls12::{ + Key, ProposedTransaction, PublicAddress, SimpleTransaction, Transaction, SAPLING, +}; + +use super::note::WasmNote; +use super::spend_proof::WasmSpendProof; +use super::witness::JsWitness; + +#[wasm_bindgen] +pub struct WasmTransactionPosted { + transaction: Transaction, +} + +#[wasm_bindgen] +impl WasmTransactionPosted { + #[wasm_bindgen] + pub fn deserialize(bytes: &[u8]) -> WasmTransactionPosted { + console_error_panic_hook::set_once(); + let mut cursor: std::io::Cursor<&[u8]> = std::io::Cursor::new(bytes); + let transaction = Transaction::read(SAPLING.clone(), &mut cursor).unwrap(); + WasmTransactionPosted { transaction } + } + + #[wasm_bindgen] + pub fn serialize(&self) -> Vec { + let mut cursor: std::io::Cursor> = std::io::Cursor::new(vec![]); + self.transaction.write(&mut cursor).unwrap(); + cursor.into_inner() + } + + #[wasm_bindgen] + pub fn verify(&self) -> bool { + match self.transaction.verify() { + Ok(_) => true, + Err(_e) => false, + } + } + + #[wasm_bindgen(getter, js_name = "notesLength")] + pub fn notes_length(&self) -> usize { + self.transaction.receipts().len() + } + + #[wasm_bindgen(js_name = "getNote")] + pub fn get_note(&self, index: usize) -> Vec { + let proof = &self.transaction.receipts()[index]; + // Note bytes are 275 + let mut cursor: Vec = Vec::with_capacity(275); + proof.merkle_note().write(&mut cursor).unwrap(); + cursor + } + + #[wasm_bindgen(getter, js_name = "spendsLength")] + pub fn spends_length(&self) -> usize { + self.transaction.spends().len() + } + + #[wasm_bindgen(js_name = "getSpend")] + pub fn get_spend(&self, index: usize) -> WasmSpendProof { + let proof = &self.transaction.spends()[index]; + WasmSpendProof { + proof: proof.clone(), + } + } + + #[wasm_bindgen(getter, js_name = "transactionFee")] + pub fn transaction_fee(&self) -> i64 { + self.transaction.transaction_fee() + } + + #[wasm_bindgen(getter, js_name = "transactionSignature")] + pub fn transaction_signature(&self) -> Vec { + let mut serialized_signature = vec![]; + self.transaction + .binding_signature() + .write(&mut serialized_signature) + .unwrap(); + serialized_signature + } + + #[wasm_bindgen(getter, js_name = "transactionHash")] + pub fn transaction_hash(&self) -> Vec { + self.transaction.transaction_signature_hash().to_vec() + } +} + +#[wasm_bindgen] +pub struct WasmTransaction { + transaction: ProposedTransaction, +} + +#[wasm_bindgen] +impl WasmTransaction { + #[wasm_bindgen(constructor)] + pub fn new() -> WasmTransaction { + console_error_panic_hook::set_once(); + WasmTransaction { + transaction: ProposedTransaction::new(SAPLING.clone()), + } + } + + /// Create a proof of a new note owned by the recipient in this transaction. + #[wasm_bindgen] + pub fn receive(&mut self, spender_hex_key: &str, note: &WasmNote) -> String { + let spender_key = Key::from_hex(SAPLING.clone(), spender_hex_key).unwrap(); + match self.transaction.receive(&spender_key, ¬e.note) { + Ok(_) => "".into(), + Err(e) => match e { + ironfish_rust::errors::SaplingProofError::InconsistentWitness => { + "InconsistentWitness".into() + } + ironfish_rust::errors::SaplingProofError::IOError => "IOError".into(), + ironfish_rust::errors::SaplingProofError::ReceiptCircuitProofError => { + "ReceiptCircuitProofError".into() + } + ironfish_rust::errors::SaplingProofError::SaplingKeyError => { + "SaplingKeyError".into() + } + ironfish_rust::errors::SaplingProofError::SigningError => "SigningError".into(), + ironfish_rust::errors::SaplingProofError::SpendCircuitProofError(d) => { + format!("SpendCircuitProofError - {}", d) + } + ironfish_rust::errors::SaplingProofError::VerificationFailed => { + "VerificationFailed".into() + } + }, + } + } + + /// Spend the note owned by spender_hex_key at the given witness location. + #[wasm_bindgen] + pub fn spend(&mut self, spender_hex_key: &str, note: &WasmNote, witness: &JsWitness) -> String { + let spender_key = Key::from_hex(SAPLING.clone(), spender_hex_key).unwrap(); + match self.transaction.spend(spender_key, ¬e.note, witness) { + Ok(_) => "".into(), + Err(e) => match e { + ironfish_rust::errors::SaplingProofError::InconsistentWitness => { + "InconsistentWitness".into() + } + ironfish_rust::errors::SaplingProofError::IOError => "IOError".into(), + ironfish_rust::errors::SaplingProofError::ReceiptCircuitProofError => { + "ReceiptCircuitProofError".into() + } + ironfish_rust::errors::SaplingProofError::SaplingKeyError => { + "SaplingKeyError".into() + } + + ironfish_rust::errors::SaplingProofError::SigningError => "SigningError".into(), + ironfish_rust::errors::SaplingProofError::SpendCircuitProofError(d) => { + format!("SpendCircuitProofError - {}", d) + } + ironfish_rust::errors::SaplingProofError::VerificationFailed => { + "VerificationFailed".into() + } + }, + } + } + + /// Special case for posting a miners fee transaction. Miner fee transactions + /// are unique in that they generate currency. They do not have any spends + /// or change and therefore have a negative transaction fee. In normal use, + /// a miner would not accept such a transaction unless it was explicitly set + /// as the miners fee. + #[wasm_bindgen] + pub fn post_miners_fee(&mut self) -> WasmTransactionPosted { + WasmTransactionPosted { + transaction: self.transaction.post_miners_fee().unwrap(), + } + } + + /// Post the transaction. This performs a bit of validation, and signs + /// the spends with a signature that proves the spends are part of this + /// transaction. + /// + /// Transaction fee is the amount the spender wants to send to the miner + /// for mining this transaction. This has to be non-negative; sane miners + /// wouldn't accept a transaction that takes money away from them. + /// + /// sum(spends) - sum(outputs) - intended_transaction_fee - change = 0 + /// aka: self.transaction_fee - intended_transaction_fee - change = 0 + #[wasm_bindgen] + pub fn post( + &mut self, + spender_hex_key: &str, + change_goes_to: Option, + intended_transaction_fee: u64, + ) -> WasmTransactionPosted { + let spender_key = Key::from_hex(SAPLING.clone(), spender_hex_key).unwrap(); + let change_key = match change_goes_to { + Some(s) => Some(PublicAddress::from_hex(SAPLING.clone(), &s).unwrap()), + None => None, + }; + WasmTransactionPosted { + transaction: self + .transaction + .post(&spender_key, change_key, intended_transaction_fee) + .unwrap(), + } + } +} + +impl Default for WasmTransaction { + fn default() -> Self { + WasmTransaction::new() + } +} + +#[wasm_bindgen] +pub struct WasmSimpleTransaction { + transaction: SimpleTransaction, +} + +#[wasm_bindgen] +impl WasmSimpleTransaction { + #[wasm_bindgen(constructor)] + pub fn new(spender_hex_key: &str, intended_transaction_fee: u64) -> WasmSimpleTransaction { + console_error_panic_hook::set_once(); + let spender_key = Key::from_hex(SAPLING.clone(), spender_hex_key).unwrap(); + WasmSimpleTransaction { + transaction: SimpleTransaction::new( + SAPLING.clone(), + spender_key, + intended_transaction_fee, + ), + } + } + + #[wasm_bindgen] + pub fn spend(&mut self, note: &WasmNote, witness: &JsWitness) -> String { + match self.transaction.spend(¬e.note, witness) { + Ok(_) => "".into(), + Err(e) => match e { + ironfish_rust::errors::SaplingProofError::InconsistentWitness => { + "InconsistentWitness".into() + } + ironfish_rust::errors::SaplingProofError::IOError => "IOError".into(), + ironfish_rust::errors::SaplingProofError::ReceiptCircuitProofError => { + "ReceiptCircuitProofError".into() + } + ironfish_rust::errors::SaplingProofError::SaplingKeyError => { + "SaplingKeyError".into() + } + + ironfish_rust::errors::SaplingProofError::SigningError => "SigningError".into(), + ironfish_rust::errors::SaplingProofError::SpendCircuitProofError(d) => { + format!("SpendCircuitProofError - {}", d) + } + ironfish_rust::errors::SaplingProofError::VerificationFailed => { + "VerificationFailed".into() + } + }, + } + } + + #[wasm_bindgen] + pub fn receive(&mut self, note: &WasmNote) -> String { + match self.transaction.receive(¬e.note) { + Ok(_) => "".into(), + Err(e) => match e { + ironfish_rust::errors::SaplingProofError::InconsistentWitness => { + "InconsistentWitness".into() + } + ironfish_rust::errors::SaplingProofError::IOError => "IOError".into(), + ironfish_rust::errors::SaplingProofError::ReceiptCircuitProofError => { + "ReceiptCircuitProofError".into() + } + ironfish_rust::errors::SaplingProofError::SaplingKeyError => { + "SaplingKeyError".into() + } + ironfish_rust::errors::SaplingProofError::SigningError => "SigningError".into(), + ironfish_rust::errors::SaplingProofError::SpendCircuitProofError(d) => { + format!("SpendCircuitProofError - {}", d) + } + ironfish_rust::errors::SaplingProofError::VerificationFailed => { + "VerificationFailed".into() + } + }, + } + } + + #[wasm_bindgen] + pub fn post(&mut self) -> WasmTransactionPosted { + WasmTransactionPosted { + transaction: self.transaction.post().unwrap(), + } + } +} diff --git a/ironfish-wasm/src/wasm_structs/witness.rs b/ironfish-wasm/src/wasm_structs/witness.rs new file mode 100644 index 0000000000..c349331104 --- /dev/null +++ b/ironfish-wasm/src/wasm_structs/witness.rs @@ -0,0 +1,108 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +use wasm_bindgen::prelude::*; +use wasm_bindgen::JsCast; + +use ironfish_rust::sapling_bls12::{Bls12, Fr, MerkleNoteHash}; +use ironfish_rust::witness::{WitnessNode, WitnessTrait}; + +#[wasm_bindgen(typescript_custom_section)] +const IWITNESS: &'static str = r#" +interface IWitness { + verify(myHash: Uint8Array): bool; + authPath(): IWitnessNode[]; + treeSize(): number; + serializeRootHash(): Uint8Array; +} +"#; + +#[wasm_bindgen] +/// Structural representation of a witness from TypeScript. +/// The IWitness TypeScript interface above must be manually updated +/// if changing this struct. +extern "C" { + #[wasm_bindgen(typescript_type = "IWitness")] + pub type JsWitness; + + #[wasm_bindgen(method)] + pub fn verify(this: &JsWitness, hash: &[u8]) -> bool; + + #[wasm_bindgen(method, js_name = "authPath")] + pub fn auth_path(this: &JsWitness) -> js_sys::Array; + + #[wasm_bindgen(method, js_name = "treeSize")] + pub fn tree_size(this: &JsWitness) -> u32; + + #[wasm_bindgen(method, js_name = "serializeRootHash")] + pub fn serialize_root_hash(this: &JsWitness) -> Vec; +} + +#[wasm_bindgen(typescript_custom_section)] +const IWITNESSNODE: &'static str = r#" +interface IWitnessNode { + side(): 'Left' | 'Right'; + hashOfSibling(): Uint8Array; +} +"#; + +#[wasm_bindgen] +/// Structural representation of a WitnessNode from TypeScript +/// The IWitnessNode TypeScript interface above must be manually updated +/// if changing this struct. +extern "C" { + #[wasm_bindgen(typescript_type = "IWitnessNode")] + pub type JsWitnessNode; + + #[wasm_bindgen(method)] + pub fn side(this: &JsWitnessNode) -> String; + + #[wasm_bindgen(method, js_name = "hashOfSibling")] + pub fn hash_of_sibling(this: &JsWitnessNode) -> Vec; +} + +/// Implements WitnessTrait on JsWitness so that witnesses from the +/// TypeScript side can be passed into classes that require witnesses, +/// like transactions. +impl WitnessTrait for JsWitness { + fn verify(&self, hash: &MerkleNoteHash) -> bool { + let mut cursor: std::io::Cursor> = std::io::Cursor::new(vec![]); + hash.write(&mut cursor).unwrap(); + self.verify(&cursor.into_inner()) + } + + fn get_auth_path(&self) -> Vec> { + self.auth_path() + .iter() + .map(|element| { + // Unchecked cast here so that wasm-bindgen allows duck-typed objects + // rather than asserting that the object is an instance of JsWitnessNode + let cast = element.unchecked_into::(); + + // hashOfSibling returns a serialized hash, so convert it + // back into a MerkleNoteHash + let bytes = cast.hash_of_sibling(); + let mut cursor = std::io::Cursor::new(&bytes); + let fr = MerkleNoteHash::read(&mut cursor).unwrap().0; + + if cast.side() == "Left" { + WitnessNode::Left(fr) + } else { + WitnessNode::Right(fr) + } + }) + .collect() + } + + fn root_hash(&self) -> Fr { + // Convert the serialized root hash back to a Fr + let bytes = self.serialize_root_hash(); + let mut cursor: std::io::Cursor<&[u8]> = std::io::Cursor::new(&bytes); + MerkleNoteHash::read(&mut cursor).unwrap().0 + } + + fn tree_size(&self) -> u32 { + self.tree_size() + } +} diff --git a/ironfish-wasm/tsconfig.json b/ironfish-wasm/tsconfig.json new file mode 100644 index 0000000000..c926dd6617 --- /dev/null +++ b/ironfish-wasm/tsconfig.json @@ -0,0 +1,21 @@ +{ + "compilerOptions": { + "target": "es5", + "outDir": "build", + "lib": ["dom", "dom.iterable", "esnext"], + "allowJs": true, + "skipLibCheck": true, + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "composite": true, + "strict": true, + "forceConsistentCasingInFileNames": true, + "module": "esnext", + "moduleResolution": "node", + "resolveJsonModule": true, + "isolatedModules": true, + "jsx": "react" + }, + "include": ["bundler", "nodejs"], + } + \ No newline at end of file diff --git a/ironfish/.eslintignore b/ironfish/.eslintignore new file mode 100644 index 0000000000..ba6401e0e5 --- /dev/null +++ b/ironfish/.eslintignore @@ -0,0 +1 @@ +gitHash.ts diff --git a/ironfish/.eslintrc.js b/ironfish/.eslintrc.js new file mode 100644 index 0000000000..f2fb3ae1e8 --- /dev/null +++ b/ironfish/.eslintrc.js @@ -0,0 +1,6 @@ +module.exports = { + extends: ['ironfish'], + parserOptions: { + tsconfigRootDir: __dirname, + }, +} diff --git a/ironfish/.prettierrc.js b/ironfish/.prettierrc.js new file mode 100644 index 0000000000..1ad9c111e4 --- /dev/null +++ b/ironfish/.prettierrc.js @@ -0,0 +1 @@ +module.exports = 'eslint-config-ironfish/prettierrc' diff --git a/ironfish/README.md b/ironfish/README.md new file mode 100644 index 0000000000..5ca59764bb --- /dev/null +++ b/ironfish/README.md @@ -0,0 +1,62 @@ +# Ironfish + +[![codecov](https://codecov.io/gh/iron-fish/ironfish/branch/master/graph/badge.svg?token=PCSVEVEW5V&flag=ironfish)](https://codecov.io/gh/iron-fish/ironfish) + +Ironfish SDK wraps all of the generic components of [Captain](./src/captain/README.md) into a project that is specific to Ironfish. + +## Components + +### Strategy +It also contains a strategy, which is a collection of implementations that [Captain](./src/captain/README.md) uses to implement coin specific logic. + +### Accounts + An account store used to manage, create, and update Ironfish accounts. + +### Config +This represents the IronfishConfig and all of it's options. It's a hierarchical config system that has 3 levels of options. If you use `config.get()` or `config.config` then you'll always get the top level config options. + +``` +-> config + -> overrides + * contains all the overrides, these usually come from the CLI + -> loaded + * contains all the values loaded from the users config file + -> defaults + * contains all the default values in the config +``` + +### FileSystem +This is an abtraction on top of any file system related APIs like `path` and `fs` in node. It makes it so you can perform file and file system related methods in a way that works in the browser and node. `NodeFileSystem` is one implementation that works for node. + +### RpcServer +This is the server that handles clients connecting and making requests against the RPC routes. This server doesn't have much logic of it's own, but it contains a set of adapters that each implement a transport mechanism. + +When the RpcServer starts, so do the transports. They accept messages from clients, construct Requests, and route them into the routing layer which executes the proper route. + +#### Adapter +An adapter exists to represent a single transport layer. For example, in an HTTP adapter you might listen on port 80 for requests, construct RPC layer Request objects, and feed them into the routing layer, then render the RPC responses as HTTP responses. See IPCAdapter for an example on how to implement an adapter. + +### Logs +By default the log level is set to only display info. + +Change the `logLevel` in the config file, from `*:info` to `*debug` if you want verbose logs. + +### IronfishSDK +This project contains the IronfishSdk which is just a simple wrapper around the ironfish components like Accounts, Config, and IronfishNode. You can use the individual components when ever you feel like it, though the SDK is aimed at making usage easier. + +#### SDK Example + +```typescript +// Initialize the SDK +const sdk = await IronfishSdk.init() + +// List all accounts from the SDK +console.log(await sdk.accounts.list()) + +// Get a config option from the SDK +console.log(await sdk.config.get('enableMiningDirector')) + +// Start a node from the SDK +const node = sdk.node() +node.start() +``` diff --git a/ironfish/jest.config.js b/ironfish/jest.config.js new file mode 100644 index 0000000000..3ad9669934 --- /dev/null +++ b/ironfish/jest.config.js @@ -0,0 +1,9 @@ +const base = require('../config/jest.config.base') +const pkg = require('./package.json') + +module.exports = { + ...base, + displayName: pkg.name, + globalSetup: './jest.setup.js', + setupFilesAfterEnv: ['./jest.setup.env.js'], +} diff --git a/ironfish/jest.setup.env.js b/ironfish/jest.setup.env.js new file mode 100644 index 0000000000..30c9540a0a --- /dev/null +++ b/ironfish/jest.setup.env.js @@ -0,0 +1,16 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +const consola = require('consola') +const { generateKey } = require('ironfish-wasm-nodejs') + +beforeAll(() => { + // This causes the WASM to be initialized, which is 1 time 2 second cost for each test suite + if (process.env.TEST_INIT_WASM) { + generateKey() + } +}) + +beforeEach(() => { + consola.pause() +}) diff --git a/ironfish/jest.setup.js b/ironfish/jest.setup.js new file mode 100644 index 0000000000..ab1789cf29 --- /dev/null +++ b/ironfish/jest.setup.js @@ -0,0 +1,12 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +const fs = require('fs') + +module.exports = async () => { + if (fs.existsSync('./testdbs')) { + fs.rmdirSync('./testdbs', { recursive: true }) + } + + fs.mkdirSync('./testdbs') +} diff --git a/ironfish/package.json b/ironfish/package.json new file mode 100644 index 0000000000..3b9bbac747 --- /dev/null +++ b/ironfish/package.json @@ -0,0 +1,78 @@ +{ + "name": "ironfish", + "version": "0.1.0", + "private": true, + "author": "Iron Fish (https://ironfish.network)", + "main": "build/src", + "license": "MPL-2.0", + "dependencies": { + "axios": "0.21.1", + "blake3-wasm": "2.1.5", + "buffer": "6.0.3", + "buffer-json": "2.0.0", + "buffer-map": "0.0.7", + "colors": "1.4.0", + "consola": "2.15.0", + "date-fns": "2.16.1", + "hex-array": "1.0.0", + "imurmurhash": "0.1.4", + "level-errors": "2.0.1", + "leveldown": "5.6.0", + "levelup": "4.4.0", + "lodash": "^4.17.20", + "lru-cache": "6.0.0", + "node-ipc": "9.1.3", + "parse-json": "5.1.0", + "piscina": "^2.1.0", + "simple-peer": "9.9.3", + "tweetnacl": "1.0.3", + "uuid": "^8.3.0", + "yup": "0.29.3" + }, + "scripts": { + "build": "tsc -b", + "build:watch": "tsc -b -w", + "build:docs": "tsc -b && typedoc --options ../typedoc.json", + "build:tests": "tsc -b tsconfig.test.json", + "lint": "tsc -b && tsc -b tsconfig.test.json && eslint --ext .ts,.tsx,.js,.jsx src/", + "lint:fix": "tsc -b && tsc -b tsconfig.test.json && eslint --ext .ts,.tsx,.js,.jsx src/ --fix", + "start": "tsc -b -w", + "test": "tsc -b && tsc -b tsconfig.test.json && jest", + "test:slow": "tsc -b && tsc -b tsconfig.test.json && TEST_INIT_WASM=true jest --testMatch \"**/*.test.slow.ts\" --testPathIgnorePatterns", + "test:coverage:html": "tsc -b tsconfig.test.json && jest --coverage --coverage-reporters html --testPathIgnorePatterns", + "test:watch": "tsc -b tsconfig.test.json && jest --watch --coverage false" + }, + "devDependencies": { + "@types/buffer-json": "2.0.0", + "@types/colors": "1.2.1", + "@types/imurmurhash": "0.1.1", + "@types/jest": "^26.0.5", + "@types/leveldown": "4.0.2", + "@types/levelup": "4.3.0", + "@types/lodash": "^4.14.161", + "@types/lru-cache": "5.1.0", + "@types/simple-peer": "9.6.1", + "@types/uuid": "^8.0.1", + "@types/ws": "^7.2.6", + "@types/yup": "0.29.10", + "@typescript-eslint/eslint-plugin": "4.9.1", + "@typescript-eslint/parser": "4.9.1", + "eslint": "7.15.0", + "eslint-config-ironfish": "*", + "eslint-config-prettier": "7.0.0", + "eslint-plugin-jest": "24.1.3", + "eslint-plugin-prettier": "3.2.0", + "eslint-plugin-react-hooks": "4.2.0", + "jest": "26.6.3", + "prettier": "2.2.1", + "ts-jest": "26.4.4", + "typedoc": "0.19.2", + "typescript": "4.1.2", + "ws": "7.4.1" + }, + "resolutions": { + "highlight.js": "10.4.1", + "node-forge": "0.10.0", + "object-path": "^0.11.4" + } +} diff --git a/ironfish/src/account/accounts.test.slow.ts b/ironfish/src/account/accounts.test.slow.ts new file mode 100644 index 0000000000..5feb36b254 --- /dev/null +++ b/ironfish/src/account/accounts.test.slow.ts @@ -0,0 +1,605 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { SerializedBlock, Target } from '../captain' +import { fakeMaxTarget } from '../captain/testUtilities' +import { IJSON } from '../serde' +import { genesisBlockData } from '../genesis/genesisBlock' +import { AsyncTransactionWorkerPool } from '../strategy/asyncTransactionWorkerPool' +import { generateKey } from 'ironfish-wasm-nodejs' +import { createNodeTest, useAccountFixture, useBlockFixture } from '../testUtilities' + +describe('Accounts', () => { + const nodeTest = createNodeTest() + let targetMeetsSpy: jest.SpyInstance + let targetSpy: jest.SpyInstance + + beforeAll(() => { + targetMeetsSpy = jest.spyOn(Target, 'meets').mockImplementation(() => true) + targetSpy = jest.spyOn(Target, 'calculateTarget').mockImplementation(() => fakeMaxTarget()) + }) + + afterAll(async () => { + await AsyncTransactionWorkerPool.stop() + targetMeetsSpy.mockClear() + targetSpy.mockClear() + }) + + it('Returns the correct balance when an account receives a miners fee', async () => { + // Initialize the database and chain + const strategy = nodeTest.strategy + const node = nodeTest.node + const captain = nodeTest.captain + + const account = await node.accounts.createAccount('test', true) + + // TODO: This should happen automatically as a result of addBlock + await node.accounts.updateHead(node) + + // Initial balance should be 0 + expect(node.accounts.getBalance(account)).toEqual({ + confirmedBalance: BigInt(0), + unconfirmedBalance: BigInt(0), + }) + + const result = IJSON.parse(genesisBlockData) as SerializedBlock + const block = strategy._blockSerde.deserialize(result) + const addedBlock = await captain.chain.addBlock(block) + expect(addedBlock.isAdded).toBe(true) + + // TODO: This should happen automatically as a result of addBlock + await node.accounts.updateHead(node) + + // Balance after adding the genesis block should be 0 + expect(node.accounts.getBalance(account)).toEqual({ + confirmedBalance: BigInt(0), + unconfirmedBalance: BigInt(0), + }) + + // Create a block with a miner's fee + const minersfee = await nodeTest.strategy.createMinersFee( + BigInt(0), + block.header.sequence + BigInt(1), + account.spendingKey, + ) + const newBlock = await captain.chain.newBlock([], minersfee) + const addResult = await captain.chain.addBlock(newBlock) + expect(addResult.isAdded).toBeTruthy() + + // TODO: This should happen automatically as a result of addBlock + await node.accounts.updateHead(node) + + // Account should now have a balance of 500000000 after adding the miner's fee + expect(node.accounts.getBalance(account)).toEqual({ + confirmedBalance: BigInt(500000000), + unconfirmedBalance: BigInt(500000000), + }) + }, 600000) + + it('Saves and restores transactions from accounts db', async () => { + // Initialize the database and chain + const strategy = nodeTest.strategy + const node = nodeTest.node + const captain = nodeTest.captain + + const account = await node.accounts.createAccount('test', true) + + // Initial balance should be 0 + // TODO: This should happen automatically as a result of addBlock + await node.accounts.updateHead(node) + expect(node.accounts.getBalance(account)).toEqual({ + confirmedBalance: BigInt(0), + unconfirmedBalance: BigInt(0), + }) + + const result = IJSON.parse(genesisBlockData) as SerializedBlock + const block = strategy._blockSerde.deserialize(result) + const addedBlock = await captain.chain.addBlock(block) + expect(addedBlock.isAdded).toBe(true) + + // Balance after adding the genesis block should be 0 + // TODO: This should happen automatically as a result of addBlock + await node.accounts.updateHead(node) + expect(node.accounts.getBalance(account)).toEqual({ + confirmedBalance: BigInt(0), + unconfirmedBalance: BigInt(0), + }) + + // Create a block with a miner's fee + const minersfee = await strategy.createMinersFee( + BigInt(0), + block.header.sequence + BigInt(1), + account.spendingKey, + ) + const newBlock = await captain.chain.newBlock([], minersfee) + const addResult = await captain.chain.addBlock(newBlock) + expect(addResult.isAdded).toBeTruthy() + + // Account should now have a balance of 500000000 after adding the miner's fee + // TODO: This should happen automatically as a result of addBlock + await node.accounts.updateHead(node) + expect(node.accounts.getBalance(account)).toEqual({ + confirmedBalance: BigInt(500000000), + unconfirmedBalance: BigInt(500000000), + }) + + await node.accounts.saveTransactionsToDb() + + node.accounts['noteToNullifier'].clear() + node.accounts['nullifierToNote'].clear() + node.accounts['transactionMap'].clear() + + // Account should now have a balance of 0 after clearing the cache + expect(node.accounts.getBalance(account)).toEqual({ + confirmedBalance: BigInt(0), + unconfirmedBalance: BigInt(0), + }) + + await node.accounts.loadTransactionsFromDb() + + // Balance should be back to 500000000 + expect(node.accounts.getBalance(account)).toEqual({ + confirmedBalance: BigInt(500000000), + unconfirmedBalance: BigInt(500000000), + }) + }, 600000) + + it('Lowers the balance after using pay to spend a note', async () => { + // Initialize the database and chain + const strategy = nodeTest.strategy + const node = nodeTest.node + const captain = nodeTest.captain + + const account = await node.accounts.createAccount('test', true) + + // Initial balance should be 0 + // TODO: This should happen automatically as a result of addBlock + expect(node.accounts.getBalance(account)).toEqual({ + confirmedBalance: BigInt(0), + unconfirmedBalance: BigInt(0), + }) + + const result = IJSON.parse(genesisBlockData) as SerializedBlock + const block = strategy._blockSerde.deserialize(result) + const addedBlock = await captain.chain.addBlock(block) + expect(addedBlock.isAdded).toBe(true) + + // Balance after adding the genesis block should be 0 + // TODO: This should happen automatically as a result of addBlock + await node.accounts.updateHead(node) + expect(node.accounts.getBalance(account)).toEqual({ + confirmedBalance: BigInt(0), + unconfirmedBalance: BigInt(0), + }) + + // Create a block with a miner's fee + const minersfee = await strategy.createMinersFee( + BigInt(0), + block.header.sequence + BigInt(1), + account.spendingKey, + ) + const newBlock = await captain.chain.newBlock([], minersfee) + const addResult = await captain.chain.addBlock(newBlock) + expect(addResult.isAdded).toBeTruthy() + + // Account should now have a balance of 500000000 after adding the miner's fee + // TODO: This should happen automatically as a result of addBlock + await node.accounts.updateHead(node) + expect(node.accounts.getBalance(account)).toEqual({ + confirmedBalance: BigInt(500000000), + unconfirmedBalance: BigInt(500000000), + }) + + // Spend the balance + const transaction = await node.accounts.pay( + captain, + node.memPool, + account, + BigInt(2), + BigInt(0), + '', + generateKey().public_address, + ) + + // Create a block with a miner's fee + const minersfee2 = await strategy.createMinersFee( + transaction.transactionFee(), + block.header.sequence + BigInt(1), + generateKey().spending_key, + ) + const newBlock2 = await captain.chain.newBlock([transaction], minersfee2) + const addResult2 = await captain.chain.addBlock(newBlock2) + expect(addResult2.isAdded).toBeTruthy() + + // Balance after adding the transaction that spends 2 should be 499999998 + // TODO: This should happen automatically as a result of addBlock + await node.accounts.updateHead(node) + expect(node.accounts.getBalance(account)).toEqual({ + confirmedBalance: BigInt(499999998), + unconfirmedBalance: BigInt(499999998), + }) + }, 600000) + + it('Counts notes correctly when a block has transactions not used by any account', async () => { + const nodeA = nodeTest.node + await nodeA.seed() + + // Create an account A + const accountA = await useAccountFixture(nodeA.accounts, () => + nodeA.accounts.createAccount('testA'), + ) + const accountB = await useAccountFixture(nodeA.accounts, () => + nodeA.accounts.createAccount('testB'), + ) + const accountC = await useAccountFixture(nodeA.accounts, () => + nodeA.accounts.createAccount('testC'), + ) + + // Create a block with a miner's fee + const block1 = await useBlockFixture(nodeA.captain, async () => + nodeA.captain.chain.newBlock( + [], + await nodeA.strategy.createMinersFee(BigInt(0), BigInt(2), accountA.spendingKey), + ), + ) + + const addedBlock = await nodeA.captain.chain.addBlock(block1) + expect(addedBlock.isAdded).toBe(true) + + // Initial balance should be 500000000 + await nodeA.accounts.updateHead(nodeA) + expect(nodeA.accounts.getBalance(accountA)).toEqual({ + confirmedBalance: BigInt(500000000), + unconfirmedBalance: BigInt(500000000), + }) + + const block2 = await useBlockFixture(nodeA.captain, async () => { + // Generate a transaction from account A to account B + const transaction = await nodeA.accounts.createTransaction( + nodeA.captain, + accountA, + BigInt(1), + BigInt(1), + '', + accountB.publicAddress, + ) + + // Create block 2 + return nodeA.captain.chain.newBlock( + [transaction], + await nodeA.strategy.createMinersFee( + transaction.transactionFee(), + BigInt(3), + generateKey().spending_key, + ), + ) + }) + + await nodeA.captain.chain.addBlock(block2) + await nodeA.accounts.updateHead(nodeA) + + // Attempting to create another transaction for account A + // to account C should not throw an error + await expect( + nodeA.accounts.createTransaction( + nodeA.captain, + accountA, + BigInt(1), + BigInt(1), + '', + accountC.publicAddress, + ), + ).resolves.toBeTruthy() + }, 600000) + + it('Removes notes when rolling back a fork', async () => { + // Create a block A1 that gives account A money + // Create a block B1 and B2 that gives account B money + // G -> A1 + // -> B1 -> B2 + + const nodeA = nodeTest.node + const { node: nodeB } = await nodeTest.createSetup() + await Promise.all([nodeA.seed(), nodeB.seed()]) + + const accountA = await useAccountFixture(nodeA.accounts, 'testA') + const accountB = await useAccountFixture(nodeB.accounts, 'testB') + + await nodeA.accounts.importAccount(accountB) + + // Create and add A1 + const blockA1 = await useBlockFixture(nodeA.captain, async () => + nodeA.captain.chain.newBlock( + [], + await nodeA.strategy.createMinersFee(BigInt(0), BigInt(2), accountA.spendingKey), + ), + ) + let addedBlock = await nodeA.captain.chain.addBlock(blockA1) + expect(addedBlock.isAdded).toBe(true) + + // Create and add B1 + const blockB1 = await useBlockFixture(nodeB.captain, async () => + nodeB.captain.chain.newBlock( + [], + await nodeB.strategy.createMinersFee(BigInt(0), BigInt(2), accountB.spendingKey), + ), + ) + addedBlock = await nodeB.captain.chain.addBlock(blockB1) + expect(addedBlock.isAdded).toBe(true) + + // Create and add B2 + const blockB2 = await useBlockFixture(nodeB.captain, async () => + nodeB.captain.chain.newBlock( + [], + await nodeB.strategy.createMinersFee(BigInt(0), BigInt(2), accountB.spendingKey), + ), + ) + addedBlock = await nodeB.captain.chain.addBlock(blockB2) + expect(addedBlock.isAdded).toBe(true) + + // Update account head and check all balances + await nodeA.accounts['updateHead'](nodeA) + await nodeB.accounts['updateHead'](nodeB) + expect(nodeA.accounts.getBalance(accountA)).toEqual({ + confirmedBalance: BigInt(500000000), + unconfirmedBalance: BigInt(500000000), + }) + expect(nodeA.accounts.getBalance(accountB)).toEqual({ + confirmedBalance: BigInt(0), + unconfirmedBalance: BigInt(0), + }) + expect(nodeB.accounts.getBalance(accountB)).toEqual({ + confirmedBalance: BigInt(1000000000), + unconfirmedBalance: BigInt(1000000000), + }) + + // Copy block B1 to nodeA + await nodeA.captain.chain.addBlock(blockB1) + await nodeA.accounts['updateHead'](nodeA) + + // Copy block B2 to nodeA + await nodeA.captain.chain.addBlock(blockB2) + await nodeA.accounts['updateHead'](nodeA) + expect(nodeA.accounts.getBalance(accountA)).toEqual({ + confirmedBalance: BigInt(0), + unconfirmedBalance: BigInt(500000000), + }) + expect(nodeA.accounts.getBalance(accountB)).toEqual({ + confirmedBalance: BigInt(1000000000), + unconfirmedBalance: BigInt(1000000000), + }) + }, 60000) + + it('Keeps spends created by the node when rolling back a fork', async () => { + // Create a block 1 that gives account A money + // Create a block A2 with a transaction from account A to account B + // Create a block B2 that gives neither account money + // G -> A1 -> A2 + // -> B2 -> B3 + + const nodeA = nodeTest.node + const { node: nodeB } = await nodeTest.createSetup() + await Promise.all([nodeA.seed(), nodeB.seed()]) + + const accountA = await useAccountFixture(nodeA.accounts, 'testA') + const accountB = await useAccountFixture(nodeB.accounts, 'testB') + await nodeA.accounts.importAccount(accountB) + await nodeB.accounts.importAccount(accountA) + + // Create and add Block 1 + const block1 = await useBlockFixture(nodeB.captain, async () => + nodeA.captain.chain.newBlock( + [], + await nodeA.strategy.createMinersFee(BigInt(0), BigInt(2), accountA.spendingKey), + ), + ) + let addedBlock = await nodeA.captain.chain.addBlock(block1) + expect(addedBlock.isAdded).toBe(true) + addedBlock = await nodeB.captain.chain.addBlock(block1) + expect(addedBlock.isAdded).toBe(true) + + await nodeA.accounts['updateHead'](nodeA) + + // Create and add A2 + const blockA2 = await useBlockFixture( + nodeA.captain, + async () => { + // Generate a transaction from account A to account B + const transaction = await nodeA.accounts.createTransaction( + nodeA.captain, + accountA, + BigInt(2), + BigInt(0), + '', + accountB.publicAddress, + ) + + // Create block A2 + return nodeA.captain.chain.newBlock( + [transaction], + await nodeA.strategy.createMinersFee( + BigInt(0), + BigInt(3), + generateKey().spending_key, + ), + ) + }, + nodeA.accounts, + ) + + addedBlock = await nodeA.captain.chain.addBlock(blockA2) + expect(addedBlock.isAdded).toBe(true) + + // Create and add B2 + const blockB2 = await useBlockFixture(nodeB.captain, async () => + nodeB.captain.chain.newBlock( + [], + await nodeB.strategy.createMinersFee(BigInt(0), BigInt(3), generateKey().spending_key), + ), + ) + addedBlock = await nodeB.captain.chain.addBlock(blockB2) + expect(addedBlock.isAdded).toBe(true) + + // Create and add B3 + const blockB3 = await useBlockFixture(nodeB.captain, async () => + nodeB.captain.chain.newBlock( + [], + await nodeB.strategy.createMinersFee(BigInt(0), BigInt(4), generateKey().spending_key), + ), + ) + addedBlock = await nodeB.captain.chain.addBlock(blockB3) + expect(addedBlock.isAdded).toBe(true) + + // Update account head and check all balances + await nodeA.accounts['updateHead'](nodeA) + await nodeB.accounts['updateHead'](nodeB) + + expect(nodeA.accounts.getBalance(accountA)).toEqual({ + confirmedBalance: BigInt(499999998), + unconfirmedBalance: BigInt(499999998), + }) + expect(nodeA.accounts.getBalance(accountB)).toEqual({ + confirmedBalance: BigInt(2), + unconfirmedBalance: BigInt(2), + }) + expect(nodeB.accounts.getBalance(accountA)).toEqual({ + confirmedBalance: BigInt(500000000), + unconfirmedBalance: BigInt(500000000), + }) + + // Copy block B2 and B3 to nodeA + await nodeA.captain.chain.addBlock(blockB2) + await nodeA.captain.chain.addBlock(blockB3) + await nodeA.accounts['updateHead'](nodeA) + + // B should not have confirmed coins yet because the transaction isn't on a block + // A should not have confirmed coins any more because the transaction is pending + expect(nodeA.accounts.getBalance(accountA)).toEqual({ + confirmedBalance: BigInt(0), + unconfirmedBalance: BigInt(499999998), + }) + expect(nodeA.accounts.getBalance(accountB)).toEqual({ + confirmedBalance: BigInt(0), + unconfirmedBalance: BigInt(2), + }) + }, 600000) + + it('Undoes spends created by another node when rolling back a fork', async () => { + // Create a block 1 that gives account A money + // Create a block A2 with a transaction from account A to account B + // Create a block B2 that gives neither account money + // G -> A1 -> A2 + // -> B2 -> B3 + + const nodeA = nodeTest.node + const { node: nodeB } = await nodeTest.createSetup() + await Promise.all([nodeA.seed(), nodeB.seed()]) + + const accountA = await useAccountFixture(nodeA.accounts, 'testA') + const accountB = await useAccountFixture(nodeB.accounts, 'testB') + await nodeA.accounts.importAccount(accountB) + await nodeB.accounts.importAccount(accountA) + + // Create and add Block 1 + const block1 = await useBlockFixture( + nodeA.captain, + async () => + nodeA.captain.chain.newBlock( + [], + await nodeA.strategy.createMinersFee(BigInt(0), BigInt(2), accountA.spendingKey), + ), + nodeA.accounts, + ) + + let addedBlock = await nodeA.captain.chain.addBlock(block1) + expect(addedBlock.isAdded).toBe(true) + addedBlock = await nodeB.captain.chain.addBlock(block1) + expect(addedBlock.isAdded).toBe(true) + + // Generate a transaction from account A to account B + await nodeB.accounts['updateHead'](nodeB) + + // Create and add A2 + const blockA2 = await useBlockFixture( + nodeB.captain, + async () => { + // Generate a transaction from account A to account B + const transaction = await nodeB.accounts.createTransaction( + nodeB.captain, + accountA, + BigInt(2), + BigInt(0), + '', + accountB.publicAddress, + ) + + // Create block A2 + return nodeA.captain.chain.newBlock( + [transaction], + await nodeA.strategy.createMinersFee( + BigInt(0), + BigInt(3), + generateKey().spending_key, + ), + ) + }, + nodeB.accounts, + ) + + addedBlock = await nodeA.captain.chain.addBlock(blockA2) + expect(addedBlock.isAdded).toBe(true) + + // Create and add B2 + const blockB2 = await useBlockFixture(nodeB.captain, async () => + nodeB.captain.chain.newBlock( + [], + await nodeB.strategy.createMinersFee(BigInt(0), BigInt(3), generateKey().spending_key), + ), + ) + addedBlock = await nodeB.captain.chain.addBlock(blockB2) + expect(addedBlock.isAdded).toBe(true) + + // Create and add B3 + const blockB3 = await useBlockFixture(nodeB.captain, async () => + nodeB.captain.chain.newBlock( + [], + await nodeB.strategy.createMinersFee(BigInt(0), BigInt(4), generateKey().spending_key), + ), + ) + addedBlock = await nodeB.captain.chain.addBlock(blockB3) + expect(addedBlock.isAdded).toBe(true) + + // Update account head and check all balances + await nodeA.accounts['updateHead'](nodeA) + await nodeB.accounts['updateHead'](nodeB) + + expect(nodeA.accounts.getBalance(accountA)).toEqual({ + confirmedBalance: BigInt(499999998), + unconfirmedBalance: BigInt(499999998), + }) + expect(nodeA.accounts.getBalance(accountB)).toEqual({ + confirmedBalance: BigInt(2), + unconfirmedBalance: BigInt(2), + }) + expect(nodeB.accounts.getBalance(accountA)).toEqual({ + confirmedBalance: BigInt(0), + unconfirmedBalance: BigInt(499999998), + }) + + // Copy block B2 and B3 to nodeA + await nodeA.captain.chain.addBlock(blockB2) + await nodeA.captain.chain.addBlock(blockB3) + await nodeA.accounts['updateHead'](nodeA) + + // A should have its original coins + // B should not have the coins any more + expect(nodeA.accounts.getBalance(accountA)).toEqual({ + confirmedBalance: BigInt(500000000), + unconfirmedBalance: BigInt(999999998), + }) + expect(nodeA.accounts.getBalance(accountB)).toEqual({ + confirmedBalance: BigInt(0), + unconfirmedBalance: BigInt(2), + }) + }, 600000) +}) diff --git a/ironfish/src/account/accounts.test.ts b/ironfish/src/account/accounts.test.ts new file mode 100644 index 0000000000..498db84a15 --- /dev/null +++ b/ironfish/src/account/accounts.test.ts @@ -0,0 +1,52 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Assert } from '../captain' +import { makeBlockAfter } from '../captain/testUtilities' +import { createNodeTest } from '../testUtilities' + +describe('Accounts', () => { + const nodeTest = createNodeTest() + + it('sync account head', async () => { + const { node, chain, strategy } = nodeTest + strategy.disableMiningReward() + + const getTransactionsSpy = jest.spyOn(chain, 'getTransactionsForBlock') + + // G + await node.seed() + const genesis = await chain.getGenesisHeader() + Assert.isNotNull(genesis) + + // G -> A1 + const blockA1 = makeBlockAfter(chain, genesis) + await chain.addBlock(blockA1) + + await node.accounts.updateHead(node) + expect(node.accounts['headHash']).toEqual(blockA1.header.hash.toString('hex')) + expect(getTransactionsSpy).toBeCalledTimes(2) + + // G -> A1 -> A2 + const blockA2 = makeBlockAfter(chain, blockA1) + await chain.addBlock(blockA2) + + await node.accounts.updateHead(node) + expect(node.accounts['headHash']).toEqual(blockA2.header.hash.toString('hex')) + expect(getTransactionsSpy).toBeCalledTimes(3) + + // Add 3 more on a heavier fork. Chain A should be removed first, then chain B added + // G -> A1 -> A2 + // -> B1 -> B2 -> B3 + const blockB1 = makeBlockAfter(chain, genesis) + const blockB2 = makeBlockAfter(chain, blockB1) + const blockB3 = makeBlockAfter(chain, blockB2) + await chain.addBlock(blockB1) + await chain.addBlock(blockB2) + await chain.addBlock(blockB3) + + await node.accounts.updateHead(node) + expect(node.accounts['headHash']).toEqual(blockB3.header.hash.toString('hex')) + expect(getTransactionsSpy).toBeCalledTimes(8) + }, 8000) +}) diff --git a/ironfish/src/account/accounts.ts b/ironfish/src/account/accounts.ts new file mode 100644 index 0000000000..72496db819 --- /dev/null +++ b/ironfish/src/account/accounts.ts @@ -0,0 +1,898 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Account, AccountDefaults, AccountsDB } from './accountsdb' +import { Event } from '../event' +import { generateKey, generateNewPublicAddress, WasmNote } from 'ironfish-wasm-nodejs' +import { + IronfishBlockHeader, + IronfishCaptain, + IronfishTransaction, + IronfishMemPool, + IronfishBlockchain, + IronfishWitness, + IronfishNote, +} from '../strategy' +import { AsyncTransactionWorkerPool } from '../strategy/asyncTransactionWorkerPool' +import { createRootLogger, Logger } from '../logger' +import { PromiseResolve, PromiseUtils, SetTimeoutToken } from '../utils' +import { IronfishNode } from '../node' +import { ValidationError } from '../rpc/adapters/errors' +import { GENESIS_BLOCK_SEQUENCE } from '../captain' +import { IDatabaseTransaction } from '../storage' +import { BufferMap } from 'buffer-map' + +const REBROADCAST_SEQUENCE_DELTA = BigInt(5) + +type SyncTransactionParams = + // Used when receiving a transaction from a block with notes + // that have been added to the trees + | { blockHash: string; initialNoteIndex: number } + // Used if the transaction is not yet part of the chain + | { submittedSequence: bigint } + | Record + +export class Accounts { + readonly onDefaultAccountChange = new Event< + [account: Account | null, oldAccount: Account | null] + >() + + readonly onBroadcastTransaction = new Event<[transaction: IronfishTransaction]>() + + scan: ScanState | null = null + isUpdatingHead = false + + protected readonly transactionMap = new BufferMap< + Readonly<{ + transaction: IronfishTransaction + blockHash: string | null + submittedSequence: bigint | null + }> + >() + protected readonly noteToNullifier = new Map< + string, + Readonly<{ nullifierHash: string | null; noteIndex: number | null; spent: boolean }> + >() + protected readonly nullifierToNote = new Map() + protected readonly accounts = new Map() + readonly database: AccountsDB + protected readonly logger: Logger + + protected defaultAccount: string | null = null + protected headHash: string | null = null + protected isStarted = false + protected eventLoopTimeout: SetTimeoutToken | null = null + + constructor({ + database, + logger = createRootLogger(), + }: { + database: AccountsDB + logger?: Logger + }) { + this.logger = logger.withTag('accounts') + this.database = database + } + + async updateHead(node: IronfishNode): Promise { + if (this.scan || this.isUpdatingHead) return + + const addBlock = async (header: IronfishBlockHeader): Promise => { + this.logger.debug(`AccountHead ADD: ${Number(header.sequence) - 1} => ${header.sequence}`) + + for await (const { + transaction, + blockHash, + initialNoteIndex, + } of node.captain.chain.getTransactionsForBlock(header)) { + await this.syncTransaction(transaction, { + blockHash: blockHash, + initialNoteIndex: initialNoteIndex, + }) + } + } + + const removeBlock = async (header: IronfishBlockHeader): Promise => { + this.logger.debug(`AccountHead DEL: ${header.sequence} => ${Number(header.sequence) - 1}`) + + for await (const { transaction } of node.captain.chain.getTransactionsForBlock(header)) { + await this.syncTransaction(transaction, {}) + } + } + + this.isUpdatingHead = true + + try { + const chainHead = await node.captain.chain.getHeaviestHead() + const chainTail = await node.captain.chain.getGenesisHeader() + + if (!chainHead || !chainTail) { + // There is no genesis block, so there's nothing to update to + return + } + + if (!this.headHash) { + await addBlock(chainTail) + await this.updateHeadHash(chainTail.hash.toString('hex')) + } + + if (!this.headHash) { + throw new Error('headHash should be set previously or to chainTail.hash') + } + + const accountHeadHash = Buffer.from(this.headHash, 'hex') + const accountHead = await node.captain.chain.getBlockHeader(accountHeadHash) + + if (!accountHead || chainHead.hash.equals(accountHead.hash)) { + return + } + + const { fork, isLinear } = await node.captain.chain.findFork(accountHead, chainHead) + if (!fork) return + + // Remove the old fork chain + if (!isLinear) { + for await (const header of node.captain.chain.iterateToBlock(accountHead, fork)) { + // Don't remove the fork + if (!header.hash.equals(fork.hash)) { + await removeBlock(header) + } + + await this.updateHeadHash(header.hash.toString('hex')) + } + } + + for await (const header of node.captain.chain.iterateToBlock(fork, chainHead)) { + if (header.hash.equals(fork.hash)) continue + await addBlock(header) + await this.updateHeadHash(header.hash.toString('hex')) + } + + this.logger.debug( + '\nUpdated Head: \n', + `Fork: ${fork.hash.toString('hex')} (${ + fork.sequence === GENESIS_BLOCK_SEQUENCE ? 'GENESIS' : '???' + })`, + '\n', + 'Account:', + accountHead?.hash.toString('hex'), + '\n', + 'Chain:', + chainHead?.hash.toString('hex'), + '\n', + ) + } finally { + this.isUpdatingHead = false + } + } + + get shouldRescan(): boolean { + if (this.scan) { + return false + } + + for (const account of this.accounts.values()) { + if (account.rescan !== null) return true + } + + return false + } + + start(node: IronfishNode): void { + if (this.isStarted) return + this.isStarted = true + + if (this.shouldRescan && !this.scan) { + void this.scanTransactions(node.captain.chain) + } + + void this.eventLoop(node) + } + + async stop(): Promise { + if (!this.isStarted) return + this.isStarted = false + + if (this.eventLoopTimeout) { + clearTimeout(this.eventLoopTimeout) + } + + if (this.scan) { + await this.scan.abort() + } + + if (this.database.database.isOpen) { + await this.saveTransactionsToDb() + await this.database.setHeadHash(this.headHash) + } + } + + async eventLoop(node: IronfishNode): Promise { + await this.updateHead(node) + + await this.rebroadcastTransactions(node.captain) + this.eventLoopTimeout = setTimeout(() => void this.eventLoop(node), 1000) + } + + async loadTransactionsFromDb(): Promise { + await this.database.loadNullifierToNoteMap(this.nullifierToNote) + await this.database.loadNoteToNullifierMap(this.noteToNullifier) + await this.database.loadTransactionsIntoMap(this.transactionMap) + } + + async saveTransactionsToDb(): Promise { + await this.database.replaceNullifierToNoteMap(this.nullifierToNote) + await this.database.replaceNoteToNullifierMap(this.noteToNullifier) + await this.database.replaceTransactions(this.transactionMap) + } + + async updateTransactionMap( + transactionHash: Buffer, + transaction: Readonly<{ + transaction: IronfishTransaction + blockHash: string | null + submittedSequence: bigint | null + }>, + tx?: IDatabaseTransaction, + ): Promise { + this.transactionMap.set(transactionHash, transaction) + await this.database.saveTransaction(transactionHash, transaction, tx) + } + + async updateNullifierToNoteMap( + nullifier: string, + note: string, + tx?: IDatabaseTransaction, + ): Promise { + this.nullifierToNote.set(nullifier, note) + await this.database.saveNullifierToNote(nullifier, note, tx) + } + + async updateNoteToNullifierMap( + noteHash: string, + note: Readonly<{ + nullifierHash: string | null + noteIndex: number | null + spent: boolean + }>, + tx?: IDatabaseTransaction, + ): Promise { + this.noteToNullifier.set(noteHash, note) + await this.database.saveNoteToNullifier(noteHash, note, tx) + } + + async updateHeadHash(headHash: string | null): Promise { + this.headHash = headHash + await this.database.setHeadHash(headHash) + } + + async reset(): Promise { + this.transactionMap.clear() + this.noteToNullifier.clear() + this.nullifierToNote.clear() + await this.saveTransactionsToDb() + } + + private decryptNotes( + transaction: IronfishTransaction, + initialNoteIndex: number | null, + ): Array<{ + noteIndex: number | null + nullifier: string | null + merkleHash: string + forSpender: boolean + account: Account + }> { + const accounts = this.listAccounts() + const notes = new Array<{ + noteIndex: number | null + nullifier: string | null + merkleHash: string + forSpender: boolean + account: Account + }>() + + // Decrement the note index before starting so we can + // pre-increment it in the loop rather than post-incrementing it + let currentNoteIndex = initialNoteIndex + if (currentNoteIndex !== null) { + currentNoteIndex-- + } + + for (const note of transaction.notes()) { + // Increment the note index if it is set + if (currentNoteIndex !== null) { + currentNoteIndex++ + } + + for (const account of accounts) { + // Try decrypting the note as the owner + const receivedNote = note.decryptNoteForOwner(account.incomingViewKey) + if (receivedNote) { + if (receivedNote.value() !== BigInt(0)) { + notes.push({ + noteIndex: currentNoteIndex, + forSpender: false, + merkleHash: note.merkleHash().toString('hex'), + nullifier: + currentNoteIndex !== null + ? Buffer.from( + receivedNote.nullifier(account.spendingKey, BigInt(currentNoteIndex)), + ).toString('hex') + : null, + account: account, + }) + } + continue + } + + // Try decrypting the note as the spender + const spentNote = note.decryptNoteForSpender(account.outgoingViewKey) + if (spentNote) { + if (spentNote.value() !== BigInt(0)) { + notes.push({ + noteIndex: currentNoteIndex, + forSpender: true, + merkleHash: note.merkleHash().toString('hex'), + nullifier: null, + account: account, + }) + } + continue + } + } + } + + return notes + } + + /** + * Called: + * - Called when transactions are added to the mem pool + * - Called for transactions on disconnected blocks + * - Called when transactions are added to a block on the genesis chain + */ + async syncTransaction( + transaction: IronfishTransaction, + params: SyncTransactionParams, + ): Promise { + const initialNoteIndex = 'initialNoteIndex' in params ? params.initialNoteIndex : null + const blockHash = 'blockHash' in params ? params.blockHash : null + const submittedSequence = 'submittedSequence' in params ? params.submittedSequence : null + + let newSequence = submittedSequence + + const notes = this.decryptNotes(transaction, initialNoteIndex) + + await this.database.database.transaction( + [ + this.database.noteToNullifier, + this.database.nullifierToNote, + this.database.transactions, + ], + 'readwrite', + async (tx) => { + if (notes.length > 0) { + const transactionHash = transaction.transactionHash() + + const existingT = this.transactionMap.get(transactionHash) + // If we passed in a submittedSequence, set submittedSequence to that value. + // Otherwise, if we already have a submittedSequence, keep that value regardless of whether + // submittedSequence was passed in. + // Otherwise, we don't have an existing sequence or new sequence, so set submittedSequence null + newSequence = submittedSequence || existingT?.submittedSequence || null + + // The transaction is useful if we want to display transaction history, + // but since we spent the note, we don't need to put it in the nullifierToNote mappings + await this.updateTransactionMap( + transactionHash, + { + transaction, + blockHash, + submittedSequence: newSequence, + }, + tx, + ) + } + + for (const { noteIndex, nullifier, forSpender, merkleHash } of notes) { + // The transaction is useful if we want to display transaction history, + // but since we spent the note, we don't need to put it in the nullifierToNote mappings + if (!forSpender) { + if (nullifier !== null) { + await this.updateNullifierToNoteMap(nullifier, merkleHash, tx) + } + + await this.updateNoteToNullifierMap( + merkleHash, + { + nullifierHash: nullifier, + noteIndex: noteIndex, + spent: false, + }, + tx, + ) + } + } + + // If newSequence is null and blockHash is null, we're removing the transaction from + // the chain and it wasn't created by us, so unmark notes as spent + const isRemovingTransaction = newSequence === null && blockHash === null + + for (const spend of transaction.spends()) { + const nullifier = spend.nullifier.toString('hex') + const noteHash = this.nullifierToNote.get(nullifier) + + if (noteHash) { + const nullifier = this.noteToNullifier.get(noteHash) + + if (!nullifier) { + throw new Error( + 'nullifierToNote mappings must have a corresponding noteToNullifier map', + ) + } + + await this.updateNoteToNullifierMap(noteHash, { + ...nullifier, + spent: !isRemovingTransaction, + }) + } + } + }, + ) + } + + async scanTransactions(chain: IronfishBlockchain): Promise { + if (this.scan) { + this.logger.info('Skipping Scan, already scanning.') + return + } + + if (this.headHash == null) { + this.logger.info('Skipping scan, there is no blocks to scan') + return + } + + const started = Date.now() + this.scan = new ScanState() + + // If were updating the account head we need to wait until its finished + // but setting this.scan is our lock so updating the head doesn't run again + while (this.isUpdatingHead) { + await new Promise((r) => setTimeout(r, 1000)) + } + + const accountHeadHash = Buffer.from(this.headHash, 'hex') + + const scanFor = Array.from(this.accounts.values()) + .filter((a) => a.rescan !== null && a.rescan <= started) + .map((a) => a.name) + .join(', ') + + this.logger.info(`Scanning for transactions${scanFor ? ` for ${scanFor}` : ''}`) + + // Go through every transaction in the chain and add notes that we can decrypt + for await (const { + blockHash, + transaction, + initialNoteIndex, + sequence, + } of chain.getTransactions(accountHeadHash)) { + if (this.scan.isAborted) { + this.scan.signalComplete() + this.scan = null + return + } + + await this.syncTransaction(transaction, { blockHash, initialNoteIndex: initialNoteIndex }) + this.scan.onTransaction.emit(sequence) + } + + this.logger.info( + `Finished scanning for transactions after ${Math.floor( + (Date.now() - started) / 1000, + )} seconds`, + ) + + for (const account of this.accounts.values()) { + if (account.rescan !== null && account.rescan <= started) { + account.rescan = null + await this.database.setAccount(account) + } + } + + this.scan.signalComplete() + this.scan = null + } + + private getUnspentNotes( + account: Account, + ): ReadonlyArray<{ hash: string; note: IronfishNote; index: number | null }> { + const unspentNotes = [] + + for (const transactionMapValue of this.transactionMap.values()) { + for (const note of transactionMapValue.transaction.notes()) { + // Notes can be spent and received by the same Account. + // Try decrypting the note as its owner + const receivedNote = note.decryptNoteForOwner(account.incomingViewKey) + if (receivedNote) { + const noteHashHex = Buffer.from(note.merkleHash()).toString('hex') + + const map = this.noteToNullifier.get(noteHashHex) + if (!map) { + throw new Error('All decryptable notes should be in the noteToNullifier map') + } + + if (!map.spent) { + unspentNotes.push({ + hash: noteHashHex, + note: receivedNote, + index: map.noteIndex, + }) + } + } + } + } + + return unspentNotes + } + + getBalance(account: Account): { unconfirmedBalance: BigInt; confirmedBalance: BigInt } { + this.assertHasAccount(account) + + const notes = this.getUnspentNotes(account) + + let unconfirmedBalance = BigInt(0) + let confirmedBalance = BigInt(0) + + for (const note of notes) { + const value = note.note.value() + unconfirmedBalance += BigInt(value) + if (note.index != null) { + confirmedBalance += BigInt(value) + } + } + + return { unconfirmedBalance, confirmedBalance } + } + + async pay( + captain: IronfishCaptain, + memPool: IronfishMemPool, + sender: Account, + amount: bigint, + transactionFee: bigint, + memo: string, + receiverPublicAddress: string, + ): Promise { + const heaviestHead = await captain.chain.getHeaviestHead() + if (heaviestHead == null) { + throw new ValidationError('You must have a genesis block to create a transaction') + } + + const transaction = await this.createTransaction( + captain, + sender, + amount, + transactionFee, + memo, + receiverPublicAddress, + ) + + await this.syncTransaction(transaction, { submittedSequence: heaviestHead.sequence }) + memPool.acceptTransaction(transaction) + this.broadcastTransaction(transaction) + + return transaction + } + + async createTransaction( + captain: IronfishCaptain, + sender: Account, + amount: bigint, + transactionFee: bigint, + memo: string, + receiverPublicAddress: string, + ): Promise { + this.assertHasAccount(sender) + + let amountNeeded = amount + transactionFee + + const notesToSpend: Array<{ note: IronfishNote; witness: IronfishWitness }> = [] + const unspentNotes = this.getUnspentNotes(sender) + + for (const unspentNote of unspentNotes) { + // Skip unconfirmed notes + if (unspentNote.index === null) { + continue + } + + if (unspentNote.note.value() > BigInt(0)) { + // Double-check that the nullifier for the note isn't in the tree already + // This would indicate a bug in the account transaction stores + const nullifier = Buffer.from( + unspentNote.note.nullifier(sender.spendingKey, BigInt(unspentNote.index)), + ) + + if (await captain.chain.nullifiers.contains(nullifier)) { + this.logger.debug( + `Note was marked unspent, but nullifier found in tree: ${nullifier.toString( + 'hex', + )}`, + ) + + // Update our map so this doesn't happen again + const noteMapValue = this.noteToNullifier.get(nullifier.toString('hex')) + if (noteMapValue) { + this.logger.debug(`Unspent note has index ${String(noteMapValue.noteIndex)}`) + await this.updateNoteToNullifierMap(nullifier.toString('hex'), { + ...noteMapValue, + spent: true, + }) + } + + // Move on to the next note + continue + } + + // Try creating a witness from the note + const witness = await captain.chain.notes.witness(unspentNote.index) + + if (witness == null) { + this.logger.debug( + `Could not create a witness for note with index ${unspentNote.index}`, + ) + continue + } + + // Otherwise, push the note into the list of notes to spend + this.logger.debug( + 'Accounts: spending note', + unspentNote.index, + unspentNote.hash, + unspentNote.note.value(), + ) + notesToSpend.push({ note: unspentNote.note, witness: witness }) + amountNeeded -= BigInt(unspentNote.note.value()) + } + + if (amountNeeded <= 0) { + break + } + } + + if (amountNeeded > 0) { + throw new Error('Insufficient funds') + } + + const transaction = AsyncTransactionWorkerPool.createTransaction() + + // Generate spends for everything in notesToSpend + for (const note of notesToSpend) { + await transaction.spend(sender.spendingKey, note.note, note.witness) + } + + // Generate the note transferring currency to the receiver + const note = new WasmNote(receiverPublicAddress, amount, memo) + const serializedNote = Buffer.from(note.serialize()) + note.free() + await transaction.receive(sender.spendingKey, new IronfishNote(serializedNote)) + + // Post the transaction and we're done! + const transactionPosted = new IronfishTransaction( + Buffer.from( + (await transaction.post(sender.spendingKey, null, transactionFee)).serialize(), + ), + ) + + return transactionPosted + } + + broadcastTransaction(transaction: IronfishTransaction): void { + this.onBroadcastTransaction.emit(transaction) + } + + async rebroadcastTransactions(captain: IronfishCaptain): Promise { + const heaviestHead = await captain.chain.getHeaviestHead() + if (heaviestHead == null) return + + const headSequence = heaviestHead.sequence + + for (const [transactionHash, tx] of this.transactionMap) { + const { transaction, blockHash, submittedSequence } = tx + + // Skip transactions that are already added to a block + if (blockHash) continue + + // TODO: Submitted sequence is only set from transactions generated by this node and we don't rebroadcast + // transactions to us, or from us and generated from another node, but we should do this later. It + // will require us to set submittedSequence in syncTransaction to the current head if it's null + if (!submittedSequence) continue + + // TODO: This algorithm suffers a deanonim attack where you can watch to see what transactions node continously + // send out, then you can know those transactions are theres. This should be randomized and made less, + // predictable later to help prevent that attack. + if (headSequence - submittedSequence < REBROADCAST_SEQUENCE_DELTA) continue + + await this.updateTransactionMap(transactionHash, { + ...tx, + submittedSequence: headSequence, + }) + + this.broadcastTransaction(transaction) + } + } + + async createAccount(name: string, setDefault = false): Promise { + if (this.accounts.has(name)) { + throw new Error(`Account already exists with the name ${name}`) + } + + const key = generateKey() + + const account: Account = { + ...AccountDefaults, + name: name, + incomingViewKey: key.incoming_view_key, + outgoingViewKey: key.outgoing_view_key, + publicAddress: key.public_address, + spendingKey: key.spending_key, + } + + this.accounts.set(account.name, account) + await this.database.setAccount(account) + + if (setDefault) { + await this.setDefaultAccount(account.name) + } + + return account + } + + async startScanTransactionsFor(chain: IronfishBlockchain, account: Account): Promise { + account.rescan = Date.now() + await this.database.setAccount(account) + await this.scanTransactions(chain) + } + + async importAccount(toImport: Partial): Promise { + if (!toImport.name) { + throw new Error(`Imported account has no name`) + } + + if (this.accounts.has(toImport.name)) { + throw new Error(`Account already exists with the name ${toImport.name}`) + } + + const account = { + ...AccountDefaults, + ...toImport, + } + + this.accounts.set(account.name, account) + await this.database.setAccount(account) + + return account + } + + listAccounts(): Account[] { + return Array.from(this.accounts.values()) + } + + accountExists(name: string): boolean { + return this.accounts.has(name) + } + + async removeAccount(name: string): Promise { + if (name === this.defaultAccount) { + const prev = this.getDefaultAccount() + await this.database.setDefaultAccount(null) + + this.defaultAccount = null + this.onDefaultAccountChange.emit(null, prev) + } + + this.accounts.delete(name) + await this.database.removeAccount(name) + } + + get hasDefaultAccount(): boolean { + return !!this.defaultAccount + } + + /** Set or clear the default account */ + async setDefaultAccount(name: string | null): Promise { + if (this.defaultAccount === name) return + + const prev = this.getDefaultAccount() + let next = null + + if (name !== null) { + next = this.accounts.get(name) + + if (!next) { + throw new Error(`No account found with name ${name}`) + } + } + + const nextName = next ? next.name : null + await this.database.setDefaultAccount(nextName) + this.defaultAccount = nextName + this.onDefaultAccountChange.emit(next, prev) + } + + getAccountByName(name: string): Account | null { + return this.accounts.get(name) || null + } + + getDefaultAccount(): Account | null { + if (!this.defaultAccount) return null + return this.getAccountByName(this.defaultAccount) + } + + async generateNewPublicAddress(account: Account): Promise { + this.assertHasAccount(account) + const key = generateNewPublicAddress(account.spendingKey) + account.publicAddress = key.public_address + await this.database.setAccount(account) + } + + async load(): Promise { + for await (const account of this.database.loadAccounts()) { + this.accounts.set(account.name, account) + } + + const meta = await this.database.loadAccountsMeta() + this.defaultAccount = meta.defaultAccountName + this.headHash = meta.headHash + + await this.loadTransactionsFromDb() + } + + protected assertHasAccount(account: Account): void { + if (!this.accounts.has(account.name)) { + throw new Error(`No account found with name ${account.name}`) + } + } + + protected assertNotHasAccount(account: Account): void { + if (this.accounts.has(account.name)) { + throw new Error(`No account found with name ${account.name}`) + } + } +} + +class ScanState { + onTransaction = new Event<[sequence: BigInt]>() + + private aborted: boolean + private running: boolean + private runningPromise: Promise + private runningResolve: PromiseResolve + + constructor() { + const [promise, resolve] = PromiseUtils.split() + this.runningPromise = promise + this.runningResolve = resolve + + this.running = true + this.aborted = false + } + + get isAborted(): boolean { + return this.aborted + } + + signalComplete(): void { + this.runningResolve() + } + + async abort(): Promise { + this.aborted = false + return this.wait() + } + + async wait(): Promise { + await this.runningPromise + } +} diff --git a/ironfish/src/account/accountsdb.ts b/ironfish/src/account/accountsdb.ts new file mode 100644 index 0000000000..b30030ee5b --- /dev/null +++ b/ironfish/src/account/accountsdb.ts @@ -0,0 +1,291 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { BufferMap } from 'buffer-map' +import { + BufferEncoding, + IDatabase, + IDatabaseStore, + IDatabaseTransaction, + JsonEncoding, + StringEncoding, +} from '../storage' +import { IronfishTransaction } from '../strategy' + +export type Account = { + name: string + spendingKey: string + incomingViewKey: string + outgoingViewKey: string + publicAddress: string + rescan: number | null +} + +export const AccountDefaults: Account = { + name: '', + spendingKey: '', + incomingViewKey: '', + outgoingViewKey: '', + publicAddress: '', + rescan: null, +} + +const getAccountsDBMetaDefaults = (): AccountsDBMeta => ({ + defaultAccountName: null, + headHash: null, +}) + +export type AccountsDBMeta = { + defaultAccountName: string | null + headHash: string | null +} + +export class AccountsDB { + database: IDatabase + + accounts: IDatabaseStore<{ key: string; value: Account }> + + meta: IDatabaseStore<{ + key: keyof AccountsDBMeta + value: AccountsDBMeta[keyof AccountsDBMeta] + }> + + // Transaction-related database stores + noteToNullifier: IDatabaseStore<{ + key: string + value: { nullifierHash: string | null; noteIndex: number | null; spent: boolean } + }> + + nullifierToNote: IDatabaseStore<{ key: string; value: string }> + + transactions: IDatabaseStore<{ + key: Buffer + value: { + transaction: Buffer + blockHash: string | null + submittedSequence: bigint | null + } + }> + + constructor({ database }: { database: IDatabase }) { + this.database = database + + this.meta = database.addStore<{ + key: keyof AccountsDBMeta + value: AccountsDBMeta[keyof AccountsDBMeta] + }>({ + version: 1, + name: 'meta', + keyEncoding: new StringEncoding(), + valueEncoding: new JsonEncoding(), + }) + + this.accounts = database.addStore<{ key: string; value: Account }>({ + version: 1, + name: 'accounts', + keyEncoding: new StringEncoding(), + valueEncoding: new JsonEncoding(), + }) + + this.noteToNullifier = database.addStore<{ + key: string + value: { nullifierHash: string; noteIndex: number | null; spent: boolean } + }>({ + version: 1, + name: 'noteToNullifier', + keyEncoding: new StringEncoding(), + valueEncoding: new JsonEncoding(), + }) + + this.nullifierToNote = database.addStore<{ key: string; value: string }>({ + version: 1, + name: 'nullifierToNote', + keyEncoding: new StringEncoding(), + valueEncoding: new StringEncoding(), + }) + + this.transactions = database.addStore<{ + key: Buffer + value: { + transaction: Buffer + blockHash: string | null + submittedSequence: bigint | null + } + }>({ + version: 1, + name: 'transactions', + keyEncoding: new BufferEncoding(), + valueEncoding: new JsonEncoding(), + }) + } + + async open(): Promise { + await this.database.open() + } + + async close(): Promise { + await this.database.close() + } + + async setAccount(account: Account): Promise { + await this.accounts.put(account.name, account) + } + + async removeAccount(name: string): Promise { + await this.accounts.del(name) + } + + async setDefaultAccount(name: AccountsDBMeta['defaultAccountName']): Promise { + await this.meta.put('defaultAccountName', name) + } + + async setHeadHash(hash: AccountsDBMeta['headHash']): Promise { + await this.meta.put('headHash', hash) + } + + async loadAccountsMeta(): Promise { + const meta = { ...getAccountsDBMetaDefaults() } + + for await (const [key, value] of this.meta.getAllIter()) { + meta[key] = value + } + + return meta + } + + async *loadAccounts(): AsyncGenerator { + for await (const account of this.accounts.getAllValuesIter()) { + yield account + } + } + + async saveTransaction( + transactionHash: Buffer, + transaction: { + transaction: IronfishTransaction + blockHash: string | null + submittedSequence: bigint | null + }, + tx?: IDatabaseTransaction, + ): Promise { + const serialized = { + ...transaction, + transaction: transaction.transaction.serialize(), + } + await this.transactions.put(transactionHash, serialized, tx) + } + + async replaceTransactions( + map: BufferMap<{ + transaction: IronfishTransaction + blockHash: string | null + submittedSequence: bigint | null + }>, + ): Promise { + await this.transactions.clear() + + await this.database.transaction([this.transactions], 'readwrite', async (tx) => { + for (const [key, value] of map) { + const serialized = { + ...value, + transaction: value.transaction.serialize(), + } + await this.transactions.put(key, serialized, tx) + } + }) + } + + async loadTransactionsIntoMap( + map: BufferMap<{ + transaction: IronfishTransaction + blockHash: string | null + submittedSequence: bigint | null + }>, + ): Promise { + for await (const value of this.transactions.getAllValuesIter()) { + const deserialized = { + ...value, + transaction: new IronfishTransaction(value.transaction), + } + + map.set(deserialized.transaction.transactionHash(), deserialized) + } + } + + async saveNullifierToNote( + nullifier: string, + note: string, + tx?: IDatabaseTransaction, + ): Promise { + await this.nullifierToNote.put(nullifier, note, tx) + } + + async replaceNullifierToNoteMap(map: Map): Promise { + await this.nullifierToNote.clear() + + await this.database.transaction([this.nullifierToNote], 'readwrite', async (tx) => { + for (const [key, value] of map) { + await this.nullifierToNote.put(key, value, tx) + } + }) + } + + async loadNullifierToNoteMap(map: Map): Promise { + for await (const nullifierToNoteKey of this.nullifierToNote.getAllKeysIter()) { + const value = await this.nullifierToNote.get(nullifierToNoteKey) + + if (value == null) { + throw new Error('Value must exist if key exists') + } + + map.set(nullifierToNoteKey, value) + } + } + + async saveNoteToNullifier( + noteHash: string, + note: Readonly<{ + nullifierHash: string | null + noteIndex: number | null + spent: boolean + }>, + tx?: IDatabaseTransaction, + ): Promise { + await this.noteToNullifier.put(noteHash, note, tx) + } + + async replaceNoteToNullifierMap( + map: Map< + string, + { nullifierHash: string | null; noteIndex: number | null; spent: boolean } + >, + ): Promise { + await this.noteToNullifier.clear() + + await this.database.transaction([this.noteToNullifier], 'readwrite', async (tx) => { + for (const [key, value] of map) { + await this.noteToNullifier.put(key, value, tx) + } + }) + } + + async loadNoteToNullifierMap( + map: Map< + string, + { nullifierHash: string | null; noteIndex: number | null; spent: boolean } + >, + ): Promise { + await this.database.transaction([this.noteToNullifier], 'read', async (tx) => { + for await (const noteToNullifierKey of this.noteToNullifier.getAllKeysIter(tx)) { + const value = await this.noteToNullifier.get(noteToNullifierKey) + + if (value == null) { + throw new Error('Value must exist if key exists') + } + + map.set(noteToNullifierKey, value) + } + }) + } +} diff --git a/ironfish/src/account/fixtures/Accounts_Counts_notes_correctly_when_a_block_has_transactions_not_used_by_any_account_1 b/ironfish/src/account/fixtures/Accounts_Counts_notes_correctly_when_a_block_has_transactions_not_used_by_any_account_1 new file mode 100644 index 0000000000..eba519d17c --- /dev/null +++ b/ironfish/src/account/fixtures/Accounts_Counts_notes_correctly_when_a_block_has_transactions_not_used_by_any_account_1 @@ -0,0 +1,8 @@ +{ + "name": "testA", + "spendingKey": "5ca7a1527a642dc8c756a1fd15455a061064bafaa272da69b0b3f5ff77f35dbe", + "incomingViewKey": "9d5130841a0c40ad2de21b04d64fc048a144ceaa433f63a16af5901b0f2a4b03", + "outgoingViewKey": "2b6625e7dddf63fd149d78209f272677f16125f2851511b8b9e29a391c13bc24", + "publicAddress": "952445f0b8e7d42febb4ebc093ac5e8160c3c0693eb86833551af5a1508f802aa69bfd37bd9f2a1456974d", + "rescan": null +} \ No newline at end of file diff --git a/ironfish/src/account/fixtures/Accounts_Counts_notes_correctly_when_a_block_has_transactions_not_used_by_any_account_2 b/ironfish/src/account/fixtures/Accounts_Counts_notes_correctly_when_a_block_has_transactions_not_used_by_any_account_2 new file mode 100644 index 0000000000..521c312a21 --- /dev/null +++ b/ironfish/src/account/fixtures/Accounts_Counts_notes_correctly_when_a_block_has_transactions_not_used_by_any_account_2 @@ -0,0 +1,8 @@ +{ + "name": "testB", + "spendingKey": "70998c4295379e0d3a37020ec117928221277cba28c8db9a97bc63111a20e27e", + "incomingViewKey": "ad5c6e1e843fc711b36971b830aace6fd38d44b19cc9d20e1e7b43b3cacc1002", + "outgoingViewKey": "c8658d394af216d65e63f696f12c1109285582e11293d322330a700244789c54", + "publicAddress": "cafc5e628c65a24832b3a97bd81b93a01e51b8410c79ecbd3aacbf5680d0d09782608cdf34d45bf599022f", + "rescan": null +} \ No newline at end of file diff --git a/ironfish/src/account/fixtures/Accounts_Counts_notes_correctly_when_a_block_has_transactions_not_used_by_any_account_3 b/ironfish/src/account/fixtures/Accounts_Counts_notes_correctly_when_a_block_has_transactions_not_used_by_any_account_3 new file mode 100644 index 0000000000..baa652bca3 --- /dev/null +++ b/ironfish/src/account/fixtures/Accounts_Counts_notes_correctly_when_a_block_has_transactions_not_used_by_any_account_3 @@ -0,0 +1,8 @@ +{ + "name": "testC", + "spendingKey": "7eaa9a1b7b942042ed0e152f6c559ac44a50ed566fb5de00868a5e76ce7d5296", + "incomingViewKey": "d21ad2d904cecaf7702aa412e4e078e3371242e3626766d36e71d239498c9c00", + "outgoingViewKey": "4a133b2cc1f677e09b978971147b4a92e5acea99d8e3c1523113dc098b078872", + "publicAddress": "9aa0a8630400b2c45d1e2e1f649c52c42ed7f02c65593f6df16cddf8ac1cefc700c5007ccde94e8f3545f3", + "rescan": null +} \ No newline at end of file diff --git a/ironfish/src/account/fixtures/Accounts_Counts_notes_correctly_when_a_block_has_transactions_not_used_by_any_account_4 b/ironfish/src/account/fixtures/Accounts_Counts_notes_correctly_when_a_block_has_transactions_not_used_by_any_account_4 new file mode 100644 index 0000000000..14f1aef43b --- /dev/null +++ b/ironfish/src/account/fixtures/Accounts_Counts_notes_correctly_when_a_block_has_transactions_not_used_by_any_account_4 @@ -0,0 +1,33 @@ +{ + "header": { + "sequence": "2", + "previousBlockHash": "A1BA91BD54FCFE97D0DF8FE1F66B6217197B96CD52DB97FAD58B275E164600AB", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:9AO81eYq2ieOi4iwBGUJ0XusCN2k2TAtlI/3Hop+ylA=" + }, + "size": 4 + }, + "nullifierCommitment": { + "commitment": "78A72F2020CE738CBA3A8995C0EBCA23C83A4CE2ADD7927210EAA2DE6466E0C9", + "size": 1 + }, + "target": "115792089237316195423570985008687907853269984665640564039457584007913129639935", + "randomness": 0, + "timestamp": 1617404208267, + "minersFee": "-500000000", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "0B31851C2BCF99590494E9950BBB17E23F42EDC9D93003A5203C80B4B75734D8", + "graffiti": "0000000000000000000000000000000000000000000000000000000000000000" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAACbMuL/////rQj65J4hhFGDJlbcniTx2UYRSG76lVbtSkImjIxHxKveQv/+uo2RzN6/CWUZKEYjtYo4WNege/jJgaiBoeKKwlpihg/jfbX3uUpUa08yz/VVqdfKdqcLJXki3EyjbJb1BsLig0pXmMElLHrzsN4Lbh85fiSPdbxBF7pnIbkbjpOxB4cLRURqJPKTUsTDdq98iBFYWytUnhZbPYX4AGR63TVdBZs4ygD72F89LLeN+M9gmCm1rKWHVO2VoRfzj5LJjKl1lqdnCTFoKTiGfwhXCPaxqxlVTpe6vKmBmX384K8xFeeRB8FATMTox7L5T/5IO2OOsgXY4mdAfZCcinV3CJ6N0ExmrgACTbvvBo8UkXbBM/2EEktO4TF5sc0dJyc9/UStFGnzJmPpAJbc29bFdp5D8mc2+5DulBJTlLlwbIkilWmmbVAQ+E+7m8TsJnLoNojAF74xPEPHZzwTWwj1xbOtVqXDQdpbql2xUlC3ljQsQd0UG6m6feQWPJEtbjS4hm/xQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBsSURLbX+8bQSkm3PCPOeck072VaMOC+HNL2rvwz6AXc9vvClOYnZo0URm9Cw+DlZu5DPwUHGHvq3uUJYE4dkD" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/account/fixtures/Accounts_Counts_notes_correctly_when_a_block_has_transactions_not_used_by_any_account_5 b/ironfish/src/account/fixtures/Accounts_Counts_notes_correctly_when_a_block_has_transactions_not_used_by_any_account_5 new file mode 100644 index 0000000000..8fd7e8ad8e --- /dev/null +++ b/ironfish/src/account/fixtures/Accounts_Counts_notes_correctly_when_a_block_has_transactions_not_used_by_any_account_5 @@ -0,0 +1,37 @@ +{ + "header": { + "sequence": "3", + "previousBlockHash": "0B31851C2BCF99590494E9950BBB17E23F42EDC9D93003A5203C80B4B75734D8", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:2twurk+SnuRDUzn9gNWZTsW0X2IC/+I5WC9hBbWkGjo=" + }, + "size": 7 + }, + "nullifierCommitment": { + "commitment": "AF633905130430364178821A5E82C30363B36BCB81A39F638D6EE05BD76F0100", + "size": 2 + }, + "target": "115792089237316195423570985008687907853269984665640564039457584007913129639935", + "randomness": 0, + "timestamp": 1617404253880, + "minersFee": "-500000001", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "2F89C9C1889BC996627AB1668C881EA8A007BF06D04B653132D2260FD72E77EE", + "graffiti": "0000000000000000000000000000000000000000000000000000000000000000" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AQAAAAAAAAACAAAAAAAAAAEAAAAAAAAAh9DLEUOWFC1c+Kkvu4qcS4JXnj5JLRHTPi4tMMYLiQce/JvjYdZNg2MuT8kkP3JWllzsnglUqZ6FOurlMkeny1OLxbtS7vIax88/UOEay6hoVlOX7RKtXCgyw6n1fezeFPBAS3ZbWczoHjYrEsp/n09+BXFbRckoOoLJxLQ0I8RAR9nT+mv0cYsstZ9UKwnzjM0k2V4I/0zxSwhEtwdq3jnQrCaycKl8OQUQmaNg+FDgef7qIjw+wBBEghYYi4HlHxyvFrwtiBb8tTWK+6JsvWZ6+NnJD7JH6lPLRuk46gGeYLvRhv6dpYHb+71CBOldzLfAVUHVVZ+qFiz7R0/4zfQDvNXmKtonjouIsARlCdF7rAjdpNkwLZSP9x6KfspQBAAAAOQxliFZfMcWuCqaNyu5ZdZsBLFI08HCa+fHkD7zi8mrm2ivf8zRcDIvTJH1UCbDKe5xWMCK2u9bpn2DxXb77RHooAh4ayEx58nv+aAQKKMcJgw/y9FsaVYD78cawe8NBbNrm9ZTRwiT1wQknYX7KyhbNf1qomsoVkApo6HHJDLi3tTrR26gEGu/jjyz4tWzYYXUqSVwSA6cGnBtTdN5DpDUMbuNkI3rr/aaI9uiYWfoAYfpSv4P+pDCtNfbOU4OhgVYe95kMpNkGgsSi2xBUq/RnSGrL1moy1Eu3ip9rkq8yqb1bffk5wl7JKWnKvrfKK3pMum+X/27uTlUBDQpgknditkdquwV+EtqnC7Q4v0PG4SD9cM+e19J28rGJ+l9Xcz6Ye5uxy6ljuL6dhOSeSBn/YZsPNlnSA/TdzUJHAlIgjYm22xVOOFHKWKUWR4v0gtZ9nSf70S3wCA5X4mlTWeRoWMhU06ACUi2aqZX/NDPqla5IrMRgL2mtbUSHlKMpiK0JzH09lrt83t6H7ljywHWOTGbnuzCyHM67oVs1Nf1QYpJrvVf8nGfoFQlRVDNSKNPX/g1pbZMgwOyyfdx+ba9fC0W4gDZr7+vhCW7EXAATK85Qbj1bYKlD+70hQZOKBH4VF1lqnshSGkGG8V+UObLpShlJh1Y5jQsfS9DmyoRWzUjByN0Dq5QWfoYCSQYcGSn2rdLpGcFxFmIDwF/0y4V0sy+/pjW1/ubvuYUPRjo9SveoRSEWF9bO4i04ClkmFKrExxuARJtUAFfAx4TekaAZ9/KEI7JCybixecBsCGWW5r+oZAHVoqxFBDxe1YTlYnXz0PfLcaVED1wE+kKmCcOiURIev4yLRP2sX+hSpOJgXfMGXAFfyQzmDijKGnFRGEiS+bO7ftwQNDcW6P+yIIdPJeq8U8LHShSYb6OsVm/0/4ugJssyIvZ28yWuD3w3WiXi993Su+o9UqGimxGMP8qm74EIyE5SdE9zdmY9Hm0bJyeqPCtMuJzLEmjrbgsrCWsKc7+Fvjj9A1lLqEqiWRLdBdCmWsLjK1myEROGUvB9mn55j0TBRv5DpYgV32AD0hlTCwXljtk4P/9K2ExuncXLgxAjckQM7rG1YX9hpKqQxYUaXJb2SeK0sNORkhCVp1eFzz98+AdGmvUxC4Cl1TxJpwM/oOcg7hM1B+LKTtRUjZmsRKALlvO+5aZHbBH8vRd1VyxHhbSfqkZiIhsPBoDodZvDV297iJDz7j8vFhQUu6l0oaPm81Isigv89mqlCTMFQGPNNuO1VId5QM5UHfbc2EGHMkuzu6vl9FsTBH6w+2wPaA1bDQ9lREzgJY6P7XSk2cfvouLC0xeuqo+EMSdp7H2tHrzHuEcmZgBU3IjXLQCZAqbL/FePhGrERRuN2+B/RAz8V10tE5YXIcYi11oWCoW8LY5n6bFElKtXP24BozwsaEM" + }, + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAP+aMuL/////gzXafnFmlTWZ0H3WQaPwdhHrc4FJctmUXDnm3AquUhBF7SaFE1RRy82nDVT3S4pvr5C7WGweblZuJfn/pzYJVyfZcBt0qh4VHDu2aYEjgW6U97bYCys2sHY1HJs9uxZ6GN4lmIVS3u8qGoS0wiGlZyea49UUFK3DTf3Mq+829+1PGcAeAL8eaQttslW1nb24jpwEnsZKnrWYYFwYzKxsDv6bCXb8eYVPtoy2rwGmeBoo/z4L6pq2CxXRuQ/0ZGV/3n9EuJ/xvq2q9+KcAiGbmWHwepykj92iMSEaExJD+G2cSsrTWi6sbo9SKhgSTvFdkXiYL1IPWqiJYUJ9sgP2PFla83oK4hLmYaNyJhQ4PkQ0+y/5iwKOnRFj1UkbDF6TD18C6faawvjKt2ncjQCTW+GVKY4+iEgWjcuD9RPCT8/kdzKNJ63gdSnhbUnpGLSRepERuqam64XuH4wZPbnQu1sKZorU8WhMw4oZp1sYJl6FVielRC+PLK2EmWwYwY0rn4vFQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDjLxaowARFABcYUBgFyPTMVz/AaBCE4kjdCD4dLzqmjvfqg8T6Uz4Lkom1BRqvDQi7BpskfczdTWv2RIWzN10O" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/account/fixtures/Accounts_Keeps_spends_created_by_the_node_when_rolling_back_a_fork_1 b/ironfish/src/account/fixtures/Accounts_Keeps_spends_created_by_the_node_when_rolling_back_a_fork_1 new file mode 100644 index 0000000000..3bba275aa6 --- /dev/null +++ b/ironfish/src/account/fixtures/Accounts_Keeps_spends_created_by_the_node_when_rolling_back_a_fork_1 @@ -0,0 +1,8 @@ +{ + "name": "testA", + "spendingKey": "8ecf15914627a4e2a4e7fb83a8ac2a125cb7ccb9eb45b167da1d0dec5dfc913b", + "incomingViewKey": "433fe4df58d152d406aab9ad0e17b4da0412f014b78fe3e180e04425e21f7600", + "outgoingViewKey": "fcdb3e7049d3b92ca64b3d7e7c530bd3def3009d5987ddd77e026727ef6e4442", + "publicAddress": "a15e9c1d6d17a8c30da2b6463aa0114878d063cf0480fd588aa3b691691a0111cfb8749f74e129abbbc319", + "rescan": null +} \ No newline at end of file diff --git a/ironfish/src/account/fixtures/Accounts_Keeps_spends_created_by_the_node_when_rolling_back_a_fork_2 b/ironfish/src/account/fixtures/Accounts_Keeps_spends_created_by_the_node_when_rolling_back_a_fork_2 new file mode 100644 index 0000000000..98557cdf3f --- /dev/null +++ b/ironfish/src/account/fixtures/Accounts_Keeps_spends_created_by_the_node_when_rolling_back_a_fork_2 @@ -0,0 +1,8 @@ +{ + "name": "testB", + "spendingKey": "664e8769c3704cb1590872b1f5b725e1c4e3e83bd10f1a409f354071e52b85a4", + "incomingViewKey": "2d3b6904b1a5632280cc69812f74a42edba1dd6fa4ca3a66574df4fc13caad06", + "outgoingViewKey": "ad360acb135095f2be6baa871585b99ba4e668f92616491bedcc90f18183b233", + "publicAddress": "0e11cf27ac9823ccb5131278d645fc50c7d64d0030bae594b577973dd98fc26f689b969450f54642c87f2e", + "rescan": null +} \ No newline at end of file diff --git a/ironfish/src/account/fixtures/Accounts_Keeps_spends_created_by_the_node_when_rolling_back_a_fork_3 b/ironfish/src/account/fixtures/Accounts_Keeps_spends_created_by_the_node_when_rolling_back_a_fork_3 new file mode 100644 index 0000000000..8b883a919c --- /dev/null +++ b/ironfish/src/account/fixtures/Accounts_Keeps_spends_created_by_the_node_when_rolling_back_a_fork_3 @@ -0,0 +1,33 @@ +{ + "header": { + "sequence": "2", + "previousBlockHash": "A1BA91BD54FCFE97D0DF8FE1F66B6217197B96CD52DB97FAD58B275E164600AB", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:rl/Rqp5Cerd4zPs1zWU/ub/h+KFnudBFV183SMBqtRA=" + }, + "size": 4 + }, + "nullifierCommitment": { + "commitment": "78A72F2020CE738CBA3A8995C0EBCA23C83A4CE2ADD7927210EAA2DE6466E0C9", + "size": 1 + }, + "target": "115792089237316195423570985008687907853269984665640564039457584007913129639935", + "randomness": 0, + "timestamp": 1617404318314, + "minersFee": "-500000000", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "64FEE28CFACEDD1A62164D04EB1786068C0BD9F0870022C01454CDDB46DD9866", + "graffiti": "0000000000000000000000000000000000000000000000000000000000000000" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAACbMuL/////rUnCHS/RY328V1mOUTFn/qtZIPZwWX0AF0fBVhgifgcdwOV73iG6Ucxnbh8cFoQ2qWix5O8U5BZIl92QHza46+aVFJZrBb/MdzkAoS9lC67c8cNRF7V4n3R5oXFoWnLlA3YaVzE+dK3bStUmKE22ozM71PF6egdLgTZ70fBXvet9+m6zmGKAAhDiGPor9fduqGBH3+q3OhPEIoGlpEzknBKHLiVf7rVn/urxoTbXH19GqrOEkhGNVXObDUU8qvYzcjTc9BLMXyDxyvuTREavCa7sVCnWd/SQkvMen6yqJ5hxIGFrXZF7d7/7xh5iftYvU/KF7LHnEflrm9yfqRgjEpji0FMBd/RuUdcc7AgjYwBqjGTw0cl0jcG2v+76QKCKSQ9Wslpte3HT9Ad3PXroV1d93g7e8szCsKu3PjZ8H8bE4BFT/+r3Mzo17g7QXD1wJsejajbECPcnJ0YoMKsqaTDJ7ipAxPZiC4y7hy71OBG55A2QueVFJ6R0kkZ3NqZ8IqsvQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBlybNZQmsFtsIUYaxuu6wYwEez0Mjd6ABZGVDjlaRlAvLfEXXkQUbkNtGdPDE/bVTfEJWDzlIL4NFPifmknqAM" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/account/fixtures/Accounts_Keeps_spends_created_by_the_node_when_rolling_back_a_fork_4 b/ironfish/src/account/fixtures/Accounts_Keeps_spends_created_by_the_node_when_rolling_back_a_fork_4 new file mode 100644 index 0000000000..3ec4622c41 --- /dev/null +++ b/ironfish/src/account/fixtures/Accounts_Keeps_spends_created_by_the_node_when_rolling_back_a_fork_4 @@ -0,0 +1,37 @@ +{ + "header": { + "sequence": "3", + "previousBlockHash": "64FEE28CFACEDD1A62164D04EB1786068C0BD9F0870022C01454CDDB46DD9866", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:OKTtptAideZgmzhC4Ehh2Cu6WzA69oAk0ew0/8yglRM=" + }, + "size": 7 + }, + "nullifierCommitment": { + "commitment": "15B8367490FD835BB597E3D23539B58597CD84FA9398751B5C9A0673B1CECA78", + "size": 2 + }, + "target": "115792089237316195423570985008687907853269984665640564039457584007913129639935", + "randomness": 0, + "timestamp": 1617404364010, + "minersFee": "-500000000", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "EE82FBDDAC3A934D3CAC0B1A5F425EC7E47236FB2EF8A0EC806CFB6BF2A5DCC6", + "graffiti": "0000000000000000000000000000000000000000000000000000000000000000" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AQAAAAAAAAACAAAAAAAAAAAAAAAAAAAAlCBg24OnuwD18XXbpseog8lNAL3aZ/HTz+KWMZe7H2+/yD/oZlbIFgsUhQYJiRi3ts/DyHovTWkulERiQR0U5rEnGAvEly1KVmUTzjNyjr7PkapJOVvKQ7Ff0LmwZVS9C/c51ykCxiiYoxYXQbV6KreU8GFqK641FjPaf1PXnc1pJmzlcEc/hR7OqgXiFjWatEEYIkCkB5bEH12I2OpCK5sI+mMMARI+eppopMqzOmzXY8k1wbnAr/4NnUhpQqNq9s/LK6gvJBPm6tuabeoJ/jmvd07YMJ6cLF2QI9fp/luHNUUkG5Hhbju8naCVSiQ/+A3RLIG0/Oz2TcHjw0TLwa5f0aqeQnq3eMz7Nc1lP7m/4fihZ7nQRVdfN0jAarUQBAAAAMn1iLJOBS7DFdoK92cSKsJjV8275t7FUR/YaqzdJnNjP1d8r621z48j9i6WzgqlwzJkqkHuQ7eTJAP6KqEzMTfCvtphplFebr5cvI4XYAlH64icXY9mnC1XaRpT4bFeALHZBgWG69ydzIunSxyoueFl2++k0B1jeSNFku4AXIRQJT9f6H7Dthj7JlRdATUJUJPeA9bpoCumzbwRR10XFzmxpmbV4kFKR/CU092V5nKXy+VVUBa78FTZ8ffkpOD6cAr86zxbAOXfgSR3EgblfmmvXdiGfUlJe4SDlJhmepB/IPH1Lr40MloSPIMZJ3evDaVJhGbCBCqyHKOf1QYoaY2QX77HFurpcx/rqHQbwYssWQVvakWYc9KommaYJaUPwSCUhbu8klJ9VbTHmX+U01S2Ba6lotO6IVOtsHK2PuMe6x62rvSKEb8yWggwRP2jlBNSKqNZZLW0tdtEy9zfPUvdmWeXvYOV/nFbGILIG+mydFbQ1h3qS5p9O4o3cPwPr/XNbqIdBFQ2KBl/7rtK3vF3sXc55N/11DMqC6csXhDmke08yd0AJHnIS6KAbJM/Q0DwRZyP92f2RXcYzFWL0Tvc4MefCFIUK9LAkZvXSpH53jcNZ7z2Iq8ATwZ/OMIeZrccqK/Up880FcB5TYZGCNWMqrmz8XxW5nAqVCFQ/ghwANpjAMTnb65uTYbTxPWhcRKfiIlmaZ2VMfAh7D8UYj8IST6iz4436TkcdYrj3nfyTOPQqwwUvc5r7x/yDpX33dyIX5yX57fmnVYJyfm1ZVzQSRK4ifgucdCipZ6fAwwBICLvkh8dzYlmtfmMDVM3RWbA281rEYYXZYWyvXB716YSTexcHeB5U8xbgRt4ZoK49am6FtE5DC+Ixs+dw63soe9wDD6iAvB1/jrUQRGd05/bXV+HLcZdvbdE9ladaDUMyexNpykg+rRD4nV6E1TsHqDtGJeaF6qbS4kbzy1xTfYH+91K7kskgDU7kkKZ3euyT3EdDYOy8GVagbRUy33Ljrlrq4kz/AL3g0en+mCod5G91nAck16Xq6tUsDwJUGlPeF3n5vWRFPAtJ5De6Y5lG85gb4QuNu8xWBDgMn1gavHUHPc80P4sbexO4IoHSPk/mpwST2lxIHJGjBEwJhAuz/IAGF05lUZWbAev5b9OBnXlTIlvWoAwiI5TISMTn4Kr4fJf0BIFZahb2ajCBQqpN5Sd0ylTg4EGz9Ph7NKlT+eZq208Cdf8vCRiee5a0fRt5UXCwVTSFlKyTdxmqNyau4b1lAJpC9X8CreJg5GWHdWVWJadzOZzHA254UQx3RUV2m4XYZ8QKgJhXJfrSyN9pQ9qd/rZ9fBiDEjL6Wn3HN3jM3ntpMxkb0cnsng22wEDTCjQNrkYi1tFxOu7hctpqdori/eE36UamQhunXLW4NUyU5fBotWz3q/XNvTi9bHd5HjetLgJ" + }, + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAACbMuL/////sq1kIOiTUC7ETo9/WTQAMMm3T7gJujWfRoWtg3SG1NNRsOAtcLRy9cjs55/jD/KktGqzsfY3Tep5/KUp28qUhFxoeVTfI1EK44FyE1Wj0MRNmYKWkoLQbOsWZWwRFj2pFrSMDN6BebCwdBfRp3Dfr7WDvQV8NdoiUhcY/OJSjOcHpBT4vAJ8Sp/8IFj0DH5lrAqt795iN17Jm/JLrlmGiAGHK+WuIViUz9hSs3CzDpsY/iVJUx5FQ6sEPjExY/9WBp5WRfj0tSKpc6XaDWgVjx8p4gOmw7tNi4VVKNEtvJLSr1WfscFjHF2IC3Yf2T9iaBmDluzSXwN6UuHpSAIrBOqfIOm+41SOT0GkG3UJKwhQgQQuKrPOkAma/u7nMHQBp9gMY6gYJcAwRoh6gDAAvO5XkLXIq5XgJgeD5y4Z4H4E4Ln+o+dRnmGFkuhwgho2SbxFJyGlwMFH2WAbWijwjbFHFCz7Z1q8W4xlQS4TogUBphhEcbuDHc8D3scy+yfOgFb+QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCWls0hTjsRQ3N/ZesV1tEWPq4NDtxNogObGhRROuh9sHN4NRMcBBMTmhthl4f+DQzgHRRyUeNm6XuCyBOs6yYL" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/account/fixtures/Accounts_Keeps_spends_created_by_the_node_when_rolling_back_a_fork_5 b/ironfish/src/account/fixtures/Accounts_Keeps_spends_created_by_the_node_when_rolling_back_a_fork_5 new file mode 100644 index 0000000000..371a0c00bf --- /dev/null +++ b/ironfish/src/account/fixtures/Accounts_Keeps_spends_created_by_the_node_when_rolling_back_a_fork_5 @@ -0,0 +1,33 @@ +{ + "header": { + "sequence": "3", + "previousBlockHash": "64FEE28CFACEDD1A62164D04EB1786068C0BD9F0870022C01454CDDB46DD9866", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:JD3ITTErwYaxUOzKApBLcBfC/vdaIdLO78rFdl7QzUg=" + }, + "size": 5 + }, + "nullifierCommitment": { + "commitment": "78A72F2020CE738CBA3A8995C0EBCA23C83A4CE2ADD7927210EAA2DE6466E0C9", + "size": 1 + }, + "target": "115792089237316195423570985008687907853269984665640564039457584007913129639935", + "randomness": 0, + "timestamp": 1617404369203, + "minersFee": "-500000000", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "1C42BECEEA2D7C4AD66945477432BCE571047529C2B20E5EC4FAB0A3C8FC2D22", + "graffiti": "0000000000000000000000000000000000000000000000000000000000000000" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAACbMuL/////qKUaCjXE/CkXZ/0L9ZroN4Ib9lDtdmbmunpRhq4Rai6Lx2vCFQgUk4W5T/qbW5vsoKUT4dGRw08Yp/0QVRpvOmqP93YqYCsuoIYYxicNuIpEuiiyLNHJEJBXn+3xrtB7DrOGWJF1sbbQxiAo/niqfyJyzr8QI/nkq/hiV5qjdshGEjvAdBAf+BvOgJYoqoo+sqdJyCCgFqSG/EV+XT2u5CS3yiTy6Jz1vmwRUfePUAQVnzhJvDOqNVkH1dZaYTOY+X/42aKqSJldTYbh5Vgs12/wBIchqKRbZtTp5jtYY47/v9eqZLyWg85ExVQuWJKJBhe0Eru1rYM1recTxJ1SMQLCKGp3kU0SVw2dRAUgmcGWPrZ2nPp3b3PAKQLSAH/fsL7L7eMkWyDBQLXESL+dGjRywo4kLoU3kOJkScYBn2r1CzIiXSQ5eGXMX36hEOeANlCbih++WYaJt0E6taFoyi2o9N/YLTnQzTEfGAFLIdUxOAOodlcipeiKeI9QK894somQQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDtcOak4lKSqfAWuroFhS5pijvr8UZq85E8D2yU+tx0cufjgUdwQfwy4zhsWVRsC7b+hXfyAYxQavkVX0bgxqkL" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/account/fixtures/Accounts_Keeps_spends_created_by_the_node_when_rolling_back_a_fork_6 b/ironfish/src/account/fixtures/Accounts_Keeps_spends_created_by_the_node_when_rolling_back_a_fork_6 new file mode 100644 index 0000000000..8e8a5757a2 --- /dev/null +++ b/ironfish/src/account/fixtures/Accounts_Keeps_spends_created_by_the_node_when_rolling_back_a_fork_6 @@ -0,0 +1,33 @@ +{ + "header": { + "sequence": "4", + "previousBlockHash": "1C42BECEEA2D7C4AD66945477432BCE571047529C2B20E5EC4FAB0A3C8FC2D22", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:UTfit1WxFTeyY7Ygjw/fWUgzqdJA4RnoayZZLkkFIxs=" + }, + "size": 6 + }, + "nullifierCommitment": { + "commitment": "78A72F2020CE738CBA3A8995C0EBCA23C83A4CE2ADD7927210EAA2DE6466E0C9", + "size": 1 + }, + "target": "115792089237316195423570985008687907853269984665640564039457584007913129639935", + "randomness": 0, + "timestamp": 1617404373975, + "minersFee": "-500000000", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "44CF4096B92506413648450040EC342AEABF64E8163453F80FC5371B6B0CB200", + "graffiti": "0000000000000000000000000000000000000000000000000000000000000000" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAACbMuL/////rP0QpYrF9gHiZOaDzdT2SgbKWA79wJeutq4O9ccLWUPsx4lY/uWEL+IontNBfQXAiHcACzJSmKXbT29WKt9uFQczQdSz66xcisEgFQXGLueNkoRpOpyhykS0GHoLhzKCEbfusIXXyBIRdOyq9S2q5cmFjkOHRKQdq/L7ycCLfZP8xh9cHMvJ39r6OywUUZ3LgAEio9//uuC5Pf+Hof1MmoWfwWVhnEGd9LSZLI3AgjMlNSR6zz7yMEcVmtClQ/c2xA3YSwh7mFMQ81ANaEGiR7NzV1E6/6NOchijruVaTN0P2I9Ms33OLsBrM1BaLfB882mTXQwkPtYBpY/YZFOOK16P/rpAKrzWNyqHA9qVTIkX1Bb2b2mZudl/QjPvnPRkFernU1GkEeJP4Vk3/ZPgAACa0Q1HIB9nLWorFSe0nrImQkmwm9FjserjOEcQ0fg0WfrHriNlMtHEoUTZUckidMj6NEs7UOx0rduUK90Pi6dS86QYjlx55RWxJIGZYb2LFbhPQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBuZVnz4BbhV1cYSWAhna3by/5kwhg6EUe4mUL9PI0S33F5DD2Rom9pMkvQSP2sYb6NBnMyVOgIaJYu+Oi6a9MF" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/account/fixtures/Accounts_Removes_notes_when_rolling_back_a_fork_1 b/ironfish/src/account/fixtures/Accounts_Removes_notes_when_rolling_back_a_fork_1 new file mode 100644 index 0000000000..6c3f637bed --- /dev/null +++ b/ironfish/src/account/fixtures/Accounts_Removes_notes_when_rolling_back_a_fork_1 @@ -0,0 +1,8 @@ +{ + "name": "testA", + "spendingKey": "cf4ab576ae306fcce9f5bcd00bbf89601d8ded26442eb8e5c451f0a83e3f4e2e", + "incomingViewKey": "265a50344bd8c001050cd80dc115118aab06fb539ac98fb6c7ccb62c4eed1303", + "outgoingViewKey": "fd2153ec498aa3c4445644f9586c0928da1561b22db1302367cf6b09be1ba50c", + "publicAddress": "30736dfe8b8eef82f79427175dc818d12a90ba8eacfd7fd4eb25cdf27f2fa781b7d196a988006cc254599d", + "rescan": null +} \ No newline at end of file diff --git a/ironfish/src/account/fixtures/Accounts_Removes_notes_when_rolling_back_a_fork_2 b/ironfish/src/account/fixtures/Accounts_Removes_notes_when_rolling_back_a_fork_2 new file mode 100644 index 0000000000..1d5279012f --- /dev/null +++ b/ironfish/src/account/fixtures/Accounts_Removes_notes_when_rolling_back_a_fork_2 @@ -0,0 +1,8 @@ +{ + "name": "testB", + "spendingKey": "31d6f6ba480007d13de36503bbb4019e7cc478ad4ec31cc90c62ff4ebd3ce4c2", + "incomingViewKey": "603abf19be4a142be2e8cabc605266bbb8f2dd10c1119d86e8da16c507fa3505", + "outgoingViewKey": "5e753eca22f0dd62e45957613e5125f20853a72c4f93706e10e958dc5c8ce1f1", + "publicAddress": "fef4e0ffe42b33ea59bc6836fc5908b46c5b0a7481a4baca58949b83eecf0e083cd50f2fecfaa4883cebe7", + "rescan": null +} \ No newline at end of file diff --git a/ironfish/src/account/fixtures/Accounts_Removes_notes_when_rolling_back_a_fork_3 b/ironfish/src/account/fixtures/Accounts_Removes_notes_when_rolling_back_a_fork_3 new file mode 100644 index 0000000000..8ae5796cc0 --- /dev/null +++ b/ironfish/src/account/fixtures/Accounts_Removes_notes_when_rolling_back_a_fork_3 @@ -0,0 +1,33 @@ +{ + "header": { + "sequence": "2", + "previousBlockHash": "A1BA91BD54FCFE97D0DF8FE1F66B6217197B96CD52DB97FAD58B275E164600AB", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:42UhMPhkKRd9OvErIELmpg+018Boa7853m/3ILnM6E4=" + }, + "size": 4 + }, + "nullifierCommitment": { + "commitment": "78A72F2020CE738CBA3A8995C0EBCA23C83A4CE2ADD7927210EAA2DE6466E0C9", + "size": 1 + }, + "target": "115792089237316195423570985008687907853269984665640564039457584007913129639935", + "randomness": 0, + "timestamp": 1617404301119, + "minersFee": "-500000000", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "D88073AC535DCD27068C3906FA89EC209082303940447C439E40576FD5640BAB", + "graffiti": "0000000000000000000000000000000000000000000000000000000000000000" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAACbMuL/////jrwh0YYB6vYN6rPnEhDD7oPs39X32q3jmakHOqP5lwUoZ6TNV693X7mplUM3F1KpoZ4e0/ZFx6RdeBOHaoXb/DOiqDYiuL5pcGsAi1zoWstL73vxTYCav3KNxqVr5MI2AQ7RH7oxf7En6c0jf9qQunN8mlqXqsCDxT4ULwhI/KMApD3FJSSvFYgXsu8o/3Angez6jvZxMRvfw8ptH3HD8NHuseQ1K38/QBwpqcsqgWrsvXRJK7rt+rOIAfEj9mrl0grRHrVqe0J5ApX6l4p5bRWLpre9YlOR8Xk7dtl9JPBK6s8Ex7UtlDcwtxGVdwl37StGA9oAj8eD4pcmKX3OHAZaF1AWDC5VFVTIu38ALThpMYAxEJ/5+lLKQKb+RnBRzNsabyF7yIjCI08ZQ3tFUp9HoCxmb/gBtkHTmj/v1UqqREX2tURTb/lxnhjn4zQP317YN7n0QXLulvyTAmrIr4mUct777n9Y+I842D6HLRBo79WTk+X0+y1cH6vyUaOYORBaQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDC4c1XuTCeS/tSJzBO3yRU5YJdY0gHfZPMXg4BoyiK5JgP8KAme44qnHOJGPX7OYOurWI89y/TK+CloXrNmX8M" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/account/fixtures/Accounts_Removes_notes_when_rolling_back_a_fork_4 b/ironfish/src/account/fixtures/Accounts_Removes_notes_when_rolling_back_a_fork_4 new file mode 100644 index 0000000000..24f1327e18 --- /dev/null +++ b/ironfish/src/account/fixtures/Accounts_Removes_notes_when_rolling_back_a_fork_4 @@ -0,0 +1,33 @@ +{ + "header": { + "sequence": "2", + "previousBlockHash": "A1BA91BD54FCFE97D0DF8FE1F66B6217197B96CD52DB97FAD58B275E164600AB", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:x9mQe11XEMEuFiepSvE5KqKm7t2NNujGiUa/WdIfhx0=" + }, + "size": 4 + }, + "nullifierCommitment": { + "commitment": "78A72F2020CE738CBA3A8995C0EBCA23C83A4CE2ADD7927210EAA2DE6466E0C9", + "size": 1 + }, + "target": "115792089237316195423570985008687907853269984665640564039457584007913129639935", + "randomness": 0, + "timestamp": 1617404306117, + "minersFee": "-500000000", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "8F176C780341005E1BD130210475C55E17C3DEEBFABEA719F923EA59B85C9364", + "graffiti": "0000000000000000000000000000000000000000000000000000000000000000" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAACbMuL/////r1TKE19ci2ff+UHZ+Pr8d3nu35cpHQwXiqPESn10ArajRMbEUfmAtv/wqKirhykKhehnyCPwLUJXH0dZDOCQtM0o1hdiwnR8rIfB99RgXKQBpg9iOxEI/s7j7oyZAUzwEFUyFYP8O0JQO9ETOcgh0fuC3Hxhdm9u+82jWLkTNoIausk70xsUYFZxuFCPse1ptcq9fWV8UPaHHsoZirS6HTYp1bEDhR15oAhb0eMbKxCpYS8E0DsTHzvvxzuKb60xb0jfXS7nFxfNVqVbe1xPPEpOYJWulN6PkpLUAqwM0taLhhVcglaGk//HVnxy7Nma26qofPW3tXhBbxNEG/BjVX/GBK7cFGLNN8dUhC3SV1/k57mi5DRgjHkXnLZ4dzUqdKouu5qQTo1W/YqDE8ZIm1fvkLVCsjdIq5i+R/kbzZVV/QsFg8QnbfNOxTuDC8vx3QU7Gm0rSo+PWxXZJfv7nzJS4d6isByqLuB2L4ixfSEYHAdfCNCBUyhK0Q1VFfNyVHkbQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCVnpMtcODQc8i9eXIctSjBQoaej3LwMRJrGNstVKdBhJn/Ir0r67rGTjp8VUd/3kP/bcREUvGLWA0khgRvl6gC" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/account/fixtures/Accounts_Removes_notes_when_rolling_back_a_fork_5 b/ironfish/src/account/fixtures/Accounts_Removes_notes_when_rolling_back_a_fork_5 new file mode 100644 index 0000000000..314e5426cc --- /dev/null +++ b/ironfish/src/account/fixtures/Accounts_Removes_notes_when_rolling_back_a_fork_5 @@ -0,0 +1,33 @@ +{ + "header": { + "sequence": "3", + "previousBlockHash": "8F176C780341005E1BD130210475C55E17C3DEEBFABEA719F923EA59B85C9364", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:LO/A6cUE7tO392VF74zu5Ax/EPQZoa0UboXvgzaNjlI=" + }, + "size": 5 + }, + "nullifierCommitment": { + "commitment": "78A72F2020CE738CBA3A8995C0EBCA23C83A4CE2ADD7927210EAA2DE6466E0C9", + "size": 1 + }, + "target": "115792089237316195423570985008687907853269984665640564039457584007913129639935", + "randomness": 0, + "timestamp": 1617404311020, + "minersFee": "-500000000", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "7BB42BA43B2C5DA96A08DAECB11BC424623502CC3942248F325DDC8FDD9B740A", + "graffiti": "0000000000000000000000000000000000000000000000000000000000000000" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAACbMuL/////s6d8P1PJFKXD0kGWRtnxMTp39mQiQU155cEccL2K89i+2ha8oAOVZ3Ly4Q5cNU00i4erQbIPo+NzEocA4isE2Xa5ae15vhEfp1/4MZ0UMosWMLQqUf29VAWoN6ElQhlhF79dYSrM7bjdtbzvjEMigA/CKVY7RhRxV8aRU37xZauTl4A5yaABtao7VGY4LsMpi/HMnoguAr6c6oeOr6NW6suqmGivbQX4H3XuMHQydSa3W77SpMY5HUi92+UVtG6K3spJmP1H8ijYLB8oQ8b9tTDLKteAx7vUu6MTtHZnDh2fTx1M+7vWmi6bGGnIkX9m8/SsBChqVXNXmKMsWQAgWzbInb17Tpg9q9uGX6erpov/32fpAn16LPAM8SS5zwrHJ/rwR8KgqlC6aCFC7/0VwmNe4bvMOhHBVcWPu3OLY25FGH+g7GcF4Bte7cmeNXaXef9HmcWke/iShlpF3WCcgDLU+iicVT680Uo+nCaJwUADL/yYbmX/qyv2yOYPxfZI+q79QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBw6PQ45flAAa5TPDtCyWAuaiSkSYuq+deoB3pG0hLAwb6Q4oLyiluDWXQEs6ePFpjoLDcsHN3IUo3o2SVyQe0G" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/account/fixtures/Accounts_Undoes_spends_created_by_another_node_when_rolling_back_a_fork_1 b/ironfish/src/account/fixtures/Accounts_Undoes_spends_created_by_another_node_when_rolling_back_a_fork_1 new file mode 100644 index 0000000000..7e2016ef1f --- /dev/null +++ b/ironfish/src/account/fixtures/Accounts_Undoes_spends_created_by_another_node_when_rolling_back_a_fork_1 @@ -0,0 +1,8 @@ +{ + "name": "testA", + "spendingKey": "6c7d09f997bd253e5fe899e872641b0a5bb041c97c2960c6603925f75babf307", + "incomingViewKey": "71356bbab9baa1c86c604982b72a166e980d26500ebe23d74f0bfdb461179d01", + "outgoingViewKey": "50c100c288a2929fc35f5a93ee0ce6d7a5228bf7e81f47f502fa6f940f180bc9", + "publicAddress": "7cf082c568525d0aa0f4f5b08d2689ae37aae86c9fd3449ef94ec9c4194b5c4725ed0cd0024792b9d3970f", + "rescan": null +} \ No newline at end of file diff --git a/ironfish/src/account/fixtures/Accounts_Undoes_spends_created_by_another_node_when_rolling_back_a_fork_2 b/ironfish/src/account/fixtures/Accounts_Undoes_spends_created_by_another_node_when_rolling_back_a_fork_2 new file mode 100644 index 0000000000..f92d3d2f48 --- /dev/null +++ b/ironfish/src/account/fixtures/Accounts_Undoes_spends_created_by_another_node_when_rolling_back_a_fork_2 @@ -0,0 +1,8 @@ +{ + "name": "testB", + "spendingKey": "e273c9d58c84a1be9bf9cb9dbd210f019bb40c0573674292d4f35a6d1deed5e2", + "incomingViewKey": "bf6b2ebb49eee159a813ed02585d6589047e7683204536a2764ed459b5e68403", + "outgoingViewKey": "70d663bbe6919528b895183e05e1289eb1f4d150e3be2d3bbb4f44852603f9f7", + "publicAddress": "a557a9bb9c1ed3141ae7ad653ab3b4c9829721bc9aa991f913cff10411ec011fc3937929a304c9655680e7", + "rescan": null +} \ No newline at end of file diff --git a/ironfish/src/account/fixtures/Accounts_Undoes_spends_created_by_another_node_when_rolling_back_a_fork_3 b/ironfish/src/account/fixtures/Accounts_Undoes_spends_created_by_another_node_when_rolling_back_a_fork_3 new file mode 100644 index 0000000000..09b39cc706 --- /dev/null +++ b/ironfish/src/account/fixtures/Accounts_Undoes_spends_created_by_another_node_when_rolling_back_a_fork_3 @@ -0,0 +1,33 @@ +{ + "header": { + "sequence": "2", + "previousBlockHash": "A1BA91BD54FCFE97D0DF8FE1F66B6217197B96CD52DB97FAD58B275E164600AB", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:nxpf1qRUr47/YsXCRnDD1hOsCgA8xliCrw0QHHXGDmU=" + }, + "size": 4 + }, + "nullifierCommitment": { + "commitment": "78A72F2020CE738CBA3A8995C0EBCA23C83A4CE2ADD7927210EAA2DE6466E0C9", + "size": 1 + }, + "target": "115792089237316195423570985008687907853269984665640564039457584007913129639935", + "randomness": 0, + "timestamp": 1617404381064, + "minersFee": "-500000000", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "B0584C1F18873FF4657F8CD9B67D121CBEE49A0CE84F726798851AB364FC18AD", + "graffiti": "0000000000000000000000000000000000000000000000000000000000000000" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAACbMuL/////uFJrD6SEi5/zZtd/L1ItTPj5PS6QNE3Yw0mbkt2rFVGi8Bv3uT0Zo/zutqXjswwyt3AXAiV4wd7d24GVk6cMzXHercMHnwvSHaa+guBWjGK3PfFVaZhdg/lDIqhrApGtA6VAi6yr6Q/SQM0Zar2DAwpN6cZwtWo3kFWCw2+mP2WnCI/OgMh/zDVl4rhVeM+xtI6JWqe8rvk2nIJQinf49oOUKiWi1DmQiJPFKOAflOVv2m0ds6ecyp0SdMaWkvwlnaTvIM2db0yMhA3/blMI0EsKRCYJwfWW6eUOw39OVed59ve5VVaZyqSW6JmuU9kYr/EY7TzD284zBu6uP6+bKHD+Hd1r7hBqiNnJoO3/SLCOVjY87C8L3AuZeWHYKee6njhCOXLeTeOl20VW3LtzA+ScaCe6Ulsn+QnYIzM5woVHiddU2/Vte9Lt1PCbu+zrI9Nhj7IkQpf548BsjH2ytQ154QFwOpU+Sy7I+XO/T7zVhJIc5JrSelxT5CbMv5XziAG/QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCCMafdYEnzZZlgFpk2t3LGGghSrPWGdyjIqld3A7AgC7HlHQ7vcdzqi8S6PtQhYMnRBrKJB7kmXBPiaVBgj7oC" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/account/fixtures/Accounts_Undoes_spends_created_by_another_node_when_rolling_back_a_fork_4 b/ironfish/src/account/fixtures/Accounts_Undoes_spends_created_by_another_node_when_rolling_back_a_fork_4 new file mode 100644 index 0000000000..888e307035 --- /dev/null +++ b/ironfish/src/account/fixtures/Accounts_Undoes_spends_created_by_another_node_when_rolling_back_a_fork_4 @@ -0,0 +1,37 @@ +{ + "header": { + "sequence": "3", + "previousBlockHash": "B0584C1F18873FF4657F8CD9B67D121CBEE49A0CE84F726798851AB364FC18AD", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:xPNl7s+V4jEfkdvhwMUvmrfhitbazg6OpyA8gZqUah8=" + }, + "size": 7 + }, + "nullifierCommitment": { + "commitment": "91AF38B825B369316AD288B0EB511CD1DBD7EE80058D0D723E25AADDB460F996", + "size": 2 + }, + "target": "115792089237316195423570985008687907853269984665640564039457584007913129639935", + "randomness": 0, + "timestamp": 1617404428104, + "minersFee": "-500000000", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "559E86CB12BC42D13F5AE73745848ACC74329856952750E02F0AEF910DF5E720", + "graffiti": "0000000000000000000000000000000000000000000000000000000000000000" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AQAAAAAAAAACAAAAAAAAAAAAAAAAAAAAiuaNrmKKfV+VdLFPa13tPDsUNiqPKwOaV9NnfjRWhPN5Ej8dS4AvpDCg7n6SBxLduMgyjASIStDvVL2F3blzrQSHhoc5qGwCcKw7kOP08kBdzfwBvCQx7Sb5RKB1zs71FjmaRZrWishPGnR0aIWE4aTu/A1r789Omk9y3ck+tM5TZJrf5VTMhnGnqPlKwjOho8sy77vgVloUoohNZP8jmL/fL+KKLQOUehiAdTTW/b1bmpM67iNN2202qCAKnGawLEQsW8T1hIbfvKDd9ciJn6fktuZ1q59dI/lnKSKT6xXWiQ6utFgeMP6nKVGUhtldpLScAh8lIJkM5V0FOdT7wZ8aX9akVK+O/2LFwkZww9YTrAoAPMZYgq8NEBx1xg5lBAAAAPt4MO3HTmUuAzcleJmRaeTWHZhOxdfUhhV74tmsHwY1nAv7rNRSa6Jr04rZUUFnfmVo/JM5dSU7W9al1H1qtVCOIngDgQfQAjHCPVHSLJ6d3qFVTnnpL2tLQlD70AvnBLEB3n4oLE5qLkzs4qQAWkWPVD4zc4L2YxUf1TzoOBhQmxV0+y3v/pmJnM0HxGVgbYNy0sKbZBkuGWKuH24TUIbIPIUEn8Fs979y/Kjxb1NUJBvsPqsulmcQKw+RR05wJge6mpiFP4BRk79I0+l+7GbU8lR0QtPi9MPSIBO5axiaauVzI71Qdd8EpT6FI8MTEJYzNniCr/RgS9j7erRhWzsk0CVVJRsssva8mtUO7PHHQNKHEFbXcRuGnb9KVPRhUqhcTvHUBaqJfYC3C4YyzH3/6Bsxb3YB/EZayKhDBgKrPUk0BWlRPwoNneLVGXGAtJ//a7sMFn4johx4OdKzrjRizDDErTgRWrrzs6HvttA0/9i68qI0iwkYW14dCf4g4QJSdnS3g/a6ks+cDYoli6XU9DC/YmPlc8E1+xQTABDivodBrCJVJ6aOEvzui6zg1wXkC6D0nSeX0xTJx+xje7zEtnrnlgoDZv5EeeaThwjx49eSReQkxMsditduJv798mhZZ86fzPG0qyKvMfwEf3bN2ce0eBm1l3XQ+tAB9hiewjpwU0EHiGttLyJVd7jNrN4DJGqAgYTHSslMMMYcfgEIJB+11N/H6i/10TrKYOqKikRLhgqkzJHtb5KXudeMDhD12HKAx20cZZCN2XkNs3Q0yUuDMMxPOhHpvPcO6Cqz47EYsjLe0WFlsDINg+YTQ67CNhdBhAPoaVtKtJUr3aukndM6DolAuTApOEemKvGkE7zIGMu28Qg2cgutCDr1ni6baUdAadz+h60zRb3KSz/Vk9G5edE8G9Sbnacwg9MWXTtSi4STl3PprQb1q3b77bRvQxdNYYz24CTZIPgUYPAAqpZVqTxD+i3H0LbjuEq0exkNb7h9ylELZ/0eSSSngEt9rsFo6O1o3Rkvf0DHYtz7ayYxe8IfZfakBzs84jf2aUwdjwET4sbeGEn0E7xspKcIBbgAjchOirYNCdBtTsY98zqa7LOMrQDWPKc33RyIkX8MBckC99CJW+QUuNBKha1FvidxJ105hZTw5tA3kiIUOjceewBQL5InHn17VR8uexItWWbRMqKG88HhSNSIttFPI0BQ7utssXucTtkHeH//u1/tgwsebjXGZfebrAh3SpFo78+li5tsFWqmVTQXR4ux+rqzodyGbk5iPgBwdNIROZZdeGH6fhhRyPw9zi89lXtjJ9Yy4qGNRB2fG31UTX0VvaQgmJYvUJdIN80XHz/Kzwxtruflv+Viu6DntbA3A9Ee/Pv6mGYqU2tCUnJ+JGZ5AZVnTmBHAWsaCdEOYzo83b7mphfYDDpii15i3lQ6vEZTkSEO" + }, + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAACbMuL/////tFtPDaFFA8moJX9D5umKS5CE44401yVRSABZcKPzRPEAbUGh4wi+7b7m0qzM2VFzj72WgTSe/J/Wg5vfRxb/yoJN1rGLQqbBZ2ahBVzPZMMTaVXnJPlHTBPd4c57+Wi7Fl3KLy7rm5FMlxisFW7/vBkZrnel1gpFZU+GJL99KXcK8N/tccy5819WvsMISCCMlXqK+9N+tR3ud6r1qiDo8t3DEYd19lioTx5VmF2/l7Gi0ImzjZ77yr4IHhBVblscSdJVKsPwPDlnoQC72Oz6E+4/yQkKcADrzucOFKYbJJS1hg/g0vtJrqmZdTTGgn2r9A3Vg4JjjHQ5ECcF+Be5COUA3oOxkOOLp3x5SvC/401h5/gg4iRXqmsmNPbsnTZdWN8XaQ/iuaf1kjUhkly4822G6v7EMR6GiSDtabTRImBbSNqx3475cI2ACQaybx+Ji6IiMsisHNt0ckdLqK8TFle/ESvOYfS1YwttwJQkfBBNbWdVAJ1nlOVgIukQO2lkVTHBQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDMuFRVY3xJaD9BGuhraVnvzvN1EhSAJW7zMmaM1eGJb3XpAQ6V43N3a157vcMjhFbt+OFtzE0X1pZPi8fWhuYA" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/account/fixtures/Accounts_Undoes_spends_created_by_another_node_when_rolling_back_a_fork_5 b/ironfish/src/account/fixtures/Accounts_Undoes_spends_created_by_another_node_when_rolling_back_a_fork_5 new file mode 100644 index 0000000000..23a1dc07ff --- /dev/null +++ b/ironfish/src/account/fixtures/Accounts_Undoes_spends_created_by_another_node_when_rolling_back_a_fork_5 @@ -0,0 +1,33 @@ +{ + "header": { + "sequence": "3", + "previousBlockHash": "B0584C1F18873FF4657F8CD9B67D121CBEE49A0CE84F726798851AB364FC18AD", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:WiRrZZClNpBJZT/EV39FIK8ejtQ5e3uyJ3gU2TvCbgs=" + }, + "size": 5 + }, + "nullifierCommitment": { + "commitment": "78A72F2020CE738CBA3A8995C0EBCA23C83A4CE2ADD7927210EAA2DE6466E0C9", + "size": 1 + }, + "target": "115792089237316195423570985008687907853269984665640564039457584007913129639935", + "randomness": 0, + "timestamp": 1617404433407, + "minersFee": "-500000000", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "D93707E44F2651A40952BDA66301F2A37596274CBDB9C711A290C86DAC4DD4BB", + "graffiti": "0000000000000000000000000000000000000000000000000000000000000000" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAACbMuL/////sUTuxrerxJPdouB4woDTTk5Z5dLzKk+gyo4VXXqKncIA4WzbRlChkJrPDap2EMnSjr1x9Yh7T3HgixAoauuA7K+97JBRT5N6jgJSCKcGdXA3COJ1gXLkfYNfYfSWRQuuE08KOYaC2//ETgFGi6OOqNJ//td/G+y8z0pX0M9vVQ2F6+H5hrzKUtyTOkDoZt5piERExCtN0IVMt2Ji7fQOmZaHii+EsFonMy/w1b2gJPWgv0ErB8t35oxsPbK9hcl9dUYCK/hDJFuUzyX9Op+xs22uBwIHK8kLYpSOebGs18LkbFjFOadRuOOh3RZmlEUeZpCPwunx6HS4JdYcSqRaPtkJSME6tMhTcE5Kn6BDnjOElIInCkOaviE5Adzstz1Eby6vx+SX9LJNhIPMwWFbg1ifGJge4wDqwv09ATRsjM5hJC1jJhTGlW9CM/BkfCwYwVOmx3nRbDT0nbq+D8bVClondr5tKGDnBXSSzQ3BhNj7QY19mDvO8IOAuBx4rtqptNbZQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBLNmSpLXWuaht7iRPh41a+MHZTX2pBPUvK5HYJVWpaSfxSL05G/H9FdhtTB7yQJoIry4CKeIRrZHA/gtFAo8AM" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/account/fixtures/Accounts_Undoes_spends_created_by_another_node_when_rolling_back_a_fork_6 b/ironfish/src/account/fixtures/Accounts_Undoes_spends_created_by_another_node_when_rolling_back_a_fork_6 new file mode 100644 index 0000000000..0023234447 --- /dev/null +++ b/ironfish/src/account/fixtures/Accounts_Undoes_spends_created_by_another_node_when_rolling_back_a_fork_6 @@ -0,0 +1,33 @@ +{ + "header": { + "sequence": "4", + "previousBlockHash": "D93707E44F2651A40952BDA66301F2A37596274CBDB9C711A290C86DAC4DD4BB", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:qu302BHVt0ie0UslF2xI0P0eVf915VoqEbt0W2j3mFY=" + }, + "size": 6 + }, + "nullifierCommitment": { + "commitment": "78A72F2020CE738CBA3A8995C0EBCA23C83A4CE2ADD7927210EAA2DE6466E0C9", + "size": 1 + }, + "target": "115792089237316195423570985008687907853269984665640564039457584007913129639935", + "randomness": 0, + "timestamp": 1617404438406, + "minersFee": "-500000000", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "647FE7725405BFA348FFA88ACB88146DD8D0F0D8ECDF4CCCBA0FD843FD373A62", + "graffiti": "0000000000000000000000000000000000000000000000000000000000000000" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAACbMuL/////pbySzDJ5rd2JadTNmW2HK79c4uDuNeMdRsxedsBV7ApeE9fcWpgr5yt5uchndtixijUXE1mHPVoLp1MYydbFrFPF+PvAV9YNNVoWAHATsuLEMPXIVd33SwwBldPEqkJfADl0n61TYHd5GRg6AEkGzdSr8oMNG7lBybK0QvGgFGYL2HQ0SlYNRgtsP6qDZrJmiNw7GYwuHNDlTpLCAS06nO9JRXOZdgVyDOqEIrPNsfz7EjiXAczKV9I8ih68xDlQc90/OC6yMIk5VAWXBigZTz+VfEPKWig6IMAHenofb2oiijz2CLOb9a/wdYVWue2wP0vY+c2tSIFVka43iMPNCDiyMvyM72oSmZ1fYRS7qa0wcPoS+zPtBZIa4kdU0X+s3iLNrJOiblt+bShSk/wOlvE6TNOWxvmPdBGoZNaBlq+tkGNMx2Jsjmpkkposq2lc8VwkiuwVgG7yAD5qwqJn0gIroi6sz+yETmwm2z2L8A8ZnQR5Cfiyrx7XjGuYg2P2ZYm5QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAsXTg/2WXgNNtQpxigArJMGnnQu6yUeEGnlK+MPtw4V8Min3Ie2iOepUdS04yee7iuAameYwPuuYZG8nU0L0kN" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/account/index.ts b/ironfish/src/account/index.ts new file mode 100644 index 0000000000..71616abc0a --- /dev/null +++ b/ironfish/src/account/index.ts @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +export * from './accounts' +export * from './accountsdb' diff --git a/ironfish/src/assert.ts b/ironfish/src/assert.ts new file mode 100644 index 0000000000..687006ea0a --- /dev/null +++ b/ironfish/src/assert.ts @@ -0,0 +1,25 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export class Assert { + static isUnreachable(x: never): never { + throw new Error(x) + } + + static isNotUndefined(x: undefined | T, message?: string): asserts x is T { + if (x === undefined) throw new Error(message || `Expected value not to be undefined`) + } + + static isNotNull(x: null | T, message?: string): asserts x is T { + if (x === null) throw new Error(message || `Expected value not to be null`) + } + + static isNull(x: unknown, message?: string): asserts x is null { + if (x !== null) throw new Error(message || `Expected value to be null`) + } + + static isTrue(x: boolean, message?: string): asserts x is true { + if (x === false) throw new Error(message || `Expected value to be true`) + } +} diff --git a/ironfish/src/buffer-map.d.ts b/ironfish/src/buffer-map.d.ts new file mode 100644 index 0000000000..9c6bbfb1ba --- /dev/null +++ b/ironfish/src/buffer-map.d.ts @@ -0,0 +1,53 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +declare module 'buffer-map' { + export class BufferMap implements Iterable<[Buffer, T]> { + constructor(iterable: Iterable | null | undefined = null) + + readonly size: number + + get(key: Buffer): T | undefined + has(key: Buffer): boolean + set(key: Buffer, value: T): BufferMap + delete(key: Buffer): boolean + clear(): void + + [Symbol.iterator](): Iterator<[Buffer, T]> + + *entries(): Generator<[Buffer, T]> + *keys(): Generator + *values(): Generator + + toKeys(): Buffer[] + toValues(): T[] + toArray(): T[] + } + + export class BufferSet implements Iterable { + constructor(iterable: Iterable | null | undefined = null) + + readonly size: number + + has(key: Buffer): boolean + set(key: Buffer): BufferMap + delete(key: Buffer): boolean + clear(): void + + [Symbol.iterator](): Iterator<[Buffer, T]> + + *entries(): Generator<[Buffer, Bufferd]> + keys(): Iterator + values(): Iterator + + forEach( + func: (value: Buffer, key: Buffer, set: BufferSet) => void, + self: BuffSet, + ): void + + toKeys(): Buffer[] + toValues(): Buffer[] + toArray(): Buffer[] + } +} diff --git a/ironfish/src/captain/Consensus.ts b/ironfish/src/captain/Consensus.ts new file mode 100644 index 0000000000..daa3e8897c --- /dev/null +++ b/ironfish/src/captain/Consensus.ts @@ -0,0 +1,3 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ diff --git a/ironfish/src/captain/Verifier.test.ts b/ironfish/src/captain/Verifier.test.ts new file mode 100644 index 0000000000..ab4ec0a2a3 --- /dev/null +++ b/ironfish/src/captain/Verifier.test.ts @@ -0,0 +1,238 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +jest.mock('ws') +jest.mock('../network') + +import { RangeHasher } from './anchorChain/merkleTree' +import { + TestStrategy, + makeCaptain, + TestCaptain, + makeFakeBlock, + blockHash, + TestBlockHeader, + fakeMaxTarget, +} from './testUtilities' + +import Target from './anchorChain/blockchain/Target' +import { Validity, VerificationResultReason } from './anchorChain/blockchain/VerificationResult' +import { BlockHeader } from './anchorChain/blockchain' + +describe('Verifier', () => { + describe('Transactions', () => { + const strategy = new TestStrategy(new RangeHasher()) + let captain: TestCaptain + + beforeEach(async () => { + captain = await makeCaptain(strategy) + }) + + it('constructs a verifier', () => { + expect(captain.chain.verifier).toBeDefined() + }) + + it('extracts a valid transaction', async () => { + const newTransactionPayload = { + transaction: { elements: ['a'], spends: [], totalFees: 5, isValid: true }, + } + + const result = await captain.chain.verifier.verifyNewTransaction(newTransactionPayload) + + const { transaction, serializedTransaction } = result + + expect(transaction).toMatchObject({ + _spends: [], + elements: ['a'], + isValid: true, + totalFees: BigInt(5), + }) + + expect(serializedTransaction).toMatchObject({ + spends: [], + elements: ['a'], + isValid: true, + totalFees: 5, + }) + }) + + it('rejects if payload is not a serialized transaction', async () => { + await expect( + captain.chain.verifier.verifyNewTransaction({ notA: 'Transaction' }), + ).rejects.toEqual('Payload is not a serialized transaction') + }) + + it('rejects if the transaction cannot be deserialized', async () => { + await expect( + captain.chain.verifier.verifyNewTransaction({ transaction: { not: 'valid' } }), + ).rejects.toEqual('Could not deserialize transaction') + }) + + it('rejects if the transaction is not valid', async () => { + const newTransactionPayload = { + transaction: { elements: ['a'], spends: [], totalFees: 5, isValid: false }, + } + await expect( + captain.chain.verifier.verifyNewTransaction(newTransactionPayload), + ).rejects.toEqual('Transaction is invalid') + }) + }) + + describe('Block', () => { + const strategy = new TestStrategy(new RangeHasher()) + let captain: TestCaptain + let targetSpy: jest.SpyInstance + + beforeEach(async () => { + targetSpy = jest.spyOn(Target, 'minDifficulty').mockImplementation(() => BigInt(1)) + captain = await makeCaptain(strategy) + }) + + afterAll(() => { + targetSpy.mockClear() + }) + + it('extracts a valid block', async () => { + const block = makeFakeBlock(strategy, blockHash(1), blockHash(2), 2, 5, 6) + const serializedBlock = captain.chain.verifier.blockSerde.serialize(block) + + const { + block: newBlock, + serializedBlock: newSerializedBlock, + } = await captain.chain.verifier.verifyNewBlock({ block: serializedBlock }) + + expect(newBlock.header.hash.equals(block.header.hash)).toBe(true) + expect(newSerializedBlock.header.previousBlockHash).toEqual( + serializedBlock.header.previousBlockHash, + ) + }) + + it('rejects if payload is not a serialized block', async () => { + await expect(captain.chain.verifier.verifyNewBlock({ notA: 'Block' })).rejects.toEqual( + 'Payload is not a serialized block', + ) + }) + + it('rejects if the block cannot be deserialized', async () => { + await expect( + captain.chain.verifier.verifyNewBlock({ block: { not: 'valid' } }), + ).rejects.toEqual('Could not deserialize block') + }) + + it('rejects if the block is not valid', async () => { + const block = makeFakeBlock(strategy, blockHash(1), blockHash(2), 2, 5, 6) + block.transactions[0].isValid = false + const serializedBlock = captain.chain.verifier.blockSerde.serialize(block) + const newBlockPayload = { block: serializedBlock } + + await expect(captain.chain.verifier.verifyNewBlock(newBlockPayload)).rejects.toEqual( + 'Block is invalid', + ) + }) + + it('validates a valid block', () => { + const block = makeFakeBlock(strategy, blockHash(4), blockHash(5), 5, 5, 9) + expect(captain.chain.verifier.verifyBlock(block).valid).toBe(Validity.Yes) + }) + + it("doesn't validate a block with an invalid header", () => { + const block = makeFakeBlock(strategy, blockHash(4), blockHash(5), 5, 5, 9) + block.header.target = new Target(0) + + expect(captain.chain.verifier.verifyBlock(block)).toMatchObject({ + reason: VerificationResultReason.HASH_NOT_MEET_TARGET, + valid: 0, + }) + }) + + it("doesn't validate a block with an invalid transaction", () => { + const block = makeFakeBlock(strategy, blockHash(4), blockHash(5), 5, 5, 9) + block.transactions[1].isValid = false + + expect(captain.chain.verifier.verifyBlock(block)).toMatchObject({ + reason: VerificationResultReason.INVALID_TRANSACTION_PROOF, + valid: 0, + }) + }) + + it("doesn't validate a block with incorrect transaction fee", () => { + const block = makeFakeBlock(strategy, blockHash(4), blockHash(5), 5, 5, 9) + block.header.minersFee = BigInt(-1) + + expect(captain.chain.verifier.verifyBlock(block)).toMatchObject({ + reason: VerificationResultReason.INVALID_MINERS_FEE, + valid: 0, + }) + }) + }) + + describe('BlockHeader', () => { + const strategy = new TestStrategy(new RangeHasher()) + let dateSpy: jest.SpyInstance + let captain: TestCaptain + let header: TestBlockHeader + + beforeAll(() => { + dateSpy = jest.spyOn(global.Date, 'now').mockImplementation(() => 1598467858637) + }) + + beforeEach(async () => { + dateSpy.mockClear() + captain = await makeCaptain(strategy) + + header = new BlockHeader( + strategy, + BigInt(5), + Buffer.alloc(32), + { commitment: 'header', size: 8 }, + { commitment: Buffer.alloc(32), size: 3 }, + fakeMaxTarget(), + 25, + new Date(1598467858637), + BigInt(0), + Buffer.alloc(32), + ) + }) + + it('validates a valid transaction', () => { + expect(captain.chain.verifier.verifyBlockHeader(header).valid).toBe(Validity.Yes) + }) + + it('fails validation when target is invalid', () => { + header.target = new Target(BigInt(0)) + + expect(captain.chain.verifier.verifyBlockHeader(header)).toMatchObject({ + reason: VerificationResultReason.HASH_NOT_MEET_TARGET, + valid: 0, + }) + }) + + it('fails validation when timestamp is in future', () => { + header.timestamp = new Date(1598467898637) + + expect(captain.chain.verifier.verifyBlockHeader(header)).toMatchObject({ + reason: VerificationResultReason.TOO_FAR_IN_FUTURE, + valid: 0, + }) + }) + + it('fails validation if graffiti field is not equal to 32 bytes', () => { + header.graffiti = Buffer.alloc(31) + header.graffiti.write('test') + + expect(captain.chain.verifier.verifyBlockHeader(header)).toMatchObject({ + reason: VerificationResultReason.GRAFFITI, + valid: 0, + }) + + header.graffiti = Buffer.alloc(33) + header.graffiti.write('test2') + + expect(captain.chain.verifier.verifyBlockHeader(header)).toMatchObject({ + reason: VerificationResultReason.GRAFFITI, + valid: 0, + }) + }) + }) +}) diff --git a/ironfish/src/captain/Verifier.ts b/ironfish/src/captain/Verifier.ts new file mode 100644 index 0000000000..8a4a4bb70f --- /dev/null +++ b/ironfish/src/captain/Verifier.ts @@ -0,0 +1,428 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { default as Block, BlockSerde, SerializedBlock } from './anchorChain/blockchain/Block' +import Strategy from './anchorChain/strategies' +import Transaction from './anchorChain/strategies/Transaction' +import { isNewBlockPayload, isNewTransactionPayload } from './messages' +import BlockHeader from './anchorChain/blockchain/BlockHeader' +import { Spend } from './anchorChain/strategies/Transaction' +import Blockchain, { Target } from './anchorChain/blockchain' +import { PayloadType } from '../network' +import Serde, { BufferSerde, JsonSerializable } from '../serde' +import { + Validity, + VerificationResult, + VerificationResultReason, +} from './anchorChain/blockchain/VerificationResult' +import { IDatabaseTransaction } from '../storage' + +export const ALLOWED_BLOCK_FUTURE_SECONDS = 15 +/** + * Verifier transctions and blocks + * + * @typeParam E IronfishNoteEncrypted + * Note element stored in transactions and the notes Merkle Tree + * @typeParam H WasmNoteEncryptedHash + * the hash of an `E`. Used for the internal nodes and root hash + * of the notes Merkle Tree + * @typeParam T Transaction + * Type of a transaction stored on Captain's chain. + * @typeParam SE SerializedWasmNoteEncrypted + * @typeParam SH SerializedWasmNoteEncryptedHash + * @typeParam ST SerializedTransaction + * The serialized format of a `T`. Conversion between the two happens + * via the `strategy`. + */ +export default class Verifier< + E, + H, + T extends Transaction, + SE extends JsonSerializable, + SH extends JsonSerializable, + ST +> { + strategy: Strategy + chain: Blockchain + blockSerde: Serde, SerializedBlock> + hashSerde: BufferSerde + + /** + * Used to disable verifying the target on the Verifier for testing purposes + */ + protected enableVerifyTarget = true + + constructor(chain: Blockchain) { + this.strategy = chain.strategy + this.chain = chain + this.blockSerde = new BlockSerde(chain.strategy) + this.hashSerde = chain.blockHashSerde + } + + /** + * Verify that a new block received over the network has a valid header and + * list of transactions and extract the deserialized transaction. + * + * @param payload an unknown message payload that peerNetwork has received from the network. + * + * @returns the deserialized block to be processed by the main handler. Rejects + * the promise if the block is not valid so the gossip router knows not to + * forward it to other peers. + */ + verifyNewBlock( + payload: PayloadType, + ): Promise<{ block: Block; serializedBlock: SerializedBlock }> { + if (!isNewBlockPayload(payload)) { + return Promise.reject('Payload is not a serialized block') + } + let block + try { + block = this.blockSerde.deserialize(payload.block) + } catch { + return Promise.reject('Could not deserialize block') + } + + const validationResult = this.verifyBlock(block) + if (!validationResult.valid) { + return Promise.reject('Block is invalid') + } + return Promise.resolve({ block, serializedBlock: payload.block }) + } + + /** + * Verify that the block is internally consistent: + * * All transaction proofs are valid + * * Header is valid + * * Miner's fee is transaction list fees + miner's reward + */ + verifyBlock( + block: Block, + options: { verifyTarget?: boolean } = { verifyTarget: true }, + ): VerificationResult { + const blockHeaderValid = this.verifyBlockHeader(block.header, options) + if (!blockHeaderValid.valid) { + return blockHeaderValid + } + + let totalTransactionFees = BigInt(0) + let minersFee = BigInt(0) + + for (const transaction of block.transactions) { + const transactionValid = transaction.withReference(() => { + const transactionValid = transaction.verify() + if (!transactionValid.valid) { + return transactionValid + } + + const transactionFee = transaction.transactionFee() + if (transactionFee > 0) totalTransactionFees += transactionFee + if (transactionFee < 0) minersFee += transactionFee + }) + + if (transactionValid && !transactionValid.valid) return transactionValid + } + + // minersFee should match the block header + // minersFee should be (negative) miningReward + totalTransactionFees + if (BigInt(block.header.minersFee) !== minersFee) { + return { valid: Validity.No, reason: VerificationResultReason.INVALID_MINERS_FEE } + } + + const miningReward = block.header.strategy.miningReward(block.header.sequence) + if (minersFee !== BigInt(-1) * (BigInt(miningReward) + totalTransactionFees)) { + return { valid: Validity.No, reason: VerificationResultReason.INVALID_MINERS_FEE } + } + + return { valid: Validity.Yes } + } + + /** + * Verify that this block header is internally consistent. Does not verify + * the trees or its relationship to other blocks on the chain, and does not + * verify the transactions in the block. + * + * Specifically, it verifies that: + * * miners fee contains only one output note and no spends + * * miners fee is a valid transaction + * * the block hash meets the target hash on the block + * * the timestamp is not in future by our local clock time + */ + verifyBlockHeader( + blockHeader: BlockHeader, + options: { verifyTarget?: boolean } = { verifyTarget: true }, + ): VerificationResult { + if (blockHeader.graffiti.byteLength != 32) { + return { valid: Validity.No, reason: VerificationResultReason.GRAFFITI } + } + + if (this.enableVerifyTarget && options.verifyTarget && !blockHeader.verifyTarget()) { + return { valid: Validity.No, reason: VerificationResultReason.HASH_NOT_MEET_TARGET } + } + + if (blockHeader.timestamp.getTime() > Date.now() + ALLOWED_BLOCK_FUTURE_SECONDS * 1000) { + return { valid: Validity.No, reason: VerificationResultReason.TOO_FAR_IN_FUTURE } + } + + return { valid: Validity.Yes } + } + + /** + * Verify that a new transaction received over the network has valid proofs + * before forwarding it to the network. + * + * @params payload an unknown message payload that peerNetwork has received from the network. + * + * @returns deserialized transaction to be processed by the main handler. + */ + verifyNewTransaction( + payload: PayloadType, + ): Promise<{ transaction: T; serializedTransaction: ST }> { + if (!isNewTransactionPayload(payload)) { + return Promise.reject('Payload is not a serialized transaction') + } + const serde = this.strategy.transactionSerde() + let transaction + try { + transaction = serde.deserialize(payload.transaction) + } catch { + return Promise.reject('Could not deserialize transaction') + } + if (!transaction.verify().valid) { + return Promise.reject('Transaction is invalid') + } + return Promise.resolve({ transaction, serializedTransaction: payload.transaction }) + } + + /** + * Verify that the header of this block is consistent with the one before it. + * + * Specifically, it checks: + * - The number of notes added is equal to the difference between + * commitment sizes + * - The number of nullifiers added is equal to the difference between + * commitment sizes + * - The timestamp of the block is within a threshold of not being before + * the previous block + * - The block sequence has incremented by one + */ + isValidAgainstPrevious( + current: Block, + previousHeader: BlockHeader, + ): VerificationResult { + const { notes, nullifiers } = current.counts() + + if (current.header.noteCommitment.size !== previousHeader.noteCommitment.size + notes) { + return { valid: Validity.No, reason: VerificationResultReason.NOTE_COMMITMENT_SIZE } + } + + if ( + current.header.nullifierCommitment.size !== + previousHeader.nullifierCommitment.size + nullifiers + ) { + return { valid: Validity.No, reason: VerificationResultReason.NULLIFIER_COMMITMENT_SIZE } + } + + if ( + current.header.timestamp.getTime() < + previousHeader.timestamp.getTime() - ALLOWED_BLOCK_FUTURE_SECONDS * 1000 + ) { + return { valid: Validity.No, reason: VerificationResultReason.BLOCK_TOO_OLD } + } + + if (current.header.sequence !== previousHeader.sequence + BigInt(1)) { + return { valid: Validity.No, reason: VerificationResultReason.SEQUENCE_OUT_OF_ORDER } + } + + if (!this.isValidTarget(current.header, previousHeader)) { + return { valid: Validity.No, reason: VerificationResultReason.INVALID_TARGET } + } + + return { valid: Validity.Yes } + } + + /** + * Verify that the target of this block is correct aginst the block before it. + */ + protected isValidTarget( + header: BlockHeader, + previous: BlockHeader, + ): boolean { + if (!this.enableVerifyTarget) return true + + const expectedTarget = Target.calculateTarget( + header.timestamp, + previous.timestamp, + previous.target, + ) + return header.target.targetValue === expectedTarget.targetValue + } + + /** + * Loop over all spends in the block and check that: + * - The nullifier has not previously been spent + * - the note being spent really existed in the tree at the time it was spent + */ + async hasValidSpends( + block: Block, + tx?: IDatabaseTransaction, + ): Promise { + return this.chain.db.withTransaction( + tx, + [ + this.chain.notes.counter, + this.chain.notes.leaves, + this.chain.notes.nodes, + this.chain.nullifiers.counter, + this.chain.nullifiers.leaves, + this.chain.nullifiers.nodes, + ], + 'read', + async (tx) => { + const spendsInThisBlock = Array.from(block.spends()) + const previousSpendCount = + block.header.nullifierCommitment.size - spendsInThisBlock.length + for (const [index, spend] of spendsInThisBlock.entries()) { + if (!(await this.verifySpend(spend, previousSpendCount + index, tx))) { + return { valid: Validity.No, reason: VerificationResultReason.INVALID_SPEND } + } + } + + return { valid: Validity.Yes } + }, + ) + } + + async isAddBlockValid( + previousHeader: BlockHeader | null, + block: Block | null, + addingGenesis: boolean, + addingToGenesis: boolean, + tx: IDatabaseTransaction, + ): Promise { + if (addingGenesis) return { valid: Validity.Yes } + + if (!block) { + return { valid: Validity.No } + } + + // if there's no previous block, we can't know if it's valid + if (!previousHeader) return { valid: Validity.Unknown } + + // if there is a previous block, but it's not connected to genesis, + // then we also can't know if it's valid + if (!addingToGenesis) return { valid: Validity.Unknown } + + return block.withTransactionReferences(async () => { + // now we know we have a previous, previous is connected to genesis + // (and therefore valid), so now we can check our current block + let verification + + verification = this.isValidAgainstPrevious(block, previousHeader) + if (verification.valid == Validity.No) { + return verification + } + + verification = this.verifyBlock(block) + if (verification.valid == Validity.No) { + return verification + } + + verification = await this.hasValidSpends(block, tx) + + return verification + }) + } + + /** + * Verify that the given spend was not in the nullifiers tree when it was the given size, + * and that the root of the notes tree is the one that is actually associated with the + * spend's spend root. + * + * @param spend the spend to be verified + * @param size the size of the nullifiers tree at which the spend must not exist + * @param tx optional transaction context within which to check the spends. + * TODO as its expensive, this would be a good place for a cache/map of verified Spends + */ + async verifySpend( + spend: Spend, + size: number, + tx?: IDatabaseTransaction, + ): Promise { + if (await this.chain.nullifiers.contained(spend.nullifier, size, tx)) { + return false + } + try { + const realSpendRoot = await this.chain.notes.pastRoot(spend.size, tx) + if (!this.strategy.noteHasher().hashSerde().equals(spend.commitment, realSpendRoot)) { + return false + } + } catch { + return false + } + + return true + // TODO (Elena) need to check trees when genesis - heaviest established + } + + /** + * Determine whether our trees match the commitment in the provided block. + * + * Matching means that the root hash of the tree when the tree is the size + * specified in the commitment is the same as the commitment, + * for both notes and nullifiers trees. + */ + async blockMatchesTrees( + header: BlockHeader, + tx?: IDatabaseTransaction, + ): Promise { + return this.chain.db.withTransaction( + tx, + [ + this.chain.notes.counter, + this.chain.notes.leaves, + this.chain.notes.nodes, + this.chain.nullifiers.counter, + this.chain.nullifiers.leaves, + this.chain.nullifiers.nodes, + ], + 'read', + async (tx) => { + const noteSize = header.noteCommitment.size + const nullifierSize = header.nullifierCommitment.size + const actualNoteSize = await this.chain.notes.size(tx) + const actualNullifierSize = await this.chain.nullifiers.size(tx) + + if (actualNoteSize < noteSize) { + return false + } + if (actualNullifierSize < nullifierSize) { + return false + } + try { + const pastNoteRoot = await this.chain.notes.pastRoot(noteSize, tx) + if ( + !this.strategy + .noteHasher() + .hashSerde() + .equals(pastNoteRoot, header.noteCommitment.commitment) + ) { + return false + } + const pastNullifierRoot = await this.chain.nullifiers.pastRoot(nullifierSize, tx) + if ( + !this.strategy + .nullifierHasher() + .hashSerde() + .equals(pastNullifierRoot, header.nullifierCommitment.commitment) + ) { + return false + } + } catch { + return false + } + return true + }, + ) + } +} diff --git a/ironfish/src/captain/anchorChain/AnchorChainValidation.test.ts b/ironfish/src/captain/anchorChain/AnchorChainValidation.test.ts new file mode 100644 index 0000000000..fe024f4eb4 --- /dev/null +++ b/ironfish/src/captain/anchorChain/AnchorChainValidation.test.ts @@ -0,0 +1,185 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import Target from './blockchain/Target' +import Block from './blockchain/Block' +import { RangeHasher } from './merkleTree' +import { + blockHash, + makeFakeBlock, + makeChainInitial, + TestBlockchain, + TestStrategy, + TestTransaction, + SerializedTestTransaction, +} from '../testUtilities' +import { Validity } from '..' + +describe('Calculates valid spends', () => { + const strategy = new TestStrategy(new RangeHasher()) + let blockchain: TestBlockchain + + beforeEach(async () => { + blockchain = await makeChainInitial(strategy) + await blockchain.notes.add('1') + await blockchain.nullifiers.add(Buffer.alloc(32)) + }) + + it('says a block with no spends is valid', async () => { + const block = makeFakeBlock(strategy, blockHash(1), blockHash(2), 2, 3, 5) + await blockchain.addBlock(block) + expect((await blockchain.verifier.hasValidSpends(block)).valid).toBe(Validity.Yes) + }) + + it('says a block with valid spends is valid', async () => { + const block1 = makeFakeBlock(strategy, blockHash(0), blockHash(1), 1, 3, 5) + const block2 = makeFakeBlock(strategy, blockHash(1), blockHash(2), 2, 6, 9) + const nullifier = Buffer.alloc(32) + block2.transactions[1]._spends.push({ nullifier, commitment: '1-1', size: 1 }) + await blockchain.addBlock(block1) + await blockchain.addBlock(block2) + expect((await blockchain.verifier.hasValidSpends(block2)).valid).toBe(Validity.Yes) + }) + it('says a block with double spend in that block is invalid', async () => { + const block1 = makeFakeBlock(strategy, blockHash(0), blockHash(1), 1, 3, 5) + const block2 = makeFakeBlock(strategy, blockHash(1), blockHash(2), 2, 6, 9) + const nullifier = Buffer.alloc(32) + await blockchain.nullifiers.add(nullifier) + await blockchain.nullifiers.add(nullifier) + block2.header.nullifierCommitment.commitment = await blockchain.nullifiers.rootHash() + block2.header.nullifierCommitment.size = 3 + block2.transactions[1]._spends.push({ nullifier, commitment: '1-1', size: 1 }) + block2.transactions[2]._spends.push({ nullifier, commitment: '1-1', size: 1 }) + await blockchain.addBlock(block1) + await blockchain.addBlock(block2) + expect((await blockchain.verifier.hasValidSpends(block2)).valid).toBe(Validity.No) + }) + it('says a block that spends a note spent in a previous block is invalid', async () => { + const block1 = makeFakeBlock(strategy, blockHash(0), blockHash(1), 1, 3, 5) + const block2 = makeFakeBlock(strategy, blockHash(1), blockHash(2), 2, 6, 9) + const nullifier = Buffer.alloc(32) + await blockchain.nullifiers.add(nullifier) + block1.header.nullifierCommitment.commitment = await blockchain.nullifiers.rootHash() + block1.header.nullifierCommitment.size = 2 + await blockchain.nullifiers.add(nullifier) + block2.header.nullifierCommitment.commitment = await blockchain.nullifiers.rootHash() + block2.header.nullifierCommitment.size = 3 + block2.transactions[1]._spends.push({ nullifier, commitment: '1-1', size: 1 }) + block2.transactions[2]._spends.push({ nullifier, commitment: '1-1', size: 1 }) + await blockchain.addBlock(block1) + await blockchain.addBlock(block2) + expect((await blockchain.verifier.hasValidSpends(block2)).valid).toBe(Validity.No) + }) + it('says a block that spends a note that was never in the tree is invalid', async () => { + const block1 = makeFakeBlock(strategy, blockHash(0), blockHash(1), 1, 3, 5) + const block2 = makeFakeBlock(strategy, blockHash(1), blockHash(2), 2, 6, 9) + const nullifier = Buffer.alloc(32) + block2.transactions[1]._spends.push({ nullifier, commitment: 'noooo', size: 1 }) + await blockchain.addBlock(block1) + await blockchain.addBlock(block2) + expect((await blockchain.verifier.hasValidSpends(block2)).valid).toBe(Validity.No) + }) +}) + +describe('Header consistency is valid against previous', () => { + let strategy: TestStrategy + let blockchain: TestBlockchain + let block2: Block + let block3: Block + + beforeEach(async () => { + strategy = new TestStrategy(new RangeHasher()) + blockchain = await makeChainInitial(strategy) + + block2 = makeFakeBlock(strategy, blockHash(1), blockHash(2), 2, 6, 9) + block3 = makeFakeBlock(strategy, blockHash(2), blockHash(3), 3, 10, 13) + }) + + it("is invalid when the note commitments aren't the same size", async () => { + block2.header.noteCommitment.size = 99 + await blockchain.addBlock(block2) + await blockchain.addBlock(block3) + + expect(blockchain.verifier.isValidAgainstPrevious(block3, block3.header)) + .toMatchInlineSnapshot(` + Object { + "reason": "Note commitment sizes do not match", + "valid": 0, + } + `) + }) + + it("is invalid when the nullifier commitments aren't the same size", async () => { + block2.header.nullifierCommitment.size = 99 + await blockchain.addBlock(block2) + await blockchain.addBlock(block3) + + expect(blockchain.verifier.isValidAgainstPrevious(block3, block3.header)) + .toMatchInlineSnapshot(` + Object { + "reason": "Note commitment sizes do not match", + "valid": 0, + } + `) + }) + + it('Is invalid when the timestamp is in past', async () => { + await blockchain.addBlock(block2) + await blockchain.addBlock(block3) + block3.header.timestamp = new Date(100) + + expect(blockchain.verifier.isValidAgainstPrevious(block3, block3.header)) + .toMatchInlineSnapshot(` + Object { + "reason": "Note commitment sizes do not match", + "valid": 0, + } + `) + }) + + it('Is invalid when the sequence is wrong', async () => { + await blockchain.addBlock(block2) + await blockchain.addBlock(block3) + block3.header.sequence = BigInt(99) + + expect(blockchain.verifier.isValidAgainstPrevious(block3, block3.header)) + .toMatchInlineSnapshot(` + Object { + "reason": "Note commitment sizes do not match", + "valid": 0, + } + `) + }) + + it('is valid when it is valid', async () => { + await blockchain.addBlock(block2) + await blockchain.addBlock(block3) + + expect(blockchain.verifier.isValidAgainstPrevious(block3, block3.header)) + .toMatchInlineSnapshot(` + Object { + "reason": "Note commitment sizes do not match", + "valid": 0, + } + `) + }) +}) + +describe('block verification', () => { + let strategy: TestStrategy + let blockchain: TestBlockchain + let targetSpy: jest.SpyInstance + + beforeEach(async () => { + targetSpy = jest.spyOn(Target, 'minDifficulty').mockImplementation(() => BigInt(1)) + strategy = new TestStrategy(new RangeHasher()) + blockchain = await makeChainInitial(strategy) + await blockchain.notes.add('1') + await blockchain.nullifiers.add(Buffer.alloc(32)) + }) + + afterAll(() => { + targetSpy.mockClear() + }) +}) diff --git a/ironfish/src/captain/anchorChain/Blocks.test.ts b/ironfish/src/captain/anchorChain/Blocks.test.ts new file mode 100644 index 0000000000..a825069424 --- /dev/null +++ b/ironfish/src/captain/anchorChain/Blocks.test.ts @@ -0,0 +1,178 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import Target from './blockchain/Target' +import { RangeHasher } from './merkleTree' +import { + addNotes, + blockHash, + makeFakeBlock, + makeChainInitial, + syncCommitments, + TestStrategy, + TestBlockchain, + TestTransaction, + makeNullifier, + makeNextBlock, +} from '../testUtilities' + +describe('Block matches', () => { + const strategy = new TestStrategy(new RangeHasher()) + + it('is true for block that passes all checks', async () => { + const blockchain = await makeChainInitial(strategy) + const header = makeFakeBlock(strategy, blockHash(1), blockHash(2), 2, 3, 5).header + await addNotes(blockchain, [1, 2, 3, 4, 5]) + await blockchain.nullifiers.add(Buffer.alloc(32)) + header.nullifierCommitment.commitment = await blockchain.nullifiers.rootHash() + expect(await blockchain.verifier.blockMatchesTrees(header)).toBe(true) + }) + it("is false if there aren't enough notes in the tree", async () => { + const anchor = await makeChainInitial(strategy) + const header = makeFakeBlock(strategy, blockHash(1), blockHash(2), 2, 3, 5).header + await addNotes(anchor, [1, 2, 3, 4]) + await anchor.nullifiers.add(Buffer.alloc(32)) + header.nullifierCommitment.commitment = await anchor.nullifiers.rootHash() + expect(await anchor.verifier.blockMatchesTrees(header)).toBe(false) + }) + it("is false if there aren't enough nullifiers in the tree", async () => { + const anchor = await makeChainInitial(strategy) + const header = makeFakeBlock(strategy, blockHash(1), blockHash(2), 2, 3, 5).header + await addNotes(anchor, [1, 2, 3, 4, 5]) + await anchor.nullifiers.add(Buffer.alloc(32)) + const secondNullifier = Buffer.alloc(32) + secondNullifier[0] = 1 + await anchor.nullifiers.add(secondNullifier) + header.nullifierCommitment.commitment = await anchor.nullifiers.rootHash() + header.nullifierCommitment.size = 8 + expect(await anchor.verifier.blockMatchesTrees(header)).toBe(false) + }) + it('is false if the note hash is incorrect', async () => { + const anchor = await makeChainInitial(strategy) + const header = makeFakeBlock(strategy, blockHash(1), blockHash(2), 2, 3, 5).header + + await addNotes(anchor, [1, 2, 3, 4, 5]) + await anchor.nullifiers.add(Buffer.alloc(32)) + header.nullifierCommitment.commitment = await anchor.nullifiers.rootHash() + header.noteCommitment.commitment = 'NOOO' + expect(await anchor.verifier.blockMatchesTrees(header)).toBe(false) + }) + it('is false for block that has incorrect nullifier hash', async () => { + const anchor = await makeChainInitial(strategy) + const header = makeFakeBlock(strategy, blockHash(1), blockHash(2), 2, 3, 5).header + await addNotes(anchor, [1, 2, 3, 4, 5]) + await anchor.nullifiers.add(Buffer.alloc(32)) + expect(await anchor.verifier.blockMatchesTrees(header)).toBe(false) + }) +}) + +describe('Anchorchain adding', () => { + const strategy = new TestStrategy(new RangeHasher()) + let blockchain: TestBlockchain + let listener: jest.Mock + let targetSpy: jest.SpyInstance + + beforeEach(async () => { + targetSpy = jest.spyOn(Target, 'minDifficulty').mockImplementation(() => BigInt(1)) + blockchain = await makeChainInitial(strategy) + listener = jest.fn() + blockchain.onChainHeadChange.on(listener) + }) + + afterAll(() => { + targetSpy.mockClear() + }) + + it('constructs an empty chain', async () => { + expect(await blockchain.notes.size()).toBe(0) + expect(await blockchain.nullifiers.size()).toBe(0) + expect(await blockchain.isEmpty()).toBe(true) + expect(listener).not.toBeCalled() + }) + + it('adds a genesis block', async () => { + expect(await blockchain.hasGenesisBlock()).toBe(false) + expect(await blockchain.isEmpty()).toBe(true) + + const block = makeFakeBlock(strategy, blockHash(0), blockHash(1), 1, 1, 5) + block.transactions[0]._spends.push({ + nullifier: Buffer.alloc(32), + commitment: 'something', + size: 1, + }) + await addNotes(blockchain, [1, 2, 3, 4, 5]) + await blockchain.nullifiers.add(Buffer.alloc(32)) + await syncCommitments(block.header, blockchain) + const addedBlockResult = await blockchain.addBlock(block) + expect(addedBlockResult.isAdded).toBe(true) + expect(await blockchain.notes.size()).toBe(5) + expect(await blockchain.nullifiers.size()).toBe(1) + expect((await blockchain.getHeaviestHead())?.hash).toEqualHash(blockHash(1)) + + expect(await blockchain.isEmpty()).toBe(false) + expect(await blockchain.hasGenesisBlock()).toBe(true) + }) +}) + +describe('New block', () => { + const strategy = new TestStrategy(new RangeHasher()) + let blockchain: TestBlockchain + let listener: jest.Mock + let targetSpy: jest.SpyInstance + let targetMeetsSpy: jest.SpyInstance + + beforeEach(async () => { + targetSpy = jest.spyOn(Target, 'minDifficulty').mockImplementation(() => BigInt(1)) + targetMeetsSpy = jest.spyOn(Target, 'meets').mockImplementation(() => true) + blockchain = await makeChainInitial(strategy) + listener = jest.fn() + blockchain.onChainHeadChange.on(listener) + }) + + afterAll(() => { + jest.useRealTimers() + targetSpy.mockClear() + targetMeetsSpy.mockClear() + }) + + it('creates a new block on an empty chain without failing', async () => { + const chain = await makeChainInitial(strategy) + await chain.notes.add('0') + await chain.nullifiers.add(makeNullifier(0)) + const genesis = await makeNextBlock(chain, true) + await chain.addBlock(genesis) + + const block = await makeNextBlock(chain) + await chain.addBlock(block) + + expect(await blockchain.notes.size()).toBe(0) + expect(await blockchain.nullifiers.size()).toBe(0) + expect(await blockchain.getHeaviestHead()).toBe(null) + expect(listener).not.toBeCalled() + }) + + it('throws an error if the provided transactions are invalid', async () => { + await blockchain.nullifiers.add(Buffer.alloc(32)) + const block1 = makeFakeBlock(strategy, blockHash(0), blockHash(1), 1, 1, 2) + block1.transactions[0]._spends.push({ + nullifier: Buffer.alloc(32), + commitment: '1-1', + size: 1, + }) + block1.header.nullifierCommitment.commitment = await blockchain.nullifiers.rootHash() + const block2 = makeFakeBlock(strategy, blockHash(1), blockHash(2), 2, 3, 5) + await blockchain.addBlock(block1) + await blockchain.addBlock(block2) + const fakeBlock = makeFakeBlock(strategy, blockHash(0), blockHash(0), 1, 9, 14) + fakeBlock.transactions[0]._spends.push({ + nullifier: Buffer.alloc(32), + commitment: '1-1', + size: 2, + }) + const minersFee = new TestTransaction(true, ['1'], 1) + await expect( + blockchain.newBlock(fakeBlock.transactions, minersFee), + ).rejects.toMatchInlineSnapshot(`[Error: Miner's fee is incorrect]`) + }) +}) diff --git a/ironfish/src/captain/anchorChain/NotesAndNullifiers.test.ts b/ironfish/src/captain/anchorChain/NotesAndNullifiers.test.ts new file mode 100644 index 0000000000..535383ce8e --- /dev/null +++ b/ironfish/src/captain/anchorChain/NotesAndNullifiers.test.ts @@ -0,0 +1,193 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { RangeHasher } from './merkleTree' +import { makeChainInitial, makeNullifier, TestStrategy, TestBlockchain } from '../testUtilities' + +import Target from './blockchain/Target' + +describe('Note adding', () => { + const strategy = new TestStrategy(new RangeHasher()) + let blockchain: TestBlockchain + let listener: jest.Mock + let targetSpy: jest.SpyInstance + beforeEach(async () => { + targetSpy = jest.spyOn(Target, 'minDifficulty').mockReturnValue(BigInt(1)) + blockchain = await makeChainInitial(strategy) + listener = jest.fn() + blockchain.onChainHeadChange.on(listener) + }) + + afterAll(() => [targetSpy.mockClear()]) + + it('immediately adds in order notes to the tree', async () => { + await blockchain.addNote(0, 'zero') + await blockchain.addNote(1, 'one') + expect(blockchain.looseNotes[0]).toBe('zero') + expect(blockchain.looseNotes[1]).toBe('one') + expect(await blockchain.notes.size()).toBe(2) + expect(await blockchain.notes.get(0)).toBe('zero') + expect(await blockchain.notes.get(1)).toBe('one') + expect(await blockchain.nullifiers.size()).toBe(0) + expect(await blockchain.getHeaviestHead()).toBeNull() + expect(listener).not.toBeCalled() + }) + it('adds an out of order note only to the loose notes', async () => { + await blockchain.addNote(10, 'ten') + await blockchain.addNote(11, 'eleven') + await blockchain.addNote(12, 'twelve') + expect(blockchain.looseNotes[10]).toBe('ten') + expect(blockchain.looseNotes[11]).toBe('eleven') + expect(blockchain.looseNotes[12]).toBe('twelve') + expect(await blockchain.notes.size()).toBe(0) + expect(await blockchain.nullifiers.size()).toBe(0) + expect(await blockchain.getHeaviestHead()).toBeNull() + expect(listener).not.toBeCalled() + }) + it('syncs loose notes to the tree when the gap fills in', async () => { + await blockchain.addNote(2, 'two') + await blockchain.addNote(1, 'one') + await blockchain.addNote(0, 'zero') + expect(blockchain.looseNotes[0]).toBe('zero') + expect(blockchain.looseNotes[1]).toBe('one') + expect(blockchain.looseNotes[2]).toBe('two') + expect(await blockchain.notes.size()).toBe(3) + expect(await blockchain.notes.get(0)).toBe('zero') + expect(await blockchain.notes.get(1)).toBe('one') + expect(await blockchain.notes.get(2)).toBe('two') + expect(await blockchain.nullifiers.size()).toBe(0) + expect(await blockchain.getHeaviestHead()).toBeNull() + expect(listener).not.toBeCalled() + }) + it("warns if the note doesn't match the previously inserted note that position", async () => { + const warnFn = jest.fn() + blockchain['logger'].mockTypes((type) => { + return type === 'warn' ? warnFn : () => {} + }) + await blockchain.addNote(0, 'zero') + await blockchain.addNote(1, 'one') + await blockchain.addNote(2, 'two') + await blockchain.addNote(2, 'not two') + expect(warnFn).toHaveBeenCalled() + }) +}) + +describe('Nullifier adding', () => { + const strategy = new TestStrategy(new RangeHasher()) + let blockchain: TestBlockchain + let listener: jest.Mock + beforeEach(async () => { + blockchain = await makeChainInitial(strategy) + listener = jest.fn() + blockchain.onChainHeadChange.on(listener) + }) + + it('immediately adds in order nullifiers to the tree', async () => { + const nullifier1 = Buffer.alloc(32) + const nullifier2 = makeNullifier(1) + await blockchain.addNullifier(0, nullifier1) + await blockchain.addNullifier(1, nullifier2) + expect(blockchain.looseNullifiers[0]).toEqualNullifier(nullifier1) + expect(blockchain.looseNullifiers[1]).toEqualNullifier(nullifier2) + expect(await blockchain.nullifiers.size()).toBe(2) + expect(await blockchain.nullifiers.get(0)).toEqualNullifier(nullifier1) + expect(await blockchain.nullifiers.get(1)).toEqualNullifier(nullifier2) + expect(await blockchain.notes.size()).toBe(0) + expect(await blockchain.getHeaviestHead()).toBeNull() + expect(listener).not.toBeCalled() + }) + it('adds an out of order nullifier only to the loose nullifiers', async () => { + const nullifier1 = makeNullifier(10) + const nullifier2 = makeNullifier(11) + const nullifier3 = makeNullifier(12) + await blockchain.addNullifier(10, nullifier1) + await blockchain.addNullifier(11, nullifier2) + await blockchain.addNullifier(12, nullifier3) + expect(blockchain.looseNullifiers[10]).toEqualNullifier(nullifier1) + expect(blockchain.looseNullifiers[11]).toEqualNullifier(nullifier2) + expect(blockchain.looseNullifiers[12]).toEqualNullifier(nullifier3) + expect(await blockchain.notes.size()).toBe(0) + expect(await blockchain.nullifiers.size()).toBe(0) + expect(await blockchain.getHeaviestHead()).toBeNull() + expect(listener).not.toBeCalled() + }) + it('syncs loose nullifiers to the tree when the gap fills in', async () => { + const nullifier0 = Buffer.alloc(32) + const nullifier1 = makeNullifier(1) + const nullifier2 = makeNullifier(2) + await blockchain.addNullifier(2, nullifier2) + await blockchain.addNullifier(1, nullifier1) + await blockchain.addNullifier(0, nullifier0) + expect(blockchain.looseNullifiers[0]).toEqualNullifier(nullifier0) + expect(blockchain.looseNullifiers[1]).toEqualNullifier(nullifier1) + expect(blockchain.looseNullifiers[2]).toEqualNullifier(nullifier2) + expect(await blockchain.nullifiers.size()).toBe(3) + expect(await blockchain.nullifiers.get(0)).toEqualNullifier(nullifier0) + expect(await blockchain.nullifiers.get(1)).toEqualNullifier(nullifier1) + expect(await blockchain.nullifiers.get(2)).toEqualNullifier(nullifier2) + expect(await blockchain.notes.size()).toBe(0) + expect(await blockchain.getHeaviestHead()).toBeNull() + expect(listener).not.toBeCalled() + }) + it("warns if the note doesn't match the previously inserted note that position", async () => { + const warnFn = jest.fn() + blockchain['logger'].mockTypes((type) => { + return type === 'warn' ? warnFn : () => {} + }) + const nullifier0 = Buffer.alloc(32) + const nullifier1 = makeNullifier(1) + const nullifier2 = makeNullifier(2) + await blockchain.addNullifier(0, nullifier0) + await blockchain.addNullifier(1, nullifier1) + await blockchain.addNullifier(2, nullifier2) + await blockchain.addNullifier(2, nullifier0) + expect(warnFn).toHaveBeenCalled() + }) + + it('sixNullifierRoot matches expected rootHash', async () => { + await blockchain.addNullifier(0, makeNullifier(0)) + await blockchain.addNullifier(1, makeNullifier(1)) + await blockchain.addNullifier(2, makeNullifier(2)) + await blockchain.addNullifier(3, makeNullifier(3)) + await blockchain.addNullifier(4, makeNullifier(4)) + await blockchain.addNullifier(5, makeNullifier(5)) + const rootHash = await blockchain.nullifiers.rootHash() + expect(rootHash.equals(sixNullifierRoot)).toBeTruthy() + }) +}) + +const sixNullifierRoot = Buffer.from([ + 225, + 164, + 205, + 91, + 37, + 68, + 206, + 32, + 128, + 69, + 41, + 50, + 240, + 78, + 211, + 128, + 227, + 49, + 167, + 139, + 132, + 31, + 71, + 88, + 44, + 71, + 19, + 28, + 204, + 126, + 14, + 152, +]) diff --git a/ironfish/src/captain/anchorChain/__snapshots__/nullifiers.test.ts.snap b/ironfish/src/captain/anchorChain/__snapshots__/nullifiers.test.ts.snap new file mode 100644 index 0000000000..da891d0cc8 --- /dev/null +++ b/ironfish/src/captain/anchorChain/__snapshots__/nullifiers.test.ts.snap @@ -0,0 +1,578 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`NullifierHasher calculates some hashes 1`] = ` +Object { + "data": Array [ + 67, + 115, + 120, + 254, + 209, + 140, + 189, + 171, + 175, + 57, + 73, + 186, + 56, + 33, + 7, + 236, + 74, + 220, + 100, + 24, + 245, + 102, + 148, + 82, + 6, + 248, + 98, + 57, + 23, + 49, + 189, + 20, + ], + "type": "Buffer", +} +`; + +exports[`NullifierHasher calculates some hashes 2`] = ` +Object { + "data": Array [ + 195, + 114, + 61, + 203, + 20, + 56, + 192, + 94, + 45, + 189, + 160, + 148, + 66, + 29, + 20, + 86, + 67, + 142, + 160, + 142, + 233, + 149, + 231, + 31, + 242, + 152, + 240, + 205, + 35, + 138, + 205, + 174, + ], + "type": "Buffer", +} +`; + +exports[`NullifierHasher calculates some hashes 3`] = ` +Object { + "data": Array [ + 97, + 57, + 177, + 159, + 237, + 46, + 198, + 53, + 160, + 65, + 202, + 136, + 103, + 85, + 165, + 180, + 118, + 208, + 241, + 21, + 245, + 77, + 56, + 152, + 201, + 193, + 49, + 127, + 24, + 55, + 166, + 161, + ], + "type": "Buffer", +} +`; + +exports[`NullifierHasher calculates some hashes 4`] = ` +Object { + "data": Array [ + 236, + 235, + 40, + 193, + 243, + 199, + 129, + 29, + 151, + 84, + 2, + 130, + 45, + 21, + 217, + 77, + 135, + 225, + 117, + 84, + 145, + 99, + 94, + 72, + 84, + 56, + 66, + 247, + 175, + 201, + 201, + 86, + ], + "type": "Buffer", +} +`; + +exports[`NullifierHasher calculates some hashes 5`] = ` +Object { + "data": Array [ + 169, + 144, + 47, + 150, + 16, + 16, + 187, + 64, + 98, + 50, + 142, + 61, + 53, + 60, + 133, + 192, + 190, + 224, + 49, + 183, + 113, + 207, + 82, + 201, + 137, + 197, + 169, + 15, + 80, + 82, + 119, + 236, + ], + "type": "Buffer", +} +`; + +exports[`NullifierHasher calculates some hashes 6`] = ` +Object { + "data": Array [ + 8, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 125, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 2, + 0, + 0, + 255, + ], + "type": "Buffer", +} +`; + +exports[`NullifierHasher combines hashes 1`] = ` +Object { + "data": Array [ + 42, + 17, + 91, + 233, + 98, + 215, + 92, + 97, + 155, + 68, + 237, + 150, + 128, + 135, + 221, + 175, + 189, + 218, + 100, + 173, + 86, + 15, + 197, + 101, + 176, + 132, + 68, + 57, + 163, + 128, + 230, + 57, + ], + "type": "Buffer", +} +`; + +exports[`NullifierHasher combines hashes 2`] = ` +Object { + "data": Array [ + 7, + 247, + 77, + 25, + 22, + 30, + 119, + 81, + 46, + 173, + 220, + 9, + 39, + 54, + 126, + 3, + 55, + 255, + 127, + 178, + 60, + 216, + 28, + 141, + 139, + 202, + 29, + 240, + 6, + 198, + 183, + 149, + ], + "type": "Buffer", +} +`; + +exports[`NullifierHasher combines hashes 3`] = ` +Object { + "data": Array [ + 82, + 149, + 204, + 114, + 47, + 108, + 139, + 23, + 139, + 230, + 97, + 218, + 41, + 245, + 241, + 237, + 253, + 174, + 83, + 9, + 73, + 121, + 193, + 150, + 68, + 47, + 120, + 60, + 123, + 29, + 120, + 52, + ], + "type": "Buffer", +} +`; + +exports[`NullifierHasher combines hashes 4`] = ` +Object { + "data": Array [ + 48, + 205, + 187, + 142, + 215, + 230, + 15, + 250, + 230, + 38, + 180, + 253, + 89, + 170, + 80, + 17, + 25, + 235, + 91, + 99, + 92, + 35, + 59, + 201, + 45, + 212, + 142, + 5, + 111, + 85, + 111, + 219, + ], + "type": "Buffer", +} +`; + +exports[`NullifierHasher combines hashes 5`] = ` +Object { + "data": Array [ + 32, + 82, + 114, + 141, + 182, + 53, + 217, + 9, + 99, + 240, + 237, + 63, + 196, + 68, + 10, + 147, + 133, + 5, + 102, + 21, + 32, + 97, + 220, + 16, + 16, + 209, + 89, + 253, + 185, + 221, + 200, + 106, + ], + "type": "Buffer", +} +`; + +exports[`NullifierHasher combines hashes 6`] = ` +Object { + "data": Array [ + 80, + 232, + 132, + 50, + 176, + 210, + 46, + 48, + 117, + 83, + 46, + 150, + 152, + 153, + 91, + 160, + 244, + 61, + 16, + 129, + 83, + 111, + 210, + 103, + 234, + 81, + 205, + 187, + 91, + 128, + 56, + 24, + ], + "type": "Buffer", +} +`; + +exports[`NullifierHasher combines hashes 7`] = ` +Object { + "data": Array [ + 8, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 2, + 0, + 0, + 0, + ], + "type": "Buffer", +} +`; + +exports[`NullifierHasher combines hashes 8`] = ` +Object { + "data": Array [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 125, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 255, + ], + "type": "Buffer", +} +`; + +exports[`NullifierHasher constructs a nullifier hasher 1`] = ` +NullifierHasher { + "_elementSerde": BufferSerde { + "serde": Uint8ArraySerde { + "size": 32, + }, + "size": 32, + }, + "_hashSerde": BufferSerde { + "serde": Uint8ArraySerde { + "size": 32, + }, + "size": 32, + }, +} +`; diff --git a/ironfish/src/captain/anchorChain/blockchain/Block.test.ts b/ironfish/src/captain/anchorChain/blockchain/Block.test.ts new file mode 100644 index 0000000000..bd623c6e1b --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/Block.test.ts @@ -0,0 +1,90 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { + TestTransaction, + TestStrategy, + SerializedTestTransaction, + makeFakeBlock, + blockHash, +} from '../../testUtilities' +import Block, { BlockSerde, SerializedBlock } from './Block' + +describe('Block', () => { + const strategy = new TestStrategy() + let block: Block + + beforeEach(() => { + block = makeFakeBlock(strategy, blockHash(4), blockHash(5), 5, 5, 9) + }) + + it('correctly counts notes and nullifiers', () => { + block.transactions[1]._spends.push({ + nullifier: Buffer.alloc(32), + commitment: 'Spent', + size: 1, + }) + expect(block.counts()).toMatchInlineSnapshot(` + Object { + "notes": 5, + "nullifiers": 1, + } + `) + }) + + it('serializes and deserializes a block', () => { + const serde = new BlockSerde(strategy) + const serialized = serde.serialize(block) + expect(serialized).toMatchSnapshot({ header: { timestamp: expect.any(Number) } }) + const deserialized = serde.deserialize(serialized) + expect(serde.equals(deserialized, block)).toBe(true) + }) + + it('throws when deserializing invalid data', () => { + const serde = new BlockSerde(strategy) + expect(() => + serde.deserialize(({ bad: 'data' } as unknown) as SerializedBlock< + string, + SerializedTestTransaction + >), + ).toThrowErrorMatchingInlineSnapshot(`"Unable to deserialize"`) + }) + + it('does not compare unequal blocks as equal', () => { + const serde = new BlockSerde(strategy) + const block2 = makeFakeBlock(strategy, blockHash(4), blockHash(5), 5, 5, 9) + block2.header.timestamp = block.header.timestamp + expect(serde.equals(block, block2)).toBe(true) + + block2.header.randomness = 400 + expect(serde.equals(block, block2)).toBe(false) + + const block3 = makeFakeBlock(strategy, blockHash(4), blockHash(5), 5, 5, 8) + block3.header.timestamp = block.header.timestamp + expect(serde.equals(block, block3)).toBe(false) + + const block4 = makeFakeBlock(strategy, blockHash(4), blockHash(5), 5, 5, 9) + block4.header.timestamp = block.header.timestamp + block4.transactions[0].totalFees = BigInt(999) + expect(serde.equals(block, block4)).toBe(false) + }) + + it('iterates over spends', () => { + block.transactions = [ + new TestTransaction(true, ['one', 'two'], 5, [ + { nullifier: Buffer.alloc(32), commitment: 'One', size: 1 }, + ]), + new TestTransaction(true, ['three', 'four'], 5, [ + { nullifier: Buffer.alloc(32), commitment: 'Two', size: 1 }, + ]), + ] + const spends = Array.from(block.spends()) + expect(spends).toHaveLength(2) + }) + + it('iterates over notes', () => { + const notes = Array.from(block.allNotes()) + expect(notes).toHaveLength(5) + }) +}) diff --git a/ironfish/src/captain/anchorChain/blockchain/Block.ts b/ironfish/src/captain/anchorChain/blockchain/Block.ts new file mode 100644 index 0000000000..8597819063 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/Block.ts @@ -0,0 +1,151 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { zip } from 'lodash' +import BlockHeader, { BlockHeaderSerde, SerializedBlockHeader } from './BlockHeader' +import Transaction from '../strategies/Transaction' +import Serde, { JsonSerializable } from '../../../serde' +import Strategy from '../strategies' +import { Nullifier } from '../nullifiers' + +/** + * Represent a single block in the chain. Essentially just a block header + * and the list of transactions that were added to the tree between the + * previous block and the ones committed to in this header. + */ +export default class Block< + E, + H, + T extends Transaction, + SE extends JsonSerializable, + SH extends JsonSerializable, + ST +> { + header: BlockHeader + transactions: T[] + + constructor(header: BlockHeader, transactions: T[]) { + this.header = header + this.transactions = transactions + } + + /** + * Get the number of notes and nullifiers stored on this block. + */ + counts(): SerializedCounts { + let notes = 0 + let nullifiers = 0 + for (const transaction of this.transactions) { + transaction.withReference(() => { + notes += transaction.notesLength() + nullifiers += transaction.spendsLength() + }) + } + return { notes, nullifiers } + } + + withTransactionReferences(callback: () => R | Promise): R | Promise { + for (const t of this.transactions) { + t.takeReference() + } + + try { + return callback() + } finally { + for (const t of this.transactions) { + t.returnReference() + } + } + } + + /** + * Get a list of all spends on transactions in this block. + * + * Note: there is no spend on a miner's fee transaction in the header + */ + *spends(): Generator<{ nullifier: Nullifier; commitment: H; size: number }> { + for (const transaction of this.transactions) { + for (const spend of transaction.spends()) { + yield spend + } + } + } + + /** + * Get a list of all notes created in this block including the miner's fee + * note on the header. + */ + *allNotes(): Generator { + for (const transaction of this.transactions) { + for (const note of transaction.notes()) { + yield note + } + } + } +} + +export type SerializedBlock = { + header: SerializedBlockHeader + transactions: ST[] +} + +export type SerializedCounts = { notes: number; nullifiers: number } + +export class BlockSerde< + E, + H, + T extends Transaction, + SE extends JsonSerializable, + SH extends JsonSerializable, + ST +> implements Serde, SerializedBlock> { + blockHeaderSerde: BlockHeaderSerde + + constructor(readonly strategy: Strategy) { + this.blockHeaderSerde = new BlockHeaderSerde(strategy) + } + + equals(block1: Block, block2: Block): boolean { + if (!this.blockHeaderSerde.equals(block1.header, block2.header)) { + return false + } + if (block1.transactions.length !== block2.transactions.length) { + return false + } + for (const [transaction1, transaction2] of zip(block1.transactions, block2.transactions)) { + if ( + transaction1 == null || + transaction2 == null || + !this.strategy.transactionSerde().equals(transaction1, transaction2) + ) { + return false + } + } + return true + } + serialize(block: Block): SerializedBlock { + return { + header: this.blockHeaderSerde.serialize(block.header), + transactions: block.transactions.map((t) => + this.strategy.transactionSerde().serialize(t), + ), + } + } + deserialize(data: SerializedBlock): Block { + if ( + typeof data === 'object' && + data != null && + 'header' in data && + 'transactions' in data && + Array.isArray(data.transactions) + ) { + const header = this.blockHeaderSerde.deserialize(data.header) + const transactions = data.transactions.map((t) => + this.strategy.transactionSerde().deserialize(t), + ) + return new Block(header, transactions) + } + throw new Error('Unable to deserialize') + } +} diff --git a/ironfish/src/captain/anchorChain/blockchain/BlockHeader.test.ts b/ironfish/src/captain/anchorChain/blockchain/BlockHeader.test.ts new file mode 100644 index 0000000000..d843c7e391 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/BlockHeader.test.ts @@ -0,0 +1,120 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import BlockHeader, { BlockHeaderSerde } from './BlockHeader' +import { TestStrategy } from '../../testUtilities' +import Target from './Target' + +describe('Block Header Serde', () => { + it('Compares two equivalent block headers as equal', () => { + const strategy = new TestStrategy() + const header1 = new BlockHeader( + strategy, + BigInt(5), + Buffer.alloc(32), + { commitment: 'header', size: 8 }, + { commitment: Buffer.alloc(32), size: 3 }, + new Target(17), + 25, + new Date(1598467858637), + BigInt(0), + Buffer.alloc(32), + ) + const header2 = new BlockHeader( + new TestStrategy(), + BigInt(5), + Buffer.alloc(32), + { commitment: 'header', size: 8 }, + { commitment: Buffer.alloc(32), size: 3 }, + new Target(17), + 25, + new Date(1598467858637), + BigInt(0), + Buffer.alloc(32), + ) + expect(new BlockHeaderSerde(strategy).equals(header1, header2)).toBe(true) + }) + + it('does not compare different blocks as equal', () => { + const strategy = new TestStrategy() + const serde = new BlockHeaderSerde(strategy) + const header1 = new BlockHeader( + strategy, + BigInt(5), + Buffer.alloc(32), + { commitment: 'header', size: 8 }, + { commitment: Buffer.alloc(32), size: 3 }, + new Target(17), + 25, + new Date(1598467858637), + BigInt(0), + Buffer.alloc(32), + ) + const header2 = new BlockHeader( + new TestStrategy(), + BigInt(5), + Buffer.alloc(32), + { commitment: 'header', size: 8 }, + { commitment: Buffer.alloc(32), size: 3 }, + new Target(17), + 25, + new Date(1598467858637), + BigInt(0), + Buffer.alloc(32), + ) + header2.sequence = BigInt(6) + expect(serde.equals(header1, header2)).toBe(false) + header2.sequence = BigInt(5) + header2.noteCommitment.commitment = 'Not header' + expect(serde.equals(header1, header2)).toBe(false) + header2.noteCommitment.commitment = 'header' + header2.noteCommitment.size = 7 + expect(serde.equals(header1, header2)).toBe(false) + header2.noteCommitment.size = 8 + header2.nullifierCommitment.commitment[0] = 8 + expect(serde.equals(header1, header2)).toBe(false) + header2.nullifierCommitment.commitment[0] = 0 + header2.nullifierCommitment.size = 4 + expect(serde.equals(header1, header2)).toBe(false) + header2.nullifierCommitment.size = 3 + header2.target = new Target(18) + expect(serde.equals(header1, header2)).toBe(false) + header2.target = new Target(17) + header2.randomness = 24 + expect(serde.equals(header1, header2)).toBe(false) + header2.randomness = 25 + header2.timestamp = new Date() + expect(serde.equals(header1, header2)).toBe(false) + header2.timestamp = new Date(1598467858637) + expect(serde.equals(header1, header2)).toBe(true) + header2.graffiti = Buffer.alloc(32, 'a') + expect(serde.equals(header1, header2)).toBe(false) + header2.graffiti = Buffer.alloc(32) + }) + + it('serializes and deserializes a block header', () => { + const strategy = new TestStrategy() + const serde = new BlockHeaderSerde(strategy) + + const graffiti = Buffer.alloc(32) + graffiti.write('test') + + const header1 = new BlockHeader( + strategy, + BigInt(5), + Buffer.alloc(32), + { commitment: 'header', size: 8 }, + { commitment: Buffer.alloc(32), size: 3 }, + new Target(17), + 25, + new Date(1598467858637), + BigInt(-1), + graffiti, + ) + const serialized = serde.serialize(header1) + expect(serialized).toMatchSnapshot() + const deserialized = serde.deserialize(serialized) + expect(serde.equals(header1, deserialized)).toBe(true) + }) +}) diff --git a/ironfish/src/captain/anchorChain/blockchain/BlockHeader.ts b/ironfish/src/captain/anchorChain/blockchain/BlockHeader.ts new file mode 100644 index 0000000000..ef9f62dfce --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/BlockHeader.ts @@ -0,0 +1,349 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import Strategy from '../strategies' +import Transaction from '../strategies/Transaction' +import { NullifierHash } from '../nullifiers' +import Target, { TargetSerde } from './Target' +import Serde, { BufferSerde, JsonSerializable, IJSON } from '../../../serde' +import { GRAPH_ID_NULL } from '.' + +export type BlockHash = Buffer +export type Sequence = bigint +export const ALLOWED_BLOCK_FUTURE_SECONDS = 30 + +export default class BlockHeader< + E, + H, + T extends Transaction, + SE extends JsonSerializable, + SH extends JsonSerializable, + ST +> { + // Strategy for hashing block and tree nodes and calculating targets + public strategy: Strategy + + /** + * The sequence number of the block. Blocks in a chain increase in ascending + * order of sequence. More than one block may have the same sequence, + * indicating a fork in the chain, but only one fork is selected at a time. + */ + public sequence: Sequence + + /** + * The hash of the previous block in the chain + */ + public previousBlockHash: BlockHash + + /** + * Commitment to the note tree after all new notes from transactions in this + * block have been added to it. Stored as the hash and the size of the tree + * at the time the hash was calculated. + */ + public noteCommitment: { commitment: H; size: number } + + /** + * Commitment to the nullifier set after all the spends in this block have + * been added to it. Stored as the nullifier hash and the size of the set + * at the time the hash was calculated. + */ + public nullifierCommitment: { commitment: NullifierHash; size: number } + + /** + * The hash of the block must be lower than this target value in order for + * the blocks to be accepted on the chain. Essentially a numerical comparison + * of a very big integer. + */ + public target: Target + + /** + * A value added to the block to try to make it hash to something that is below + * the target number. + */ + public randomness: number + + /** + * Unix timestamp according to the miner who mined the block. This value + * must be taken with a grain of salt, but miners must verify that it is an + * appropriate distance to the previous blocks timestamp. + */ + public timestamp: Date + + /** + * A single transaction representing the miner's fee, awarded to the successful + * miner for mining the block plus the transaction fees offered by spending users. + * This is the only way inflation happens on the chain. + * + * Note that the transaction fee on a minersFee is negative. By "spending a negative value" + * the miner is awarding itself a positive receipt. + */ + public minersFee: BigInt + + /** + * A 32 byte field that may be assigned at will by the miner who mined the block. + */ + public graffiti: Buffer + + /** + * (For internal uses — excluded when sent over the network) + * Is this block valid. If valid, all blocks before it are also valid + */ + public isValid: boolean + + /** + * (For internal uses — excluded when sent over the network) + * Cumulative work from genesis to this block + */ + public work: BigInt + + /** + * (For internal uses — excluded when sent over the network) + * graphId of GRAPH_ID_NULL (0) indicates no graph has been assigned to a block + * (should never happen after block has been added) + */ + public graphId: number + + /** + * (For internal uses — excluded when sent over the network) + * the number of blocks pointing to this one + * (number of next blocks) + */ + public count: number + + public hash: Buffer + + private bufferSerde: BufferSerde + private targetSerde: TargetSerde + + constructor( + strategy: Strategy, + sequence: bigint, + previousBlockHash: BlockHash, + noteCommitment: { commitment: H; size: number }, + nullifierCommitment: { commitment: NullifierHash; size: number }, + target: Target, + randomness = 0, + timestamp: Date | undefined = undefined, + minersFee: BigInt, + graffiti: Buffer, + isValid = false, + work: BigInt = BigInt(0), + graphId: number = GRAPH_ID_NULL, + count = 0, + hash?: Buffer, + ) { + this.targetSerde = new TargetSerde() + this.bufferSerde = new BufferSerde(32) + + this.strategy = strategy + this.sequence = sequence + this.previousBlockHash = previousBlockHash + this.noteCommitment = noteCommitment + this.nullifierCommitment = nullifierCommitment + this.target = target + this.randomness = randomness + this.timestamp = timestamp || new Date() + this.minersFee = minersFee + this.isValid = isValid + this.work = work + this.graphId = graphId + this.count = count + this.graffiti = graffiti + this.hash = hash || this.recomputeHash() + } + + /** + * Construct a partial block header without the randomness and convert + * it to buffer. + * + * This is used for calculating the hash in miners and for verifying it.[] + */ + serializePartial(): Buffer { + const serialized = { + sequence: this.sequence.toString(), + previousBlockHash: this.bufferSerde.serialize(this.previousBlockHash), + noteCommitment: { + commitment: this.strategy + .noteHasher() + .hashSerde() + .serialize(this.noteCommitment.commitment), + size: this.noteCommitment.size, + }, + nullifierCommitment: { + commitment: this.strategy + .nullifierHasher() + .hashSerde() + .serialize(this.nullifierCommitment.commitment), + size: this.nullifierCommitment.size, + }, + target: this.targetSerde.serialize(this.target), + timestamp: this.timestamp.getTime(), + minersFee: this.minersFee.toString(), + graffiti: this.bufferSerde.serialize(this.graffiti), + } + + return Buffer.from(IJSON.stringify(serialized)) + } + + /** + * Hash all the values in the block header to get a commitment to the entire + * header and the global trees it models. + */ + recomputeHash(): BlockHash { + const randomnessBytes = new ArrayBuffer(8) + new DataView(randomnessBytes).setFloat64(0, this.randomness, false) + const headerBytes = Buffer.concat([Buffer.from(randomnessBytes), this.serializePartial()]) + const hash = this.strategy.hashBlockHeader(headerBytes) + this.hash = hash + return hash + } + /** + * Check whether the hash of this block is less than the target stored + * within the block header. This is the primary proof of work function. + * + * Hashes cannot be predicted, and the only way to find one that is lower + * than the target that is inside it is to tweak the randomness number + * repeatedly. + */ + verifyTarget(): boolean { + return Target.meets(new Target(this.recomputeHash()).asBigInt(), this.target) + } +} + +export type SerializedBlockHeader = { + sequence: string + previousBlockHash: string + noteCommitment: { + commitment: SH + size: number + } + nullifierCommitment: { + commitment: string + size: number + } + target: string + randomness: number + timestamp: number + minersFee: string + + isValid: boolean + work: string + graphId: number + count: number + hash: string + graffiti: string +} + +export class BlockHeaderSerde< + E, + H, + T extends Transaction, + SE extends JsonSerializable, + SH extends JsonSerializable, + ST +> implements Serde, SerializedBlockHeader> { + private bufferSerde: BufferSerde + private targetSerde: TargetSerde + constructor(readonly strategy: Strategy) { + this.strategy = strategy + this.targetSerde = new TargetSerde() + this.bufferSerde = new BufferSerde(32) + } + + equals( + element1: BlockHeader, + element2: BlockHeader, + ): boolean { + return ( + element1.sequence === element2.sequence && + this.strategy + .noteHasher() + .hashSerde() + .equals(element1.noteCommitment.commitment, element2.noteCommitment.commitment) && + element1.noteCommitment.size === element2.noteCommitment.size && + this.strategy + .nullifierHasher() + .hashSerde() + .equals( + element1.nullifierCommitment.commitment, + element2.nullifierCommitment.commitment, + ) && + element1.nullifierCommitment.size === element2.nullifierCommitment.size && + this.targetSerde.equals(element1.target, element2.target) && + element1.randomness === element2.randomness && + element1.timestamp.getTime() === element2.timestamp.getTime() && + element1.minersFee === element2.minersFee && + element1.graffiti.equals(element2.graffiti) + ) + } + + serialize(header: BlockHeader): SerializedBlockHeader { + const serialized = { + sequence: header.sequence.toString(), + previousBlockHash: this.bufferSerde.serialize(header.previousBlockHash), + noteCommitment: { + commitment: this.strategy + .noteHasher() + .hashSerde() + .serialize(header.noteCommitment.commitment), + size: header.noteCommitment.size, + }, + nullifierCommitment: { + commitment: this.strategy + .nullifierHasher() + .hashSerde() + .serialize(header.nullifierCommitment.commitment), + size: header.nullifierCommitment.size, + }, + target: this.targetSerde.serialize(header.target), + randomness: header.randomness, + timestamp: header.timestamp.getTime(), + minersFee: header.minersFee.toString(), + isValid: header.isValid, + work: header.work.toString(), + graphId: header.graphId, + count: header.count, + hash: this.bufferSerde.serialize(header.hash), + graffiti: this.bufferSerde.serialize(header.graffiti), + } + + return serialized + } + + deserialize(data: SerializedBlockHeader): BlockHeader { + // TODO: this needs to make assertions on the data format + // as it can be from untrusted sources + const header = new BlockHeader( + this.strategy, + BigInt(data.sequence), + Buffer.from(this.bufferSerde.deserialize(data.previousBlockHash)), + { + commitment: this.strategy + .noteHasher() + .hashSerde() + .deserialize(data.noteCommitment.commitment), + size: data.noteCommitment.size, + }, + { + commitment: this.strategy + .nullifierHasher() + .hashSerde() + .deserialize(data.nullifierCommitment.commitment), + size: data.nullifierCommitment.size, + }, + this.targetSerde.deserialize(data.target), + data.randomness, + new Date(data.timestamp), + BigInt(data.minersFee), + Buffer.from(this.bufferSerde.deserialize(data.graffiti)), + data.isValid, + data.work ? BigInt(data.work) : BigInt(0), + data.graphId, + data.count, + Buffer.from(this.bufferSerde.deserialize(data.hash)), + ) + + return header + } +} diff --git a/ironfish/src/captain/anchorChain/blockchain/Graph.ts b/ironfish/src/captain/anchorChain/blockchain/Graph.ts new file mode 100644 index 0000000000..7299f181d4 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/Graph.ts @@ -0,0 +1,12 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { BlockHash } from './' + +export type Graph = { + id: number + mergeId: number | null + tailHash: BlockHash + heaviestHash: BlockHash | null + latestHash: BlockHash +} diff --git a/ironfish/src/captain/anchorChain/blockchain/Schema.ts b/ironfish/src/captain/anchorChain/blockchain/Schema.ts new file mode 100644 index 0000000000..430e133e79 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/Schema.ts @@ -0,0 +1,47 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { BlockHash, SerializedBlockHeader } from './BlockHeader' +import { DatabaseSchema } from '../../../storage' +import { SerializedCounts } from './Block' +import { Graph } from './Graph' + +export const SCHEMA_VERSION = 1 + +export interface HeadersSchema extends DatabaseSchema { + key: BlockHash + value: SerializedBlockHeader +} + +export interface TransactionsSchema extends DatabaseSchema { + key: BlockHash + value: ST[] // Whatever the strategy chooses +} + +export interface ChainTailsSchema extends DatabaseSchema { + key: BlockHash // the block hash that you want to find the tail for + value: BlockHash // The tail of the chain that starts at the key's head +} + +export interface CountsSchema extends DatabaseSchema { + key: BlockHash + value: SerializedCounts +} + +// Essentially an index, but one sequence can have multiple hashes +export interface SequenceToHashSchema extends DatabaseSchema { + key: string + value: BlockHash[] +} + +// Essentially an index, but one sequence can have multiple hashes +export interface HashToNextSchema extends DatabaseSchema { + key: BlockHash + value: BlockHash[] +} + +export interface GraphSchema extends DatabaseSchema { + key: string + value: Graph +} diff --git a/ironfish/src/captain/anchorChain/blockchain/Target.test.ts b/ironfish/src/captain/anchorChain/blockchain/Target.test.ts new file mode 100644 index 0000000000..e3abfdd118 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/Target.test.ts @@ -0,0 +1,195 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import Target, { TargetSerde, bigIntToBytes, bytesToBigInt } from './Target' + +describe('Target', () => { + it('converts bigints to bytes and back', () => { + const bigints = [ + BigInt(0), + BigInt( + '9999999999999999999999999999999999999999999999999999999999999999999999999999999999', + ), + BigInt(255), + BigInt(256), + BigInt(1024), + BigInt(1025), + ] + for (const candidate of bigints) { + const bytes = bigIntToBytes(candidate) + const back = bytesToBigInt(bytes) + expect(back).toEqual(candidate) + } + }) + + it('converts empty array to 0', () => { + expect(bytesToBigInt(Buffer.from([]))).toEqual(BigInt(0)) + }) + + it('constructs targets', () => { + expect(new Target().asBigInt()).toEqual(BigInt(0)) + expect(new Target(BigInt(9999999999999)).asBigInt()).toEqual(BigInt(9999999999999)) + expect(new Target(Buffer.from([4, 8])).asBigInt()).toEqual(BigInt('1032')) + expect(new Target(Buffer.from([0, 0, 0, 0, 0, 0, 0, 4, 8])).asBigInt()).toEqual( + BigInt('1032'), + ) + expect(new Target(Buffer.alloc(32)).asBigInt()).toEqual(BigInt('0')) + }) + + it('makes the correct bytes', () => { + const bigints = [ + BigInt(0), + BigInt('99999999999999999999999999999999999999999999999999999999999999999999999'), + BigInt(255), + BigInt(256), + BigInt(1024), + BigInt(1025), + ] + for (const candidate of bigints) { + expect(new Target(candidate).asBytes()).toMatchSnapshot() + } + }) + + it('throws when constructed with too big an array', () => { + const bytes = Buffer.alloc(33) + bytes[0] = 1 + expect(() => new Target(bytes)).toThrowErrorMatchingInlineSnapshot( + `"Target value exceeds max target"`, + ) + }) + + it('has the correct max value', () => { + // The minimum difficulty is 131072, which means the maximum target is 2**256 / 131072 + const maxTarget = BigInt(2) ** BigInt(256) / BigInt(Target.minDifficulty()) + expect(Target.maxTarget().asBigInt()).toBe(maxTarget) + }) + + it('adjusts target up', () => { + expect(new Target('55').adjust(BigInt('5')).targetValue).toEqual(BigInt('60')) + }) + + it('adjusts target down', () => { + expect(new Target('55').adjust(BigInt('-5')).targetValue).toEqual(BigInt('50')) + }) + + it("doesn't adjust negative", () => { + expect(new Target('55').adjust(BigInt('-60')).targetValue).toEqual(BigInt('55')) + }) + + it("doesn't adjust past max", () => { + expect(Target.maxTarget().adjust(BigInt('-5')).adjust(BigInt('10')).targetValue).toEqual( + Target.maxTarget().adjust(BigInt('-5')).targetValue, + ) + }) + + it('meets other target values', () => { + const target = new Target('43') + expect(Target.meets(BigInt(42), target)).toBe(true) + expect(Target.meets(BigInt(43), target)).toBe(true) + expect(Target.meets(BigInt(44), target)).toBe(false) + }) +}) + +describe('TargetSerde', () => { + const serde = new TargetSerde() + it('checks target equality', () => { + expect(serde.equals(new Target('588888'), new Target('588888'))).toBe(true) + }) + it('serializes and deserializes bytes', () => { + const target = new Target(500) + const serialized = serde.serialize(target) + expect(serialized).toMatchInlineSnapshot(`"500"`) + const deserialized = serde.deserialize(serialized) + expect(serde.equals(deserialized, target)).toBe(true) + }) + it('throws when deserializing incorrect value', () => { + expect(() => serde.deserialize('not a number')).toThrowErrorMatchingInlineSnapshot( + `"Cannot convert not a number to a BigInt"`, + ) + // @ts-expect-error Argument of type '{ not: string; }' is not assignable to parameter of type 'string'.ts(2345) + expect(() => serde.deserialize({ not: 'a string' })).toThrowErrorMatchingInlineSnapshot( + `"Can only deserialize Target from string"`, + ) + }) +}) + +describe('Calculate target', () => { + it('can increase target (which decreases difficulty) if its taking too long to mine a block (20+ seconds since last block)', () => { + const now = new Date() + // for any time 20-29 seconds after the last block, difficulty should decrease by previous block's difficulty / BigInt(2048) + for (let i = 1; i < 10; i++) { + const time = new Date(now.getTime() + 20000 + i * 1000) + + const difficulty = BigInt(231072) + const target = Target.fromDifficulty(difficulty) + + const diffInDifficulty = difficulty / BigInt(2048) + + const newDifficulty = Target.calculateDifficulty(time, now, difficulty) + const newTarget = Target.calculateTarget(time, now, target) + + expect(newDifficulty).toBeLessThan(difficulty) + expect(BigInt(newDifficulty) + diffInDifficulty).toEqual(difficulty) + + expect(newTarget.asBigInt()).toBeGreaterThan(target.asBigInt()) + } + + // for any time 30-39 seconds after the last block, difficulty should decrease by previous block's difficulty / BigInt(2048) * 2 + for (let i = 1; i < 10; i++) { + const time = new Date(now.getTime() + 30000 + i * 1000) + + const difficulty = BigInt(231072) + const target = Target.fromDifficulty(difficulty) + + const diffInDifficulty = (difficulty / BigInt(2048)) * BigInt(2) + + const newDifficulty = Target.calculateDifficulty(time, now, difficulty) + const newTarget = Target.calculateTarget(time, now, target) + + expect(newDifficulty).toBeLessThan(difficulty) + expect(BigInt(newDifficulty) + diffInDifficulty).toEqual(difficulty) + + expect(newTarget.asBigInt()).toBeGreaterThan(target.asBigInt()) + } + }) + + it('can decrease target (which increases difficulty) if a block is trying to come in too early (1-10 seconds)', () => { + const now = new Date() + for (let i = 1; i < 10; i++) { + const time = new Date(now.getTime() + i * 1000) + + const difficulty = BigInt(231072) + const target = Target.fromDifficulty(difficulty) + + const diffInDifficulty = difficulty / BigInt(2048) + + const newDifficulty = Target.calculateDifficulty(time, now, difficulty) + const newTarget = Target.calculateTarget(time, now, target) + + expect(newDifficulty).toBeGreaterThan(difficulty) + expect(BigInt(difficulty) + diffInDifficulty).toEqual(newDifficulty) + + expect(newTarget.targetValue).toBeLessThan(target.targetValue) + } + }) + + it('keeps difficulty/target of parent block header if time differnece is between 10 and 20 seconds', () => { + const now = new Date() + for (let i = 10; i < 20; i++) { + const time = new Date(now.getTime() + i * 1000) + + const difficulty = BigInt(231072) + const target = Target.fromDifficulty(difficulty) + + const newDifficulty = Target.calculateDifficulty(time, now, difficulty) + const newTarget = Target.calculateTarget(time, now, target) + + const diffInDifficulty = BigInt(newDifficulty) - difficulty + + expect(diffInDifficulty).toEqual(BigInt(0)) + expect(newDifficulty).toEqual(difficulty) + expect(newTarget.targetValue).toEqual(target.targetValue) + } + }) +}) diff --git a/ironfish/src/captain/anchorChain/blockchain/Target.ts b/ironfish/src/captain/anchorChain/blockchain/Target.ts new file mode 100644 index 0000000000..39f3bed169 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/Target.ts @@ -0,0 +1,295 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import type Serde from '../../../serde' + +function max(a: BigInt, b: BigInt): BigInt { + if (a > b) { + return a + } else { + return b + } +} + +/** + * Courtesy of https://coolaj86.com/articles/convert-js-bigints-to-typedarrays/ + * + * Convert a Buffer to a big integer number, in big endian format. + * + * I'm concerned about efficiency here. Converting a string and back and... WTF? + * Every block hash attempt has to be converted to a Target, so this is a function + * that should be optimized. We may want to compile this to wasm if there isn't + * a less janky way to do it. + * + * I'm pushing it out like this for now so I can focus on bigger architecture concerns. + * + * Sorry. + */ +export function bytesToBigInt(bytes: Buffer): bigint { + const hex: string[] = [] + if (bytes.length === 0) { + return BigInt(0) + } + bytes.forEach(function (i) { + let h = i.toString(16) + if (h.length % 2) { + h = '0' + h + } + hex.push(h) + }) + + return BigInt('0x' + hex.join('')) +} + +export function bigIntToBytes(bigint: bigint): Buffer { + let hex = bigint.toString(16) + if (hex.length % 2) { + hex = '0' + hex + } + + const len = hex.length / 2 + const u8 = Buffer.alloc(len) + + let i = 0 + let j = 0 + while (i < len) { + u8[i] = parseInt(hex.slice(j, j + 2), 16) + i += 1 + j += 2 + } + + return u8 +} + +/** + * The bound divisor of the difficulty, used to update difficulty (and subsequently target). + * We are taking in large part Ethereum's dynamic difficulty calculation, + * with the exeption of 'uncles' and 'difficulty bomb' as a concept + * https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2.md + * original algorithm: + * diff = (parent_diff + + * (parent_diff / 2048 * max(1 - (current_block_timestamp - parent_timestamp) // 10, -99)) + * ) + 2**((current_block_number // 100000) — 2) + * Note we are not including the difficulty bomb (which is this part: 2**((current_block_number // 100000) — 2)) + * So the algorithm for target is: + * diff = parent_diff + parent_diff / 2048 * max(1 - (current_block_timestamp - parent_timestamp) / 10, -99) + * note that timestamps above are in seconds, and JS timestamps are in ms + * The bound divisor of the difficulty is the '2048' part of that equation + */ +const DIFFICULTY_ADJUSTMENT_DENOMINATOR = 2048 + +/** + * Minimum difficulty, which is equivalent to maximum target + */ +const MIN_DIFFICULTY = BigInt(131072) + +/** + * Maximum target, which is equivalent of minimum difficulty of 131072 + * target == 2**256 / difficulty + */ +const MAX_TARGET = BigInt( + '883423532389192164791648750371459257913741948437809479060803100646309888', +) + +/** + * Maximum number to represent a 256 bit number, which is 2**256 - 1 + */ +const MAX_256_BIT_NUM = BigInt( + '115792089237316195423570985008687907853269984665640564039457584007913129639935', +) + +export default class Target { + targetValue: bigint + constructor(targetValue: bigint | BigInt | Buffer | string | number | undefined = undefined) { + if (targetValue === undefined) { + this.targetValue = BigInt(0) + } else { + let candidate + if (targetValue instanceof Buffer) { + candidate = bytesToBigInt(targetValue) + } else { + candidate = BigInt(targetValue) + } + if (candidate > MAX_256_BIT_NUM) { + throw new Error('Target value exceeds max target') + } else { + this.targetValue = candidate + } + } + } + + /** + * Maximum target (in terms of difficulty), which is equivalent of + * minimum difficulty of 131072 + * maximum target == minimum difficulty + * target == 2**256 / difficulty + */ + static maxTarget(): Target { + return new Target(MAX_TARGET) + } + + /** + * The initial target on the genesis block. + * + * This will need to be hand-tuned. + */ + static initialTarget(): Target { + return this.maxTarget() + } + + /** + * Calculate the target for the current block given the timestamp in that + * block's header, the pervious block's timestamp and previous block's target. + * + * To verify whether a target on a block is correct, pass in the timestamp in its header, + * its previous block's timestamp, and its previous block's target + * and compare the resulting target to what is specified on the current block header + * + * @param time the block's timestamp for which the target is calculated for + * @param previousBlockTimestamp the block's previous block header's timestamp + * @param previousBlockTarget the block's previous block header's target + */ + static calculateTarget( + time: Date, + previousBlockTimestamp: Date, + previousBlockTarget: Target, + ): Target { + const parentDifficulty = previousBlockTarget.toDifficulty() + const difficulty = Target.calculateDifficulty( + time, + previousBlockTimestamp, + parentDifficulty, + ) + + return Target.fromDifficulty(difficulty) + } + + /** + * + * Calculate the difficulty for the current block given the timestamp in that + * block's header, the pervious block's timestamp and previous block's target. + * + * Note that difficulty == 2**256 / target and target == 2**256 / difficulty + * + * Returns the difficulty for a block given it timestamp for that block and its parent. + * @param time the block's timestamp for which the target is calcualted for + * @param previousBlockTimestamp the block's previous block header's timestamp + * @param previousBlockTarget the block's previous block header's target + */ + static calculateDifficulty( + time: Date, + previousBlockTimestamp: Date, + previousBlockDifficulty: BigInt, + ): BigInt { + // We are taking in large part Ethereum's dynamic difficulty calculation, + // with the exeption of 'uncles' and 'difficulty bomb' as a concept + // https://github.com/ethereum/EIPs/blob/master/EIPS/eip-2.md + // original algorithm: + // diff = (parent_diff + + // (parent_diff / 2048 * max(1 - (current_block_timestamp - parent_timestamp) // 10, -99)) + // ) + 2**((current_block_number // 100000) — 2) + // Note we are not including the difficulty bomb (which is this part: 2**((current_block_number // 100000) — 2)) + // So the algorithm we're taking is: + // diff = parent_diff + parent_diff / 2048 * max(1 - (current_block_timestamp - parent_timestamp) / 10, -99) + // note that timestamps above are in seconds, and JS timestamps are in ms + + // max(1 - (current_block_timestamp - parent_timestamp) / 10, -99) + const diffInSeconds = (time.getTime() - previousBlockTimestamp.getTime()) / 1000 + const sign = BigInt(Math.max(1 - Math.floor(diffInSeconds / 10), -99)) + const offset = BigInt(previousBlockDifficulty) / BigInt(DIFFICULTY_ADJUSTMENT_DENOMINATOR) + + // diff = parent_diff + parent_diff / 2048 * max(1 - (current_block_timestamp - parent_timestamp) / 10, -99) + const difficulty = max( + BigInt(previousBlockDifficulty) + offset * sign, + Target.minDifficulty(), + ) + + return difficulty + } + + /** + * Returns the minimum difficulty that can be used for Iron Fish blocks + * To be used in calculateTarget for easier mocking + */ + static minDifficulty(): BigInt { + return MIN_DIFFICULTY + } + + /** + * Converts difficulty to Target + */ + static fromDifficulty(difficulty: BigInt): Target { + if (difficulty == BigInt(1)) { + return new Target(MAX_256_BIT_NUM) + } + return new Target(BigInt(2) ** BigInt(256) / BigInt(difficulty)) + } + + /** + * Return the difficulty representation as a big integer + */ + toDifficulty(): bigint { + return BigInt(2) ** BigInt(256) / this.targetValue + } + + /** + * Add the given amout to the target's value. A negative amount makes the target + * harder to achieve, a positive one makes it easier. + * + * If adjustment would make target negative or higher than max allowed value, + * the current target is returned unchanged. + */ + adjust(amount: bigint): Target { + let adjusted = this.targetValue + amount + if (adjusted > MAX_TARGET || adjusted < 0) { + adjusted = this.targetValue + } + return new Target(adjusted) + } + + /** + * Return whether or not this target meets the requirements of the given target, + * which is to say, this has a lower numeric value then the provided one. + */ + static meets(hashValue: BigInt, target: Target): boolean { + return hashValue <= target.targetValue + } + + /** + * Return the target number as a big integer + */ + asBigInt(): bigint { + return this.targetValue + } + + /** + * Big endian, and since even after 20 years in the industry, I can't remember + * what that means, the most significant byte is in the 0th index of the array. + * + * The resulting byte array is always left padded with 0s to be 32 bytes long + */ + asBytes(): Buffer { + const bytes = bigIntToBytes(this.targetValue) + const result = Buffer.alloc(32) + result.set(bytes, 32 - bytes.length) + return result + } +} + +export class TargetSerde implements Serde { + equals(target1: Target, target2: Target): boolean { + return target1.targetValue === target2.targetValue + } + + serialize(target: Target): string { + return target.targetValue.toString() + } + + deserialize(data: string): Target { + if (typeof data !== 'string') { + throw new Error('Can only deserialize Target from string') + } + return new Target(data) + } +} diff --git a/ironfish/src/captain/anchorChain/blockchain/VerificationResult.ts b/ironfish/src/captain/anchorChain/blockchain/VerificationResult.ts new file mode 100644 index 0000000000..9a732780c8 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/VerificationResult.ts @@ -0,0 +1,40 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { BlockHash } from './BlockHeader' + +/** + * Indicator of whether or not an entity is valid. Note that No maps to zero, + * so a truthy test will work, but beware of Unknown responses + */ +export enum Validity { + No, + Yes, + Unknown, +} + +export enum VerificationResultReason { + BLOCK_TOO_OLD = 'Block timestamp is in past', + ERROR = 'error', + HASH_NOT_MEET_TARGET = 'hash does not meet target', + INVALID_MINERS_FEE = "Miner's fee is incorrect", + INVALID_TARGET = 'Invalid target', + INVALID_TRANSACTION_PROOF = 'invalid transaction proof', + NOTE_COMMITMENT_SIZE = 'Note commitment sizes do not match', + NULLIFIER_COMMITMENT_SIZE = 'Nullifier commitment sizes do not match', + SEQUENCE_OUT_OF_ORDER = 'Block sequence is out of order', + TOO_FAR_IN_FUTURE = 'timestamp is in future', + GRAFFITI = 'Graffiti field is not 32 bytes in length', + INVALID_SPEND = 'Invalid spend', +} + +/** + * Indicate whether some entity is valid, and if not, provide a reason and + * hash. + */ +export interface VerificationResult { + valid: Validity + reason?: VerificationResultReason + hash?: BlockHash +} diff --git a/ironfish/src/captain/anchorChain/blockchain/__snapshots__/Block.test.ts.snap b/ironfish/src/captain/anchorChain/blockchain/__snapshots__/Block.test.ts.snap new file mode 100644 index 0000000000..1e5a8732d0 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/__snapshots__/Block.test.ts.snap @@ -0,0 +1,70 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Block serializes and deserializes a block 1`] = ` +Object { + "header": Object { + "count": 0, + "graffiti": "66616B6520626C6F636B00000000000000000000000000000000000000000000", + "graphId": 0, + "hash": "0500000000000000000000000000000000000000000000000000000000000000", + "isValid": false, + "minersFee": "-14", + "noteCommitment": Object { + "commitment": "1-9", + "size": 9, + }, + "nullifierCommitment": Object { + "commitment": "0000000000000000000000000000000000000000000000000000000000000000", + "size": 1, + }, + "previousBlockHash": "0400000000000000000000000000000000000000000000000000000000000000", + "randomness": 0, + "sequence": "5", + "target": "115792089237316195423570985008687907853269984665640564039457584007913129639935", + "timestamp": Any, + "work": "0", + }, + "transactions": Array [ + Object { + "elements": Array [ + "5", + ], + "isValid": true, + "spends": Array [], + "totalFees": "1", + }, + Object { + "elements": Array [ + "6", + ], + "isValid": true, + "spends": Array [], + "totalFees": "1", + }, + Object { + "elements": Array [ + "7", + ], + "isValid": true, + "spends": Array [], + "totalFees": "1", + }, + Object { + "elements": Array [ + "8", + ], + "isValid": true, + "spends": Array [], + "totalFees": "1", + }, + Object { + "elements": Array [ + "9", + ], + "isValid": true, + "spends": Array [], + "totalFees": "-14", + }, + ], +} +`; diff --git a/ironfish/src/captain/anchorChain/blockchain/__snapshots__/BlockHeader.test.ts.snap b/ironfish/src/captain/anchorChain/blockchain/__snapshots__/BlockHeader.test.ts.snap new file mode 100644 index 0000000000..07605c4749 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/__snapshots__/BlockHeader.test.ts.snap @@ -0,0 +1,26 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Block Header Serde serializes and deserializes a block header 1`] = ` +Object { + "count": 0, + "graffiti": "7465737400000000000000000000000000000000000000000000000000000000", + "graphId": 0, + "hash": "0500000000000000000000000000000000000000000000000000000000000000", + "isValid": false, + "minersFee": "-1", + "noteCommitment": Object { + "commitment": "header", + "size": 8, + }, + "nullifierCommitment": Object { + "commitment": "0000000000000000000000000000000000000000000000000000000000000000", + "size": 3, + }, + "previousBlockHash": "0000000000000000000000000000000000000000000000000000000000000000", + "randomness": 25, + "sequence": "5", + "target": "17", + "timestamp": 1598467858637, + "work": "0", +} +`; diff --git a/ironfish/src/captain/anchorChain/blockchain/__snapshots__/Target.test.ts.snap b/ironfish/src/captain/anchorChain/blockchain/__snapshots__/Target.test.ts.snap new file mode 100644 index 0000000000..d57576a6fe --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/__snapshots__/Target.test.ts.snap @@ -0,0 +1,241 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Target makes the correct bytes 1`] = ` +Object { + "data": Array [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + ], + "type": "Buffer", +} +`; + +exports[`Target makes the correct bytes 2`] = ` +Object { + "data": Array [ + 0, + 0, + 14, + 125, + 52, + 198, + 74, + 156, + 133, + 212, + 70, + 13, + 187, + 202, + 135, + 25, + 107, + 97, + 97, + 138, + 75, + 210, + 22, + 127, + 255, + 255, + 255, + 255, + 255, + 255, + 255, + 255, + ], + "type": "Buffer", +} +`; + +exports[`Target makes the correct bytes 3`] = ` +Object { + "data": Array [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 255, + ], + "type": "Buffer", +} +`; + +exports[`Target makes the correct bytes 4`] = ` +Object { + "data": Array [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 1, + 0, + ], + "type": "Buffer", +} +`; + +exports[`Target makes the correct bytes 5`] = ` +Object { + "data": Array [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 4, + 0, + ], + "type": "Buffer", +} +`; + +exports[`Target makes the correct bytes 6`] = ` +Object { + "data": Array [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 4, + 1, + ], + "type": "Buffer", +} +`; diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_1 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_1 new file mode 100644 index 0000000000..a89cff5982 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_1 @@ -0,0 +1,8 @@ +{ + "name": "testB", + "spendingKey": "e09a699b6b54df1f704efd19706646cbdfffa2d58cb809fe1714c800d6e17951", + "incomingViewKey": "d86e80a19cf9967a86730d0e56f1b9d6f734422462ccf343a3ab3905b72ab906", + "outgoingViewKey": "678dfc58dbc1e6d899641de49e54643e7cc5651189b11ad68a271fb9ecec1719", + "publicAddress": "53bce2970eed5c276af8905492364fb882859c9aabe0b7618bfa48bf645f67127662ac1933b25c09fe4cf1", + "rescan": null +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_10 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_10 new file mode 100644 index 0000000000..3e017c0cf5 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_10 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "5", + "previousBlockHash": "70F8084A83D6DA9C496C1584E0F35961458F04ADDED67A30F85CEE922D8326F8", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:AFgGJ5HwMLWHeaugua9XCnm1CEZJ4ktr8qeDv6Wdu1U=" + }, + "size": 7 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617062257080, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "2EB837CC42C970CC7BD9587A0847F9304434D0F1332FFC6B2336E3E64CC5889D" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////rVeOJSTdgmBWIBzc8Bl4SngZUw02r5khWPvmM3u3yidbmDotW9IfedLK73Z3EcY5quSofZGsEvP2Va7lSxG2laIyhDrVciZ/B1Llctrg63SNvCa5IihPsi/h5m6/lgyRGMxdiBu3YqMafDAkHN1chbdkAGMgDlW2nYjhvslKetuZrV+gr1FAkrO9eynDC8HEpGyRxHh0DH3AaeVC4ooLAPGGdoM5jswUHxYtopbX8ja0P/E3USW34F9CwhOyBN4HdY5tepfz5xPHYnt8ZVTbyoFC54SIVrZJc2w/HOdqwlvgEe5RE6cT37picblT4B75UkMftm3ndt/QarKzGFAuIlBaH4hsZJZJ9fKMd4OEBpKfbcb925WGea1XuPZMVAEs5IFu7C1pIl5LJ2lMhgw+J+QjSGxUe2+vBQbPQn6+jJiNzgaR3GondRWTgV3i+o3EYIsDi13KxAZVALohgIsKUNtOefMY0qawfh41Q4WKvmO0veo1Ddl8CaZCwKwzBat0RdCgQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBht/lXBy/UtAdBhvy4IEIucG89b6ZwMRZYTWmVezj/WNKMhBBZrjjgdWdo0bWj37CXQ7jeJTWeyvACgtPkm8AA" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_100 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_100 new file mode 100644 index 0000000000..070e43aa3f --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_100 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "50", + "previousBlockHash": "DAC0DCA5DC258F064E161B27C298C2FFEC74715F67EF432C01606D76C128E1B5", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:GP+GOtreG1DwoiIk5C217NC8C1o4H1GLKFy5W0RN828=" + }, + "size": 52 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063417527, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "5F63F1E83E6B9EA60B6FCC4B10455BA5E618844C285DC0A92D707B27862AA77E" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////qQkjyAkUye9p/hVLm271WC37NrDRXOn9TapSrwbmmTmSKk2y6faZodBsTwfMhqwEigqSIvYxfEg+8Ww1zkZGpH+bwRT0Xyhb9tmvcg+WpVJUieTK96FTa7zMH3fSUehGCJr+hAMQMfBW/2MU3omX71x3Ev+nfNa+MKNFnmjnkT883OwNqZfKXjk0t8kjtgGqgrAO55vwbW4KeC4XSEj5dA/rnfF/Ysi4VUjF+NVj+lLny+ZzPPK6AcfIFEZXyCspVcei3sAE9ytfz/AIJkS+BF1Rpj6gY/TxAO16iXDs6imRovhjucuAbFaav1NqhI6zZ0LWK8F0n+Atb/ccR6zWT117+plkCI7oE6CBRbkC2OUyg3czYsZpssjmq6/Q1s5BX1XWIfsd294hFJ6EM7yo+N+Q6Qdw7Pq6gTloa+YWYsiNzWJn0qrBMDc9PANNekVdr9IWsrcUyOy8t8Jje0GNrbm40MWGxWD/FlLiZmi4JMsVvQ91M8jHXQD3kwLt+KWrLyzoQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBR6TeOrv336Dz3NIFg3ouYYsztDZBE0Nw1OoNdkyR1x58bCTipO2AZJPnZGBwOQCJ4dTiPMe09wCY6XLqOUroM" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_101 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_101 new file mode 100644 index 0000000000..b31a9a5938 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_101 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "51", + "previousBlockHash": "70E1ADEF509F11D1EA8B23E81C3BE8EA1053F58989F012FC30EAAA49D1DFD74C", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:h5MaMU8FpXMVBNegxoZlRNglWGFgQ3CyZ1mcs/vRaSU=" + }, + "size": 53 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063422663, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "61CB2452BE021763FBE5EEEB104F7C8EFD34CD57682EC3C287F6C1A0CFCEFF8E" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////j+OF0KBLzhDZvP013yaLdoU3g8wjjW859v5HpTgikLIZtw3CrAFg/bKcotyhs7gxjq/LBbIyASnO7PZyOpOwq2tATFPdUixR+dUJ93VDIkUC3SfsL+3vCZS73d+eHwvZFdjDblrtmGeAtUCldcx+iIK1wNM1wACDhCVU1sEfrq5xdVVrvyBcYC+/XQODFaYFhwmyCFliK+6H2izta82POLoGWCGcnfrIZoHXiKvlivB6YJ4XGPREfU+vvDCQzIi2FClWIrRBQLk5ichA+QTZnqaVZRYzR3nOU9y3PnnmBMIzX/9FQJ6XFr5XZSvHe5TVImrjsykIaSU7cIAbhbaeQdNSIKcGQiYwVb0N3OI6u9eX7qiGzPxMH4SM33kecaqxednnofUGFjncmtDajNgY+2nNRSZK8AbOYxY5tWxBE7FbLON/3OqiQ1gcCh6UFaAwZ3kBk5qaWFrU15jAUCIDH4G5QcmQW3NHrJLUDNyUBCUaGnAetmkYPfBZ6eHUwMZpy+zDQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDT4IIIraQ2Ffz8gFiGMJBzpAG5DEBT3SwkMRR+14o7TFoqhOIWkMlSY7XzqlwI0JcEP+zHywPRx0T+K5ma33wI" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_102 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_102 new file mode 100644 index 0000000000..a31ee6d45f --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_102 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "51", + "previousBlockHash": "5F63F1E83E6B9EA60B6FCC4B10455BA5E618844C285DC0A92D707B27862AA77E", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:jjUykNmH9DmIugCwByFXDI+eVpVjPgfpfgS+Ygf4bgg=" + }, + "size": 53 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063427704, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "C2263C4B96E9956091A731897C7EDB0867AAD1B8E50162124A10890BD3310524" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////qPAcWP+uJo8tZZ7uqwLCcIVbZ+3ZmABX6bjKapDGCxF1CsEjU7l7V3tWdEp5Y6fFqZtvgskQ6ZhVEP9TgHUVWglvxoIRRspM2PMFlA3dRafKhqhVzDNo6Ivt4JhKhEfvArkdNFesY8yeX29MYHtTVqlZ1cv4EaySRuKo8mtJEt2uCXPP4FgDQf4mrodi0ZyyoNpfhnsZqCP9/CghhDfgLjFk6yxwfUF/XIFma4tmRWjmhMIVilF/DjJzUTiO6tyVdVfETK6G/2qwlow84zbOmKeiJVd/ZzyLECXYe8/Mfs/3jmsBe2Jy9mgOW1VyXhVJyM1jdvwvGpZFMeiir5exVW7zqm7k6ovNDKm9rg8sW9XttcKIMomla1NVqboeukTGM7siEzSWCgdpuyUj+am9YxSI+eH/cKUM6SjlY9ajCbFhybJ4W2rY+ew6xRDsgTcbcA1q22n7eSDiD9MxdyJldcOizWKpihXihPIaJ7ZHeSQbOs3mWT8tFS8ZQl737zrNnYSzQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCMBfslv6LCeqpxCmjMvReJdLrbJcRFHWJo8VtiNDOl5tpNtFdADEubBIkUn4l1C6RWtWejgKR6X+wHfy5bJgcE" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_103 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_103 new file mode 100644 index 0000000000..5c72f9d7d8 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_103 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "52", + "previousBlockHash": "61CB2452BE021763FBE5EEEB104F7C8EFD34CD57682EC3C287F6C1A0CFCEFF8E", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:14lMUlwoQOCLitORvj8BbZppLwrKEebQiFrx1ZF3/wc=" + }, + "size": 54 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063433008, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "4CF303268D4D3BC9B033043EFF6418E119F29DF6B2B9DF02F9AF702E01AD3203" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////kBd6eha4W3C+RuuSlcLofBjnzBmH/y5GIAtGIeOa64X0kZKVmMMwfGGYRb8Sd9r7kKrbs3Bo3vZPm/YKYIvM53+tLfev8hsr4GNS05YmLMbjNJ6+Xd/UWPhHBP0xnJfmCqmd2yPNUcGjBR6pNMGgkh1j3r0TdiBhE0NRWwegg0vPz6y2GWIJ5C5Pavlb+kXKr1ik2BD30rjv2uOVblHDpTtfjA06nwUktSHnwMa9suymmxC6Q+FdtbYKl905OmXs9iYFqCvPF6x6SbL5vTzzU2BFbA0po302EOHf33dWHh/nvub4HfjWnVsMfhAVcr7miVK2en1jImt635oIt6isAC7NTtknOrnvVz2+1/W431sfP2JQiE+yU/HWuHMa8MDRLIsZSWNpbNf7SqxBEbM8125TAY7Kt1OHztUfIVgryYEmczz1/q7Gxp+CX7ppe31Wz4AE35lw9VCoIPkuhEAYgC6RrDW7s32KW95DKxaWUUwH+3Fk81hn/+G7q0QXtdCmokIIQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDD/YZZtO6ctXyrJdB/BVRfgUUn3nBmfuq1ZYAtGzoPBcOYmlwf8ZB6soZoxTYHTw5lr0PY38Kmct7mP0ojER6cM" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_104 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_104 new file mode 100644 index 0000000000..3e19f8295c --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_104 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "52", + "previousBlockHash": "C2263C4B96E9956091A731897C7EDB0867AAD1B8E50162124A10890BD3310524", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:xyIBQ/0QtXGLsKuQVtCSp+0p9XoqElHmDeLyX56NKDY=" + }, + "size": 54 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063438362, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "9BEB0BF40940A21925FA8A07D5E0FAE91A9E8828C37929217E78B54044DA5340" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////tPh2WgDdu/IzGwRKI76Wsxd5of+8EZIIihYRgWCE9HWSJ8I3ENWyEPS9sf5qulqVuQOAiBj9D54nwC5jMEf+aeSQcV2H8ncYxiKZUeovsm55IeYwPaR73lPDPmi54D9+AvyBYTbUWxLenq7NkRy0sCT8OCskKbNyCXgexFJUG0aoIzz9EtTz+wLcuOqzwiQyjzfkRPVXuiup8JD/HvfQF4cOPbp1+GPo9mg1naoOVMVV4O2x7UvQwKevoK5uTofke801bxFRTIALULDOwbVIQD7ML9/ht4QbiyRKcYWlVud/mRSZVZPGizE1Hzjg4d17pZZgP4I/AReMYSHm4VffLWqyq9rpw2xNXG2a/bYuXx2TMPiv5aE8ndaESyA0wPbp+v+TQpV6maxc10o8xHFAIhGncF74IxvLYn/Kf8HCSIS51+hYHX358+sFVqzYV0BDWhg6iVCS3t11a4p+ePpYj95WOAcrExQ6UXsjP2qIuBUAMi2S2WkabgWAfJNH8+EF5ONgQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCt+PGrU8NYIb8fbToOxdPvpFQ+sYw8zXOngaJDDGonC9+rhs0V99QahumgHTbfk/enkejjIUyujj0oCYbf4oUG" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_105 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_105 new file mode 100644 index 0000000000..fad774e1c0 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_105 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "53", + "previousBlockHash": "4CF303268D4D3BC9B033043EFF6418E119F29DF6B2B9DF02F9AF702E01AD3203", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:LFhToupwCOroRsUr5wkAG9THD74EsRMxtdZpcD1Cwhg=" + }, + "size": 55 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063443599, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "F50C39FDA15826C1398F4495F8C3CC2583D44D6987FA104ACEDDA34D54764DE3" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////uBcul6UBQ4XZ/TBZ3Yqn61FToNOPZnGaWERItwx/1FkFtyPy/1rnAo4yY9/RWV3TjRFO7oidXIjiGk3AWtl7jlwMIxcAiThzBPpODFmcWeOVejFv+1FBZDUzvKUEaQdcFeeGbO24C9GFlusyrcBq+dqm3Q9OEv/YxAvD40kQfS5KtxY+OpnoWO7RJa2P74SCg611ZbNjXkQaQioe36pyhjGd4Zxn+iFoqa92xKX8aX+sDLW2bOBBevmqJ9OfxSTMb8NyNORG08F9lNVA0jb9x13WwE1AGtkhzzelJmytqANENtE+FzqY1CJ0xD0jA6AcnNWINkTONAwuv8bJ7RuaPfwvFr5jBwRoLtM4Dsnqzzuq8L9zAHBkatDO/4M181vRgyEsqQaAj9xo7kvhzzEs10Ha0c9oYNva6GyDcPfd7o8EF8EWeN/OgEodv9ME4SdzZQlsLiT5pbM1QJsHVfyGeZEqVpICEOjShZoCFVxsCAun/PLq5gRD/GSpOAl1ypLsfml/QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDB7dSLMsazMH8RgRSA7z6Z1wfI2IWcGoi5kXA434k0VtAN0hWD+autIsfkbRnPnIi6mluZcRMVwmhM0oMF3wH0M" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_106 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_106 new file mode 100644 index 0000000000..1248949f41 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_106 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "53", + "previousBlockHash": "9BEB0BF40940A21925FA8A07D5E0FAE91A9E8828C37929217E78B54044DA5340", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:XACS/nVHF+yAWhIyoJjuFEeGu/0q66Zjc2qVw4qefgo=" + }, + "size": 55 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063448608, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "0D979DFFF2665CC4889067D74D86895C2AEA244F287616A217A5D0727FAC306A" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////lT79paYwCnTERz/+1DekL4KOk18LRLyenM4XvcGdD2bawu2rbsRDMqFGTdHo/v56mH7e+XB6SUwG+/hUX61rZZXOdQ3ngp8jyHHk4HdqtF+boJDvdWXYawV449uhU558FWtNQI+FXjkmw6DR2+xmizwZE+xhR9ppi32nofKKYFApBszmLOAzrS7tiXqLGCwHsp9bGvPIxl+19AspkbURwEx9ozzJUr+pL7sNKYg34EXBgkMb/o7Jz4iDRA6vKUftbbKUc5dHXhWo3GBhMpUgbDDK/5cSswAApjgll4/QwqTgnHrAURDPf4UCM+/NZOpc0PhvvY/wkBzY26BDFxRAPuyKv+xovcaAhgaVBu7nsSAE1Yxcrwz9TPQvx3Vw806gzaNZECpFnVcF0sjBe4/tmdJVc5E6dKtZbODQBWyOodqpYS8XXGACMCBzQ+IIdOhn3EXsVKHt0+/zPiiYObrgQt9mqduoLOYkHiskQnCuo5jnQM4yX+3Z/xxfhTL/ps/ImuaTQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA8ATkt+tA/WiY7JbBOpaRY3ydQWfEvk6QBWFKzmzO4VoZvYifOK0XMx2VLVXq4DFSapJRYFcZo3wiPa/Rhoa4M" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_107 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_107 new file mode 100644 index 0000000000..9ae193671f --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_107 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "54", + "previousBlockHash": "F50C39FDA15826C1398F4495F8C3CC2583D44D6987FA104ACEDDA34D54764DE3", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:n4PlTVhqmR4UQl7mA6lqSiIa+Zr8EO7qN9gTbgRy+CA=" + }, + "size": 56 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063453739, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "0089A58BD625D282F3C9E7F96A9DDA763C2178EB4527C571C9BA8664123EABE3" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////sgS85shGvKwOCsCpaHxjQhdXA4x56d4yY6qQ8w3QhOPAcnhslh97vI6T09pWjHTAmSibHnmteM1MF4oItEB+Zo9Ftf/g7TisUnvbPtDlhLR2dduXPEn3u3JT/iWVVMssFPHuvg0ax4gzzoS1l5UR0M1T5BU+yiQHEECuOJYr5RUdIsmlTIUUhSSFfECzjzO4lcqgw3MJl7QoaHdIcLYjqHmjTc9LaFDhYsr5s0o6myz1Bc3PGzpkJAZbGmTz76o6rSm4NjgIG9/DAFpCk9N8GxUqo3GTylXuN7liISOTSNup2fkwIH1fPsV2sZUDl7aShVyDavitoZZB95I6WI3GCaGqOatxHUxNaPx9H7yCZ0XA01cjIDzKuOQWCb6Fp8bmme8EXhOxAqBwQBseFKjd/pxF+VLFGcfy3doyJRUaDzGzIQoj1R73FVuwDFFLnX5Yaw3MYkmdN+xmK1HQyRgU7ddLQK8aSx/21mKJPs88RozS+q0VHItET0juzNFcG0CT7WlVQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBW+7qBUb1xPxEIJ86Fi6FoNdw/LEbHgyf+P6tnnmEtqk5crwcxI3WJFbIHhCvG+RsRXfZXd+h5sTOZXunl+AME" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_108 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_108 new file mode 100644 index 0000000000..951f9f82da --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_108 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "54", + "previousBlockHash": "0D979DFFF2665CC4889067D74D86895C2AEA244F287616A217A5D0727FAC306A", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:QxTo3R7T+rU2lQcap011CMNUKdotQNRYsTjkxpDdBSs=" + }, + "size": 56 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063458616, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "16C139A80E5BDC3E7AD1FC6E315E4D983246BE7E8E470BE4308DE934F784AA6D" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////pp0UJXBmfjSDc5aVajO1lzNmCr+I5eGwVCgvglSohDvRKO1kV4grKheOEds8YH+joBF3h3rg8eIHLO4a/zZSq2aTcfxdIgXb+HmTWHeqKVPM1BaIVRI/e3uN9Cj17AcuGUPPPAfTLn76zqjgcLzyMxC5gACdp6j52a6vYU09iFLbBwxFsJVnaYTAUSlids6FtR5Q8LRwJnmFH0Bodon6z9njqPGg+CbIwvadUiOkaVbQDWxLyUnZAT2cVPYe4wbi55dyFViLD6QrQJaKESlJ4y7fUDii9nQZY8XH7I/j5U5Le/9t6IQ+pS+sCAQxnq+ejhyNefHUTEe+VurtBiauNU4iiA0K5RDBFSurVi/VioDbCRuCYz9PrYny2vCjDGmktE4Y+e/XYCnN7TaDAvQsdCowIORV7SqK3ShAToN6pY/+wd53MJWvJLlMqinMn2HCGwKA7VYLNCBd2cwJep06lO4MM5qR7TjobGg+2qYhzR8NffeWP7womM982G5KOVFVRUebQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDjdNh2cA6EeSe5PJN4awoeEuB8RhIFKXoV345WksLW22QXjcwospJYEgWcg8hgs1E7yH+kqz60gaURReCflBYH" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_109 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_109 new file mode 100644 index 0000000000..c24a7866e4 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_109 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "55", + "previousBlockHash": "0089A58BD625D282F3C9E7F96A9DDA763C2178EB4527C571C9BA8664123EABE3", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:4Z/EyDUjMN/jfgQO/aGDsaJ/X8m6JkogAAkLBxb0VyE=" + }, + "size": 57 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063463947, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "C3FBD30D5BE204B6A0EF8372B2277104043A3DB24110B2623707C3653873859B" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////rpBlXre4klKnZc+8oRqPYB5AdOzcMCsgqKGWcg38PQH2DHKES0ooTRQOvwV4bETUrUhTJBW2ZfHdN1BUyPFqqfunrcD5cfuMrwinmpK87QwuUHQ/gdaXQ7E1D3zhYU5gDbuMD9Weg0R9tcUUT3L7grII13qsoDVvo/HjXn5yWO0LlNGKDgC2zCMqIz6BDR2JiY+CmOc2I8dSMleYEDF7w1nimfjN4ku876U0DslAv/fljJ9gWNUxU7CFioBiinj2E6pPS6oxc6HCfr5dFVzl7bxpVuGn+GHeQlwxOKPgdU0RAYstsaQTkuEA8GHYXRFFKNRYlhgzwJap+FlKlkT1Li1RmVQ/ceadMAU2H9mO5wWJDOGHaOqbdZxwlRGHVee9uiy2lDPk/QESsPcQgsrzlOF/5d0jbddvo13P/Qkx677PaJ0XrvErCIVFeDVKWyaifxKyCihPAxxiSqSHvC0a5Wa9ghcvMziafxQtPjbu8IyJktdjCWWUp4LzI8TT8Dr/6izqQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCLiVi+rUj118+1zaiv1gxwcZNGpNfDTlkPvM1aSq/EID6ieNjcOTaMOV0H94UpK5CeNrAm4qIHyaJfsQo2mcEF" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_11 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_11 new file mode 100644 index 0000000000..589eec94e1 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_11 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "6", + "previousBlockHash": "3D2C0BCE1E669D763BA72284E92D977D6C4486CE3BF43FD0AEEE93A0C4A89B49", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:HKEr842lbuDQz9ndkOAIwvAsv6RkIp+HOSpuoItrb2A=" + }, + "size": 8 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617062262418, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "36FCAC83F194085FC63C0534DB0F713960DE0C7EB0D2AA22EB0534C4887D74DF" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////uahI29WlFDhiTCucyb+tnxCcUJC2mUrvM2hb0ZEHRM/1lhHLCDdqxEXGE+bTZ7iJoV7p022wdAxqegOf1VhXyj5Bpt18nt+hyY9rveGK1ATzd8focRp0HWj1P5YGNQYgA7lBSBXEhiSFl54YOtbgzWeD6eaPaT6EaLMtKloo3PwElph4xPUhjtxQ/+nBLIw8rYyyIKKnCOtdXNhmdRCI72HhUhvsrv3fy49raqtfJiftmNp6SaU4G6vSTG1pu888CLwmm5RFyowc3QzWJN9wL8D+yQnVQjhEI6ab4bzoKAKuyJEXAvYI2kBBlskFwPq4jH5/FX0yDTBJ5bq9N1ORb6Hv+hgsrFBuVPj+fIVuk1ffue0Dqim5kp4mzwZ7ClBBW0n614u54L0m36I8UAF2cJuXr+0dBpeoz7PntOOY5W55wQY6RuvF8LOqDIfepmnCJNu8A56Z1PeXdSqytAms8rjUuWNWAv9FUktdxNqvLduY0TxvS5c7fWep0bwstpwi8Lv1QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCRbomb0Tb6wU2TReQjxank1+zh+EnnCeva5d7Zl4WM0/mBiDNmNS6pnbYh11xM93bekBF8738ZC4GpsGMIg+UG" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_110 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_110 new file mode 100644 index 0000000000..2779e6024c --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_110 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "55", + "previousBlockHash": "16C139A80E5BDC3E7AD1FC6E315E4D983246BE7E8E470BE4308DE934F784AA6D", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:I4PHTy5oF16EYQs27K/GWnz0TtJt7bK9bkd/Cggazkw=" + }, + "size": 57 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063469149, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "FF846273BB9C5F02E4EA15C5628D19F208426B743AF3DFA4FD040AD502F52FB2" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////gABcDxWJ70bLx2gj+E+mGTpM+EsZmZmARuaXafgysLuOve4RtJ0Mx5QoQaVenKRNmCIRa7U+Fkk7gPMU5f1LenMMc5FFDp2p/IlnjN9XweSx2zAeEVRBcPVUve5xgGO1DidtzvoihhoQLvqA0fmOQfJsziKcotBBBbmkUgXavbK4UNt8h+ho0LUziGtqKoShhRX0tw5ahLKlSS9LoUCxpUSHJQYtDZKRYa7tt36fLFnA+3pbzsj5uctFgcJ6pFkWp25h7gOjXdvV7i2VHZ+adIEy/jkyB5f0kkW2sTLTaUOfl4fkeFqfR0Gx+N8brn1vED8WH8pOQHAYYFgT1IZQUM6saZNZl1Dij0p0/XImLW1HjxTBmVIDl4aLkndwZkdtwkMBzCxpAyAbn90WzGOPo4LkoO56VaACrTyiPMGg3mMMxTnGxiNXpa/77twUH8qunedpIjE2lCBEIxshUMIusy7xyRDbsKXe5pi1k3FTK3gW37tQFG/s1gP5XqAYnm5Hq2HyQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDC7yeFJxOLczfLpxmuqvhYPojDqZdz2C1bbCtQvAow7cBQ3xhlkKizOi0yO8cmPXmhnPYSFC6Von7C2g+tB/lEG" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_111 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_111 new file mode 100644 index 0000000000..bfe9babc1e --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_111 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "56", + "previousBlockHash": "C3FBD30D5BE204B6A0EF8372B2277104043A3DB24110B2623707C3653873859B", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:+RBWLiL+rRioYC4RIoz9m8ihu7qGEQSpuGvgVnp+R2g=" + }, + "size": 58 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063474233, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "3B4109DC1AF5ACAFC1B96AF33EFD26A56E2DE4236F9602CE79BA6254BD11861F" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////gKJIjH6d3n8uV7j3t3qpbqIo/CCFZHe0/0lfJ+vagKkPsVVG6e2RD/hMzl/I2UpXj472QnPGkzL3iaR8HLPHc5y9YpAOwna6k4iWSgIT5gbMvad2S4670Mfn0HylePg6FBmp+OpUaDBszycVmrBGAHLivVuKKSdaabkdCwIWCTmRwq2IjGgMwnbxXEf8/poDrIB3PtlPBtRIp0AX79STfWSujzgfhKEl0SnGMsyyy11XXbLRLB4QucAXrTPu3t3COI5X9yxkODc/sJjaGgcWCtsndfvoK1UkUMYH0kL35iibfADHnIH7Xt1e9rR8vx5Yh+zkf7lRMadeqFWrzPwNZfzDQ94V4JO1/OUZ0G9NQRQHj9QZ7e2WsghLBcxKokQDOpqlMUJp6lHHNUxFZeT54zmmyKyHgjtHeks+oQjgYlev8/JE+VUG8vc3c71Q4udW0i6CgeeyTfmbFsu8DlZAohscQXuExbkmF1H3jeMAyEtMynWU/sP1bawUMDZuT7XH4wX3QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAZwYMRq84F6AzrHr0Rio/vByxjw9aWY8VFTkhoJ3QfCHOODnNj1oljMojI5aBpXiT/+5uy8bhHas5ATOAV9YgJ" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_112 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_112 new file mode 100644 index 0000000000..fc796721be --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_112 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "56", + "previousBlockHash": "FF846273BB9C5F02E4EA15C5628D19F208426B743AF3DFA4FD040AD502F52FB2", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:F2EDbwExmjHwQpDQKzom7DirX0NLBl7x8iz3GYfqww8=" + }, + "size": 58 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063479201, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "5C130C532CCB6C41E50D7800BBDAF3CAD74D9BE40B9E4A4A5F38E9E47182AA3E" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////huKlPA7lqGSMVYaPI/8AKRqBhzre7jbCNmm0aYVYwphgNtA465Wg5xOIzUN8Nl50jlzHDDoipXS3MKSS2DIlL5bYetEaIR0stdSQAgRVVGORGjtqHAkMd+sPM3k5z1j+CVvnlWPG7x7Ifs2Dcao3afaBUOKqN+zL2Dx79owonDSbW21PjLAz1Hb0fwfp0LNPiuyWcUABo7YBa8ciNLdFNrgpo7C0uXRhU2V/EEhTph6jVBfqLvi1DPpeJzUB/kdBgYJd5IALp9uoi/8XIngvv0oQl0xwL8uSoCuDFApKXoNgl3mdmZqgecnwt/vhO80jCaXoqgGZlsIqQMXgQGuSUcdSyXRO9m/WsftvkPxdEXrPoNL9gpTDsTGR3+z85odUqcXbaYZiE0OZJ8HC8NretAiWoycIH91K7fz8RQLt4lcuhRNIjTz9b9Kp+iVPprPLYxSpwgA32rypClqwYX2sZTT/qrv/5nrnJEdY2DCvx6qlbYR1PvS8darSxRsSNoyFztHGQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCk6Lv+xRT7JUmPXPv2MlRlfTcw7HNdc1H30xnfyIYgCz601BGhqY4/YWakkgSsUyreI0wvUSoIylT8rHf64C0G" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_113 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_113 new file mode 100644 index 0000000000..09591a3371 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_113 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "57", + "previousBlockHash": "3B4109DC1AF5ACAFC1B96AF33EFD26A56E2DE4236F9602CE79BA6254BD11861F", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:Jn7aLOtOq6RKK62Uk4wrNlcQ06CNrb83cWJAjFrLZwQ=" + }, + "size": 59 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882561655772227099265022751590609053759679761171040884447085243962752512", + "randomness": 0, + "timestamp": 1617063484164, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "62CC4C1C207B23D7FFACE326D85FD9E56CC829D742B105696FAAFE0EAF71BE6D" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////uVs8F8Q6V0umS16KqBInuHD1QJuewZY3JB4c1QYWuMtqN2FCvO8TI6U5eQ/7PoEHtlc6cU6P020iqsje5oSO6hbjqVbGmln3yQhLzEtVo52BoJAed8ypFAzML4Id2ev2EW+LaKbyF85cqQ+SQFKjCMaxfHFOiP0dA8QlUDYCzJKmPuLBdcs/Y/4QvGi4lSoBt1EG9ITZ7Plm67iGd+6x1BdSFG4fBtfz/fBCakOdO5UeuFwXxvRWJQfCPCLXKSU5Ju8X4Q0PI2G7hK5Ek2oBICCHa9parkduzI+rdNcweW9uedA6NnFRwhyz9CEVbiPcbuEJRsetghwsUt7tCGPyTri82SJv5HYP5k8kVzjidesmUdOfcca0sxchLHn2o9Xx567yhB3zljAVsWCxHbJIoXclKZ5bnVCxQXStEUJUeu0qgmEyiIsxwouvBSv7PPvRXqvTDpozRyE/3IKgIzsJRIb4E5WHh2qgr8GjXU8nGaypXX8A/RGG1wnqU3/cLKvQ6MIxQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDB6E5Skm4KY0QkxUtzQV4+UoSLyQ9wagd6na4bKdoVigfr+66TkN0HI70z+ptQPZbHCdbf7vbNchhVejFIKtrkI" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_114 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_114 new file mode 100644 index 0000000000..6e4bdfbbfb --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_114 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "57", + "previousBlockHash": "5C130C532CCB6C41E50D7800BBDAF3CAD74D9BE40B9E4A4A5F38E9E47182AA3E", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:PE7lEyBl6ZAnwtR4De2zsO1LC9xju6I22g/K2pCDWBQ=" + }, + "size": 59 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063489217, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "CC7DB1B52532B28EE951DC8135851FB5AF7FD404769BDEDD2728BC336E4450CC" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////oT+mjAfl00S/G60g1WvnEsxgEfogNj7jrwTbfX4L6ARqN2C9oWKZsyJdbEAHjqZgmK1qhWx2ZpJemK8h//xHUHp9xp1cgK/CEGCf75GJAgPuiUoFMsRz2M2nFOxPcJpkCTQQXMzzy8+ri8QN62pRyC5QPpZqMCK51mMvNO+bgf+930sUNIU/m0OgtgiL/muardIbvaiLsGc1HNOR8q1FGVsWTrUOMNGf1VAQxMzyuROg/T6PlyxXyqUOZpLKfB0l3P9NtlmYssueRJlV8x0+9A0BoG5v8rQAEshNgv6MXqeIyIdbCHkQ6k1IpMYRVhLw9E/q3CifcXST5xHEJ8CaIqgIbjQfBSbH+tel2upOPZEaXMS6oPXYq6kj/SwSE5c9P9frqUS5TkuVFKx0Jh52vBkZqUNtQ5mZQAdSi11RWpw7rpp8NPcdKESmfcjBBFfXf5dJ4cRvXwRrh/e4ZsgFv0MCnfN4eIizTN+ymia2cigoFhWREHg63ksVS50UyK/iT3ByQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDD6E0bzkSO6j3SlPfhCx9a7CqHw2lJbl4T2BvrxePbPYp9z5mdRsOlf0Ng2JHLNJAcdNfwDBZm2Q6ObujfhV2IN" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_115 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_115 new file mode 100644 index 0000000000..2eff219119 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_115 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "58", + "previousBlockHash": "62CC4C1C207B23D7FFACE326D85FD9E56CC829D742B105696FAAFE0EAF71BE6D", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:QbjMBSwJX9YRJps+t/CDNRz4iDeHABK6vOnFfRHgASM=" + }, + "size": 60 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882561655772227099265022751590609053759679761171040884447085243962752512", + "randomness": 0, + "timestamp": 1617063494584, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "53D6EC63574C42D117AAB7543E80B3F0D3AEA73BD9963C2F6CC5ED2DEBECFBF2" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////qpIiRPo1CWjhklkUcjAK5QUA2ZOtI84H78gpPHg7snnKoZ2nnFOIP8owCvvHUogbsct/9pprgHDJBDFfz6hk7VPO8ZfDgJRdq09nkIuTMRe3PESD4Nnnyd4begJegBMECvDtDYHTTqX8aIDdNNAssnngtpOkphYCCOIKoC6BQDDh4UzkYNvWYyEtb+DI3aHYpPw36q53MEElMJ3NNbqvjRl8r3OdVWByHoOAzhZ81AMlbXML9HzBdfz2Uz2Or4IXIJbOO4kQfuMWFovqjWHBLQDhfdqLOixcuGSG9xMLeY0stCGiCgDKS8OQ8j7ie3CRnOXBi+EYLXpx/bi8x2RdIBcNyggLJM1cihjiR1/ruYe9LCn9z9bEDTY9+r9nz7Q695pSf0W1CfxTaoKN0NZ4M7c98khhIaWV1fDl0S/0SFDPafNac1OJs9daDhimClq1mIrx3C0tQ8xbZax1V31EN0qjSZ6G2HOJpfQTxQgEuukFno/7Jygp2RhUpbkJy5Ky8HuuQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCIeOtc/84PSpmryMLxmIwG7Yshfo/Srobro4L6e/k1q0Ci7J/Iq4mol75DiWPSCCcMojwN6zxvmiXqtLzOekYL" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_116 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_116 new file mode 100644 index 0000000000..d370c6537b --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_116 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "58", + "previousBlockHash": "CC7DB1B52532B28EE951DC8135851FB5AF7FD404769BDEDD2728BC336E4450CC", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:VldRQ1ThqV+z+ftuhSEDI1VHNDLgnBk6peBjM7/kx0Y=" + }, + "size": 60 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063499551, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "50BDEB3B37FB344CC0CD15886A861166910C353BB1C2A95F75BE83CFA1221A83" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////lEE6/d/ZOWxFweHCcoPd67iQyPMsG0pZDN/9Qj0Xxv6snXMOYMbn7m/y4fJHtPrXitBnHCjGpqt4cbJwTUiNfy/oiQXt785aVoL51kBvGGR21oqfsLfcAYauIFUHZ1ZIFjceRGerMIHFYvshOEdMI6oFN6F+T8+VyuSN4W4F2aXIPrNFOJ/mLcAyGpVusaGplsYpw/88CeNVSIwMRHoXZmqYbLp/O/X3eGROJfak4BOVh0oDKpjDo0I1bEOK3UB5pnRJ7fOuZItGeuJQ9ZYOC0gxMFLnIWQaqrWfKiah2e2qzm3+Ve1BJ9sEGTAc2I8okINIaWf4b9el5sRckBVZAOostCColUaMWV86+/EWPpSbJTep1130MGijIlF9lDsCyIbjO3X3CswqnE1OaI46LLKmH87Txw71lAqrHoFBfAbCRwaZH6a1uqUUj5kr6spov9oI/6mRgdnEY86h90xgVc2UCA6raT1hkuaBDVLcOiEKs471zjDQGqxEjOpmlONdrxR6QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCvXImQFbxFWLu00x5bu64qyjRyyE80p395tywfEf0Qhi+1p8pm35H3t9RCXULn7wAoUMGfHBx7hO/8zvnLCEwD" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_117 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_117 new file mode 100644 index 0000000000..2161c798cc --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_117 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "59", + "previousBlockHash": "53D6EC63574C42D117AAB7543E80B3F0D3AEA73BD9963C2F6CC5ED2DEBECFBF2", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:gezXTDto+Vs6NX0gQkjlLwuPc3ApLu5czZB/tJq6sRo=" + }, + "size": 61 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882561655772227099265022751590609053759679761171040884447085243962752512", + "randomness": 0, + "timestamp": 1617063504672, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "7FF826F2F9C0B5626AAEF51C5F432BEA75D984100FAA78F29EF30FD00C687E9F" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////qXdY33zEILSL76uQNhcvVRGljggcFYUNG2qajqMVcWvavm/ygsRFK5xywGe//BCPi9PMuQiJaRgVGL2UvFcf1lOvaIFwRYq9wlR73hrTzyJ4HWLgJvoGSpOeneFm+t4oDldaX82oa6knbe6NkGKW4KUMaSCm19FPp68Id8wN3gtsKMDCP2NCaCDtHoKmbuOhrIo0FiOrbk4MBgLCZ5mGfohhxRof/1Zl9n5OXQFWMESZ4bRHWkz687DNBxFZKrEOm8Rd+IuDAQHtUP0u5Woq1Kx50Zo3u8cs+Jk6ZhhN/Eonblx/T7Eml1oa2ZhpdgLsHJSWb4R5MLLoDmdppP1wOA0ySKGpN20wKu+3Tt2BV95jwEWMYinostQVz39S0WWazFHMvbWpzurtpG1Z1n4KxDbMyQx+ya7GwX0azrOIV7qaTsoUiSyxwEtpDMCYXYb9xHFTGT7w8p1MMfXYHdZhwu6iOWJp9jNMb6x1GXml2PZ+Uyn+Z4iNugNL+rhusP1jt1W3QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDcJCx/CyhnhkQ6dynQhGs/ZoSeHNrb9ejyPxtcevC5YsPheY/Pbl/LnKWfDERHdEQb9ZXut7Ui6I9KlZuxAPUM" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_118 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_118 new file mode 100644 index 0000000000..c4307141a7 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_118 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "59", + "previousBlockHash": "50BDEB3B37FB344CC0CD15886A861166910C353BB1C2A95F75BE83CFA1221A83", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:sv/vDG6edZJrMVfLXbn/tfYaJW3qUG/xkrBib9jEhFI=" + }, + "size": 61 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063509707, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "977FDB36482D480A81DEBF9BDBE4681815042323F7AC7D348810932AC66718A9" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////txQzqu9Z9B6vlbWYHvaG3KDzDkMHZM6uol0NXAUBhCpln6AVMJRHxpywdrE9AXX8ke5pPN/3eYREnmwtMCgF9c5KHPDta0abPPpr0KBjKaCdAA8O/WF7bHPm/o8i8B9DEbl8R3Eo0z4Xc2Ba7v3hm91JXtL6USiZyxwObgPTWyDafeXlokG6RuPUVtvdkGX+jEqkwNzqW9esXRol2lSQA+qKNl985vFY6G+Pbbpi7FBlqXa8YZpyR8VBvzKlf0btgKNLKtvijNfJC4u9r92b+EE+c+7Lr6yf3C29QVy3diPeA8yLxV12sLIt33AyD0DwSHDN51uzHYdpFZXaCHruHRps0GUNgoEYq9knPstLdmuWJpwCBV5NhMko2/nyq2pd8DCSCqCtpJkvGuxP8o2e8GweJkTglnU9GDnJc7SXz9C99HS/EvdL6l83FwHmbrFzUOWTtHRGnxJWPzTa2n8zCQ10iJd/xGLPcI61zrWtn0gh7XVxTq+C6ihbRbcQAiXiyLU6QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDWk+w4ppzO9Sc3FQ5wF6bwO6iDoB33PmelwnLxKa/FgpIQXgDqLSAjcs0vSVxcpU59yvyeZNOal3udRkKO/dgL" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_119 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_119 new file mode 100644 index 0000000000..c8555eaf38 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_119 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "60", + "previousBlockHash": "7FF826F2F9C0B5626AAEF51C5F432BEA75D984100FAA78F29EF30FD00C687E9F", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:n3z5YQgoA3KGAWHEtgznM3wJ7Tbr6rIorB2PpUZoHjo=" + }, + "size": 62 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882561655772227099265022751590609053759679761171040884447085243962752512", + "randomness": 0, + "timestamp": 1617063515218, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "85A86E3739B0B833EFBAA5061FC7066B8103C86C03BA605E15E0FF5326C1F6A1" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////gKxLRaoIdIwqrxxbe+0XE+2eZjZm1/prM9Xa58nhk40AIbI3qO7SG6v2jxrij5PTjhRsPsB3ud5jFcBi3uwcjZxSCBRKxka1tOTnaXPFPrquKEfjgjBfwU055WwHxLGXEmr1k10fkQq0PF1wdQaTcto2zQWfuKDoNj1tUrZ3EWO2hSbIUY7KaMT7KoNoGym9i8NNtI59dPW4R3oTBdRSrS0tlFfFEEBTvJdZHwNRMlXxKWWJWD8Q4iKUA4jFkmvg7mRDVi2f0EE7d68QFRHmo1Mzr7ccm2VbrWKzQ9BrYd0Vf04G8ctGACJnhWuFAUr51TyGbbeFh2TyOpTBc1HqckV1qAwl6Q0B/+Bbmf0aAZq8I979sMI9CnAo6UEE6GCQVPBbJNT3E7gO3myOxsfq+uG8E59D07mJjeaXs6itFhmYywxjHhchTCcbytNZSfwVs2RECLnokgVSgZcdQcNyx097PoDbUXfdGYgBgzrbd96oxBSy7ZshW5qQ2YLo36f/th4kQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDFMkUbQjMAJ/3ZDyHfHXQiXfNWpKP99Dp+6GvwmPuKzC5Pl5+fazoAs73DyZ+VZOGBP916OJaZLwleGZq07NwK" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_12 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_12 new file mode 100644 index 0000000000..ff11ccf125 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_12 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "6", + "previousBlockHash": "2EB837CC42C970CC7BD9587A0847F9304434D0F1332FFC6B2336E3E64CC5889D", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:5d3g72RS/eBHNDGJFNArQqfs6ioYBLJ/AybgRr+GkFw=" + }, + "size": 8 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617062280105, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "593A95C049EB2A4DCE49277DF5C8430373A00B9756DCCFCFAADB48400B35D6CD" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////hhDIYAFxfwMuIyhZUah09rV+aOLVBHz0ateEBZ/O/5RiI8R78GR4UuXNfcWr2j6+sKQrAUuJWbja2Qcf+Y5Xm/FMA8dCEMQchU/HLvsIyLVYqhoquuHeEx9NCGBWKhGhC0mtGcwtQvlF53xzzubQ2mcs1/THbCzipEQO8gisdMB/RDI2O9E03s1xf5MALeMgkzw1vczhCEcLl7oRQ1TCn92e3pvubyuopb/s8A3o4cEvuaV0S+UpyZTXiYXin6kku6m6GtqXsqnKFB3lK1OpciTE8ZrPjDGAwp6IZvupUpeoeNsGIz1YzPo0yDquPAndkyr+zDxM7dk8DAnJPuDIPx+L0vSR/iAztLASDQK070Vx/r6J+3knRFhpbI7+rF9gS9KKXwm9dNSHoJAnbM53zE+KNZk5LqwNLGLtc9jAVr229JGRr0vattEsfQlkuZKL8pLyjqYOPxccGyzMi6czPuWUK1QqJTBMjr2o34bP+BigEBiiYk3PyKRbxo3b1nB5HwftQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBgCXUTMjzWIBSWkjGwxZbtmW7CQvG4d/V45fY9oQYYU4KhWcGKL4BCUucX8KUf8Q8U0loTope+/O8lVYePYpAF" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_120 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_120 new file mode 100644 index 0000000000..adbda88de0 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_120 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "60", + "previousBlockHash": "977FDB36482D480A81DEBF9BDBE4681815042323F7AC7D348810932AC66718A9", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:OMSTaZOoLBGgr9DZc1HUSL6m4iolsfqT7n9mhYBDVgQ=" + }, + "size": 62 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063520229, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "CD0D7D6E197A262D149B40C0D7712D1200121DE0C69C16CFA0D65CA16DAC06B8" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////hWt827Kgc4Iy/3S7xyIWtpc0lOat+a7ZwzFz1cOvQNy36no0xZdw2lacyVPU3EicubrBx15MQiyp24WyKTo+GXF0ZLu/l+8gK9JCpcgirTpTj+B05bCFJ1XDzXmaEdXnAsarS2WxDO8nKXrHUkokj4hBl3N6OqZL0SXgoZOC3Bv0VVFGAjM9gP7GGtDHZAoqqi7R6+9LokIEqdazmAROKn731q1gVVjy5+tSDBWHw9PN03l5no4D13rkoWnKcHjPdXivzY1aXPLl7HADPSHnPuLtbfGyQzJusOrIFSuSeEKoPWinUrouI3UiSLsuqMBDeG6c0XmnofHTIEmiV5RJCPGADgmHnq8zWz92rdXxSZ6PY4jQVFbvP70B4sjXFuzMs/OR3byufOrsMkBDBlmQN4PZiP59u/HO2NJfilGVma2ZmNV7arEb1/prd4VM3UjasX3KBGnGThUQuOr4kgSblZuK8YFj2GoCBMslWsdzNPLGxip+vPYoxAd0V1NQtc1Fa2oNQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBaQJmCioHhclDcjs4PGpQQc5CNrqp/aM8LE162G0hpJoqduvuuy8zj6SDXodhBLUrPEeVovCIVs40lZYPuOK8J" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_121 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_121 new file mode 100644 index 0000000000..6eec688a7c --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_121 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "61", + "previousBlockHash": "85A86E3739B0B833EFBAA5061FC7066B8103C86C03BA605E15E0FF5326C1F6A1", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:iPjx0gMfDixYUjYs+pfWMZiUAVuT3n1pn0QjZDJURVY=" + }, + "size": 63 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882561655772227099265022751590609053759679761171040884447085243962752512", + "randomness": 0, + "timestamp": 1617063525357, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "E99F464317E040EE38114CA48CBF49EBAEA96EA9BC4DD9C6D222ADAD9FDD15AD" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////rte+U1SnJ2IRkC05Oxxs4Tt5bnBqprboGYvi6X9WiiKOeJhGetS3PCQY8fH6Qr6CtLoieCGrDAvAqDowGO/PnooKX/ITG7zopy9wMuHCwyh/mJTxzdwiycnU0Yjc6nb+GOu69Y8W2mDimO+/06emoWp/Mv+KNF/r7VkOb3mklr4iIa34ADpRBy4bez1OOFmjs+eveflOJrofLkqS2j7qJlqZ82BViEHJfiD7qqqmRePg5OLHFbMNHhoYeyqQtJiW76aLt8iwKuPvQseTmyZYfxtRkKSzKtnN7ryhYHZmvrK4o5jDCS6DENdSey2QREJdJSsi2dpV+2YQFkIQtdoSKc+Xmuw7jZ8TQI1IWXuMZ7S2vdB2dQrEYHC+sgbHtHOfkTGeR9/HCBEGCAE9KeuLNSzEm5ztUyeqn3xp3aL7pbTMjgkWMBSzlda6szlXgsJcytLr/Di0USjzX3az+o3WLsW43jbRbsFbZ3WSaNbtT18AHk3TgX6ZlZ+EAAso28PhcVrHQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCiLEXwjofbTS31gVI8FYRDa/hStrY+RiPiUFtAOFQC7Gp4czwFfpNVTbtrBnhqWXj+67TwkadQhmCdJ7KA6ZkE" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_122 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_122 new file mode 100644 index 0000000000..791a36d6ff --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_122 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "61", + "previousBlockHash": "CD0D7D6E197A262D149B40C0D7712D1200121DE0C69C16CFA0D65CA16DAC06B8", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:Cobnha4IgjjNpX58a+U5aYmAhWgzB9SKNmLiLn67/Vo=" + }, + "size": 63 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063530267, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "F0209248536BD46AE93C58FCC1164E00F42AC74B7ECDE0CDA9D477D48AB0202E" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////te2TbGpSMgy25lFUzV+Swsea6CMpFEd3lvlnhfQLHvneAhMUvALna6V+fdIjNiqCr7L2ZwrOzIvDezpllZzMAub+mSwuTJj7sR7p7WHmDEGQzIiyNt5ct2imlG8khhbaBwVvPJbUYiKuS+kT5ySXyPNlnhTzUv2kaNNvRUHfWvJoJRR75DrZXuol9CcYxe2pqtz5z8TuKLGVgGXTta+DGPbAdy0q4NzSdA1OQDLJdGivZn6rNx5Y4vKz65/nKbaL/t00QSwk/e2g5rRh9zLNPNQRiZJAPSMw2S0rY/K4lUUIv6/qaRECUmatZLzhwJAoa//8d52rted/6x5BMLKCMzPicJZ1XrEr3/5R7gQADAEBTVgMelsU2C5u8un5WTgBtsxzU7fwQnxp2mxp9ag5sspR5LSUK4A3c8zLHTiZiNrHGWViVsSfR97jh1woIh+RajoXKWI1o9MKohgFXZ17TmxxlY33OxTXtOIEbiQrJlIxYlXyZgWIf2PuDQJJzhYOjtA2QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDD5HQsla6pYc173rUELdrxidiQiVwSemtOSJp1kQ4M2BSNkJ1Ei+42bn5wFgOn2DZl/E/y1q1rq/rz4T9NkFOEB" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_123 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_123 new file mode 100644 index 0000000000..89f0ae772c --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_123 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "62", + "previousBlockHash": "E99F464317E040EE38114CA48CBF49EBAEA96EA9BC4DD9C6D222ADAD9FDD15AD", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:zqT4M4OO5xbVqVGcQhxekHXFqgDgT9gdtDN7aHY1FFw=" + }, + "size": 64 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882131347797691639928472277308994909901191375134389962514151511518109532", + "randomness": 0, + "timestamp": 1617063535300, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "788C1CD999105E5E4B6F43FDD7369F6B33CAC10EEFF2B16D9714426D08C2B33E" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////mR7O5pDjdgULnAdeP7lIttvOdG/uwShgtxoLj7o/KEXcPikAZFZ+Q9iSMhfBmB5Ekwb9htPF/xSI5/kaRyffPCtWP43BKK1DnoVQbWdddgccvW9XyLklEEub/eH6dgaQDQHyv1/Kl7X/mIvgDFWCU5JFz4IEg8NnYNvQO2i3z3V9I/S1CgkwkcJoMBbG/BjYmULX/CH1q3BEAv5MUJlV/M89peJuZE9LlY0MfAE6cUjEigk8HYHdiFz41EwD6kNRLvnFdbyUm0T5WvABTfyusnHJI7BuWus7gwwd1GfJXc5wRprrj0nn+Mbhp3xsHKinqK/7nQcHOZM2xKC9W/r/R4CcAy7kNjKcpqJLnnt3O4nOVUD886xDJXnsunBorU/Ld7jjLeMTWtcwe30xAgnCVvmJFyI0SvalXjaYgYSlAqVpOfPtpHSew0lCrKeZyXvpg/CJ9QqRpbHEQuqy8hitHDXF2a7ElEp0hHRAfHO5x0trt3OHAjpk20WWWOBZYfntxVV/QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDBL/DEUDUBfiynlPbfeHJI1KfoH1eXhUCXjiPXdKh7tk4Ol+sycD1coFSFCd4Ihs3SJMMCmNlNJGndE7ucxukM" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_124 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_124 new file mode 100644 index 0000000000..8ba6376772 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_124 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "62", + "previousBlockHash": "F0209248536BD46AE93C58FCC1164E00F42AC74B7ECDE0CDA9D477D48AB0202E", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:ytwpL9HHqITVi/aZZ0iAd5y5hWIXkTygj8jHKnqKhwk=" + }, + "size": 64 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063540780, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "9D66D91B9EBA7F166AB204865FF6EA137982F88A90C29684B6890E1A66303E30" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////qlmP2GhX2ofzW6oWJ273QRg+a/fycAYJSmBluGZ8lTUxKwYTHCPUlO5MGidjpMbWrISN1B15C2PbmMM0WsE3yZnkbBe617ENX1a3fU/nLC4sfP8VqPfeiNH6lMuLsXaxA1ul989P/83s6WOJURyD4ty9EKHpZPrBgM0cFBeXukORweLHeVl8rStMzIdKJ5RRjc9mq6LYMJonujJKCEYnei7e/lvO5E15EoQboiGPGqgAncPpDRWbCtWVfxE9vb6xGIfVvujx/q6rnA/GuGsqhFgR9nTX1F1gpYT0vguIET4QSBlXjNBeeeKOvPnmE0j41eggnB1MOQc6orXPKJ4iHo3LnnS6F06wQNjzpYA+WFfIEyChM6undPbgM4aQM8DOnqXlle6fW6jiIxmB+3NEM+kwf378ztHWSv+Jtd/Tq9jDsMz5vQEOgWCnwyAk61Nwc/4Rzvtm3pGJi1p1vIhJL+7YfiQzDbTnPIhuyJSxrDhTMhqdgGQndN03YAT+BuEqjYrzQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCHUpbZ7gaMU5y6RI2L8LgfCYLlfEPjNkzkIo/S4paQSvGq5fHrgvyRq/higzbnk+CTKj/U037rbm0J7hkhbjoB" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_125 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_125 new file mode 100644 index 0000000000..9363ebbc4f --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_125 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "63", + "previousBlockHash": "788C1CD999105E5E4B6F43FDD7369F6B33CAC10EEFF2B16D9714426D08C2B33E", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:ZcPP5h9FMf+7uc0+xF83p+kDacoUB4J+DRJDr0g44iw=" + }, + "size": 65 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882131347797691639928472277308994909901191375134389962514151511518109532", + "randomness": 0, + "timestamp": 1617063545813, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "835EC8EB20B37CD21C0B07C10D7F0E0BDA3C3AB916439BB1344AA763B0189013" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////snI40oX9loB2javJYat+ptAXVHa+AoIbpc5k/LlJqeDoh0ufJgHb0avcu1Z5YWrYgztay2Hkw2RXN7GgxYIPa1rlFToPnpGiBmFbQsYCPj95ZpJCGY/oNAPGZ+Vg2wU8DNvUz3a5hparAIWIiCnTnI8aF212AjO7QrQa6fQLFQTqTzuLCiIGQVKEMCu85Z4HmOzlqzPFjRvq/04gFcZmS/4j6CZfxMJT3kT8YpxYAX0/TjJm+9b72o3muxG96iSRDqaWP1aYDIkbkyqtW+wT9Cl6WD+/XWvgg2s9nvwRHhPV184N2S5jbXzwOCRhTuCMLHtrmClPFQtW54z5Uks0D9t4PAC9UbAs1ELTGdJcVTEAFaAUlVRCA0/acXa+c9nUNyNyQWTikkXvfZucGA/JjbSBVJ1k9pQqkVlfBUcvNPrm2kNWOb0oiwf6BOAU7SPjPow2hZTmGvIcnlUSNp4sXAo4KDKe9q+De6OpIjfDu6t2kVcAYqhFTSSj7hh0v8jqfRNfQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBqDCBUfoB9RXgxpPk8U+a5X3lN9M3DaQX10yRKHheOV/A/bP7uAm+847JhFlORwg2D3pI+QCQmlVEc9dhEyWkD" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_126 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_126 new file mode 100644 index 0000000000..3346b47bc1 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_126 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "63", + "previousBlockHash": "9D66D91B9EBA7F166AB204865FF6EA137982F88A90C29684B6890E1A66303E30", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:W1L/cr3DvmZo0eQEJz9qW4gHbrqbhafLB2M5euXsUWM=" + }, + "size": 65 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063550885, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "9E92690DB8DCE104B7AF6F4D5C11416C3A4357C293BC6D418C1F6A4D76055C95" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////pRhkNmCoi0SyvsQE/yYMCFsuKlphp0EajdF2q061UeJyDfi140aFpmXWhGTsVSWwlI/Pxf8bqvdO1feQbYzZlljlKHBP+BKojUtJ20mQ1DTJIhZcRj3i2zGz1C5avf2HEAemCvuf+m4tqVF+b+QBf1jVhy8um8KeX8Z8UCCEUdUhV+tw0P2gsChZ2V5veoL4pMD8sGPpHZuG3qIf8FKsPUSJaFIa91EijPqkowc6b1pZj7loKDrZ/DP67lREakRvgOYd8LkYeMtg7rKqe5fl5p8xFBMe5LPTQRKk0YwP1NSqX72cGpD9+5YW9AQ9IdMximvMEXvi5ykgz+dwae7GVchXESWoj89pOUtBa57oiEV2rtwMRqJQOJQMoGlENCqOr0O/GMQPD1aWpK6K/HyCFe8xhPwa67Vb3Sb5fOgWYGy7yiMgDC9EDWakaHMlP39QiPhODAMac7+6Q7ad6aOkFBHzTwBOHIBbBp+fue1TDl5TE8QarYipXie6mz1kRhehF0ciQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBsR8WlVZNKLbZJRHtCxVKPHj5pIc6Yu2pQg8MXvzL3Vbd1+bOY+DWZRbJiJ9Lz986Pu3qXErfPtUA8sXdpUbsM" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_127 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_127 new file mode 100644 index 0000000000..9c4f8b4ba9 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_127 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "64", + "previousBlockHash": "835EC8EB20B37CD21C0B07C10D7F0E0BDA3C3AB916439BB1344AA763B0189013", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:Qy+AE/s7ldcKU/8QHYf3oPfk6cOXsUiMro17p3vAgGc=" + }, + "size": 66 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882131347797691639928472277308994909901191375134389962514151511518109532", + "randomness": 0, + "timestamp": 1617063555965, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "05160BC936EB95C2BB999DAE44FC45EA9ED0BCCC777166ED0098A258F4B3CC14" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////qE7K3GoMtJCOgHyLAGzUKtnP7DEdtZ9SEHmpBHknr7FjnjT/RfbJtIQ26ZsEg3eFj3/sigCLn/8kYxhWZUcbQu+GGUsqmhnLVYkxJ9gADbmpvqie5AvpKrpy80/W98UcBmPqFQjFfIGPk1tYwk8NGkjLpqqAagUz96Rm60TENb6/T0PvQSeZ6DQNzhLjDwfDhj39UIIBG3Um1xwDY37obE+lLlrOV7Y+SnLle/HupVHYP9McBXP09hp3jXelSK651WOtqFG7jJ+oUyaHvTRgdKR7dSFx+yVj0TS45/zaE27aXNGvp4lxh7UgvtJ6zHVDYGAZ35+vMwcmpyompPHSXwMzfhzmmp6JYc8iLfsTH1o2GZDI1sw12L7ir7DsHCNY1pG+eQaJMKZ3juSQD7tARvmUZByxU7D5yPjzB9lLgWD/inhW2l2rKC2koL//T0+B1/b+trLH9nv0YpN2wjhzMCxUjrZJvV71WQYMxHPUTVMoiSuoc+sgYTeB/UgQAekL5bVsQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAPGz+mkPcT4aR1/nZn9gN13d53JjAlbw/5eechfK2K3URKS0VkPNqM1QWNso51cAr+o1xu1xWVebogDYf19jwG" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_128 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_128 new file mode 100644 index 0000000000..ba7dc41295 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_128 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "64", + "previousBlockHash": "9E92690DB8DCE104B7AF6F4D5C11416C3A4357C293BC6D418C1F6A4D76055C95", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:sTB9k7J4lQCgtrlnAXWijHsKCUhsMBLyI7WBSNAaLS0=" + }, + "size": 66 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063560880, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "B876CD96CC6FDA5F737CBB052A5FDB140179FF3C8BFB75D55FD03F469160C4CC" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////heWka07OjtV9DVrKcGp05R14jOQzrqKZrMw1yijLA9975dgwENv2oLNQIHsy4CySijOgJ7Yq3gd31DNRH+Nh5qyJEv//qV7aTJN2Ihg4vM4UgkMfk4AyI0qwcQ3AVNBFBxeIoocA/KaDVJdGZOrf6a/3rLIWsIpcNy2imuTgLe9+XnhhiZoKGIb+hshifzhGs61OjQXA7xyQilNPmAeNwUJ7AbGqzp01HTahSTWj+VDz2eAruYXVKluKatRa6T1C8hHcgbKF5fKRiVgyY2neWoBTvniivZ0vOnutZ1QMQTEl+ZyrVXmsO0BXGDefgUp/jjbP0C2XcmIB9AnjrXAJW9XLcLNq6b/BFMt63IvOK9lunhXzRdS9MtKErasvPMU00mY35BgV5mf5JQsHfApwAUOialjr9syGR4iVvYr7376SJeZNeFsugiERwi9Gk8PhrIvqYrSfbemsgTUMl5hg1MSvtHutTFHfJ1IRtLpxixtgnRA/rp1nLBF510k2FesQteZIQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDD6QjzvFTc4I4uWzBs+7uk3w/8jPY0mFUHQ72FO2ordqUUFl438bfYJiqKGvXAi+SGy9xZYdass5q+S0s3tRcAN" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_129 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_129 new file mode 100644 index 0000000000..8ff05bc43a --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_129 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "65", + "previousBlockHash": "05160BC936EB95C2BB999DAE44FC45EA9ED0BCCC777166ED0098A258F4B3CC14", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:krRYLHujlFt6MfEU7XHObt3Q/wNcdLtf6y2HNi+kjiY=" + }, + "size": 67 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881701459226640133281333645594906705754066038206936556099670930859474975", + "randomness": 0, + "timestamp": 1617063565949, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "F2146BED3122207D1C542DE1D2A5E35D450A13E9BEA334CD183A7E8BBA9BC085" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////ofFQnn6ruipUmUV0J+sAPxgZS5xsEvooPcwL0BmdeWZSd0P4za1wxCca4QQhJRHXo/we//E6Jb+5sljqnyVjzRwAlVHGfqQMBkir1udNHKnrol86rT+cK9JyM+2JfxiRANxZ64ohgp3FS53DeW82vwa1h05DVjnBi7B0d12mGjKhofvoranBPvHbyIN5eNM5iuwfn45OHNpWrRlkaw4xu5lkLSoZaEfE/Xl0PXtHOSE1W8UVXZ5maFX3RmEvwAkgTkmWEAJ+reEvDVf7Ou93W3tqAiy6ZgVyXgraNOywL0E/cQ+BJHQH4jyKN9SXgFaATNhs7snd54FRCap3mqR8cak7CB4VGJR8rNvPiHTnTOpOSLKA0A75BjvExF4SBpqpYguHGYYEVFBOxwkexstgq3Fl9zx0oBZfsW64+DR2Fc7vNaWAkGgthFnSNhHfoC/bYXmcfr/YeYj2aBykZ3SHz5rdOeyEBc9icNz7QC+Krirret3IHwjjUm9ETs8TB1bRq6YAQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCLXnavBaSyCySjHeCTPFnOMbhvANaLTmvuXHjwT+jJ1XYQoO7Rd0Q4di2PEY/wD8LTp+Eic1SGKs/8YOD+CXcG" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_13 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_13 new file mode 100644 index 0000000000..f317782ef8 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_13 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "7", + "previousBlockHash": "36FCAC83F194085FC63C0534DB0F713960DE0C7EB0D2AA22EB0534C4887D74DF", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:GT0b5zCuR2wkUHNdr3gby02RdUYNVL5RSXmfeOn7VgA=" + }, + "size": 9 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617062286010, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "3CBED088C9D86FAEBF2C934A2907CC9EC335AFE5E9BC24C73422E60E14A10A80" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////j3GEg01VUVErkRs2RkMBJc3xTNnctkN0BHLGFRuaKhLAq9kuZPaStF1UU+8pMhDWqCj1N14cD3dYRmC+dS/d3ujBb91JgPRAG8s0VxfVu/+dy1RCyLeZoOOaiwIrRPgYCRZ0WEeoNd3s0YnNXn0pHlp5iadWwH77m6HfGeu7XZ+suyJfc0//uNnXsEBJQsFJpc314qk4a2tOZxBu/ZSBs9/mRoj/TVAs5JGkBNYfkqpSMm4+4BeK9YHrTKpeWNBIh5ZevnhsgIqCndndRGJ8fX+c56Osd1BCqwd4nLiCdGlSX7vV4qQonDfrl3+uBANcW/1CN3gEmDOx79fFHoEkDHdxzPKtkR6Z3CKwhdJ8kzEr8jVUU0/azgx7avwn263v6++fQbiVEaT1YNz0iECgu+QFzLrff90oqT8GYWioQRJHAhU6D/LMLLS6EidelU5Nr8VoTEv/c9ZbXQ6upIOZIZQ4LgFr6XHTtJluOzc86OL0T0pZd+SAy4ij1fCfgIq9SU1RQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCDRGw3e0a53rlK9n8qk9dCdteKhs4tDoz9F69mFwvGj9LwsVTmz9w2OYccp5+Wba7IkVgF2jMq6y6U8WjcohYI" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_130 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_130 new file mode 100644 index 0000000000..4002ec438b --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_130 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "65", + "previousBlockHash": "B876CD96CC6FDA5F737CBB052A5FDB140179FF3C8BFB75D55FD03F469160C4CC", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:feOVS3fJ6NsD1WPd9u3khZoa/F/tG2U7lO+0ukVE1jY=" + }, + "size": 67 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063571289, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "03408549A860934812449D7E8E45D6F2046ACD62F9F2739200C42357F3FD2664" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////srwsv47M9qM84y0/SlgPadBsw43K6YWtFvz7VB5/KCTdBdCnJl+KK8eDqtqkeWZvubbl/In679di1r2uXs2nnR3+Vyq7+UNp9ZROG6DJhD6QqP5zhQUUqT/Z6ORVrnjDEHo3jY34rVEi5OPO10syQCJPrucklPR/LgFW/ptSGOCYFNjyTcR4oDvtgklm4xPtpXK+YK9mOMVNx8GAxTzsiZ9VMsi5MAnqb63i7zyrEdarQOXHZCujTDyB9nEKRKWceTy9wy4vG3py8dzHpiFnzrNtL1Z+V/gqHy8ba2VuojyT/eyk8in5lWAGRGM2K9oN8Mj3VUoxPIWyuL13ffTJRKQYErLvHeapwopYLv2IfgzQ6PEIBMfuRplQAa8utgGIF8BT7sN+lgN6FVLeZPJS5hzDRmU+GlPZqRH/eLsRUNb8KohSFsJ36MRk/6gYOxBv+bllwm9P8BTbeC6fc7ig2lC7LYF9xHLAp7Ha323ld8X3/RPoyQnUONH2JET+Ly6cklnbQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDD/kBEuOXKWX6/Up2bCqhdDxcwTXdY0uKZ29+b3Qi47BzlbfwhOa3KqCia0ySXI0HPR6JIVia7mWONLuvbHa+UG" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_131 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_131 new file mode 100644 index 0000000000..7aabdcf233 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_131 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "66", + "previousBlockHash": "F2146BED3122207D1C542DE1D2A5E35D450A13E9BEA334CD183A7E8BBA9BC085", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:3lJY3t92P1SJnMfi0tdFcpEdEMP7nuHQvBGifpKDPVY=" + }, + "size": 68 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881701459226640133281333645594906705754066038206936556099670930859474975", + "randomness": 0, + "timestamp": 1617063576376, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "74A9BD1BC1DE7C10F05C5EE157E875B84AFCC52DE2A9E8A3A8AB706E3A76DC8A" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////pjxyDUOL3LpceQfHJgXHmWqCUHTr3yekKIZIcbdqf80TEcmd68criowBg2w9znxxqHNtK2zymqSZ7taDnBmh7rDyyFNzaAqbC1zCJoiLW87PKw/gH2oNPDHQBx1w9YO0Fs7drDdGS8zhBvt91LEKWu7PcdFv+tWpSF0ST5uwnXy3/U2H+nhP38f3KDlAHJNAh/5x5yNzepHo4iVXQnNSsLyutivZWqcE1ideeDBa7+naBtnRkWaxy2/j4AMVYPul7QFe7HhNlAbzNeWHmMN3RIG+JXilS3/b/NqgiXseq1oOXyBNYyLoD4AgOZ+/9HqHtS+DOd/8C/QuxvkDXwWlUiUV+ztsAcnXCzPpJwPAG72t+YPMAxsaSLPBsG/l+Nal4xZeFEE88QoYMwcw/g/JdQuu4nRP5hQ6B7ikNi5VpcR+qKnDsv9Mn/YBfNjgvUimxJZo67tHojjZUB/YnwTig6Rope7z7YjjI3tFhrKz/YhqbU/nAbrEwek5k5r4fEnmahxYQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDD5WgSkoCKcl23cKGpxgbjHBdwySdVoeBuoDksV6WVTRvuFeYm1f7k6M7fpSGpr6ObadQzNZyiofnhU/YK47JgL" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_132 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_132 new file mode 100644 index 0000000000..947cc8d50e --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_132 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "66", + "previousBlockHash": "03408549A860934812449D7E8E45D6F2046ACD62F9F2739200C42357F3FD2664", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:ZtWjuoBwQzz2VHaitCdQmATUQoTOMZBXzbFmmF8LlBQ=" + }, + "size": 68 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063581429, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "227928843C72A86FB00E8871392298026C8BEB59895457730743FE23D57F616D" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////kdzOjsd4rZot0voMFgsqhYkfkJD/EGRoBgVkLZs+EBqfA5DgFVMEwkHWUqAiE7KSrOjWCALX9TvVpCE6Q1rm6NXU10/4Y+56fkHEPZvTHn0krpLmrGnKGGHdqT0e3UYlGUJpb09PjxzcRlCpm/OW3MWg7kAG3xtp4WvOuL4f+u0dCIdyz+tXYaDvi2/l9Y0cl6eMr8JERokUTqGOYkJLzHml8MC7p6ykKQfIexCOspFp5cgHcFfXcqRsWD9p+SAUSJUdEThhCojxyhxBFCdFd8szUxCKmPDJeGunGQOn97Ld7QKZU1XyqWDOBgfi2FMeO+QLfhPtQPnPr3Ym7ZYWJhZDEGoS87QwEt44N55gCI5F4S0883ZegM+Yel9jL0BA2htTiFUrCo0/kgel1sLAYUb10WrEsvpMiw9kx5Wkg2x9OJqtqiwJSOWT6dEHZ+p3k8DNpjExRyZbvzx4A27K5MGDBRqSFDKFzVwHubRi2V8PGDZU5Xykox6pvT/PteSEN9e7QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCia441oxswMtw0n+yTXGlHE4aYPWx1GtBK1vMIJpGyoRRNgnWoaCQCC/j2XdJ4dai7x/CraBrYMn+4wp8De+YJ" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_133 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_133 new file mode 100644 index 0000000000..63f5286d2b --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_133 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "67", + "previousBlockHash": "74A9BD1BC1DE7C10F05C5EE157E875B84AFCC52DE2A9E8A3A8AB706E3A76DC8A", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:H8WQK2uoDceQQeRau2eAmOx8e69ZFEGNaaoVulkV8SI=" + }, + "size": 69 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881701459226640133281333645594906705754066038206936556099670930859474975", + "randomness": 0, + "timestamp": 1617063586786, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "8748977957FB25B786595FF91C615EFC682D65735870C02956ED543A71AF91A8" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////ka2T8QZqnydz/USNX6i8FOil91C8Fdh6yD392a0rXK2eTRnh+hNOrYREoFfYwmaSj64pod8CmfmEI9JRkgC6cV3DafkousWtPObgHYsZp33LdOaMRBMmTHPAp37/amqaEUJDDtmxmXoIt/mCjU1Kv/0zrE1JBQOrBVpxvbg8Hh6tDUnTGEayrPASRkCsq7upksGcH9D7F56pkZtMJiHPXwz2vPdlZZ/U4GkFo4bFlnXtmJjl/PsErU76pI5zzYTlyF9w617VUieAF2sLNAbqi36+3CIKvAXUtNpkLRNj8N/LAxU5AxoLK+ahRRHLg5DDBBdaOjHWel5q6dF0v/R/GLY4VTw6fHDrSoaebIZhyZJdc0fBlCFVXFfWoQoyV4hPOToWVehgZ3x/ZS1HhJbpT5MDfTwWVMoa8D0IoXRf5KY61p1cBsvSqZXsd2QeSWKoBR+vpmoe4Q3aSAAz9dDZF+vKBFMZCq+5zlEw9ROIyz9ZXo//Qaoddbwe01fyYPQDL7VEQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBlE/uxz2rO+bpsMRLWEclq8gPfYEgA+ylAhnmTxlnWNNLhQHu4bv1dQPY6KlM0Bb0jzt74FZwZHQt0g/9qStUJ" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_134 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_134 new file mode 100644 index 0000000000..153d2139c3 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_134 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "67", + "previousBlockHash": "227928843C72A86FB00E8871392298026C8BEB59895457730743FE23D57F616D", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:93KDauVvRPOGWyhww8HbzEvPs4qSjbsEbPAqeh/Iaz8=" + }, + "size": 69 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063591776, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "CE4D5293D55CE1AC8A2F0F209026D8F7681F5E83ABD71C3570BA652CF69C60EC" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////g6zo9GaPkVqKtmiBV7NHfkbQAVseVW+xhOQ+6cGnk0fM6ORuFRV2Tec0fDfAM2JSrcBb7LjzVEfVNmqhMyZ6RrBou5j6Ka34oPcAS1pEzvMwnI+5KOOdxE7JKTApJa4xCB13YFz+vFBUG+dXsd7XwxS6nlw/2mbA3cQ9+mQO46kDm86LYveV+bSigoLbY8zBji3wEJi17MEXRdLXDwh2U4pKtoK+Ve1/rupdRNRpOUCRGdM8VK86SDt6NIuHdY4USx0CfbWhJWvYua9y8vsdPGat6Kdl7Eu8jwtiSMSTHRm6uvoOBuxMNVNKSQd1/4sYvES2UihJ3Q8yPGgDMfsXZWiW9lTSs7VpkboHBMr4zIF9nV167NODvCQRPjQ6mZ9FFPNMcQX5pjpGNRHvCLJ+VJNpydox2Zoo2mcNNwSHyIO+ymQMEYN1Kycbky4+qisB8gTYnxhtMzVGaYe9umuBjCmUIzhHLudksqqPhxo88OZ25DwNJrQfEOuI24kVvnGElwPnQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAErQKz12h0W2SIvIicuJgP2g6jOC10aQ9K1PI+B0GkAgZLLhVELRMtbsIi7W0Off9oRy/jfWAe7Ph3OVC5oFMC" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_135 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_135 new file mode 100644 index 0000000000..a9488ee5f4 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_135 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "68", + "previousBlockHash": "8748977957FB25B786595FF91C615EFC682D65735870C02956ED543A71AF91A8", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:GRfRB2W3K8qyQoQpNJOtFkNA5zDtVjqRcB8gf/7OnyE=" + }, + "size": 70 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881701459226640133281333645594906705754066038206936556099670930859474975", + "randomness": 0, + "timestamp": 1617063596846, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "644950A6B5FFDB9CB652DFB884817F6821F241B561DD25EDD677AD8B2CCC3ECB" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////haYcjdXFRgOw1YEyoJfmlkFap8MXG2BpBMinTdWq1omRIH2hHiJD5h9UiEyIQwwyhzqSlbEqQi6jnNj311VIA3HByA+diddKP8xGFfcRS/bLsxtMu5jCMrIQXt2YTA+bFU85YACX8+60bQPwpjhuttkRqBnglf0a51jrXwLCZaMncDsByX9dp8Kt7nGWD5YyhG8d38MBQ7IF9Sedd4feV95fBkKp6lqvyVhYiGl3Dkg596IUXYi4BKH2DOmGMnxkNeV6Tk6vzoNzXnHOZVH2iP59GYcd6cz+2+NkGbU34evCxBmIpOLEH9zhHOXr0VKH9og2ES0UoVhHc24YhhneFjKEKeOb2aBQvA4MfXufKdmCrgcwM+2TISf0v2Gbdzw5RqHeotxohXWoc9AlELqtQvJFRKMOLijgz7WplA/MqhZn+Y1tLgxBjoxX2Wc9Kh06G88JAkT2Ev1ZIfWYiNywZ0sXM2+GMu4ZxmcKSQmnSMjbtg9LOljQYRWFkqh4y9ukj2h/QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBd85Zc4Nnxsv++F8sE4OiCiFdgVEKdBfdx19LTR7SkriqNyNw5HjBQMnhZg91ps4CvKYIS7zuWkoaa/R8bRL4E" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_136 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_136 new file mode 100644 index 0000000000..ba15d1c379 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_136 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "68", + "previousBlockHash": "CE4D5293D55CE1AC8A2F0F209026D8F7681F5E83ABD71C3570BA652CF69C60EC", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:uQBe1x79xJkp6hUgdm6DCpoIqWeUEDNWZ+ReknYGPQM=" + }, + "size": 70 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063602007, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "9B68D0CEC95430163AEF1A98AF1B47AB38B7D3E4017875028EEB31A9FB2D04E9" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////uAf2+QeBrlzAOVIaf66m7H1eiFvrrGUNW5HtEaavrvavPYzrLcxsdUT4IVtIR12ttRHvwzdOqNpAniZFg6jgfxKzdRsEt7M7CToMBjHiXGJRXuPSffeWbhvfq9hZ2ZXxBD6HnKaVSgmgl8qVDfN2FxSlekZvw/uxSGanUEvL389JvWgY+hO0IbMQCluPyXCIqsWbMsJPFipXxTLAkmjVajKhqZkC6kJOuz87HNx3eGdHwss3xx/h99BkvkGrMn0X5ysshRXoPTmdtCvxAG928SRd5GsLhk449z5LTm7jjblxRIZrA0oUlKgAKNDPUQDWbwvOZXKBDKuO5piOJWs7XhSzdTWKhsKOaYqnhksHJI7PJGC1ppa3GG25N7zD2yyGxmgJX1ZZuIF/hIfZaEzkXRwMsIrgUSZFl2dEHvkP+eZDn6ngXrSzNOSTAsl2osNuE/cWO4IzCrKjcgmjGsFcNSEV9m+VA7Aj9DdkYpcP3Myc+GkFSvOwv7qRkOkMyeuCYSAJQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAqPU/3LoPIBshRUX9mslqizNJhQLj0JhRp5BiMEtknFGGnHx0+xw21VXUoqhMZK1aW99+tZ3OGEN9qozbFv0gM" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_137 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_137 new file mode 100644 index 0000000000..5171863a2f --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_137 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "69", + "previousBlockHash": "644950A6B5FFDB9CB652DFB884817F6821F241B561DD25EDD677AD8B2CCC3ECB", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:9PWpkg+TrWsab3pFPLBay3SAqiklIFzE2OHduH51glY=" + }, + "size": 71 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881701459226640133281333645594906705754066038206936556099670930859474975", + "randomness": 0, + "timestamp": 1617063607064, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "C8928A422375D4AA63AC67CC2FE58F96E297E20608166BAB8B42418E8EC08C80" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////twWITZuzP5nIpYH2CVvXox1AUtGcAtbCgDYokyP94szNlWxtSN7P7fdy1BowZXWDtvwrhJd1k1xaBz0qyx6E4ESc5yn5BJ7QHIEitCZc95j33nuuQEsxAKr+fJRHGY5lDGld97dnKDPDNk+ZWtFMqXnngIUrZHY0vwOA/ox1a7/+mnkFa1Y7a1ApJ4omlQaZt5iIUpmkGA0dRzF53b1hcowACBySBUaPfhVBz5HwC74xQ4cgBJJShdPhib0PnoYjb9d4QNEgXHrofITYXJpNgdmUqxQUUB+5vYKbn+pHDakv/oVP+ZJPk1TrmeNmVx4dbHlnRquWsopGm2mg/hdJHIZ22jqCPm33p4woxKa0lxk5aU64nheOSaFY6yJtY72MSGB2iX91tN1gPHCkDIjfUiM4Tja/hJqWVMt6BF12RYUfzZbOxbPnOXQUHZ/bdb2JtC68cHWSvxLkdcOfpPjOVMoanOduGTebzqcSw68kLuU5Vg5PLbYFK7ekbGmPsAldf4S4QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDoLJogoQp+YOty6ReLMPSkZrmzPbaJ9Edph1z8HwYC7HmseCYibOh2ctW7K4wikKM3M22nIkSFAMGkBwwep4IA" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_138 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_138 new file mode 100644 index 0000000000..8ab143261b --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_138 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "69", + "previousBlockHash": "9B68D0CEC95430163AEF1A98AF1B47AB38B7D3E4017875028EEB31A9FB2D04E9", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:AiACPSQX6xVNeZsb6/yXBli8OID6FAcTNrDTd1rzq1w=" + }, + "size": 71 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063612218, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "4441B7D1526F007D6938CE84A0EFF148BE94A6118A63DAE7E72EC67F4EA01844" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////to7GSF7X/kUnz7Ra9oxERoz4vJU6uoAVytOk2aHjHnZTv/B1tGyLtU0vwSL1+1yshrIYmuBgx3T4XP4J4trfnz/0T8t1fzjeOQsfQ7fC8nxOE/lWlImjqAgBj1JzdmrOCnpp7y1hSCqKc57NI9EyfgRK5hyK0Zy4bQNgiu4aYfvdwPt96pEDxezhSP7yju72qCFDZ1xeDp4KeZlwpd/Kyiu2WH1T/b7tA3tfhLD5H2hwE7U2mrR6NNZD9roDHNcgQgsADKYiyI4g41+yrPMhgxNoin6BL37MDW4UDVTMlOniZVEum7ogJavheg04LUc7r45ymsFeboVXTVk6OIJvD9PqJI4Zw6sLRrvFAVZ0jwWpeLAT+XPQetqs/7Blb8rP+y9aPiS912MGNyruru63r+I+esF6SbIQ+M2SYntpHAtR1+ybRfNFfYIzSy82Lws4mb8Fps2dOMQIzbMKWaJUUo1UfFBdsZGq9S4xB5FH+01YBO5uVhet9WCdB1meljE0TIkIQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDV9apmOK2Q2YaZjmodWkvf+2RocDMSscyphYfop7ELVteiXstTUoTTyk/DXPCkRJfH3IXRrvVFR3A3+wTobx0O" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_139 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_139 new file mode 100644 index 0000000000..179bd9865c --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_139 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "70", + "previousBlockHash": "C8928A422375D4AA63AC67CC2FE58F96E297E20608166BAB8B42418E8EC08C80", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:1oly08q/XYMJd26XGK2XEbllQOGqZNm0XQtirjPqpQI=" + }, + "size": 72 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881701459226640133281333645594906705754066038206936556099670930859474975", + "randomness": 0, + "timestamp": 1617063617620, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "E99BBB3D93CD38E06B0704759275B3AE1C167B26F704DA6A0D890D2101AA06F0" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////p86CAKd4HkwTpbXI0ZuwAAYXbgxM29t7FqR95O16/dILf9fb4nrZdGyKzUtynkRGpufoI/udojajoeZnhM8k5vk9B9EwoTO2CAmH6Gldcay1U/k0vUP3XwhOL3+AQbscCVblYmmW78wUF4mTaDEHgBvWQ55tw4ogFFnX+h/k+nwm8iNBBVRlG2ik9n4g4yZsgXdXFKm7RlMmuRMLudadMVLjzsXbUh0CrT1hVtu5iz6cAhVq8OBkEdYIgbjADYugfbaTiwBoe3pJBqZJEdVc0K8awEJoPGbzfxLUjDXdXuJjTCxrSPcBKnb+aitb7YZMz53y8Jp6NBafVNH7n/EqZuaLABzOeJkGlV93KaZTEriP21oJhuiaArwIsIt81htEkRg3uWw8Ghq5CEkV38AV+PkVlyaShNtpQP3RAxX6j+KRc2fmQy55FAjbN4BcGE1TXyVNhTFllaxSnoCUHmwNszEyaBzo4+v2GIdYubjxpdHBnbWEbAlhGpFXB1P7IB6XomapQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBJy0Drip5whqf2WkyoHYRPEXfRbQ9cTIc0ZtbDZi40DJEjRmV3iOEAdZr+1qaeFGzNjHWkSj1jcs+UkWtfLY0K" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_14 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_14 new file mode 100644 index 0000000000..b89207252d --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_14 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "7", + "previousBlockHash": "593A95C049EB2A4DCE49277DF5C8430373A00B9756DCCFCFAADB48400B35D6CD", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:YTiICNz7Oj5gQ0or1DtSCOGg1RFcaFKIOMLkezZVokU=" + }, + "size": 9 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617062291970, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "1994CD8D922B3559B3B7D4174BA1E5761D731CF1D48A35414C5B294A93297F3C" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////iH3l3r9CsDjfUaBaoVYwb5qsSTzv0NIrBIGDXiB0GjNJMMZixFuDQsa+ALNn2zqyksVHMMwtp29ZrxSm9bUlluA8iHxNIADyIU21OPdXf2EJ/4HB8TA7HKHTlOa/qLpkFOoahLxZ6/Hmu2qrcRlyeFdVKy3JGHB77JFLBU6/Gd3Hx4lBa6cob3HHs2GT03OyuIVg4lxLtGFsxqpwALowwW1rxqJoLY3FwO/faYa8pW0f9ymRk91lbWib0HgK/SvhuwA3POZSh04/DazYKuFFTnuSZajggxpEWgJJ/Naae1Mwyl1hXq051uzMK1om3xLTuUVxrYdCFLjewbs5DNuBBsDAdTMAJDopblBCVmpKw3WXB+wHqcCqWQYk44NXIHsgkibfW4yRn2ZlXZ9MT3kBok88Bxxlso03gS9OerZmjBiQhjHXFFlZRmGLKOJQzl56xoy5ckgpnM/RCNcE50wReb0+wAGkJ4dPx9FmpGv/9RzMXW/UDjFvnDmuAsbZI8zONmbDQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDClO/pqIY8ifQfmnCx0PINdnVVkHZPRbu1579y5+YtHKsqcZW8yxpiY+eY0tvAR/VxMXBqwzrQg92A1HWw4+L0E" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_140 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_140 new file mode 100644 index 0000000000..41596b68ce --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_140 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "70", + "previousBlockHash": "4441B7D1526F007D6938CE84A0EFF148BE94A6118A63DAE7E72EC67F4EA01844", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:ryRebC1FQy3qmstbYUogyZ+CjucaHBEsh2tll6cPSBI=" + }, + "size": 72 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063622445, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "F9A39089164EF0612B61D1488ED95B036F516BC68C79754D1EF174DE8C94BC23" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////r5rH9WatURnCN6cpROTQJKClxnYMNgK8DuV7ze8e5APUgCrUmuYDqabTtyxM+DFtlIQ8FyCqgCjONZmaKmIwLyXpHaEpqKbNxtvXKJdogm5GFZgpkAwumsQSbXpQwrwqCQ8tohZrZqc5NHGkjYyjRA9eAIscPNPkUfkdmu5jcBSN5g1nK1NfsH1tzJLEMUMDrY1ati2pZLentVJ/QdiBILtMeiDoYRJ/5eWFipX9V1gQfHRICAr3xkHXo9+h+1dc7W55D3iM4Ph+9kQfjxvNijZztfK9wGDUhD9kOZwbWcP+kMJ8qXydG+VXI/X66VW1tonVe0vVuktbUUCvg1VrEz5qFfdhTo4rmQok/FpJN/+L93sA8pen1g0rejqi+ZMY4MK3vyH1U0byzq5VvOD/oplgVw6svP12ZIDvtGYnRA5qW4Y8/6vIjQvtXl7f4CgYkG0pEAUDmu00IDI1lrNQLPYt8DgV1LRwM8QtHIXwKp0jwxsp1jylKqeI5JkZS3VEqXNMQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDB6VeHeMs8gonz8cTTwP87MvwLe/75ul8oY6ZYYsQRB6Eukbyei/98lbzoYkWFJFO8ZdaJdteiSE6B2pDcbll8B" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_141 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_141 new file mode 100644 index 0000000000..c09efb8abb --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_141 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "71", + "previousBlockHash": "E99BBB3D93CD38E06B0704759275B3AE1C167B26F704DA6A0D890D2101AA06F0", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:jVENraeZVXCj4jH+0xWNBnO0olUVAgwMHYCNFc1rbTU=" + }, + "size": 73 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881701459226640133281333645594906705754066038206936556099670930859474975", + "randomness": 0, + "timestamp": 1617063627706, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "AECAEED559A7808B74BE200357373C5F11A3B1675326889895316CE1020B5DD7" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////o3PvNP4DMhQVAr24Rq/5LKRDaxTGMtW4w41sp9lz3QmkZvPXKv9vqFYl1NoEx3s7hwjirshGcbinEGlhTZR+Irl95Akt79h2U+bPA3Ik4MB5ntVlsj1bPY8BEGtuyNHOBlUSmStF9gv13e0vnc5jF+f5B0RjGdRMC964b9m1XiCqwlWsHzLeCbXsrqR3XfuqiUj2YrxUazQB6X3Lj5Sc+pfRr1n+vdWHQ9n4wjEgGQz3wWAglYeCQgIPem/EPIFAJkcGnt8cgTNrWfUYLFAVkhPs6WQFxIVauaezeLbQZWyeiDjmy+CEPbalpaC/EeaMDlsETj96fqstaRDuM5SYRX+9DiWD2N+3U1vFufwjm/iuO2RxW6VQHRyaPvZCXUGHuvtexx0g84D9/CHQffWdxM1mVlBs1vf18J57n7hC9AEmq5bt1KckkX6YrT02BZNrM9lbhAtZ+ocJ3b1+z0RnY4H2ja3vFCGKvrJpqqAjT2eB9YqDdA6Bcr7FwCq9xQdeE7giQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDqA5GCyhXS1jA2RJWGvwpeyl8nZShrJXq+uwvz757RbbAsu51mY4QrRsglWVGGTqA+lRnEINcsXRpkjg9Bw40N" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_142 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_142 new file mode 100644 index 0000000000..ea39f896d8 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_142 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "71", + "previousBlockHash": "F9A39089164EF0612B61D1488ED95B036F516BC68C79754D1EF174DE8C94BC23", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:t7NWZBaA4zBEsVn0fMADRgxBB2NAvfAwr+NwBTAI+yc=" + }, + "size": 73 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063632951, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "821D65C47B824279364AA1A2D75C8DB93B01B4C6406317DB25EBE83B320A179C" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////kMTqpdxrh5mw2PomQmRGyL6zLnR2kL/LmiqJXc3UEfHTFxdyongSGYQZJ+0zLaZysxPl4PoLvVHyQksTOjSyHMvJceEhXXqxYiR4iw+cwfyJkPc46bq1xHTSgl3MtZx0CCzqcdeGXhNl5O7r7aVvQFGSOoBajWoo+3AB1cBhHKc2n/xeABTKLt6PxMlXBIn4lFx8wieQR/dxQgmc+za1/HCfvMOrqAqxA03KkjyCXV++Pj9JdtTkgw7PGOFz74Q5ZP4L3A2F6L2xl84XXqO9/0Zo2oJWgwMbuMdrZv+dnjVDKxPPnDdAIVcOpCQPi2hIDlx9Fo1Jpw0KJPJQ4N1iT65qA7lV31vDFaL07AG3NJKxVOBM4b/j31xW0HxIvZ0YM1OyWojcJ27sypK3gHgot2Y+Rc4ZLRHYd9oZYx8rGuZd2h0zRExrDtdFyGNM2bUlxVYMsaUkAvVy4aLwN2vn8QFJC6PzFijfhPsi7QAf++S5gBQBEe9YFgEBupmiXqpXp6gfQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCJ0bDTVXJoWclOKvX/xjbvcb2FgyLA+l/+5Eb4DudwVhfptCukIftjJcbg6k14Zekjw42fz3Aq5GdrJHVrDpoD" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_143 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_143 new file mode 100644 index 0000000000..6c44726027 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_143 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "72", + "previousBlockHash": "AECAEED559A7808B74BE200357373C5F11A3B1675326889895316CE1020B5DD7", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:uC3J2zeB37QetBLautvSeeVgYwGE88kl/Nst52D0cF8=" + }, + "size": 74 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881701459226640133281333645594906705754066038206936556099670930859474975", + "randomness": 0, + "timestamp": 1617063637928, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "0B156CF77A600B1B0F803FE2886AFECE27F02A7C96BF6AE078DDC86D289C926C" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////pEuTB744ons9p0RJm1DOPvBOqxrAG2UhQ2gQ0JI1xcKJ+d8Ask90hk9dvKzudKAEll9X6XgPzsTPVZdJHN5wb8Y8gvGQttZ6XuczFoeMlTQPmHj58H7gjfa0ItI3mfDyFyDSx5XAcikf5qhO8TO7ABMjRaXX4Qvh/VflbT+NnvF4mB3rhMBJuqqRoCfsSh5xqoAQy/+XSZW0mnv2jeHD/aK66sDkOdHQ1xIHda220704+A1ocguFfK7cEwhXMeM3pmi7Ms5bDBRIcsP4mJCm0oys4liSjxjASG122HoLmPKqDwVgfZnMsi20Keyyg+w9/Rs4KFBaxshLJnVtca+lIDwPRXRopxcFgXeovxX78kwqM0kYQLyVSmoYBUMfWc+jYQbzzEGHSXWLImGohgTUhbsNL+dO2apxKC0vHjJp4BTYX54mMwPI9Y3GsVZwuuX3MBCXUrkyMXMtoJYYfHHYYE+d6jJvi4tAFTRZHLiysCc5kAzkoCH3Bfyivxxfk17B12ivQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAdX1sYAKWY6pixa1GSUPpS3TVonXDgKiQHALTC3MlVDA/RA3j7tSNxPS3J6zAKz6x2CqjiT/o/6wzq2gZSkKcH" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_144 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_144 new file mode 100644 index 0000000000..ef64d0a59d --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_144 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "72", + "previousBlockHash": "821D65C47B824279364AA1A2D75C8DB93B01B4C6406317DB25EBE83B320A179C", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:Vh/OpviMU83ragf44mGIh3D6AcIy/zZW1UZ++LIzOwI=" + }, + "size": 74 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063642997, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "8F3AB5ED03760A006FAD3DFA2B02B418375F68B5629791864A325FE0941DBE43" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////iEO2YLryLcT9KaXHchoTUIeIUYN36zb3Qg91IrYtXnlwqNuhisxwwnF0Tu+IXIY2pfx08+kpQFj9XZFvA9nf7glVrTMATFETDfVcOsMwCQVbj/G5EQ+VuBVI6zY5WccpFDQvUF3Lt2n4pqIcnSl2yjVBAu0OJ/4Ig2+9sSBZRN3pP4q5vT+MVNazBPBlhq2NqfAqBNwbSvqSjVN1sgqpvZ02XnyCcCcy91P0V4dHtfCbiKqjjvYzYsffvBksd0/Y0wMtIZmwqFE1hzAl3KLGuvzK/O7iZqLe347wJJtmoOmom0uyhoG9HJoqHsG7AnuJlCGjqN8OTQDAUuZVVHuBZYeWnFfWxJvOpuniNXwCzUv9z245aQ0yG/+4KzsFn95RTX9h73nwdNSpZABUMZ+aybXXS99/e13GKGDYS6mf/ia6SpMur3RKy179wUyuYczbyjFC2YcScFetTKho+dmL1t7o1W+hBokYUjMGEIc5siyFEuItzkCGZgjpsR6pSHiWIXAFQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA0cQDuPlfll++4fiS5m56cC6ztlI7CHfWD97Dnkj/GhuLeXenqrl7om2wzOQ6L+2MSxBzhCzf/HI04dhyH5hsL" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_145 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_145 new file mode 100644 index 0000000000..8a8b8cabc2 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_145 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "73", + "previousBlockHash": "0B156CF77A600B1B0F803FE2886AFECE27F02A7C96BF6AE078DDC86D289C926C", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:UaJ82zC3HDQS5DcBvQHQa8h6MNQcs2snQtH35uzw910=" + }, + "size": 75 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881701459226640133281333645594906705754066038206936556099670930859474975", + "randomness": 0, + "timestamp": 1617063648289, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "ADC38AD072D5348E1F3FD356CA5A2058386C26F677DF2DAD0C107B2200EE892C" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////lOyJfWiKpPxRw/jZYUn3W6KS6hejCEoOnd4dTJhFH02p2ko0eV3UZBY1c5b7KO9+s2RcWI2IoTakZwsCPMo0ejR7b3LrykDh7hLGqckCLwomNu3iFZbx+ceE8HrE4yB/AI4XYxpDwfTFEzhrhk9cV01rfSlyNNBwx5F2M6Y+gDO1Ow+n1D2U1LlHScWOFonHka7T5RB08f/KtORHfiVnac6ndUY819L4/IF1kjWcSMV0AoRJJVIlljZF2PnDiTVSCEMZ/TAJPxwTQa+HRLugu+b9a6gjHsQScj1fvk5KQb/amIyRJux16wPqUf5X6hsIFkVjngvs65BnSrG5INO/J+9exe0GMOdmJUq7BNz0Y2c2BGU4NvnCNb+uciftienBTMjTX7B2eABbObe/OFM9yCmk1xu0R88ZalhJwm0QHv+jkDNA1UYjqjosdb7VFPFWraW2R37PrZvXV9eA1in9quQ+kJNpwHd1SkUl9Ua6eE9hfY8SwwWxcFluYnbugaxeCDQ3QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDJDKarheafAVwAO0fODkFAoa1stNS8q5FnjK2ZJh3Chn8fQ8Me4rBs/4WtYK803cHpF4WHGYGSvOZlfvnuMGsE" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_146 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_146 new file mode 100644 index 0000000000..57b9c1cc37 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_146 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "73", + "previousBlockHash": "8F3AB5ED03760A006FAD3DFA2B02B418375F68B5629791864A325FE0941DBE43", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:5TxfDd4V3ihKWcdWa7JxoVRP//gLlbzy+NHZlEJ+4WE=" + }, + "size": 75 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063653127, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "2D83A86A3090FBD82A434B7959C76576B07303C900CC01EC903E052FF74A18DD" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////t6jkxvkgLV9hTKWQ4YSt3QzTQfUb/CRANoR86Kqjn0xi13w+XgfrHCD6HXmTkYFqsEYm0GPtBfLSEVF8HKRahysEYf3KZsokTxit9tBtwj0mEuEejLx8JAi7ehdMTWubFM5b1mHolg77qfUrlKFJsFS3L7XBHikHGWKnFjlv9I22YkBz4HZmxuqHgvh+SH1AiIC9gJR4ZJJdg+wH8yAQNjVMtBZQhUXI3gKIUPX9X7uRaVOQKr6KI02VEnEl7uWeRMt4gNneRAxoQUfZxgfaj3Y7gTPeUUEIO778gBD6qEaq75J6t/n0lvkDfj04KDXkknvZrdaLniyPUntLDGsjFZT9+MonjHlrwqrNekCogT4QM9KNjpr7MjDjLfcgFgkS50ntciEHIamUHe4a8OlgaeczFRCuMkn9/M9mgXgCN20L4h47KxpTNwUCEFuscM8ctq8l0PVeTFsFrGCfpYzWiaEuQSYYKgEvsG3BYxKrplCzDUYppbuVbrPFyi2f9Lh954FGQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDCnlxkLh3HsSdktyzV82AeMLr7x6SkzmYVFWT0/Fze6IEpX2UOH0fY+l+CiCilxy6Wa7sL7Y5k6qhpWD61mNkB" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_147 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_147 new file mode 100644 index 0000000000..91aaeb1ec7 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_147 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "74", + "previousBlockHash": "ADC38AD072D5348E1F3FD356CA5A2058386C26F677DF2DAD0C107B2200EE892C", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:GlgJTdv7PA6rLBOlq4W0ZN3GS2khRzIuX73PsPwXtS8=" + }, + "size": 76 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881701459226640133281333645594906705754066038206936556099670930859474975", + "randomness": 0, + "timestamp": 1617063658312, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "32CDBECADDC73A4946A011ABE0D342B051135729EF90392807B04211AF8CC4D5" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////klFXFoeeqbCgQH6bnaJLpgiaC1dj9L6gim8hy+ilPZkO9aK9RF8mLlHUVYQL3dP/kUisejXSbGh51FHFqaVhVCKlTw0oKBl5tSTINS76PZs+H4/BEACYV36jF+5VzPIfAjabS4FIf0HHHJrF2Jf8MijWZGUUq2amJhK4+PN+NQTvJq7YVKV5PSGSF9i/W7WctnCyI1E0Mq3K0YoFPxexNv8+jFVv2dvdUgUNilU4Xzjz/k/DeM5QWgymo1mVz/bAvxAaVOUUxs807vzHEul31AiTq73cl1yvJkKu7GBTWiCLuxtRZxW5olHf/92PGJCF/S2VwB0+l+pzX3YStTkhYkS4UIqwWe7QaKBT0QPoBxhAJGsnydrIntBuNwuDAcSymLMKdmb4dcA3OAYzjGc2s8f2rmaouLMrhy+2wDltDUmjnKF1FfrsSUKFUlSWjk+qBx+KuivpEp4R/6tGHgW6n5Ur13Ze/slhERjTNdTHQEOvRD3ZBdTFHvtF5s/ToAdfY1oUQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDB6mCYDk+Bpf7Ef+a3Nfk350zbgOTNpl9Ll2TCMPY2DbbAHgSqh4G+0wiN2j7mT1x6tIX81Ngn6RbBDOAcbg3oC" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_148 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_148 new file mode 100644 index 0000000000..f015d4abbc --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_148 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "74", + "previousBlockHash": "2D83A86A3090FBD82A434B7959C76576B07303C900CC01EC903E052FF74A18DD", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:yhSxLC+iWfL67tFut52FLppz85cMzWzOny+7XVmlLzU=" + }, + "size": 76 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063663555, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "3084A3537E3BEEFB7B1BF200236AE4698E319ECA2D2660CFC84B4B10FFA73AA6" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////reuVu+L6Ue1mQShsOaxy5n1b4cO5D9UoSefAuivnsH3QeMEhKk4VSz7Z/FgOfHoKkfhRG+pTCDLntRH4KeOyAFePqDE1sMLUy+2/TYwHfS4UpeoeAGEXeu/J0NiERY/GEyJiOeFDzzgkZK7HzPfevNinfyYyktBz9g6lSdypI1nQQljqhLsbKFeqWO2PA0wIoKecR+jaH7Yp/pQBo9ThVaiTvbNbms11dgHQEOX0G98tSkoQSw0rjzF6Jl045M1SZCSnGMVWsLo1vFKvHcsOUbjxtBelpis5kHdu2A2wAsR5SnhP2ckOn+RBfR9N+o3sBPqAFwljNC7YHE7IOx+cX7yhN/5HqN/pVz6jbi6VqZVcGAnuxBmTyquZ9zOcqAADq4o/TvlOChLNAqObD/J0cK33WVxGE2I1KpH97s30/96f+HMcWdDedIM34RBOg27XJzW+7Dpszmu1o0BzI7qVcFBsYiV2XaN/nTJq0lCn+H6o0H9BEKR5q0gGFSU/cHc2iUaMQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDB19fmq2goJHJzQfjfnNjHCMc3GwMw04uLOZVjzaFfnxoN2KFXPvkGETBhz/WVi90fxZMMvfFPwmSeoz3Vf9GAH" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_149 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_149 new file mode 100644 index 0000000000..2d2d8f97f7 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_149 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "75", + "previousBlockHash": "32CDBECADDC73A4946A011ABE0D342B051135729EF90392807B04211AF8CC4D5", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:fQvkQ8dUO0JH73x2ShCfHzjiAv3e84Adpt1dR+8nG1k=" + }, + "size": 77 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881701459226640133281333645594906705754066038206936556099670930859474975", + "randomness": 0, + "timestamp": 1617063668511, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "1B6DC219C07156BE092F4226A6D62C65E4A6B5F3B2865F5CCEAA48647E94DF1B" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////qmlxINsfGWddEBxgKtYhjrNJxwxKjh7wqHbmGLY2fD2YRBI9DaxmACQsg68BCUj8jF2SFGRMZ9ch374A/UYe6zRpcwF++HvGMQc3QtkmKcTUuFoGYpq49Oyczvgl+8Z2CzMOzcOqAODkuYN2NGwhPzUDkEZvd3sZdzqI/f7p3qPWHrOesFzaYA39HagdKhXDjI3KCU1DbENPBjlmK1Iw3XzhEDAIsvoJb1pCAUHeSC5etGCPfrXlFncR9sGexg7JElpi76b59L5FXbYaSCKhg2n5IoH8QZ6GosXnBHHtz0QTF8LRXz2zuR0nJ4NkDkxZhL7HcAht9m5wmMOvNEFgQtgDKfQVBow2KE3PzeqZ0CFJzn5Xm8o2gqugkwCR4J3xDTbHN8H2yTlZMLSYw96ugKtLGxjMYWu6htbkSbcxu6+FetSapiHm3A/QDU8vavgjB8SMGHRJ69kbMt/X0uCJlnHiRpu5w0d6c77fraRXLy2hFRiioqt9t/m54cpsi9O16X2aQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCUIfW6G0/mpAwnUMG5J+oC18GZla0J4+YWM732qk5To8YmATsUadvQmMG5msoOKKFWG7dF7vV2HJyiF3bBOT4A" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_15 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_15 new file mode 100644 index 0000000000..4441cccbf3 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_15 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "8", + "previousBlockHash": "3CBED088C9D86FAEBF2C934A2907CC9EC335AFE5E9BC24C73422E60E14A10A80", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:6i8DSsQcoy9tRaKD5hn54HH01OQG1+V7wQlYRZdht1Q=" + }, + "size": 10 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617062296903, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "82C8432CEDE9120C86ED77F9CB46AA156CF9A547E51CD518EC77D0AD6C23FD22" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////h7Q8FqI6Ae6Fgy4tzFt9DaNcxh/nAJZoWr9PnPWlgooexCKNJqhuhjCAh9cctoEckwxsAUdY8TO5ys/t6mPU4N1p0psZ9whhStvEVjA0S2U7P0NOSMu7hA64PSU3qqIPBsjut55XLfnEafRZRWe2K66VdyhXaEXX/55RbcvdXl0KWiRhV1nPfa8hFu2qJQdWpBIUv/2jvlopF9x+zm2UZNPaD4UBpYp3HvUFJMcSO3oQFyL9OCGOY1v39t0lahOMrISeJKwtx98Os1yQVr/qwdcW/b6P2it96wBVW02iSeYah2bTLh2h/duvrZR6VBLsr8CZngXnXHu6nt5dE9y3TF0/5mLPBKgq/sh0L7kFlEK1rRjGaMNkQvtuuxfWF/nVG42ZDn4fcH0Sg187b6EcdMubbE4t36IsUFVgo3LWk+/GAY4ImmQEpSoj+wY/O42OXjlQhP0b5q2FmmKD1AW1u957G3JYa7skIFNhU8SfU5sIdHmQEr3MxViMw9UCneBR7UPCQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDYurh05dj8BDge8vlv45iXUeJecR4dcLQwHRfZFgI/0KMQS9yvyoC5arPLJXZpA3kIUwM/3pyc8zjLnUB1lhcN" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_150 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_150 new file mode 100644 index 0000000000..876a0e0790 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_150 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "75", + "previousBlockHash": "3084A3537E3BEEFB7B1BF200236AE4698E319ECA2D2660CFC84B4B10FFA73AA6", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:HfsH3RUVjRQFGpb7z/1JL2hCefsguJWkiZAhQyzIsQU=" + }, + "size": 77 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063673858, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "69F250099850117F428A909265F27BFF69DAF5441BC53620AC5F68B2EF3C090B" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////rgaYsyQgYGrRUAD/EfkCFckg02bpXysIA6+Q/pw8yyD1xAKs68CLKk30d2Clmu+GtO2zyX75yiH2vCDpsLHuGEYoIWMmiq/5HZOhxLrb8BrYNwyB9dXfYaZ3K2X+A6+6A6apI2Uf5morUiXgAiNmEA7muO/Z1qKlqS8tMSOcxP4daLQmP9DeaPLIDJOtnD+ztT7k/wDWr4rY7aB3E8yHGzMCIAWrMLYOGf1FvXhbD1hU92vtbuM8evDjUXr+U0BJydU1lHFa+E5F+F7GZBEZXnP5qthb3anUTfP0z/VO4G54pU6OyygFnUmUGx4REsL0WHbgu6bUKkLcoL437XgoZiuCDnh/fenq96ZrvsGi/QbbeADsfQRCv9/lcTGlWo6M9vnT1lqNGfh/ClODLWeyUb1N7d5oqjNVmGNv3HRj8t/UxfD4SwHJ17RNVaewL88/GLndwZyXo5tVGMp7xDacXo9wgh9evE8jdkRCrReA8Z4HFZjE0z5kZVNaHIRFTBYtcAgFQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDZQTAxcFQWllRp0G15StTcwd+65TP3kOIcnNg/5Z72N0TWX5x5RIlTT6VjPmwbdoLndlbYq7W/KfKFn6Mtb8IE" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_151 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_151 new file mode 100644 index 0000000000..00d4952b21 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_151 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "76", + "previousBlockHash": "1B6DC219C07156BE092F4226A6D62C65E4A6B5F3B2865F5CCEAA48647E94DF1B", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:iAeZ8tzNaTkoMbyHMtAThO8oqhudNd0f3r/r5RVAlhw=" + }, + "size": 78 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881701459226640133281333645594906705754066038206936556099670930859474975", + "randomness": 0, + "timestamp": 1617063679255, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "41905E91E99E1EAE71ADAD8E7685CCC547549AE2B47E4A448D75DC19033D46C2" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////rpKjwvVUGh4YRNvTTxbD59gZFgzOcdCQ02dl8GQBnr8z0vPJwZyvG2z/aXUOQWqFqgIqPTxoipq7oaKmXCXDwkvsk+FIDHGECKEYXIc8uHPCMWN2PKKUjZhCoYRMaJxeBxSYBf+g2I+a6sLfl/hAvJv48+fXOEgxSMYPubTRgh9GSBHR6MgGcnpqG8jrURiAjyvVId1mauX0tIv/ULrteRwQDPYT8xdcyEV1jSef6bYyFVQJKuy7nPgA16OUs4wMJ4TtH8us8UDIfHB9N9MXcEIdMI4+My9mL9W82elYBGgTsyyx1twqNIrMqYdebVwRfzu8icHZVBWI4dOP0HnGIFPIiLzxbR38/BbfgYBSCNBz95z9xWzOMOmlU6APTUKqHdW090aN1aJednFzJtzKZWI9LLA5YBz5A84YC9/IQ2vpEGhSPyr2/UB2VW8W3N0gWMQh6YGhFOTHPv2mRN/+9mce4Cg0ElLEqajily10PrQLIT0jpMEpRsBIKu+vP70Uyy5QQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAKB4IVid37XmChtCjMK3ZxbMj0X4FuE5uuRGFWJTeysqtEYdNUliNjSGdm7WGJTmmkCEiRONakP5sBVpm6O+sI" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_152 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_152 new file mode 100644 index 0000000000..55e783a973 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_152 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "76", + "previousBlockHash": "69F250099850117F428A909265F27BFF69DAF5441BC53620AC5F68B2EF3C090B", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:BfBn3eGgtsp9A920ixrNfC34rauGfJHtCltO/Pf+aSU=" + }, + "size": 78 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063684568, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "0691892D4D1AF77E38900BFDD526D5F10F5D996A659267E19A606231D9EFB806" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////uPP8q9wyqZKD+2ovAc2oNH/xiErxD1fQ6YQJX9xyf5qbOXYzeZj3FvL1LuRdhtzLjcwaanUpJG3MOWXUDGcn5vOqhD3UodSMsUrFSaX51ZE0aPy7hCBoVFxcLbF7sFkuAGzoVwJVECelqNeGddFFefB0WUqyqZctEqr45AiPWHo5Em/J0q9m4yK97OG5ritttSXb2OyT8ONAXHvhaW6EYfzQtHsgFe4sr/MjXWDjS9XBNA3CDS8q961v9Qd2RZVuc+Sa1EyBOFXbIG4N58gXGqT2ImWGAw+d5az2+s+U/SnNEy8ylGrUK3IzKvcOe+MV90cTzrYaG8Qxue8p0tq1Eug0xqW5cwBXNtqV/wLE3TsP1WaWLB2rH78fsO3c96da2y3XDzsm2QMUWuj9pQEqFtXZ9tPbUjXLgLIOLtPshwqANOvzyJ13fST48WVGg3dw1oeGs7xMgYc24o98i58qumA1Dm6qfg/SDWFLxgf3i9ZlZoYWjtc2t5+OzggPl8irhIkdQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA0MmMs7lVmnIWFs5eImpa2h9vPASeS5sBIFAF69RR/ktLbXd6Ocs7Tl5m8hGIZc3vOVtTXWkjNNuzfNqRixsEH" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_153 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_153 new file mode 100644 index 0000000000..8e4f9e1d1a --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_153 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "77", + "previousBlockHash": "41905E91E99E1EAE71ADAD8E7685CCC547549AE2B47E4A448D75DC19033D46C2", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:mF3BeoOfd/R60Tf0M+DP2Gyt8C17eYl9hwCaLKrPcwI=" + }, + "size": 79 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881701459226640133281333645594906705754066038206936556099670930859474975", + "randomness": 0, + "timestamp": 1617063690165, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "2BBDB25534499384F7639F7ADA78E7C710CF09ADC36DC22828EFAE639582B8FB" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////ia59xPudBMhSS0MNQ5PXtpcp7+vBoxpRVIA+sbM7iUTUAZXI++HZxi/uYekIEauwpv+G4/f7EPBtyXXHGgzUP7seON/ldBFyVw5O1v4UbsAbwIRwg9rnAc2x4FU3VEC7FTkTaQfIKIMiye5zGRYFxAdrrDCCBLeYJK7cN5W9JKKfEZkmxRMh43N9CZ4d/kqSiuK4R3hvDpEdJaB3dSalvKZZKn6q7uphNFMO0y3NQmCPJtaJFVbjGhGNDFXnQ1Qk+N75DdtwDCygSeWdfVjM88Ar398UjGIY3ZBtHo+XN6Du1VOBHXkqI4rRvYfXnKCDuHsgw+hlNaZ5a8j3yIzvZFm1BAFPwzjOZmRy363ybuLwlZjDTjLRJQwWVcoRBhzIng7n1fViHANfvVnJl4/Q0iY/jUPN4kUMZ9CRSB3FQMeW+cVxdnHLGmRSQ61t5MJkXVXDz7omA146PuQLtOTPZZKJ+kDCr0/llNTHtrPISgDKczFV9TQ6Z7ohZsroANw3xtcCQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDG0yLwUfi+sqLbbPzONg656f6Q3fPNJc85oIeqIkgRU3Y2p62VaYU1HBM9K76fpLn4gfEtkGvCMMB/pcRyuyMF" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_154 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_154 new file mode 100644 index 0000000000..fa90a3c2c2 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_154 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "77", + "previousBlockHash": "0691892D4D1AF77E38900BFDD526D5F10F5D996A659267E19A606231D9EFB806", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:ksqCOvOopLof74mt+ocjhQrJ38Lk+rvvIPnBiiOcdx0=" + }, + "size": 79 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063695512, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "D551BE7718A6B6F63A2DF47AE38A17D8401D4E6F9EEC8970D9AEF1C02687D710" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////krUB7P2kycwGD8q02zrC/bLcVpCjCHkrirlIaP4oyvghzlyvCcYIs1qiUWju+KGnkA59JPIa1dKP8sM2zVWPZz2NsZ03SXEo7p6Wvw+bwKFpFmBQqVbomDqFphHXkt3FBdmblmEVO+Rof7L2+pG8BWF2SPmOYTJOdFYZuCm6fc4TpZJcX9Hiqj5KTjicc2KKi4nuQHPFR7LiwLDPRzxC0Gs+H4XJhxKFUAfc02lOkcCZ5G3Eh6A4Hzr1ZyaSzux6QGnrKNG41U1KZmkkwzmCQNMb1uP33CqLhEB9Z69Jc3Oq2ItYFk3h7A2J1I+Sj7i0vhcsfphvjR6RY5JDze62NCFY7bUrB9Lb4X1XewxyIDCFhKbvOXayOeseZJbGAhy4C/SjiOTx+TdzYQffZHO03dAQZDYbDcLdf3ymzr0OwAVKrpLZyYABfDdQf5rJKF3q77ogQvJep2bM1QJ/YAaH9rI7nieiChTm6OhYQ+QEmHvnO+GfbMvvBnZYKDkIPi4C/eAgQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBcET0Eh92HMfLcy4BUomRIiuzabzWUg6LVsknHrEPht3X/ifkgBDNRJT6VebbZwebAkuSp0HapezJfY8W6kgEK" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_155 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_155 new file mode 100644 index 0000000000..9adeaeab4a --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_155 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "78", + "previousBlockHash": "2BBDB25534499384F7639F7ADA78E7C710CF09ADC36DC22828EFAE639582B8FB", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:ww/6gH45SJzRL/DOWpZTtNpByPj72AxzFG24G2Q6rGM=" + }, + "size": 80 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881701459226640133281333645594906705754066038206936556099670930859474975", + "randomness": 0, + "timestamp": 1617063701115, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "1B745D1E49DECB16E0357D5101C0833A3481874D0B7B89EE28BA86AB38272DA9" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////kZBRIUOsExvmxGFJn+Gfl9fM8CbHh3ankpV/R8bYehE6iv4WTyriQTHX1fQJRK5gh9dNA1p2p8Y6wkU7OSDH1RoVycW4faClWtql1Nd3AnmeIBhy8c6sQD0OfPenZpwRB3WVjhOGWOu39Ug2RvI9D4rlKfYeyd2c5Ss/HcP/CGXGVACOBmlSlA8/EgDeRoV6lDtNNxEkj5YSTyWROIcTtR/c08VUTBZ3X+ApqwK5D4fQmQ6v50TXU6dnGP+7HtC7e5rcmwVLWD90CCH5fnubjbDxuCjDeO8AlvQ/eBJDy9IuzjIHWwohqE2MoX63rQTVw6n4o+SOY1PtgK4PCqTLLyvMEuo4sJw2dW1KDIJxit0t2HxNF10t2rDRbstE6LS8Tqs4mBGnhDZ3b/5jPjGUwWgxPIflO6+jAGAugVmK+tr/8frGjs4kANiKlHn9mJZ39ia3+VwzywqZSuR2BDKHu5CUfikV/hEGOCbbfoLa2nX3jxKChjVU6YaKjoCITiTEpiRoQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCBgGqLWL6VsfJGn8mOfWF+rZ/bh5I20klLvR8sNQE/8TMEcAafX+5ZEQ+rvMtg3/8FD7Ckeg1y6wtG2kfVaNcD" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_156 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_156 new file mode 100644 index 0000000000..392603d7f8 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_156 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "78", + "previousBlockHash": "D551BE7718A6B6F63A2DF47AE38A17D8401D4E6F9EEC8970D9AEF1C02687D710", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:1wQKkTno0NxCo816krzXIg+aRAuz2vglhLZJUx9iMCw=" + }, + "size": 80 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063706255, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "BA89FDD422465D3CECD5F86251F0FC60EFCDE85EAFCE5A20673885BDB0E11695" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////oTN39FdmCCJNxWUDaAaYNTRF+nF2qpZ4zqATYLYkP4CMFokXrnXZ8tzOJoGaWBn0sQgW9s0x64oOsRMAR6qArK0sQhexWQkQM1Qu7KV8WOv6tFAL03+9x2kxQk2yyQv0Ce4AH0Eoy/vb+ADssFyMqqhbBkCCw0NL5I83Ifug5ipYf0Qrzn+0R9m/BB9IeWT2q2Ce4Hw6a1NI48DOVlsYemPWvwDrZOZufYnjiSoAypQXrXjyFCnhj9Pt8NW4YHZJZYXwJ3J71TgSiqx7nxgdD6L9lTN8LDGx1ab27HzfFBZBiJqaa12+MAbcszaQOagT68H1Oz/iaa3B6gn7D4PWJCpFmu/4M0Q5jU1BgMy8ncqO+CyzIvB77grPJj7/4fvMrafVNc2UEMcrjyW2f1fNZEPphzRki/hhW46fRkbnj019Itj+UBvWdH76iAl9XrpiraQ0C06VT3/9LPLUO7uj15E6dqhnt60BBCJ3qx4AH6Voo6uAJJAkpFF6Gm3YZSh3ysZeQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBc9VoZIoGVqsZT/HkeFtaeaaUK6rwh/exSfr7LvhdksSHGu7k4Bc2ofXnK2cZHqPU9b0uj6HdGq5DZwwrGAgAA" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_157 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_157 new file mode 100644 index 0000000000..e134835b20 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_157 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "79", + "previousBlockHash": "1B745D1E49DECB16E0357D5101C0833A3481874D0B7B89EE28BA86AB38272DA9", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:T5bUHdSewZPgkTpM95oLNTXcmdsl64gZ/ZN4lJo6oAU=" + }, + "size": 81 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881701459226640133281333645594906705754066038206936556099670930859474975", + "randomness": 0, + "timestamp": 1617063711577, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "21AC832C2A70473BFF7A09A414DDD9A9C3142052F7AB4C845B1DAD7D61708710" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////l6/xE4GxK75IS7IoE3PoLvJQhXgI1lUE9IXzw6sllfMXI/vD57ivGWwyojS0BB6fpTSvxA0KZd5HaTDsM6S2vZW+kB5tB8AWhDHr9WGubtn1maeEGu3DHxp+WUKiQBTVByLAPgG65hJJHI6rVBEyjNjVNErDTSpoLfhF0kQZC6xLQC1wYJMVrbOIQDjLHrhumAqf9OVsrwKP6KJmejKmaeWMlaMJSeaQa67XAQBgTROiDYV4wY9rDFov1zTjHKchHRasTWCnQ+L9UDjxskoS+sMw1ZFbo2jQHPrztAycbG13yO9hKzDCYAZOKSfoTINodItllnckQRxGXNP7WbDSO3/WnozV+dVvWCfIm/F8UeKae64eUsfgCJtWQnVzJNtDcntvFtMwSPU2fPM2wtgHvQ3MSjR/JWTNXo7ASkJtKV0JaCJn+xvikb+/H6bO9ZY5cr8B0Q+SgTV3c2S8VLlGREyLG0WqyacDu2HIoceUFMOQBV9nI1oxQkfDZbleOAq2khuyQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCCSzxWrBnZFhreQddufqTBg7Y4p/Laf1cEaotg1JrH42tHdQbCXQox9nC2t8e57uUPYiqh8/adTZ4AnxpchuYD" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_158 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_158 new file mode 100644 index 0000000000..0410249e18 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_158 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "79", + "previousBlockHash": "BA89FDD422465D3CECD5F86251F0FC60EFCDE85EAFCE5A20673885BDB0E11695", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:Bcrt+rVXFGwd3ZJ668okfOYqBHFxe99ix41BOdCCPgs=" + }, + "size": 81 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063716862, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "E55F9717D5145DE742718F9ECE51DFF8C50B6108A085CA82E103C267BB4550D1" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////prABAJiQTe0QoX10Ql/NejnRhCZlCplqMBWLH46a8x1X/Q8NQzI4rW/5QmfGNTqHomFAdhZg39H6oUXH6BcB0cMpro/WtCUmJw+yFyvnfOzcyRYG9IUbMi+zD/C7s5c5A9sk1NbevFWTIa7aLc4ikn7pp/sPBoc7Dbr6CowQhc2DVNWznDApMTAFqwLS1x5DonOOqsKXWzMnDrm2876gS75ui9sU6Ogz2XWLO3gNhFnKU//4bMcUA9vVuQTEo4Ac33b3FVIa5741afK5196RB2tH1BxcXKCC2GgIjDUx6ysJG2fozTEo4j5tbGoOJdmYXrwJUmnZjrpH8nR3LscSTYJ8M4hvCV+3YHIQuZHnypLdq1iLyBW9M9F1zslTpmu2dNEP3jrE08XwOpi3AjDqauw5PXBBUtf2TZKKvWvgNJkquoBTCTxjAGqMcwFcrMhYSsrT29vO21Ds1fYrJJoQbFeZYwZsx5cuaTRSU7+XXnHnZWwCos4DO+EeT9PIuIUaUBwiQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBCiB4m5WjBYSNzJzJP1DhN4tI1UrzAa7aGk7j8XkgxtPFuawH5DpXbD0qr+n8F4T/D1+zj7GX9/vkQVKjDT6EH" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_159 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_159 new file mode 100644 index 0000000000..b106fad8d9 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_159 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "80", + "previousBlockHash": "21AC832C2A70473BFF7A09A414DDD9A9C3142052F7AB4C845B1DAD7D61708710", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:gOlfDgGPuDWfVjvtt+7GGQSfaz4uJt/ldI+JHCtQdD8=" + }, + "size": 82 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881701459226640133281333645594906705754066038206936556099670930859474975", + "randomness": 0, + "timestamp": 1617063722076, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "865D3F8AEA2ECE37ED0D8FBE60DA3ED246D9CACF707BA7183AF2E60B4F358A5A" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////jl6scyN96r0ncoWm/kPp8CTnxA6ouFG4OMr4/BUo1aS3QYDy33y/KUyD1cpJ9DRcglPWGWeS3vwOBioQH1Rm/tdHTzbO7YvxRc0JC5mzZvOekynxhQNWbP0AkrnXjW5/EjA89Y5QADfmIcHMCdDpEN7Akm2KrnhJWp7tWspOeham9/C30JTxeCBCbTHMDWEVuA7DqAi5BXrvrg7DYuGZZkPP1dlrigqzmc71LcYYiobeJZI1+ZLH7lDJPOF3t5/kXi6CTGl/IPn/v8fbhFPfBXgcW53UYwnthT9byiM1IFOXzk+aQwLkGa2OCQQDSdPiE9H3/F9BePqPam0Djl6XAnJd86NMXOIdVGEGgSTLUNn4LNV//VpAFot9Z1SJAx6Embied2fA3Z59C+NbmcrIUXCIfKuEphkWRhN5KOmSU4zwDngZqFcG6foeL65+WlioZpJ2lbYWr1pgD6LuoKubx1jer/RAMNznE3s+ibAjEytIi0TG94V+WQn4VRq2NC0gmgwjQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDby9/wZ+5b6u3gcuQyKH6Ee6nzxonGK+8Nc4PC31dVP36vn9BzFlMqFNGkXf+ZEcc0srOccihAYZEbIq3AZvsH" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_16 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_16 new file mode 100644 index 0000000000..a5cefeee6e --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_16 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "8", + "previousBlockHash": "1994CD8D922B3559B3B7D4174BA1E5761D731CF1D48A35414C5B294A93297F3C", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:tK18UYTuX+lYGlllFnoCWI0y3fYvCb8UPPEtV4xdUjI=" + }, + "size": 10 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617062305640, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "C438483D3C38316967C6AA98B1C10C1DABD47E66E367E2962ACD826630C3EB32" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////gAcQsGmord+WLaiU4r925NMcuIe8PM2nxIwafhGKDrreCnmkSZl284j5VI1KcBwgl+Rbj0OBUOf0pk3NpUaM2Rj/3ZnpUjflPz54s89rTgoGL7M1las9JS0pvCPm0zWzD6IHJZNmrj6XPzCn/mqiT46m09izZ/lxM38scc7gb4fl5fyZDF5LmusbHwneyxtLgDwQg2nfWRsvvnVAiTNDDcYZcMpFNJn+K5cVVt5oA61OhlwMl4se2jUtORAZYX9xtcUV+w+FLnfPACa4P7k9RzDKcdchcjJ4h3j7EzSUKE/nkqG7xFYev8kgMskiZVV0LnRxccl0Epd70ocg4RUcZoozWjAY9QbzY1T66vM6rIIHK7vAMKfZjAfbwHyhO3vWXUvcIro0CV7G78eJqSWDbztUkfd7vS1t4wvUfaLGn7Ec7sLKyHFm+jfYqP0vNZif1ovnwqtoxBq3MTXoWs7IyJ6ye4SfQAsocjjtD/6T+LfXLFZQEQeewRsQysEOkap25rdeQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCR3slZQ2ViA2YJQYMvqaGirDlNgaiqsZqUr2WIrjkPM+MhO2I5HXJIxGkB0FLcEIiFkBGlNvsOXT68iBpRpK0H" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_160 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_160 new file mode 100644 index 0000000000..38ee7c77ff --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_160 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "80", + "previousBlockHash": "E55F9717D5145DE742718F9ECE51DFF8C50B6108A085CA82E103C267BB4550D1", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:VgFJDf5ta61porOY+VOZMLVRk6tmMyfm1TlpM1brwRc=" + }, + "size": 82 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063727553, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "4C43B10B614FA65BB4899756F26216BC6426BD0FF83872A73EF18C4B8C914B38" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////iL6QzG1zQbm4qzzqbJaw4rPLdWKBJghoqZRu0u5UXknFzLkhC/gy5lvAFF0PSM5ZtwvmOGAs/ucxBg9fdp5d+GnHg9uwhldYtwYwxQONDesifqSALa0GqIIgAF6cYHx0C11qZDqEmER7AsB2P/nRe4aXrWqna52EWWV5wrjwnhIe35sQ/XvlmdE+22Bh6WzojSxJh8DOP1xaedpgYEV2GShwHs3LI70VghuOryvnZ8GezezJ4LQEu9ywW6fsPuynhN59uWumadXQ1VbwWsZiyP8NHCdC16ruawvglo9gzXJD1PgaVAuRRPWz+tHMx+3zmOZx1vnda4VLumZAgkt5V7/bGmnjM0mG7LeL/8AANM2jbkcNf48NUek5NHbtDDgVOmw7cvNK3/h4Zh18zD8Jf3F7ZeDB4ToHQnobHQ/gBY6hqFaI+ztJCZJVHlsCPX1vnnTFvcmiWPi0J2nyyqubr7xDL6Xdfrh/cPCdIl+xol/+ZC9Uvfvu+xMsx3BCl+Sdn4AtQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDALp5CAJ7+kvR4JPrFC5jO4yuBv4mcXd9w8+zQWPwP3mOwzzZqOLg/T3ccToa5hFnbKQwkz3vfxsP0NicuDtQYO" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_161 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_161 new file mode 100644 index 0000000000..5bd86718bc --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_161 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "81", + "previousBlockHash": "865D3F8AEA2ECE37ED0D8FBE60DA3ED246D9CACF707BA7183AF2E60B4F358A5A", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:/4/d8xS6NN06HFF+E+WW1pyDQVsPrIf4wZSgaEJfCHI=" + }, + "size": 83 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881701459226640133281333645594906705754066038206936556099670930859474975", + "randomness": 0, + "timestamp": 1617063732795, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "90379BE47BC643D225E24F1B4BAFE66728511C0A563CE0C445512119B3A85D3A" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////pGqm5FsJ4OIC6KYH25W23oPopY453e9AQBcALnixP7bCly2HjYnISHrdOC3/u+2ssMwMkiZYq/dL0pkY6En5FUi1AaKVM8PzGDTwWngNlEQzqdi6LJ3j9lU41gQsMCPICpORVNcqRnnPZBJEvUhbIF4w5MH0gCRsudDbAKNNu2rWrzWr2An1Cf2XNQoN42A8h2MxuEJfJctftmpz0CtwvYjLvrs5Py0BXBKA1fS0obtbxbqK9ESIZ6OffgPKOl5M/FRyR0A84e5OEKNkPvUZU3GEKq/qv1Vpx3qVr4/bvTRfeAUpj8VKG2vRtRa1f33KU3gDhGDpK6QzsWggfLWdHOYTpbrTXKiqkpme0wVvhGh3d3vbt+4KKBVJtnhfLL2by/HXZkeg09S15GXkSV+FnnbjgPfEB1/6GqsZ5Z4z26JrOJp0q05UJYEJIszKWa+E5Hf+6SJvqCLP5EBf3Wd9IFmkY9JWFjV3G2b2uCMUvxHC4YZ9RmtTiHnhf78pkpxfQj4qQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCK2TCp8zQXUpkNfecWEFTC5czDq7pYxWYQsJrWA4eRqL/MsQETW+veeSL9n/QvrlTDHDYWKAfzemc8hz6po34C" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_162 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_162 new file mode 100644 index 0000000000..301645d698 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_162 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "81", + "previousBlockHash": "4C43B10B614FA65BB4899756F26216BC6426BD0FF83872A73EF18C4B8C914B38", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:lJoReEw10Qh1gr52tfiP/9Fylvx4TRikSzLBxnVNQFE=" + }, + "size": 83 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063737897, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "EF59A9B9158551684D5A58C7E85114931745FD377D6BD1322C736C0195BB6A81" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////kuAtWZ//6OEWkLF493iXnNT+nYY4a8Dz2X2NZ9zgdC329lRgMW8LnRHa0mzWcPW+h8SzvInBeT3FtuAxjFpHBQhqGIkXgyfBIqvKgtHAruwK/iZFGdN1BAr5KtmpUgaKBoIXnL+yvw4RIvY/cyhPd8MhbMB92IoSh3moOQwMstcczulCkTn/tbO2+AydX4cWuDx77ELp0dElNAdlHRbLIUenph9q5x1oN4BI1giLhBU61fpA4QBSskBAtLgtiRzePwz2z2uXTF1SseJwre6aMi4HPnEO6lNiY41jgkBPdWkdUV9I1gLbsh7jiMYbzeoS/XYo60axbjdPfQKy7QIkRO4GjIk76TYC+XGJUtoP2BZCdfN5hcQsX4r6nCvv96JxCIMRCHjJSAun3RsCc3GoHk1xSsV6hOUvMkauQI5Lk71tSfBNRrFaQJa4LcJ6tXNpPREpNOHUkJ1DOMf/8zowqqCjNfQn64hVWDUIUP5ip+yMOgbf9j5O0+mEzMc6vwPtmc7YQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDGwrYN/0LqeYYh3lZn/By9LUmvJf0U/FlrKSXDg7n9Qb9LvNMIJzGb5C8jiApBsIQO3Zd8+1n/yy+MvJGtfVYG" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_163 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_163 new file mode 100644 index 0000000000..80656c70fb --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_163 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "82", + "previousBlockHash": "90379BE47BC643D225E24F1B4BAFE66728511C0A563CE0C445512119B3A85D3A", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:9K9pdoU9PRQufALYeqImw6wVWsR6UAmsFHo/Hjr49lQ=" + }, + "size": 84 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881701459226640133281333645594906705754066038206936556099670930859474975", + "randomness": 0, + "timestamp": 1617063742991, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "AEED96FEA3394F6FA4AFD43A9ED4B09E68967144D7F44C500D8A60AFC72E4C28" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////o+htVrQgMF2uc0aV78a7oHaOjAOj26UUBhd5KGPobOJnsJ3F5kcHAYYH/oFx2PAeo0GIDDuuwrxZrQXwSnsFsXHwy5rBcEaWqEF7KjTTAaj3YSJ5Py7agCrZ7ua1oRi7GR7FMLp5obkUvggbe6mtoQDqa7RXx+9mcgH97m9hc5hmuPpOAGnHI4lJTXEbKUxbhJyvjX6IxBiv3zfjRrsx59Quh17748Du63tUmuFwzzSmm0eQPooXxkByjoTHI4o2OAWNx4/MyD9rDEAvoxYVmXdzkeMfWifjLjFpB8AoVi5kEh/PoGr8fj6IA3lxq3hJyA8Ex0fLGtGNMr2dMJYRJ+gDNTrMTbBWRByUeIDIiCfWfx7+qHfNV33HvkQ++aOcyILslsDXvZGTO+Ne6PCRMKBa7eapC+FYhSiuf7fYb9AOcRkdAkYhsRShoAxAiThAl7CXxgLn/GUWzI8R7Fg8ZFd1KZpeUBetxNqc6zllbt3ilfO1y0N5w65us+yt56/UbkrzQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAhLn85H/Ao75ZUc7ahN6vdHf4u4DmcjAQdClEj5kVbjs0A0RA4Ah/FsenFJpk0CIS52HNNzKRKVjQBts3YhLUC" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_164 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_164 new file mode 100644 index 0000000000..7621b510fd --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_164 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "82", + "previousBlockHash": "EF59A9B9158551684D5A58C7E85114931745FD377D6BD1322C736C0195BB6A81", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:xmEJT7tC3ybYsf7fh+qsRoHtNRkQwwmyJ2Whb5Q0Yjg=" + }, + "size": 84 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063748286, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "4400BD614A9E4B10FE59A53C1E923CD90AEED2A04E623B533041C9C3779F6897" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////iPQ15xHt7lL052xzdGCwSmqUL+qudshI+vpi3AbIYLLASo/I9gqoPGGIDt6DK71zmaRrHSpslCmg6XRAh472olwuVpTDkW/S7hnwpskPpvLCyaFT+xw+jEnIG5vuzW6nBs0azlyt3xmBoFopmy8p5pT0jQdqAKv7t3xJB8NkJhjsnkkgTtcSm6Nnp79DfrBpqM9XrnOzTMY4+ud7eGsysUK0rueWP7NJc8rBLjDwZxH8PxI2DWBDrB7qRKvHf7BkklFVILoUA9Ockb44jE3ziYkX42tkD4QXwNXO2Zb/nCqAKzZ3D5OpfNvL3csXIju7ul3GphZP+aF74i9RNM4YFdGiBCssAHuEaIhGqnR/AduilmHwQn2RVnIKbyV4SlUdJCVwZDZJkJSKjSLeAn5GmyIzALUW5fih4q+DTrlPdkPIv/ZGE31Ym8aQ+wmbC63PPo14hPCrB0cOKx2nyyAfSe57xtXIYh+7h/d2wSIqNntJ8E6YY5EECNcFSn7WZCDal6QkQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCNXeJ0bCeOByLfNPhgKTNzYyJqSdlZ3A+OK0mzOKI4Wp7wFoR9213wc7GQwm0AVc2QLTEKEFL+2FkCUCrBZBYH" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_165 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_165 new file mode 100644 index 0000000000..c4cbfbf9d0 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_165 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "83", + "previousBlockHash": "AEED96FEA3394F6FA4AFD43A9ED4B09E68967144D7F44C500D8A60AFC72E4C28", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:8SRwNVCUCDi2Z+An9rDzO96tqAN6OIkJOs+xIGiTxkY=" + }, + "size": 85 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881701459226640133281333645594906705754066038206936556099670930859474975", + "randomness": 0, + "timestamp": 1617063753671, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "A89A6301F9F196D3AA288EA8D0D6E66AB4C174E3F0AD975124D0DFFFD206ACE5" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////ohT8HSxlfpF4GBN0XAJCrf4w7GcTMZ/dTbQngPiXL/bkJjaaKWkJ5e6I9d3buyVQlM+adVNNfDvmjTTenNbfMcQTIQkrQRT9bFirPzYlrFMjquUbwHjZeFN+QqjJP+z+FFLbfMtVy+W4EzZndhCmzctfdb+lIpb7w698Fl10tC/oFq92eMkP1QIIa32usDX9mMa1j3a48nCwx+4m0OTk+jvSEjHvn6pf52q1Wy6EUMZ9eEUoxlD7rS7cFSVzeHujN8b1d1UHJYXRPBmhnyFLORK9/I9DXL5bVXdFiFmCV4HbVhmZKhmwu6NMyQTZsOdYf3aQZcCYHI+WT2uGCxvwY+U3UxwTFdXu5rOZ2A6TnlpYUCmM80nzwZLsOXS7/AqnTgTy3oe7fpCXVz3q/RL7xHI4Ke6wndW4Pcch8rgFYjkWLHzokXnonYyC+2rOf6fTFTQ7fyGikoDMQexx7Y0CytASOqltteN19d6N1UdrEtQBioM2G2XeHBAFHS1GDmNPbTEdQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDQCMQY2TqivLr50Et/x1YfiE2UXEpsGRwcoxik+dgYQQvf430VoCd4MTPDlSC96Y8twauDH0UMmG8zv3rQ1f0D" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_166 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_166 new file mode 100644 index 0000000000..a8ef40b3be --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_166 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "83", + "previousBlockHash": "4400BD614A9E4B10FE59A53C1E923CD90AEED2A04E623B533041C9C3779F6897", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:xCA+qFA55Zr0a68V2JPVyG76M8KCPH/KqRr8qlE8JkM=" + }, + "size": 85 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063758869, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "CEE902D2B9C8E1E753206EAD1940FF92A7E9BFC1383FDFC479A7538A159B3B9F" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////sikYEKcIsXLQ0584HumwbqJFqKyYY7nUvHK+HiRGjJJvhpyx0zF5UgRZCLuBnU6viPsxcevscbFV7ndvT3eXA3kfhJF0wmqk+oD258yzLXK/KJGJsxbA9r5wSGUGtQYEAYgh7u1ZtyBg/oZ1Hxm5fZ7B21xKYVIjbrztRy0ORRjzwKdBOmR4xuGKfp0ACcmLpqxtrK6xH73ISTzNtW/B9tSfNt76S9GIzNv6G8TostsWNw0yWGDwW6Nxv8Es/pJ3FaeS6TFijrvDs1FEhmH2T0bLKcfzHwfv+VxAt+3ev6hxL/L2wlT2yVW7yaXcSG2ABy1F8uOeu+Pe0bkjN4LOMxflyDg4ZYDl+vS6qZmQbg0Z+Uu6iRN1lmlyIed2+7s/vqeQnYeP6YxwTJ9O8BYe7h1Ww0gqDBdbCmltQ837aVUmdAVquBGIQyirpVcDg2X3sjHiKV2tM+i5sToxVSVBw9OMSzFA0tBcLOqp8R0DGzGfmxceKhhQocSA9gqw4KL1mRsxQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDWyIyonOBeXMhToF+mYKRaub1A0awtfRrSNsAdQG2hWgmTkE2lB0yFmYzwQzjMsBpBb+H/axjNHrmAM96SGqoL" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_167 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_167 new file mode 100644 index 0000000000..ab84591c9c --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_167 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "84", + "previousBlockHash": "A89A6301F9F196D3AA288EA8D0D6E66AB4C174E3F0AD975124D0DFFFD206ACE5", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:8JRO782EHKQpCur+Vm4Bgre72r/+Ti/YtiDF6JnLnz8=" + }, + "size": 86 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881701459226640133281333645594906705754066038206936556099670930859474975", + "randomness": 0, + "timestamp": 1617063764376, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "F157A39D59B930237037A3DCD12679BCCDC8A8A28A4C6FCD493160364A990C6D" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////paeUTCJQk2fEWz5qzmALc+CU4I+MR9mK1QCjZg0kjsMNQb7UQda0/14PzvZ8ZLhOuM3m+Wp7jXBBE1uQxp3my5rBA1wkLehbKpgbubC6vq9Wf3VhQP9QMTLRFLfJFNE5DzKb0eVD4zAZdYA6pcdvM9J3PUe/MuHdH26lfM8bTMlu5sbT2rYhBO2fm1sHI3zMoGqUY5asPqDA3IMtBB+mnRBiddGqB6xQxNxVGloiy0D/WxmakhwcnNWFYPW+gYnbS5A2IyXjolo2axN726xuvUbgkKEljVlwEknjIgntPlY/+73BYLIJcDofKVpj16bcbgRtwV5UivyF2PuHek6sFpeW5JS5xWbw242qIW1afacVO9VI0jk+wwXgwcJqOHHfjoMPoGR7c34abPtSvfCkboztyseeB3XH4IvjPnrTjYXwMEHIofsbZhRjD+vMYu18sE2DTroVenmYKwnArEakV9xZayA8EvRDQsWfN1i3cmGEuIqbkq/mhItAw0/HEMYWV+ZTQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDUubf0nOYmsnzDx6enF3U+5wvMKKHxFsxd8geIM+37caUu6WK9dX5qmoX9POR9hRE2xrS1AvGpp0WtzTu/iJ4N" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_168 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_168 new file mode 100644 index 0000000000..b64d720996 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_168 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "84", + "previousBlockHash": "CEE902D2B9C8E1E753206EAD1940FF92A7E9BFC1383FDFC479A7538A159B3B9F", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:tqRHv1pQgdbwsObzk2A4s9+AxI1/EgzcbBtSW07mpzY=" + }, + "size": 86 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063769425, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "A2F5FECB01DEB9025BFC0E001C99413F3D2799B35E7A4255428617FD913F7829" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////knD9FZRdm6i43+C19b28mUrrPXYBdYn8dIR6yppYXLduzP1DerWNAkSvVWWwjkekr+pVLWvvuCaV/qCbdg67WvYgA5da6NcaKhgeK4rKCUF1LL+JZP4Gf2xK+0ZpVcosEcUvaX67UeEgpOhvX/xbmQ5NevWWNPGD1YggzJ4FkWjFS5JfrECOGrvV6QwpLY3hrZuqMPm53nc6Liz9QUl1ait1XjwsgNYAaCx3ygYl6QfMVAtK8K7c7CGfVs5NG6950ENoOw7GPKYrHsAkajx2YquRp3ADMeNJr2v6spEltIUCn8S88L7OJMowfg2ING4naM3RFqPHglCYDL6QT9N8C3XZwUbSa58BISjf/PWJHOYiRkFywqY/1mMPzpMfnrLaW5x3fEjaedJaSbqX65ZVbJqlC47OIh9vv6woLxkUzeaVYuL+pXgOaM9V49/IxFJT41z6awvT5lP941vPHBJ3VSYMBJ8osp8Jw8OLeQw5XhuGzZSv0Mz5brd5+hkHU21+aLhbQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAfmRfh1A1Icc9jLjWwjQhWbeHANh2iaCh2Z10B6qoaCProaX5CVghcKqOeuTwVHGymDWDA65XwMWZIq0s9eFgL" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_169 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_169 new file mode 100644 index 0000000000..3318469312 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_169 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "85", + "previousBlockHash": "F157A39D59B930237037A3DCD12679BCCDC8A8A28A4C6FCD493160364A990C6D", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:xfgv1Z2udoLeWNgRjl3FSLTvwqaRKuNh1o3QyyBpeGo=" + }, + "size": 87 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881701459226640133281333645594906705754066038206936556099670930859474975", + "randomness": 0, + "timestamp": 1617063774926, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "6268415D0CBF33B9F6ADF6E6FD133327A166FD49587F33C3C8CEE91772D7BF49" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////iWXbEsWLBEIkGf1F9D33XTIbJ7vKiQcBPzypW4ReWi+YrL87V6JwN8YPyJjZY71RhB8bqCWKGharRtdMvQ4wYe8eIQfI4BqmprT1M2NhK814aTp9lIj+bmT6tMVy0+X0B7TxORJQ6W2iOqauN4wPFXSk6Wu7HB929viA78y3Siegq81fbvxgn4hofHto9yC+sP5A6es2TxSH/6K1Gsh2YI4WPf4heijJpOSrMs1TnzZQTtYxx7PRqbDx+c/yRbH4q8PHYIBfUjICs3hlmYfJF1w1pkBJKiRFN/MllWHnsrO0Jvlt181Erl3k+GNNybKGBwVaPFrZUWm4yRB2U2aSWDhZICBsNimWq/I/I/cCXoW15A290sWPyrfdylnPQw2fOzgtLfYZH9GKXLQj10DCwzeYws+P2IUXJ68nEKFY1zWrYzCSXQY0y8E1kJAkunqQveNq40cX/rOdEr+lbRncYDif6nIpya/48D/P/Z5SA5xMyKaOeR2msfTggkCdwDNHYmXqQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDC9PozlmM1Inx9IBREkgSdXdnzsSdfRnZkzT/RyJHpOE6bvAgsX0wCegrjfHE6y8qkAV5Ey/NQk8fLKNWSZ4GcA" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_17 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_17 new file mode 100644 index 0000000000..119c98c292 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_17 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "9", + "previousBlockHash": "82C8432CEDE9120C86ED77F9CB46AA156CF9A547E51CD518EC77D0AD6C23FD22", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:5frAoPyDVAWde+tJkYGifZx6wme191lir3fh//Lf5WA=" + }, + "size": 11 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617062311064, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "A6CE28A51C0E9E06139077B3278D4C22C3F86000CE1DF84601E6A2A940E0B985" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////mG3LJkHnA5AqyahXjigyzkmmRlBA0Do55zxneE+rKmsNrc6VxrEzN4DiI0vrPsQKil+yy2lFSOmVV5BL361xCQ2QFptfm5zWKNyJ+sI5cOl4623Lv1wqnVBBfHAoN9DiDTy0scycGSaQ4KeZhxCuyG57COspJp71Ey5e43b5pFeGOLT/JFYcvjout08zCVdwld/NtT9mIB1sluLKtEDfhwS/qK21JvDrmp91/Co5QMclMh0TEFv09jle1nA2FT9L2BE7zgoVnv0E1hbldjz13rYXI6JPu77koIZ8dDQRrdG9vH1/stKsLacNSF9yyErxdRwUCFzMocLvl/bQ3NQMIpJdcjbaDhQZQ58quM+zbN4K++8jNYUoQTBAWnC5SFnX7fOyr5eKw4K5JHZpurk1qRXGanrHCZ5IFD3QLo4jsckaxJZ65bWaaVngCovZgVZ17mDrat/IxnJdHVhxlG8qNvvdGdPyCrG6S2uhAEUOex2C6Dr9IoK7F+V44H8dHjk+FceOQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDC1GBNPEXp4Ac175OXX+iPJPlY7vGzmvNTC7HxDBweraxYt3Cl9JrAZF467r2OsVSUl+j6yjLyB5+aSmn0VPKIE" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_170 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_170 new file mode 100644 index 0000000000..dee2dda9c8 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_170 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "85", + "previousBlockHash": "A2F5FECB01DEB9025BFC0E001C99413F3D2799B35E7A4255428617FD913F7829", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:JgH67QaBtwpIXUkREdt0OyB5SgxieEW/DvQj08gv/yY=" + }, + "size": 87 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063840998, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "049E65AC5E6E89E40047C1F3FB1A465FFEAFE5577AEF5E87F483043202B1150F" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////qPkucyQtjZVANBZGNVQKxzA7gEqwmeCglnLRkTPl7haSvvlwJFqp17iUu7ZhqDV1r4qYsJdZl/l0KLK+bW1f3N9lSUqkvmf8JxOd5g+glYrMfHvprTweC0uSJ3IdEOm9BVy9o4wNXZZgb0opxil8tbjzTk1V1bWjLKhS7vHR6/6HEOE2RlFSP8t6Ph6p5p6PkP8PvVSN92CuAaPYnFIu0y9u7QC7OZCw7g/Ek65tkhkm6xA7+Ca9x5uFeXXrMSSb0Sau8cobyYqwRXyU/SMVf4qv2pO/wZUFnBj1np6Ud+/8xjnaoIG4sohzQU89vFgVorW+Gwt9uhOztvLJg7GkAjGzrQcZxBA9lsO0W4/5TYb648obKfbfBFARRK7e39defy/ON/wgytDtmrV02V4u9v42wLpTt+xKAGjCkpXPjgoHxsMBcmb9W/x+oPEOvUqPk2wWVyGIkn85uAEElhPuV4QMBbdwaP0TxftBdHX+llx+wMnJCGyqMhaS8mJ/zj9zkCe3QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCnVdgx+l4+yiKM1+jcwxEzL2gGQ36HhL/OuhLKYnXjwcrfJlVNT+JAcgRajciLR8H6oSMA3yNPtsJvFsiqo5YG" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_171 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_171 new file mode 100644 index 0000000000..348bd4e317 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_171 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "86", + "previousBlockHash": "6268415D0CBF33B9F6ADF6E6FD133327A166FD49587F33C3C8CEE91772D7BF49", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:kOMwj3NK8FEXnle/ZPQV2k/bsfg5KJQ9pSksjCAzdlE=" + }, + "size": 88 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063846050, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "304558A52362DDA0C4EE4E5846430A4911C98B292565EFF6049CFDA6553D4D9E" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////hReU8Ri2F/w2H/B0Lu5uW3OUfZcoJjfYfOqSUiOcbZlk6F2mTSs0s3CjlDvH8Chmpgt0WS41b6I3LQvEYnub7WWHquFL42gYEDol52Y8gsBFiuZ3jv2s+s4ogjR2uSoyD1AHn75QC3NcaTjOjI6jDZ+NU4UwDrVByX/9nNf1tkaXoHcjVg7hgDjoio6eWd8Gj/h83w7xV7Y7QeFnwntkENQ0fMuUwOtaOwDwZF8eRcvQfGgqPayPEBw29DGMEjW1yDq8tNxlvpVa7cnkWa/tFfCUaJZJBx81J7KSacgb+xm+/7fQ+XTyl8if6ROCh7D4vL3nFyg8ZzknzSnE4hy2AenEbhiR8ATZT/e6fCegoA352ue+/cqBz4WwfzAroZsX7wEs1IswM3UkCr1DiM4OwrRi3zPnFaUchYyjZZXZV62lcYLa7ZZLP2X/7li4yjuYVTtj6MBZL18qyFPmNoVd7kLQ3FzujYVpBK6LCeoZjKzdXvVy0HW9hyPZtWw3dm6MSrt5QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDa44zTetcm8vqvyWgKlHskPEu2WnwPjq2Ek8IfY8kHGNPM3lAjRUwK8C1o7RZL57PPA7EaTg/Pb/24Yy3lHFMN" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_172 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_172 new file mode 100644 index 0000000000..7fd3862a52 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_172 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "86", + "previousBlockHash": "049E65AC5E6E89E40047C1F3FB1A465FFEAFE5577AEF5E87F483043202B1150F", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:1PCY2M/X6T/D20maciUCXSMdXCMJ0Gk/Y2O0QIykTmM=" + }, + "size": 88 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063851096, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "C5A4DE86003F77C0D3F1CEF437323727ACCA5D86DA96FB13C5E2378002CB2F77" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////gc3AcKuGy9JkUH20/yWG5I0Aez9aT1XDEBjyzZz3U1hjGaXugadiLSQshwjPbLdyps5rk3nJalbKfn4W5xdpkD8hdOPGNJkSMMtCAUrB0J5CYFfnBDoLSOcwZsZT0fL6AIvMWwL/zfRP79inDQxsHUmsqyeeaUBsBj3eS+R2VwbEm+fuNXEuwqyWDDmVM82xhapdZ/4Pu/W6kgLfXEScywT06+nnpFewJAjc5ZAmE8RQQ88GkhxjLnR/OjKT+xNWcmx/jBZBAV7/CObcTKfb7y2UMdkLjfREsjoSSOMXGaKbdidPQBmVEjrbL/IZ6mIaw4mq66kknyT9R9Zadzn5PlHpDzHR0BrobOtTqrAxgfYRnTBITkFwwPnl0Lbf2+Kc07Un4swTuO4QdPdsZkRP+3LEL/K+/spCSRn523dvJ6e4+QOrSdOZDaG0Hz1O8nzI44T2BDNNSmdAGm2223dH6PydtvCsvrfLsICx4fOZatKz0PVPer7gvcKd6xgWZDwhbSviQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCP3Vx/THYaFwl0frr44JJRzb8vo2ANMY1KCqHG4Ormr027cRDLwy9WFWyN0qdjTIWVp5u7zPbbYdtbl3eDvkQJ" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_173 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_173 new file mode 100644 index 0000000000..1c7f02e96b --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_173 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "87", + "previousBlockHash": "304558A52362DDA0C4EE4E5846430A4911C98B292565EFF6049CFDA6553D4D9E", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:cp5OENj3qXal857fPj4xNvUT5JVX6GdX3Dj2RbFqm18=" + }, + "size": 89 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063856196, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "009D467DD46FE629E2B6CAEABE71EA130B886498F158553771725E10575788BB" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////kgbYbpwMEk9wfspa4vbnsiRflYndr+meQZLolVpHB737ojTihmqfnCqR11g7EP3Nhk5cZfHPTgBWzuG/S+yO1OqZwVjyxpMF8XAw7die88VSe3pqX63cZyXUOKuIQ6fKAwaZrcR890DVru3l0ImoEXj0PptiETzgSMrgd1Z1ZNbDTD+O7KD+gtsUU1KZflFKg64CR+urcrgMSrzfOUh9BFtgikREF8AmtIOnPFEouWdLJbV6XLABG4M/bIbwLgo9Dp1fc29IoOTaXUB8jl155S0eHBEoSpfYXcBk/mNrQDWkUz5QX5y8Wxjrg9ivjm4ohNZ8KMnEJoc4xaWD2LaIAGLmmdMVPKlqmNmTR7rl4OtoCd3b4SquODa/3AckSIjv5iYKMpPci4WLKAqOn8cgdWwzFmQxzH4ckJ4z+Sxh2hAQzGLERkfDpH6oZIHkh6tOS19KlQOzKDMa7gvR8ptKXgKW9jgMJaxPjWFsUgheoYKnbkgkplxlyqdPYgEId3i7IoOYQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDm0pm4GxDFY1pBJWvJQR9CkuKLnyjFuAwMQQ7iS/TM1E8IOsUROCGL1wuqrrCwIlaQyc7MaNCG5V8/Fjf9oEUI" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_174 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_174 new file mode 100644 index 0000000000..a7784a5ea3 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_174 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "87", + "previousBlockHash": "C5A4DE86003F77C0D3F1CEF437323727ACCA5D86DA96FB13C5E2378002CB2F77", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:ZV3fnxDGTMHSUTB3jaJ/wY210uFdY3h1/TuS6xFs+Qw=" + }, + "size": 89 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063861016, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "7C92BB94EA07381E5441FE999050803C1F973F9D5F0A12098F8E7DB8770B4931" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////gSuawybFodqirKQpw9gKLXrvSb+7L27tE3bFjnOUkioknfgvMRE6fvGiV98eqlo0gGVv28JM+vleJ8CnhnOsw1ymbJdwwg9J5B+jKz3fzYoEHKE0UefJMIFr2eTuPAO4BNVy5wYHeCyx6iS6ch5mQe0x5u12Sez5vvXN/WYxzKgEPbAGp9n8ETZFCuSNS/BtsDW7+Ah2w8HrDIDyEwIqpI7eIyRX+u017n7pLLEWWqoQtnZ3doYKA4eHwIwMofMSDl+ckYmfunjDr5cU+UP8do9LfORxRB1fAN3IcJXcHhWUfaISwMPJvnP+oNHABykZflEDXc94TSYwJRnb6gEMFKPiIXWHAmZSHCovryxnobAhNvfiAgfCClFLYEwKH/PaVvZQJRlq+jeCHkFFSm14NgwpzKhhQ8N6nwpEPKPojTN7Kev9eE6YmGRd6Z9GG2msN/9etCRvgBphORYJeP+9+B1pqdDnigZVr8KLCBbEEcKpbPbS+eFXa+PSk6DEoeMf4OHcQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCP05jZ1DgsfqDFprayhwe+JLZxOW/gTPH6DXKTY52DgOLwgpyOKqjgeuTWzzY0OMtV5YUxx7mh8ROa4WM83CsA" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_175 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_175 new file mode 100644 index 0000000000..3def23b6c5 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_175 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "88", + "previousBlockHash": "009D467DD46FE629E2B6CAEABE71EA130B886498F158553771725E10575788BB", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:DsfyXE0UNRlELnj8HBzbDpklEbcKn5rRiW7rVH+f/iA=" + }, + "size": 90 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063866201, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "5E555BAE9C7279D0B2159D16BC6D049AA0C5A825D04BAE8E337D77FAAD2F9FB8" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////sGKTaKxvZYgaAzYukNucfqafJk4KfZDhwLVmniMpRWteDoeORT2KbjDfq9ag5JWbtxPuwJsjyAeALbCGhAdyrN+q0x0O+FpvBGHvt6BojfR+bUAGIOLnwNSTSFWA/+C3BeD+dmchqSSmngeChATJlilJDEBOsFg5VVRH0sU8/wQOW+Vnj8BWPSmyjsYFQkaQgD8tqKYjlDq9ZvKVLg5T/9QG5pwApApXEPlP1ZFxpC0P+Rta+zHcEviUlUefrM3Y8A0K8IKEMwn8RiupVVKuLKI+qs1xM6rEW5VkXCt2fj8UBy0OOamTLkgLJnq7pJuUnq/RUdJfoxUvrME9+O84GvZ1CWf8AvnIEmdc7wuralLMGZTIr+j/DsMKnp3j19ssl6VyL6bDHcVMW9fE0kfOZ+W0Zos4YaD97bWnh2nZ8f7X0GkqbdguhJnTA0rivtrRQtlqCbnWQ6ivfs3yVXJaYQAW7mRtNTNjjBGn3kxxQ/kdRWypaT070EbLVCIjfDI0GA5tQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBh7DaNUE2mD6LXjKzCCVfbx2T7MECuHDs9JFPzaaOsx9x03pD4OuJxPfQsXl+6t4L0nHG0yOA1Yh2G+e60k48G" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_176 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_176 new file mode 100644 index 0000000000..966d2efff8 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_176 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "88", + "previousBlockHash": "7C92BB94EA07381E5441FE999050803C1F973F9D5F0A12098F8E7DB8770B4931", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:bnytpoWfsRhiykKI9+jJAIwmdaWjFJMIua7TYCZpqGo=" + }, + "size": 90 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063871171, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "00D7BEBDCA79E83AAE9296BC60AFB8E0E3C4A3DAA952B31142326ECF35B1A10D" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////kYRRjL7XoBMmGT5SqD3S4PdkldhB9Bc2R1ZaNaETq+BS3xtZiYBEqZTizbAeXbydh4Pc62e+qeE3Bt7p3cwLQ20P0CRX/LQAwZzPllqPZF3Fm0OSS1IHLxQpg+t03BwWDO71Hzo8tzzSI4rHJabcm/lkQQa9E3PwwYLlfcKqPQDEC8PePxhjAaPSV2l0pNVahlZObuGgLr74g9GMcZeHGoDGX3BGgXxbPafQ4Ltm3nLVChODDZXO3KssoVNVC18MGgsNSqKZLuH5YeTacqntyDxmocGrHZ8gPKw+Y2B1GVp+O9dYri7ag6GkehSEawnWr3BhaaHkLP0Ju0kc9rLeWeJRpWAYQ5G3XSmKmAlEzc+92gXP7KsY35MxkVcly6tCrU5ZMie3kYZsvrKIwZwvMnPjwfPZwqxnmIrqGoRgj5lkZR9Il3hU6P5y7ZVweNEOcgxjpSMN+gCrIG8PMvy4YDGk7kioHrLRmdk8MDnukvM4b0skccpTafmCHHx3Wpx3DUPWQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAKw9ocHUvLvwluiDbXAP3r621hfhIN8d9caJZkefzkp02rzZjALCzWze0zr9UKmdkt30oMpu9z+um6y9fYuQIE" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_177 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_177 new file mode 100644 index 0000000000..3794ea068c --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_177 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "89", + "previousBlockHash": "5E555BAE9C7279D0B2159D16BC6D049AA0C5A825D04BAE8E337D77FAAD2F9FB8", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:82O6kaZRN1E4uK6UH23K+jggLwzxKLiiYcqkYeJ1j2M=" + }, + "size": 91 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063876138, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "8BB59D1C3A6E3C8CD55F8C8D388BB73996A50C2258560740E40C3A9BD8D1CD8D" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////sgfabA7oqAVsIZF3Yqma8I9r07gTLO1qXQvPAOICpDVwCAOfzrh5dKLQRCOFYFbQkbKKQagWuMOecTIHrXgeUgPdJpuwraU/2gK+6Z8wvb3K4nLDzYGczqRI+1dfd0VtCKfNaKgXMf1mMo8ee9qVNPLlGBcjtnRUcxQpL6kpfMEDLEcCgyEmlsGcmwo+b6nYsPeVJxujk+xSSirCozIFfOps6IdPWabStvZdR479sk1WVVuMxj2aX8VgbuzRqHEoRIun1MSRlhwggucbjiU84JGckcjV0RbDnEt6Ivm1d1vZKen9vekE0GguK0iGKDHr5WQx7c0fLkmUygF6o9dXQiJeyM1TmCmjnXzFuagPXoTW6ZgQ1hj4Bp2mN+dR2ijGKfrpgCGME55JAeS1KPHVAwPfAVevNrYZ2k13Hsv8eu6tZd+rbJmLHgmD/H+I0uvshTcY9KJiP1TRTCyVv4P8pGy/YrY4kB4yJuZud13N0aAR2frTia6pjOLBtbESs2biZAFtQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBFZd9eQgjH7unaDO8kSp4h1NOkc69/iVSZItx0zDLSpr3TXKlLgzYYVo0eGAToUvuPiyGYiznyudqQHHMdPwYC" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_178 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_178 new file mode 100644 index 0000000000..3cb3e0e22f --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_178 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "89", + "previousBlockHash": "00D7BEBDCA79E83AAE9296BC60AFB8E0E3C4A3DAA952B31142326ECF35B1A10D", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:QFZrUBWlPCiNmu7IqmgP1hX6kC1m6xND2TNHHVHFNBc=" + }, + "size": 91 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882561655772227099265022751590609053759679761171040884447085243962752512", + "randomness": 0, + "timestamp": 1617063881169, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "A9E0201836087067741BACF6971F937E08FDA9FD08927EBBF84A4A24747D2AAE" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////o5tqvu696rKbkRgzAQWnx1wzzPv82L4hhfB/0cgS5WTLSBHUlowOvF9PAbzmWp1+stHkw9FaI1YMAwXDI7go0hok8siSxeoelNl9HhzkAmM3kRNPygUdU1eQ1IbfNnQKFS08l95syCfcxLuxZuJNwUmxBTSM4b6/dM3qB7ddrKogkpm+G7yeKvdPckGsw2ZxooiaynH2rAdeE1gk/aOzLT1fHVSS7UJX5cqSMfmBHyUaMiogMpPkjphb6BKBsz/iv39Lj1PVo1JtS0Vnn2/b8NKtzIMUaRhZploL/zSpqznjzqIiPnTNRgHWWbPc9IPP5tHeVlJeMWn21GM55piJPKRw7JDR+lSvB8iVgWYFDVQYwx6Jht/Z2ofMks3PfizH71ugJ61mACXGgLkoamOezNcAmJIJVPoTpNZn4UyNwekW6tndbaOjz3Foqo4Lpk1Omgx+ru3LQc5EnVS9h7i8tSgnwys3NgB5CBlOFId6hh7PFLCQA3FijZonLVXi9p6SKtyLQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDD/KOFte/OtpITX3SuJ8GcVdO9DBXZbR4xAq+eY3BOrEAig+8WocUsdn0IS2YDFMQLh4as9/YgeHXBUPZvSajwL" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_179 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_179 new file mode 100644 index 0000000000..37597787ea --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_179 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "90", + "previousBlockHash": "8BB59D1C3A6E3C8CD55F8C8D388BB73996A50C2258560740E40C3A9BD8D1CD8D", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:GE4jCxLy5jvOIV/u2oZ0yqzH1Y9gdSzWNhrFOXeL0W4=" + }, + "size": 92 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063886195, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "9B3F382F8B84E6B11A2AD1146EB65EED07089797D8A2936C8F018941D4C18932" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////gEz3aKGQy0wkwnh28lS06SCVLB4wl3+kopjlFVsQOoeIRbUNUF0GY8/zUepb8daysqvQyVNsRTycrrP4j2C0ttsDTpp4kpq9PujoXXGURZEJciNiYjFdVntQEjb5+ZkVFdIICF4Ku8AM4CIkqz8VD+jc5TIMJBFqIJoSY7F7tnyg7heWvyA1AZinBpN6gclas8QpWqalo1et1RMYxinAQL3h0FoiucwWL/6amoNriVhCvU671GRLTqPQNb+eLp2OkwOPnIvUnXzvnrEHasw3Xk/0V/2c7eeO/0GRHMNjUWZKGGCUfr+RfNW4h9KNXR2YVtfm87Y8Oibjhp99ZUgBFQFSsMy5FDDgKzqzd7vyHCaY9vSYLqu+D5xwNWbvcj0yo2f4TZcrVXqVn0Z22r/sq0bnLu+M8qVdpEzvQnWvq5QIswQSTeeJm7gD6qfGP8HPOmiWjNu6B2kONO2c7ktlTkQhY865qyk1u8pyTHC49NqyXfRTBjqhiku+MBsr/TERhY5jQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBhc7pupZLN7rMv8IvOfw5BlKf/Nx3V9S8jfto8xjPVA8FUG9TCoczzcgegvSQxlKtatNny/7oV4CAZwFKa4xgF" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_18 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_18 new file mode 100644 index 0000000000..7af76abdd4 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_18 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "9", + "previousBlockHash": "C438483D3C38316967C6AA98B1C10C1DABD47E66E367E2962ACD826630C3EB32", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:bLOiyMqJk3kj4mdwq02LQFz06wNGBzM073e+kI1P4z0=" + }, + "size": 11 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617062316117, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "E4C19A57868277E3A5688E03168ABECFF5BBE7A519DB1F04795FC470EFE2F9B2" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////sS44NgQjRNE0vjtYxcE/w1Cwq83EJkEgmfzPVCeiwE7LvcsP0SqTwHGAthZRkFDVsBsdabrlRT2mJ+vfq6h0zFrJBtVPdr7oI5m83oNtTZRfCNTau9C7ltCAE0hSdM3rD98k6xU4PBWvv6IMi0aRxjQKt03MC85z5QV3RgkLvKJs4AcQalf/0sXqv2rvZu+RjDJTZ+rUoM2wl2X80uJUPHkw8Uv7C3atMd1E5Q0itFNnq1icjB2wIDb/qd3RZz/JqkMXeq8S6NxRx4cRAUrFgf5Pv6zkfDHMcTz7wy+Hiim9zGwIh7ZQAcIuv2f76lelmAXBur2bZV2cIp1sEDmAVgtIo0Eht4CdlZx2GVk5+A2u+Gu6nlRPhJ57pFFygYlO+zUj4Bq1nmZ6rpKIBdynzGWA9O3faILDeF49eic05e+5YmQ6ki4QhjTPhGsTN4siqncx5p03Rw4lxFqhdKxOtpkC5je2UXG6XPQjM7X5xBohUxX1PR2Bgbf8wbhDcgdT7y3hQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCKrCfKx4RlCE9wKBP1xGxPPHmQhQGBq/X/pvBfVzNMGXIxMn+Fyay11VI/KwGsq33UaI+XsByVOwW9wWZRR7IL" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_180 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_180 new file mode 100644 index 0000000000..eb580ace68 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_180 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "90", + "previousBlockHash": "A9E0201836087067741BACF6971F937E08FDA9FD08927EBBF84A4A24747D2AAE", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:xAaO9nuD1BdVm4ReDzBlJVMgL9oucB353E0K31ueOAo=" + }, + "size": 92 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882131347797691639928472277308994909901191375134389962514151511518109532", + "randomness": 0, + "timestamp": 1617063891094, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "5D38D43A98D3A15777833D52F27C0664BF4BF05881203B8E464B667D46438D4D" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////rsSJeXlZg1sKcRkdRbGCULdxMVM0X2Jy9CeSDkfUYQGOkZ706Swdg5YPjtNzF0BUinrIpUWDFbYoQA8J+chLqy9aj01lXn/uOHQCYukxtmXdu/VkxGrhVD6u8XZ5E2pmE9bsrRMSxKQvYBB+exOsV/AVOEBztSCS0EEQDhLZLtGUdo62uJGWd+edsXSrxrwFg117+v67LOtNoe33oCEdORhYPPApSErU8s1AQAwkFBU6oekdjNsDWGEnUUv8bblMOJ/Cz/Gb0tiz2APqdLxyasIfrDyDHEQYhVVdBsjvk/B191zRgkaRMiqEbT6DrnCr2KyFORL53GL8ulpwNYokLDuR2knwkEfnyWV2pQ/HJJwELzTLX2KUwLFkN4fuFR/Neer6+zY2ADF1xyijFQwbtdTzXrJsstY72UpzTLVOCW/IJwf3Ozyk9/Ihw4kpqgOSi8J2UwCkrjHPMbDJryQv0qfHuoDFAeeupezBmWZ0xeo87Xo6kUFlh9POPR10CFBpU6oUQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBchkKRxIImF43UiA6LlDfrFK4tUVPYw/4u9CS4uNTcD2NdNqvYI8htjjFF9AIGqhkZ5n9hsWHBWmzCzcR35ksI" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_181 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_181 new file mode 100644 index 0000000000..2cb378bb86 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_181 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "91", + "previousBlockHash": "9B3F382F8B84E6B11A2AD1146EB65EED07089797D8A2936C8F018941D4C18932", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:Xna1j4FNTWVTAao2IEQ6+GjKlRdpUABhKD+3flHI2j0=" + }, + "size": 93 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063896328, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "5FCBDFB3640B01F6D3EAB52FBA61C7B2277A926207228FE809F3FC3F51F14C87" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////kqWcpIc0tbWnyoDNTgS8LUQyICdFXxmO7OCayGNWBC+pVBEoLp+LTZyq+hWOUkCcqNnH7HmpXuaVELZh2tGiMVvYs702BMfkKSszNg2XtBi3G2QqJdnCSPRwmpYB9wVyFeg3OAnb67SJxIBXgjkr/0XM98YN/OEEicz/QD0bRxBselVjh5E2gE88+65j6R+AslZ901OoE0Q9u+I+7sTVFdToYALzNbBYTAKu4/+4Q4BNJRXHGEp3Pm5Qwd0K4Zi58EPJFNXwUWwo7mOCKm4HivRdlvMOvg2phhnQSLWaewipK8cqNL09aJzNCZniWwzm6nlTn/Sr21BA8JRXAsRoWsl8ZgK90WFhUUjfH7J2e7OaGtJqsJ4gwQXGcPOrNmxrljC55Cbzc5krxE4MC2i4gNSHkcBlKsbDxgfGvmcZkLNMDtUXGRIMMmfJGfM+ujv6KPSnkt0KAxICziYy24pQHMIT6mdtr/P1tBkzZpWlFdqrp/0RGCcIOlMNzajGgmy22VGfQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDwsrD6Vdq6t7cMOKtfmp5x5obXLQYfVVAJO/sK7XqzEc3SUHMVGnpeTsoz4lH0WF3w2E41a3HFXqBAWBK1FvED" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_182 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_182 new file mode 100644 index 0000000000..573a0b8c02 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_182 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "91", + "previousBlockHash": "5D38D43A98D3A15777833D52F27C0664BF4BF05881203B8E464B667D46438D4D", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:GOJfy+wFk/cgJ307AsoajKXVrJVdHWHF/epFYL/qtRs=" + }, + "size": 93 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882131347797691639928472277308994909901191375134389962514151511518109532", + "randomness": 0, + "timestamp": 1617063901209, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "A894702BC741083C3A5BA8A1397E61D609A3268E53BE66D161BE849B89160967" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////rJgnqNNhslNT0fvwWIwe/mphKhD/M47lgeg122/EyjFIKFHCXNa1SEDSGCDz2EALjqRvrnb/7S+tXUPRK9oOi/5xpBeHnTHZnUXAyuP3/efYKKPPS2zM/5mtpTRAqLhJAO2B+Zh0qUUP0+0qjw8qAcqUQAtlA+SwlQ+yNO9msPG5NTJKX+dwT4giCqVkwtWJlz/IPYPdYfGgrh3lCmA4weUhkC05gO5mb7LdZw8Il9IS7PziOXUWVrgepe3gk/eeAdmm+LKfOS+zRBYyM5V3iaKXTkgT11j9XOilwfVP4z+d2TztLiYYMSpv0zZZohDVkhFFC5QoRLO7q63Y/NHrazDcccHSpeoapNtNTdXUhm/Qm+vkU7xIbiDRrBzSfUMa7IQs+gtR5df47MBeyX7vb/wXSyQGGC/V1w9zZh6s/BQzRaPT4WEmS7d07aPjpxc45Zx37+AT1yJzNI/kefzCqKHEoAcZoS0j2tBClCRmvNGxl0/+XtUZfTyyUN9WNE3Tpy/5QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDD6VuapB1fVNFt8OeEfYeWKzYOd10kgfHPGm4Em5PUszwOEjxqbcSN7pQjvLYJOO7svpamsfqYrDkvT3GnwAFIB" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_183 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_183 new file mode 100644 index 0000000000..33ff6bd1ef --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_183 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "92", + "previousBlockHash": "5FCBDFB3640B01F6D3EAB52FBA61C7B2277A926207228FE809F3FC3F51F14C87", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:2L1rCU362UNgknu9DOIciKJW9hbh0waKPCQfeXLW4G8=" + }, + "size": 94 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882561655772227099265022751590609053759679761171040884447085243962752512", + "randomness": 0, + "timestamp": 1617063906125, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "8878DD5917401AA59E111D24EE2B6818B1E91134F2ED11FE20BB60390E24519C" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////j6BmBXCgaxVi30F9Fr++KBRLNT2OHe+fHirGAk8GEtZ0M1lQRusuFahHGZ/a5Sf2jXEj1eOEt6swDEvF44cVOzbBOXZeMo+QNvPC2ezCWKtVRCHsm2VD5Dsvn44rkOC3BEOZa2dXPN/FBNYAwQaR7qCi+V8Q8Hel8gLbgn/XhM7w7OmE7DMrocUYdIIPOHVVi3jQ8tCQheqFs4uP1BEK9/9ZBNWTofDnUTAGdSJ3JUbSlToPWXWCSnbcOOGfswBByLoveX95QKZ15Qdekq1OeYaNykHXnSZHEBChRjsAT4dAwnTMMns0gKIWm0fEmUxo2zbsUoPYLkuHDG5mOd/RJReAYgZjtT0HKvYty5RocUPQnHs+n0LllS49rnMzJjqFqO31JwY2DTHH/H78s+e3wDkYbEizNyFxmrkSRqNP3XAjTscLGmgn7tZjJG4TF7uwOOy3ZVmMiwv8wB4hqWxSbmxtxckER+t1+8Voj0dN5qvM2sYLeJDQpB0Wj+pNuDO9LgugQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBcvXXgRQOj5zvFFw0uWQeJ5E5ANeOfOJaIok+EciThCn/QbM1cW3f+W6Km675NIApKuGxFbB8FdXJL+kXV6+gI" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_184 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_184 new file mode 100644 index 0000000000..396e49f402 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_184 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "92", + "previousBlockHash": "A894702BC741083C3A5BA8A1397E61D609A3268E53BE66D161BE849B89160967", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:ZnekWXJt4zE+KR8ZkNwPsENy+D40H9ezSjx1Z9FmGxs=" + }, + "size": 94 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881701459226640133281333645594906705754066038206936556099670930859474975", + "randomness": 0, + "timestamp": 1617063911163, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "12215220FF2E6FE99EE047DC4BAACFE51C2D5369E7B0EB66DB8BA44AA5E8BA41" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////sU/r0t3ZuDo3FiWsPVcACTMyyKr7IO4seDSIwwuuAZHABV2SWHq5r4xYXFRRm+f4iO9qCScpYlUvHQ14BK0XlwmxopXQNZA7J06iGxPcJUjz+mgshxZn+D+xmCS/ynvOBaoEc6+cE2XOqjM1NeZhEKBV4i+nlr81xWB59thOSzIH6fsdcHb+B+0XiIDCQrK9hKB453w55squ0hRG/gutZG2dPr2yJ7+4LClmW4+/RD0+GCwFC7L0V+kRzKxqm/nun4QaYwv8JzqZxsCVkvi6ZNs6JwomMk2AFxJ+e5mBT1LMVdUGcK1axmX5z9033K6dJWga36Jq14T3Dix8+iHvMOXqsPw3naZxDiri8KuC042/FfOHn7dsV2lcjZZVY7+UiHGQULg7eNlDgZGmihTFhyqOZ1NEWb6+RYfEa6ooUbgVpMVYUIYpHa0+2eInhu6izYJ/s5l9c+5gHCLOpu0JP3PvBGoOSFrUVjavXKjDEhw2MyU5pRBfzBCifmapd2c30GlxQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCtSMXXgqFnNKy37uyVr0RLjIdizx+yk+LPMRw/rZICsa9M/+Wa+cDjYmxeUJ3Bnv4jqFP1t4NUCJcVijD5ROkI" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_185 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_185 new file mode 100644 index 0000000000..b0961fa754 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_185 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "93", + "previousBlockHash": "8878DD5917401AA59E111D24EE2B6818B1E91134F2ED11FE20BB60390E24519C", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:ZziL4DrJp7dexDykoEWn9iReNAevICNPxc63oVxKuCE=" + }, + "size": 95 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882561655772227099265022751590609053759679761171040884447085243962752512", + "randomness": 0, + "timestamp": 1617063916157, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "81C19E8D647FA092E33590EDC5AC057918966CF585DB7466B06240EB354FF2A4" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////iqQWhEiQNjP/AzEuG/uCnisit/wz7dEvOlPmjlmYOq/wI/fY7ZKtVQkb+t7Tz3c3rkdggJ3fMmcaCORHTLhetbTqOqxBmyYZmqiRFD2MXHCz2buymIkC19PdvOCN4jbXDFOSAhA/G870jdc7SI5AXZqJaNcqhG6MuqBE1RzOEZhgM7eFlXRsDfP7PuA6Ur1GhJrOm9HPfLvRYchA5soyWE7WGqHKbdNFwlbgXZ8GhLukbYdxu32mXV4N/4Gl76Py+/9aJVHRQAhLNWmRA9uyDi6srr1KITAdpuRr4/R6pu1ycua3094vVOsdiE9NZbQx1aJKwhfUKNFLnurPKUiBBQqj53vmGed7nfTOwUBsTt9uo10x7mUpcjuEbcc5Jo0gWdMPMJ9vie4WuzyeJMG+rOeNXuMuKEUrCqUnGRL0Z6RC6765kdtXZu7+JgUy0upPZgo7jkfX6vYXFgM7AqLv3wBWBOOg8/nCrLZSJBfkscSae2SeVUqunXygyCRlxQjQBYUJQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAUU2oSGM0BbJu1iaM9aQykQAIv8sR9KwodfDE1JkECHp9ARj8aekokvazkPzOh5hWK2E+YOcjwAT3tKGqnwtQM" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_186 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_186 new file mode 100644 index 0000000000..5fc1bc6269 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_186 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "93", + "previousBlockHash": "12215220FF2E6FE99EE047DC4BAACFE51C2D5369E7B0EB66DB8BA44AA5E8BA41", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:wwnuO5kMhLRetUAlhe2nyBMuyujcYczW/KTb/wl+1gQ=" + }, + "size": 95 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881271989446208257911980828427057262643615932976441214377264856368067535", + "randomness": 0, + "timestamp": 1617063921074, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "3F4B4473BD42DFC4B632F15457E5611F4B03A31FF284EF31560C349A5EB3848A" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////kGD4ozDiYIxY7mhsN57ds43TX2ppwuZMWWDlLBoXyuTjf+ofVeoMm8w2QtIdU84cseqmmeUE6wQH7i9PTlUaY2DdQjd0IaTmxKy5ljkRvxynXnPUkABtgVsm7LWJoLZxD0zHqcF+uz9+bO7BQPIXecw58efgw7w44Ma4XnUMTVBShx4Cszork+sr3PMOeh9ahaat6rlTPcRjCDkdKacYv38dPtW9e0Rn6dc/l4cwwibjcLuDW9fswhn9yDkdxT/chMZRparxUX0hINCLbus05s7iYNAnFzafTwuij/5ZlKKCUmldGzrOYOFuG1wxdhzfgiVYvPIjKGl5O84DisvBGTmR8GtU6h29A3DmDvOVNN4b+87BeIPD38vYjVwNgGPyU23HDV5VO8wmG7aR9t3AVXFgbF2UKyQ8HSlM74tNiUHbSPyTjzwffP48KCEAg1NxOfM8tuJb1s6Ac57WXDHhlrZkuLN1X27kFH7yM22ij8mvlBkFfzzB/GnmsZkwjgpwv50oQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAHe2qQLRChMESTc7NaQjbv6YLh4JNEiU6pmG33QRBBRzY9owZe7KZ4Rm7Mw39kfeF756cK8VBcSiG++o8ZRBMN" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_187 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_187 new file mode 100644 index 0000000000..b45fcd2839 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_187 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "94", + "previousBlockHash": "81C19E8D647FA092E33590EDC5AC057918966CF585DB7466B06240EB354FF2A4", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:Pr2+tjyoRGeLvuBic/8cw8iUzhMy9kzD7MYZ01A39Sk=" + }, + "size": 96 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882131347797691639928472277308994909901191375134389962514151511518109532", + "randomness": 0, + "timestamp": 1617063926084, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "332A55EC6D7C7082A8B73E66527B3E9E8EBB7B254714C96D6AE216497AD8445D" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////lizhBLV97ppP77DEi8cDx5CoVJBB6JlF4ZCgrYefo3uy4F3KaPZAnaPN+WZcLsMOjxCbZ6yn8tIrXuh3tIrIZExiKsjqKa/eB9ieaXQ/zC/Dln6QbLan96WXDjKVI39ABKi1Wg/RJ5xxXKV3Z1+y52dluiBJgH/2dOvNBCtWPyZzhBFouriAMalpYixExP83scNnwRak5y6WMmQZnLOYqGtB/qxEqsbLPq9AWC18w7WELSIynJm5W9FOSY4luoG0rItQnrKqskKDF1imnRNCyYH9FyOIwoNX67K+R4rLAYD5rV3G2bmWyv4IVkVX4oON8W7Q0BllXl8MLYKXgVD6MFejCkRuQZUYi+0zG6Kd1tSe6xutQVkxjWN42X6jmmVHTigsD761EqxBikjuOzNBxMzvyUbSiksVkxxfSWmCfngGxPnoKkzysPuyCddTl1KhG+QPmXRio0iJr5ot5u8EpBheLL9C7N3cZ0fIK5LQ/zDOlFX2E8FHYHLLz9mMTkVgnRYWQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA4ZGH1mlyEZu5tuMPpA0X5hZh/29Zzm8s+xNWeoVQi5iEDf0FE0shhGcHCprrI7ySyhZOEXE28NNcZnDJ4qBEK" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_188 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_188 new file mode 100644 index 0000000000..752735725e --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_188 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "94", + "previousBlockHash": "3F4B4473BD42DFC4B632F15457E5611F4B03A31FF284EF31560C349A5EB3848A", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:z5jc550b19Sj3q2DH8PtOwIox012IMyN6AJC0T7z3U0=" + }, + "size": 96 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "880842937844725196442695540779332307793253899902937591585455087694081134", + "randomness": 0, + "timestamp": 1617063931018, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "F1A0E3519F627878133F024B025B6D9B9564DCC3EF17847E5F93927FA16123CD" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////hNcXuG6Me9qtOC47ux1n7o0RAcu/FtStThzdQJJncdreCouaqqor5AY3pKbSway8hbxm/46mj92r1xzwwyYxeiZH6+54xijm+AVr0znIRa4DbISy0e5pvf7UkUdMLGqFBJ9coD0Lfp8ByJb2arYxFP+AGlokptqhFNpGy+NoWu5oPnd0+mEEKfB7JBRjq1ALtNUmRmaY/pRKexrkSjWSthbwAfcDO+K11T9BWv8SLc41kdXRrWNt9st3jpKpJbnolIJG8xFWogwHCioiM37GFcKzyIuvH75faig3aLem50qUu1epS7NZUqMrkCL3RvEdKJeU/Ia9zOEBWXW9hbDtC3bDVZOhUUXku0Lj198o2x5shDck5RWbgaACCYpi8RJqkqap2LI7+FJimer0J2Hcl26B1ipj2q1u8faHxKFSTMrHfuMlXqb/ThnXJEAX/vNVyYb6HLNGdM1RDe4e7GJzyT2Sl3VTzLtGzC0yvXJCzV8S0f/C9mRfzN6YmXGoQyyiFdT6QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDD582jrATiaBnMksFD5FJY9+pruRK1KYllhiJgXToVZLwj1ZW9j7m/b+kDId+UuTdXw8wKir0jirFMqzT72bAME" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_189 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_189 new file mode 100644 index 0000000000..ca4e9ec3bc --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_189 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "95", + "previousBlockHash": "332A55EC6D7C7082A8B73E66527B3E9E8EBB7B254714C96D6AE216497AD8445D", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:XRJaVbguiy5beElln35V9MFZMYvrQccDGwr94Kzijlo=" + }, + "size": 97 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882131347797691639928472277308994909901191375134389962514151511518109532", + "randomness": 0, + "timestamp": 1617063936508, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "D9012A92D429FCCCC6A5637090C4E568F7B62226CB57F6F8C75D4E10CC1F9CBD" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////jj4lF7jOEJ3DOrWmXu8sxOeNN7Wt4+MxKBxdTCvGD6s/FGynpC+RXKo/rTiZSh4vrz2CqWLIedjBd1bHgXaR/a+0l/G6fXu9aQRt1KWdCNiGsyTZ43Ef6kUxJSgupLhzBJLsjFoqlUaNsDsxmfeR1z4B7NU3uyUMPdEnfvuj7kOc8/aoi1SmX5jI3C5PdFsMlUDs9YzO7A2ZR2g5+krkmAJRDnMhADNbRFxfMUkbfuf/o4nZ2XP8/wexgD8+EXHNaPEgtHXHC6xysjZwftahCRpoAHo2SkxS44pzU59vtgs9V1zJgym6tjtCz9soDsiNH4vpOnXC14MOQbUUjjYGM8EgaMbLcGehhIxBxF9k8+rOKuSOaYoPMTBGq/+aRx/jnBq4JlknSFs5dkqPCr42h5t3K3g42pOSZmYNbKxqItjYGoDufPnlBZCd7yNWLYq1BDaiwv+QMAFzeEsDKH+pKr2rl0+4XB+xyTmhHjvFgPlJKF/Ky6YgbZdAvZLDmbMsbdkPQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBRi/boEvrZA8867jQtuuJUpTV7BdvvsXTvsOX7fnXtbmRmgJDYOcjd/pl2hBwg0fdFkrEm+8mRZDy7dm2igccF" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_19 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_19 new file mode 100644 index 0000000000..157ecba850 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_19 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "10", + "previousBlockHash": "A6CE28A51C0E9E06139077B3278D4C22C3F86000CE1DF84601E6A2A940E0B985", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:StocDphXA211T3O5wjMN1FQp/UPRz1dDTQuhX9WLIlY=" + }, + "size": 12 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617062321301, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "66BBCF9A60A9224DCCD036055DB1A88066F6460E36528970AA6A1F6941EE884E" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////iuadnBR9EHy5Era3MysIu6olsr7vBQSpdSG/KoNo/zTgBl7Q6/ods9OZDzzdMUt4uYqTb1OpM2ACiG5XOcq8FMBwStvHqRAbaGp4EQbcWqPnDUF1eFWGl4K2SNcfDT7dF3iqPOI18Ute2uR42cT1GqTxy9ewtddqy3/02dYFas+pTnlK8nppMIDSp4OAQoDFk+0KzXMumz1GtY6awvFayfOOuwRCdKVh5AhFOuXFVEC9IhfXnP8SY0msut0leDV5AMFijOigA+fXHX015mi8idJ8Z5z+yp68n2xavbXhOtGAHuc9DBWB/12cwUbCxx8h7rV67+aUQKRvnUlWy1/MHnVWwTbDh4q+Tb7VCnlDhy8x9QxCJJI62tXhR7rpPS9Ex/zksrvIoMnxht2PHLOpjlHQLdAwkGejOtnDSyCLN7FWio7+wyJMUvCi9P5xx5SEyuHcx3lYxuutODK76QaNVD5gCPrxolS2XvI2FAmLSDWfwAMwGHxQTsT1H9KVz+bPf9pgQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCyACx6STGYfNMmTVhOQEnlch0jhIO8G7atkUL0xNgYmV8/IUdffWvd8r3ydLyb/qkmo9kqsY5D3LhOzNMXsbkC" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_190 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_190 new file mode 100644 index 0000000000..c489ff228c --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_190 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "95", + "previousBlockHash": "F1A0E3519F627878133F024B025B6D9B9564DCC3EF17847E5F93927FA16123CD", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:rwtERcZX5YDPFbLZ+gIq0+cpvmDLEWZEAB9lzN4bcmU=" + }, + "size": 97 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "880842937844725196442695540779332307793253899902937591585455087694081134", + "randomness": 0, + "timestamp": 1617063941415, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "8E986537872AA69BC8E4C60C073EB73BA641121BC181008604949690CDC40970" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////jAqnX0Cbk3oGZzJ3/tBTxSL76Dy/fof7ejrMdgU1RqpbkIM2YoDj59yylqNf8xsIsDWKYvBPIDw1QoZLO62P936U9nZOj43I3qghoXvpj951M4xQXbkA42txhflV6FRGAq43K7oArxNy7k9O7c9M8co2ji1PfR9vbOv67Tgbae5iUWAZ7vpLKpGHU5C1wCwstvUQm//uPaN7sU2e6b057Wj9tD3wjjymJu7SzrPoBajMiRo9EiuyJKnf0q96YttT41NSjQECdk2c3/E3eINPDzHHUuCwEg8F8wjukbc9OVy/RZMPEpXhvi9n/s9dobhl4L3bOMNEe2ZnUAilc5j0VDeN0aU5IgCQK3ewToC3u8uf1VfiT+UciE9M8AlveWQUqGp/Nuc0ZzDRXitk+W81jtf9misYAbxcQaH647x113ZukdvDJuxafQEB06kSQC175VcRjHJ+4iVpAkKm7zvLYUUccDXIQKqOxFFIM8t0ac34Phu2ZIJgMKzbRkQngMG/Fxy8QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDB6zb0V6e+W5v5waLaWfmixGt2YyvL7fjayfKlginmpp8mIhsBtLUlQqLmpwcfrrSQ12ultyGrQOoD6nY8xwdsE" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_191 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_191 new file mode 100644 index 0000000000..01d6128594 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_191 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "96", + "previousBlockHash": "D9012A92D429FCCCC6A5637090C4E568F7B62226CB57F6F8C75D4E10CC1F9CBD", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:p8NNQbJn21QKRjDfdfpfzRbFTPczjZupX46NFeQLdjQ=" + }, + "size": 98 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882131347797691639928472277308994909901191375134389962514151511518109532", + "randomness": 0, + "timestamp": 1617063946591, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "CD7BC3797AFD721722964DB729961C6C8B2233A10B295CC12552F444B43F010A" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////ghyp5OImDmvIqj5SX6imbVrDvPOMk8/fUy/k1Cd8/S5I2tXcAeyUxAr/jbEQO2UgtW3KjgoLyfW9C061O/8qi/qzEdijFJz3a1E1074neqRKZUDIfRSw2l2FWlKeTVD5AQcrkgfBEgiVvHdr/ak5+6y8XDWp52R792OGdjr9QND1OMhW11dDMjvJXWoiU2A+uLLh6wj7jcdZntQNUQCawBg7WdKZ82jyuraAj9mIpRxkp7qGQRzeCDWC01dcvp//Kr/FaoKAx13abQL8a13TWmXN5H8a1nUeFPMvE1LVBgD0Bf+zErPBasQaNh3PyDTXFDIqz9ciOT3Nk6ZqybkuPickt+m+MPU1flwV+L71DWTs3Uo2ochcmR9EhIikPOtYlwL3C2lgDF/aa7ETooxXFiH25OA64Y1L2D/HNOkzgVhCiG574nVEiEPiWFGxU1NAQTlKFMn3RZ0tu8ax+Y2LseC2x7lUm56QhWspjN3UNSfWAy1amdqu9p+PV4AOKhiY8myDQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDbSSfSBwSI1taxq2SiFbKuX+O2xRXn6NTOTj+b8Ra1MOnM3VB23EKqYAH/wYcfdKzuQJsec5GRG6kOOIQlh6AF" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_192 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_192 new file mode 100644 index 0000000000..e7a6cb6f83 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_192 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "96", + "previousBlockHash": "8E986537872AA69BC8E4C60C073EB73BA641121BC181008604949690CDC40970", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:py4/k1cxsfjGwNdoHAoH85rsTjHF0OhexFvAjD3i310=" + }, + "size": 98 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "880842937844725196442695540779332307793253899902937591585455087694081134", + "randomness": 0, + "timestamp": 1617063951543, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "663C6324D65832FB374D14C0DA6CD5E93A289620C2BD8F41500A582EC6A23CCA" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////qK9P2HMo+rHfLt1oKgPOPJSrYQFjXu8ivShkQOkCCaVI9aOBWj28V/U3vc6Rd/rfiGI1AYyM/L+uZf/lO77zXHcVwt0apqsbOS7D3q8Z5ZKv6i1j0hcEoDBMwv0E6Er8EKVRvnDOEOnhKNwjtbaj+5/2anJSneHM57FJbIAHoaQeazaFwPYmXLPDLtakEGQor/S42h70lKwWrkC1eQtX402bE8JTGG51RNSEsqmpb/NVQna7Ee8lfegqUTunwNxaSqbvIMBr67N/UQjd9Y8LcJRfO5DyQ4WBCOy27Roz0ukjTC0jFHvZra2ED/jrrGzFaLINvkqbjUbfVTCyE5WLcdjnksTVUD9Jqws2JrSR2egPtxOSsAGMpt4okqIusbAIIB6Rd+eHUmSu6dCaQXvndhLyshqRUNnjDNGYdUx7MOrmPeUn58fTpp7+PR9hM0GeaxJ2jwb5v5SwkRfeXpv47kXjGlMpd0shVAFSCzeq6Bqel0VAef3chysKxZfssbdEQYRCQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDQB4ODWlOLavJfljarXILKE/mxR1JrH62TNEtyXz2+RtvVzBZY/5j8xTvAbX4Hz6Ch+wtsgGxhbn1Kq77PJlYJ" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_193 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_193 new file mode 100644 index 0000000000..8c5dae89db --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_193 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "97", + "previousBlockHash": "CD7BC3797AFD721722964DB729961C6C8B2233A10B295CC12552F444B43F010A", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:QnbhV2GyqL/elcRMPvzj4IX4XP+p91edeA6gGDz4m20=" + }, + "size": 99 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881701459226640133281333645594906705754066038206936556099670930859474975", + "randomness": 0, + "timestamp": 1617063956487, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "D1AA6D36AE9F08F5D0A56217CF448ED0F62263336435D62CC47045642B02AFB9" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////pE/b92iT+STLhr8DV0mkr7ol8xOMBLcXyCwCfwgbzftZfhvw28VdDYOgtzho5HeOg3J+7cZcFtvXt5vKZInbTa8HUYD0Cib8Q8TyzDzeYPyH6HSgLTEw2bCbpiHMFlKMAYgxd3wcND976lLXo5/zmAJrY/dOurfYn2VhwfLAxAUGRdjdPHYsvAGo1wqBAiL8lCs/61UPkx8teerGXmAk8Osiznu6hhWS5U42/FBHx5YAGI/XHrGcwdB4k1TVizdxHPyZsKzijTdQhrofTbmqCSdA/A1hcl38uZFgTKf2r6Kc86oed2h1FJ/OjosZIR7T8iAn9+XRTb5gFkECYFavDf7WzzSlHbMtflxamOoe09WNQc3bY5zk36N6c7G4f/s2wGeRy80cYf6dgwOdhU3i/ohyCL4jN0jXecB4Wn4wQmPq6ZfRy78L74rCgjRJFtzL9PQDU0WNHMqvfXmo3ceDhwYNvRyPz7XPozO4EoymGcaTfbVvHtV20y1BWVHZcSkNWlD7QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCKVcDbByKyR0V88h7Y8t+NQz3X6lIZ2GAfzl6kAUQYQkl7LxTBClBVKw6rCoAua888MnAOzqu/jyAVf1if620L" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_194 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_194 new file mode 100644 index 0000000000..c88980eb88 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_194 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "97", + "previousBlockHash": "663C6324D65832FB374D14C0DA6CD5E93A289620C2BD8F41500A582EC6A23CCA", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:o7b6XuI24MrCklIpLU1kL3uSfQpsgJUFuIBCDFR/f1g=" + }, + "size": 99 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "880414303811710731626908341002797352898950613333641758207554622931212968", + "randomness": 0, + "timestamp": 1617063961505, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "12AAA04FD3DACFC1CC1DCAC7DE008D626C5F1FFDCA044B03173BB52066F91A1D" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////gGJ2Ta1YaGDX0sZmWzML2dACj5++im/WucewNQ07CHHeln+lwicOPWUJFurV0ReggbnolIPvsWiUBHIQRRzX/OQNWp9V4YVQElIDHcWJFoZpTnfZcgRLJOSV7ORvjjdgBcYTNU3jr9Efra659OCIZMPc2wUm2Z58tHkmfakGmm1slJM1JUxPpKVOvL6A0ClWr9k9dD2lmkcW8LzTz3bYTPLxbeWLb3otgAsjsb+nYns9TxG+d/40hJoury2Oo8iumaMC/HgfGPfN2e07xO1MUwGr/cLydQb8e+ouvWOAPzl8aZOJOYiCzyw98iJ9IcW//lHUOohKPXkq9NVW1bcPO26hgdZDtE75RM0iX3FPqrt5tAlGzdFajzx70vWlLJw1TTFvQljNsjHhhhG1GdijMPAukJzX6qJ7NLqi7aDlqB6MKJR43GHvAs4Q6YSy1sfrcGkJracozBE9anvpfxztzDcLdnNI+TEqJJhV4KaWQutLh2/4Z0LyoBUHEFli2xgXCsSLQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDHkYQAo2PePVPdDisUuwA4cYHBUyyi5aSyW9GWcWhzMErs2exr7830jlAbVyJeJUBYP7JUqKS94WlBkSL4BngH" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_195 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_195 new file mode 100644 index 0000000000..14e2ae9340 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_195 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "98", + "previousBlockHash": "D1AA6D36AE9F08F5D0A56217CF448ED0F62263336435D62CC47045642B02AFB9", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:DEj4at/N0bmy2ITiyF+o7cQp+pyUGR6kPuEAf3YAMkc=" + }, + "size": 100 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881701459226640133281333645594906705754066038206936556099670930859474975", + "randomness": 0, + "timestamp": 1617063966868, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "91402FB36FBF043631ECC8047090A70F6D2B15A873E1206C63D66FC191C832A1" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////q945/uMQwA7jRvnMSzC0Nah9f83NWIUWrK/EqsNKQgeWZul3KmW/r69biQJedQQus7g1d5x/Z26DiBn8sc5TlIRU4g2QbLZXeVw0ZW4h5lLsX7MQ7SV0L2dr++VPVvCnBF1csWexGCEz0py6gc+O4JGx4vmA96UR4Q9EPkqWJVwnd3ROLgt03tx7+4mp440JsPIgUEUSlDQYvWowtzIpmdNdu3JOmiVCmM+Vabhoh6C8iE+09sIL5xdLxayDy4Zp/tR6IDys5n7QGOxw87UTfk91ujhPTiCKf8CnFFgpQOMnMsyqSMjbw54DcUyTSdDtEZdSdXtKU5SB80l4vwqfUDnOV5vmelWJaZNtcQPSLJRBBpQU+FE9O4JNDyvYj7fhGao9Fqn6e2cuHdWJqdXBs42kD2Zl7tLz+Bem6zcezM2g9PDAAWbgmFoSO9gQZSL+zEuHb5h9IKEP+UfnI0Ysbru545+b3iB8KmOT4q8IEWUeIbHg4WAQPDW+rlcywcMnH1PtQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBeHtmlUZhPVeDSh0HgbBaeUnwSIYyrobBkyXotGTALIldmRUzs6mcRRLOT9GP1Q5GCdaC7dLrptKsUFROGRNgH" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_196 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_196 new file mode 100644 index 0000000000..6886b68bbe --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_196 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "98", + "previousBlockHash": "12AAA04FD3DACFC1CC1DCAC7DE008D626C5F1FFDCA044B03173BB52066F91A1D", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:In2YdOHQBLFm2l7REZKPjGJY7So/DOtGZBMofeUnJyU=" + }, + "size": 100 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "880414303811710731626908341002797352898950613333641758207554622931212968", + "randomness": 0, + "timestamp": 1617063971709, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "B8F1DFE86E64616674E7DDE4044D1548E30988AF127C66C0C6DD0917B8866566" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////rpLsplgdTyRsaXQQwn/auTf4KlfWwv8lMYWYNufDDXcaWEyZQFa40MEMO2K+JLPGj9Tt0ZzgqbegV/aXgpSkIvH2QgWz6Rz5iX21MQAPg4jZl3YEHxtNTi3llvbISCiZC0M4W2Yt2z9ORTf/RkQ6dvPuslKGWvbDgYn3+7deVfhLSO4vih9IdfMxNlkwcS/Ni7X/FHarpDRu1disrrM4dKXzAMfBqMLWK0qOhfvyyTEFCYChgqr62iH3XAlFfZRv9qIpCJaOCMYc7jODG343OhJozBK0TmCgVO1Whse4np3eNJxot7KYIeJEmP59k7SKaPGSmPlAcx9eFr8/z9gRcmzDmwGvR4zYJvmRB4EPAlFqBq/wJW/RNzrhnOnaE4bmyCfkFOi3d8UsAcvKwRuBMJLpZyDiinK3JKMadP1SA5lr5caw38LoBkXFThD5XhEW4MlgyC6G3c7Xj1BpNYX4oZbJw2utc9mJpxTjKg3kyJ9A2nktuDtkJO1x21PEzrAG9rcpQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBxV3IacRSf5TIBsTPqyOdynYDfU7ulc4KfbXMTaqv7jevC7SGA66aBEYSbscn9GP8EOZ6+Ox8fmfdu9u1ZOdsJ" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_197 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_197 new file mode 100644 index 0000000000..0d904acb18 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_197 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "99", + "previousBlockHash": "91402FB36FBF043631ECC8047090A70F6D2B15A873E1206C63D66FC191C832A1", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:UoQMfSVZ6xo/Wzb1PRBzf+cMPgN6MJmTkGkiXpVIwBE=" + }, + "size": 101 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "881271989446208257911980828427057262643615932976441214377264856368067535", + "randomness": 0, + "timestamp": 1617063976749, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "93D0F2B9CB92E8FB5339E9B177264E96A17BB570A2290932402908295EC27373" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////jWsEmTcdzjy746p3LneW7W4jrGrdGRqtqcQKkk053BPJNA+hEJRPZC7nik4H/XvjuTqwKTT7N22/OwngzoRC7wJGAdNIBruszFqR3FD5VEEuK3FZ5Kmn+bTrVXy06yWZC5eX3DN45CpteTeqVUgYvJu77cqtEDVB+N8yZuthyGmcZs/l/GxX3gwmK0bhSmTNmFnRwFCqXPvPQVNzUAWhPT4h+3XecYi/PH+v+1324hLiN4Y0GQQyeStwrOmR/k4nRhAMGaeCuH+uy+RsNFcDtnmbDnDlD4WVUZ4RhUigkJZzSkoJjjkHHWO7cFPGotP6trnFiNvoo2z69Ga16HAWYMq801oCVE+5GQMdE9GQkuCdJjW6tvLv0ACMewP3K+0CXC7Jei5ky69zVlM2P1Az6efAl2ayFKbD4vbt7xAlIdX3yqkbDF1cWzd2Wr5InKh/YYLCSDrziAlfCCBBzfZ4cfNiX29A3mhtj7+PVAHGwfMqTxGifgPolJBjXDrCv7vxKEZGQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDB+EORsCcJvKjHP0wHM8aX8Eh6QPyiS5H3Yr8piJQyr5GkY474vosTWdP+MZZxVbcyA1JZX+h6oOLtCSXKCaQMN" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_198 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_198 new file mode 100644 index 0000000000..24ec30d996 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_198 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "99", + "previousBlockHash": "B8F1DFE86E64616674E7DDE4044D1548E30988AF127C66C0C6DD0917B8866566", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:Egdg2Q25xlsV0eiUuah/BW5HL4lNsMjK8K9kftbrRSU=" + }, + "size": 101 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "880414303811710731626908341002797352898950613333641758207554622931212968", + "randomness": 0, + "timestamp": 1617063981725, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "FFFD83B9BD8980C580C160A96DFA5070AC3B3BAA01A276F1F8ED7641EEFECC1E" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////r/KDpc5ZZV27H7Piy6Q9za93SJHhEQagikdlE8/xB/E3qYlK0C2mXTU+5EkBEmM/rhrFbNcPYDuu0w97EVh+artCRSxlGXYq3B1LlDxcYlzNuJwSKhbfMjRi7lF8fvRHAEKtldmjOAvoIb2PJS0ELJXqUF5sJ+J4FFAclR1w3uyf9Nd3jWf3DwhkpA64vuKwmO6XjVLWE62OBFUe4Mx5F7ZR5nCdUGsYAwdX5v/Hr/1HSOEQTHC5k2RX3cOD1Q9gSNh+BaLGYj7Ot7pCx6LtqkxWG12VGgAV/wkH5Lgnvqw3CitpWvcyDApfFACQnJSHGEe7DGcKdt1SrKOUavjWB4KQtzMk2CJWFZh3n1lLQnseePyay0vECjU9By8uLoMTpLsQ+61R/iFvh3JuZGtidBwt5NeM70y1HYFfUyDSnvl8Z7h/Lx7F9lagaHOZtq8xq4NIsPxaTxRpCYPcIDvV9l79Hg1xlRB0N8httrfFuH1UuKO17HUymCjy9UIhqnSxhZxNQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDB6weAIWYEeo6cIrlBthDfMGQfBH2xsdE7oz+Ej2CjKkrPXFT8PSv9rWa5rbs6jGg6Ps5NBeW4l6cxEt68EtBcD" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_199 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_199 new file mode 100644 index 0000000000..b92a794ff9 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_199 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "100", + "previousBlockHash": "93D0F2B9CB92E8FB5339E9B177264E96A17BB570A2290932402908295EC27373", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:xYc9yJviRLG5tfnK7nStYp46mZ0gEK+hwzjIbjqHlQ4=" + }, + "size": 102 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "880842937844725196442695540779332307793253899902937591585455087694081134", + "randomness": 0, + "timestamp": 1617063986610, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "229C87573BEC7536DBC1A11D995DA49076D5E277E155FDCD3C8BDAD418E76197" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////jYugJFrh8g1kN8lvAi1i3WNk7I5vR0CUly020JY7PnPgSrwA2wzI3aCh5DXaYfMGlEIIMmYE9jItDtmJ/eELZyWn/4odBau/nALPCc96WY/VkhPp7XHIYOmmTY5PxrsoDr2oBojOBEnDrzTD99p6Bxm4eN2zZkegnR+RwG1ZSEZP7pj+F43FqfmDyejt11cKgGVGYzhfMci/lkwGdwoiGyvwFI/SSmU85bdw4CAnCPbsx2y5Z+2mofh2ZqS813BHFOTzlbpOmNj1zg2moq7kcWJ40+CwVq8reX9CyyJReGmkEelkCdvalbKUhTVwOkYgl/ax8wmP2lJPcEU1IclUPnRTgM5alyFiGckCNAnRVEdbKXPp5uvgHfKEkpXN948IM+0/jYHTUtM1SdS10E+RUucfokagHTjZ/6pyLv2pER7gfKBRbInNxh6GsbHuP0bgNazwppNgmNfofZkOnPSgSz0U3nKbS/IHTG6R/HNwAcV8Jv/tF4pbecfvjUCwNfR1/rXKQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDDZ5VH4MI1sgqSFMtWNLckCiiCkRbfgOalUPtQu9zJwumC2h8+vX2o4DgOv63M+fF54t+mE5ZWYeS8DFiH+HsE" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_2 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_2 new file mode 100644 index 0000000000..7a5afcc519 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_2 @@ -0,0 +1,8 @@ +{ + "name": "testC", + "spendingKey": "79b4a6c8a3045964606648c15415a05b5f399468132bfc655f3ee95a9609f44f", + "incomingViewKey": "81dad8c44de6e9219da7ce32e46704705407285564ac87e9b7d99c196d7a8604", + "outgoingViewKey": "24292e253bbede479664ee7c5527b248b89f035aa43c84f4f06b0b17cb2921af", + "publicAddress": "16bb75e86accd6c22340a8eaa434b2b82d7bdb6058de2e87fa0dd0b8b53d9eb093c046650cbc1538ea1c0b", + "rescan": null +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_20 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_20 new file mode 100644 index 0000000000..02ef5ddb92 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_20 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "10", + "previousBlockHash": "E4C19A57868277E3A5688E03168ABECFF5BBE7A519DB1F04795FC470EFE2F9B2", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:/AwOADt+/lxoX1f7uNbfH4j6Pf4crEhaKHVo+HQbykI=" + }, + "size": 12 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617062326339, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "D7C0B779F292B18BF894D239B2ECC264391494369107DEF0E185C0E360FBEA87" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////jelG49/ajmVWDiWrCk8Cyl3H8fmSvYAxGdo/U6aepGU0PyETs2KZlmFO4Z1KAQRNq42lGXaUca/JLf6T2o6FNNYfp4BkStZwSle7B+vu/HtuQV96QLgrwQeZuLfzKBGeFXL9KtLluD8GZwb5HuEE3JOiG19Urqu0AxpnNzBL4Y4BLuKVCHJvC0ROatMbvGsqp5m+oiZPKYNu+ZZ6V7DU14msNBvGOtVDqMPuqW6tJgU5owhMFEb3JsxhvktTIGLWUdTv/qhPcpu6UauCnJhC/ikhOymdSp62bzP1ShplxtdIn3ZZDQmvcN267bsGpADgp175qOUQFHJN11zReb/PP6sB+DJXRDxOv4PZLv+uyZUoC0OBfct2zlT39tz7W/4BxOsOFgOtUtaj3Na84vTjKoruiPeSgqJPvU4l5FfjQ2qQFSfw8s8AXBBhG5ofcnbIpkb2DKBsJxRYQU6NhlL85rA2XjNGp0fBwsfpF2Kh1NE2shVEYvPEdQQFwaYS1zWBwakSQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDbHlEjpsVADWJFAbPPeb1GopMr86hppyQ0ZujlxYTWz0pUpkoEG4etxK+aB7KqwNQCh1oNXKC3tNo5OZPKsRQE" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_200 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_200 new file mode 100644 index 0000000000..76b7196d5a --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_200 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "100", + "previousBlockHash": "FFFD83B9BD8980C580C160A96DFA5070AC3B3BAA01A276F1F8ED7641EEFECC1E", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:00TlEZjKt2k3auoqLQijRTGH3sUHNPyYouu2vRgT3x4=" + }, + "size": 102 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "879986086737872350920864124883632568194233224903032010270683244223561600", + "randomness": 0, + "timestamp": 1617063991715, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "D1728124C2229BAF800D4DCA996ED7B543226B9B5852A966BE18EB56BE7BB1DB" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////onBotsWsCDTtMp+bw41myoKBi9SOa1eoLrqENkKtTL6oCYSwx1eqkeHM0q62ZBviiUVRazBc1RKt6aypADGsfobwAZ2eh4qqXxAAPi5uBqzPIxTTFFKVd+7HIo3GuJ2lCc/GkSrhBpw9M++Z7zHOE0vrvRNAhVbkto/DXDspAicbxhuUVCEAUNwtaBRDjp0Gkau2jDaKg/BIKhkaKW4joWBXHp5ugi8XDE3kJE4194FCHOufYeJUvPbujO8nvHIs+bKKmmULurRk3d1RO9Np4hBWYaHn+4T76zj295xRyO8VqGlQoE9tgMFpRFIYjXWRommFWgJ5pJNgp+pr4P6ZOqCQwPkmr6aJ8BDmRE1mmEKRd+p+tFPSn1ed2gkuBdKwaunRDMQGjJ2+hQNPliZ/PpP/+KP20pSsbMmkttsziYsTlYdrFuSkAh224NJteASD0mwWT1+6576C+PypTGw5rga7FEoMhY1RzVdnTRd/u3BPLGfzTcBGmZFyDZfLHX/5ojM2QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAtRcR1IxmmQJXW5CY1qnGfxhz98BLvSNO571BSX/CxAXM8BZLdllLDhMBwHv0gBiMGNFWYrlriTWAHw2CU3cEA" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_201 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_201 new file mode 100644 index 0000000000..de382216ec --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_201 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "101", + "previousBlockHash": "229C87573BEC7536DBC1A11D995DA49076D5E277E155FDCD3C8BDAD418E76197", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:w3REQ2EO1EiVnNQwr0WikJl2hFgPlS+Vv9q1Jhwl+kQ=" + }, + "size": 103 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "880842937844725196442695540779332307793253899902937591585455087694081134", + "randomness": 0, + "timestamp": 1617063997292, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "C6C9684B2FEBCB98EBD098220E2D80AEC42C9BBCF96EB249D5A0B91345848B75" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////t+LyrLWRc/jDgYcGnG+fm30Wb//9V+xCTwrLOPOOaIK07Kn3EZG+B1PYsdISXKMslD2NH3OziSzYO6NLZwuxPczcUdH2xHzD+Ixg5rscj8GWbUdlXXbJkeQ9tJe+2848FNjOVjD12fv19iPGC1u1QloaWV7zYl8IxS9FQPQuDlG/bsnL5TXOIe/7t3jSLYxypOyLxmB/dCnN+HPyzRUgVXESuCj+HrEOMMVmRtNB6e7d2uEFc4SV8PCRA/EEyvy6xdtRtagLTvm1qQOTysTRmSxtlZkRsa6qEypBZtJ0GJElUzk/PUiYudDjv7NceIvQkaMISEuv8CGjxq7GLs3sFYrD0GZIlBMX3Cfd395gPfVZc4unkmFtNLaU4+mctE/oGcztqilbDizITYe7CUnZvyRYXiBW7xDz4L5cnNW7TexhJccuT7waWDhHh6NEgeg8AqmXtwTKAPq21/fCDxgRkgk3WjT/bTzEQhl2dMj5zraK/TPTyM9/CLeriWi1gAKJQ2xWQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAyzqSv8xcMF4Vm88XJr5k1ouGTd8C90pAa0mH8OLk9k/zbaZcbIqil5AlOjXZFvc5oDcwQZu0kS49nQf3NAqwA" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_202 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_202 new file mode 100644 index 0000000000..9a8f8a8e7d --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_202 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "101", + "previousBlockHash": "D1728124C2229BAF800D4DCA996ED7B543226B9B5852A966BE18EB56BE7BB1DB", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:o22vX8dG1qdShT9o0GjnXxtT14kpTmH8VPbkA5dCLlc=" + }, + "size": 103 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "879986086737872350920864124883632568194233224903032010270683244223561600", + "randomness": 0, + "timestamp": 1617064002328, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "3B8A499747B2AC1BE8AD06B531C7D777A4B6FF02C0810A03826D8F0FEB96DB70" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////p1aD0N4OAv1uQtNFwxCBfdih6CpgY+T/mRN8epmoWZdPRJoCAA2MTJQMfyGxUFv4rFk9mLn/scCYIc5bw8bZl8GCSIXStT1kwsrpZq3QK0DikagfUeieOe5GwOfIEywvAJ9xSu0qdO2TeCQJIprtYkHmYwtRJSsfqMHZMh5K67DTVeJhw5TOZ8R9FDbNEjESie+ll1MhylyFkvIE7KxPMNRYCoxns+idzuRhXvXulCEFfIYKl1nuGPmGA7qzxoGLPZapxWnDoMJ0THMUrFYekOB6RO88++++DSHwmfr7HDGzNQYGV/2zPHUTHEtt4R+fKli8pbD6f0STQEPj6IgSBLCJoq5lsVb2PKusFatJ3Omjm7dkDFt7m174d9jqL4M0NtYr2CAvQ/hYfU6OpEbQx/nQQ7T9Pk2lN9Jlf8DUuR+c5sUzs9mo1YFSClXrj4NpPTMbNpm7nSh4rnitwAbn1nXfWu2/sBw/oOsPp5KMgbBbLpWYFa122Ehj3jQBHF147civQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAS8Cx4hubBGus4KEZ6dJo23xHt/RFdK64z+KNwTqVYFtvBq1NYkZ23O/G8wLW9w9mt1/rtMQmg267WUYmlcHEO" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_21 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_21 new file mode 100644 index 0000000000..d41f4a5b3e --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_21 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "11", + "previousBlockHash": "66BBCF9A60A9224DCCD036055DB1A88066F6460E36528970AA6A1F6941EE884E", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:X7QM6bpc4pUQIOnzh0A7GNQ/3vpyccXyiiDXCNSNuD4=" + }, + "size": 13 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617062972379, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "BBFF034F93083F5911D368C886AF28018CE1B7B2E2FE33F70501CB96435F1AE2" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////ugDZBh6wv9C/tXRH+oLuScZkw6WGH2cQJ6pdIAXFYUYzqACsS4cfyqvr8Nc9NckgrfW2WOon82PuVLKOmgDA08LJnrCkao5HpQVFCByoy3GwNU6IjXqKqrn2Le20B/CRAA4vATjyGtt2xjn+sT6+OWfIs6MmXdB6GphvPY1+ypw9DzxDTTVIv5hYMjDmAjB6r01uZPnCK2QKj4ng4POzjrpiBSYcJ/DTPwgZLkiE0yXNEEZeCOyLblaaZIVQu4ZqGGpentGOVz9XWIXaFk/XQ8DbxKyUzmYL7OGA2YXH2NHcyisx37NsrPR1XHsV5M2/91+ZvcMBvNTp8mneFQrjcTD/87xxAEQeUpnIKp/HJs66Mx+28jGG1zdxPnAmcIM0yLdYMUUCvHcGOqrZ3J1w33iuISlBtgznWEoNZnDKJ2x5FxCRK4ljBipSN1Z+jwdt+PTPjRWOqdzq0Uj7LaSGzCYKWdelomOdkIeZcOSP7Cp7yA0mGqmcn3/BSebLBQWbYyuXQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBGrjWLj0lDbWOqTrGueS0YAF4q144bG9gAfM0PdDfK6Zjs1INlWiYzyUTxZvibia+veB3cjrx20U1fn2QFXncM" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_22 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_22 new file mode 100644 index 0000000000..60a9930bb6 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_22 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "11", + "previousBlockHash": "D7C0B779F292B18BF894D239B2ECC264391494369107DEF0E185C0E360FBEA87", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:LsDMjzoP2DQt1OepDu/XWwzd1XUTuVJAVZpq347uTzo=" + }, + "size": 13 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617062979312, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "AD1E1B42725056385B919B050768A357C93B51188AC288F65D3522B476662F58" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////hNnLL5QOQFGLjy4aY6ijA+I2kGcwEKzIBI/eRMnzQfSi9oPQZnYFTIInM70xO4t4ocCF9DCCHGjBBszmlLonZTa5GFRczlH2hfURZJyS3xUvqS90aN0tMn5+7tw0fI5gCZj74db/KXxS/gfkmIT63l+CI7p6zfd6nUBIALimuSDtU8/g5/Rb1AO4MfiNJfKxjWckpbjwhb0M0ZyGOrtA3b4X/slY285vebl3xGwKG9M1lY6WhafAaM1E9WouSacf52TblUfJ3aQ0L1wgpFUsEyu4XbYmT6N/sRUdRPFbea8NPyF2sXVrTPdSXfdJk48UnA4Xe/NyuvMKvO0lhN+fQXs+rNtBwvBeVwxQ+B/KuM1tWYde290s5AKzeWNwwh9MJDgfC5tt8NJe2f0CF0rJRbDaUsuI8xRG5eGNR3bf0JiyZ0QIP1lqogkrI9xHU589kGrFAYmuFytF6AIpOlF9xGfGpHKkxlubXsI0ZRaRd/Svu1q/lQehDvux90sNdhjvkNfeQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDUo0SDA0AxmvfY0vuZIVPzOOtcaDuuP12NtcMrS459t59XcCTnre63qybzRtq+gkTosclv+Sn933jPM3l8gUEH" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_23 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_23 new file mode 100644 index 0000000000..f68f4a8132 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_23 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "12", + "previousBlockHash": "BBFF034F93083F5911D368C886AF28018CE1B7B2E2FE33F70501CB96435F1AE2", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:UmVqxJDNRUPeZD/EwsIkgJ1+eFeAIYHaVvRNSX/JhCA=" + }, + "size": 14 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617062987915, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "0478C53262BF919EFDFA04DFDC328AECEA82F84244378B5BD2B51E732B751295" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////rxCAybCkf8phtSEBYQ6ekQ0VJNIJpszHZZ0ZauQkYpzTHJYkEwrrvuF05mHIQSUspH57wK96H6wSiLbofPTvMXeMeIt12yg6xyx2/db2/dljsqpfb7ljTy5NNMb1S30LAJPHM0Kn8ybgogP5GgiIgNLtmLt9GEKwv4+U3XXqDdJSz4ocJtLVJElZmUw8UtAYjiv8fadN5x/JvQzwUmS1O3vszsJtDNxJf28AIvKZE2LfW0q+CnXF6vDTrB6+/A5AZPyy0Ofx05noLCmlflTx2lNz+FOStx59eHFOn/XcNvLkL+EZMn45pEalYvYyS3MIgAzUnj7TXEz7BsBW/BC0UAvRZBVayYcYzvf7hzBQzTxe1u1NwnZ8xIg5RU8YlOkVx0EWhKuTHnYsBDRj4xI4fnmJVEdRNiakQUxJh6YeIR2lWt9B9Ni6OyysErp1Esga6m+9dO3dzXEtIfKuIW7a5rs32QiWrNocTDmTo+PGyIyew/ykbn9W8RaeFQExu2tWMe0kQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCMi8pFNhAtYYK0qjUemjbkyCTFhVTECK1i95TWFiIfTF/fbRymK3IawD3jBi5x571zdvyIMNUeBmQ4ceQ24GEJ" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_24 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_24 new file mode 100644 index 0000000000..09990ea6be --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_24 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "12", + "previousBlockHash": "AD1E1B42725056385B919B050768A357C93B51188AC288F65D3522B476662F58", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:b3iZ7222mPlwWj8Sr3shP9WKDDnqIvtK+bmWL4yRXVo=" + }, + "size": 14 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063006640, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "79A12578E5ABAFC9C0B29641D8DA190D94CCE5755EF44E0998A86DE3645FE036" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////pIFkoFl1dWEtNdRUss2D03RArTXaszNxH+MSTNmamsuSwxX0zIlrVtth6a96ctcBjpbCACMwNpeirH3iXJ1jcldJf34H7XYG1qg8cZ763VeMtxw/eZsJ64P2KfgoHacYB6TcgjNdbsGi/erdl4xm5GK3ovMdPRXWHRMPMGbP73m/wtcjJpfeptyB/FYWMLB+q960Rm3quPl7/1xxnmrpDQ69D9uTPGhwMtYLhLfePCyesn7frdaxVZVtv7UVbI+0GUBWB8DPQhpBU24I/1D6azNLbKVV+0VNAO8gxcHcRLe1SiYqzrKFDjco+N5nf4uaRdp9NeO1Dw+7fhi+wwvdBLBFseSfUaIdSoX4jfAZnmO+WEpCjQaULNQiMgiJnh7yI5zyurScoYULZM/PWoyHe+Wqf9XPw7t3OyGBgaBegPojESARd0rGPPBjJ7BAfOg9DbeEpOKlwN+6yDHgPCxIGjXyxGXB93Emfhuf0ByYsm3X9ZsuQRlDEIRyqJjZ+iCLZO9rQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDzwbZhCm7/gz1xrcU4UYGZVILrCCq0Aihj2lx8qaYgCigCI1UAWmXgsxbFbrLQvcPe/EsmFYKq6mnX3XjoHJcF" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_25 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_25 new file mode 100644 index 0000000000..a5e2e6876a --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_25 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "13", + "previousBlockHash": "0478C53262BF919EFDFA04DFDC328AECEA82F84244378B5BD2B51E732B751295", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:JuNAGB0cIyWdboqFP98sVi7rzbR2H7Qz758qxNGw6yg=" + }, + "size": 15 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063011798, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "EC4624E6993EF740A76C1B6DF82B8F86F7B14F9DDDDD3345783242EF6B2EF862" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////pM1Fqc9XnoylPQPGQqbSXNYI7WgCU+E5SnBRgQW8w44ZxtJBikVQtVw4YjcqE1+Rp/dpdoQFd2c7hcylNZmcyG2vxEtAo/aaZlUmTgjeEhjYyraQgt/LAzLJkitaiRPjD/BzCn8r6NRQ4boJlQmbIBPpUq23m/fdfd8Dts0NYtVXxYHV5pk3tfMFpgPkrG54l9HPh4srIxZrpIOwT+hKWG6E7g4v2PSAGzGDsRdVLiWKdlJ5wV9PdYDeVf7QmcebAWEsQm+c17i32ImgmhnHu0o/SmmP/33kPauKAZlI7cFzPQxWIlnCHRnb466JdNq5zAuIcyhUvqsSTudDZEm8G3EunVyNAfuZIHfeSpXkxtrHr2hQFmfv69msG91mIbnOF+/a5Od4Pa1ub0QGB8azzthLusQrL43GZqOCm9634/KdO2sEBJuFCM6FIe0d1dks35lsi9osMSZh24pshWkX4m9tEyNR+JWoEnJ4GqNqkGXX4jpTd0Yl7bW8osg/5CqSBKYbQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDB/QcxNJqekCnIJ1CStTgXAE2VIJUB66h1exMnkTOcAtD9OWmsJShS38Bd97gfblAXRhWn4jF9zIfZT7P2ehV8G" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_26 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_26 new file mode 100644 index 0000000000..14a7a03a3f --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_26 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "13", + "previousBlockHash": "79A12578E5ABAFC9C0B29641D8DA190D94CCE5755EF44E0998A86DE3645FE036", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:G0HF94gN+2hDmeXP3zAYJdgccgOHa37vxgI0OL4xbTE=" + }, + "size": 15 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063016694, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "BD04DD921515025D21E2B2AA77CB85930F745CB98082A38CB1FB4A76F8FFB84E" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////mbCaSyhSRJYa4L/kLB+CAZ2/l0lhnJBM51FWU+tfG4WQiHRVuaIzjstgwleDmLYYqvDO07hPBz7+FklJKnJgNEYpX7aJyZOOEmRoxQ5dOnbf4PYaJ5KdAwXUnRfanjjIFiEmXFfumAsWneWE8fCg+kCE43MgvaENiI0VoVIqL/8MZ35iJ2yEyYSFNOPkSI6VqTGJmUIfBbD0bDvtEqtDhV0fY0J3KCEtHfYpnSU3P4iavh/mZrBlBB5aBGzVIROPvxEu0u8OBRjgftZGi0tnyZYB3+XvE4Msn9Iy+Mgfn2MOIeHvvrnKUrqxtI5Pl8K8dBVUx/gz8vUPsF76EfTNB4qgLE2NsMCImVKI1GXoJ3EJ5uKivZlMq4gzOU/A9RKfF2fVMsugLglVP9bibn6TzLUZ7NaJBdusS/i9QyZvqItx3bC//R+OK0LCOWX854Aoo05YTZwEo3opzmxc7a+U2xFaqEZG2kGY5H0cSogeIxc0roozXrHMUopEghJZS3A+ZS6EQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDtNCgX8ocCZBuWc+Z0xgbiAOWbQ0R6/D2u8GaaqfsGXMXgr+1qz8VV9uwy75Y3IgYwRfhXGMFz2ySC7bEatPAC" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_27 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_27 new file mode 100644 index 0000000000..9504105552 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_27 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "14", + "previousBlockHash": "EC4624E6993EF740A76C1B6DF82B8F86F7B14F9DDDDD3345783242EF6B2EF862", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:JEV0jJVuvfgWUVw37URWQVVPGQ1Cwo0jqP5yQ2VamVY=" + }, + "size": 16 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063021919, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "20988DDE40C0A1E97EC2FD821BF207D890FB231BB6742D3A523539BF03A5AF05" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////kzFG7ymQJKJZW12TwAxTs+Xu9KsGj7btGYAIaS0J6RTbLOvpsnL9O0Olsfa74juMsmjoymdmFrzjhCV5v4QxQue9cv4+sPsKkvcjdjPi74opOlK5HpwyMEQj7GrT86g0Ca+1i5Bb6bJUX+zu87adXuEsr9g3j55SGHsCVdRUcJ0L4HK6zgMoTSGU7X02DHELkYJg7dzV32AIFV7Y4hhwJofMz2xPdFEvicN9N1ghYpSpV/GN6TwBBuMCcwEe2jPJPKquFdeZoNY01AA/ZSaeVuMlZMOpkaNQw15q1je/eaQaeWLFumRVqHDowZQen+8tAo361muF9NuOYHXRXjlfIsI0WdDqyNEqB5lm/9/BzhS958ly5nM7oS2cHBCWOxAhiIHCXnOn1Nu2CXGVFPcf7/alHJw2XjWvl4vJ2wfCuRCSQbM9crPNb2h3GgC9OBnb9lKgF9+mwRM8gBqA/inYwP+bgwj42yhHEoRx9t2BI/MeOE/7cVwRD7CBO1XrBYaq/wJ/QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDtRLATjesqB6nzS5wyb++zJ9EoK5/dmDMi2JoUE1ZAsbjLGEYMbKAgiddBMLc+QbCyw5u+P1jXUAtFuDccRNcD" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_28 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_28 new file mode 100644 index 0000000000..9c324a1b46 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_28 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "14", + "previousBlockHash": "BD04DD921515025D21E2B2AA77CB85930F745CB98082A38CB1FB4A76F8FFB84E", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:+vQ3T1qs515spBHtgAp1Bp7WsH6XI9QQFaxRBorfgTY=" + }, + "size": 16 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063027043, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "8B2A37B1171C24AEA0D42A52F6556A79D75207524CF0A11705F237C920FE5367" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////jooeQoh3nGWm/ivtuZ2HKzk3rIUMc4XGkQHnwZmETJ5XXoVeHN2D4oC+t2tuihyzq639652SQJDuNcqnMcsei0zIPv/gbh/tRM1Hq5CDrD9wVsGzPdO34QF/wqj7/OboE21fEFbJJJag74vDNSFVDJs00jW4ar+pQeZr7Mm2bcExa4qxXylS+QaJ3OOWRTwqhsHLoV6jG94gW50qeqhawf+utKtyWmkV7VwvADOqD9omv4j/NlDuWHbb3DOEG/oDidxPoxcWUTzAteOk5xWYyBrORhRCuVfgSdOtiDWK/AcdJM/xLgS71LejwhyPaSRG1c1cJhll6oYUxPveYEz5ByOVH7GkI/MLbH1YnRj8u/KK0HixWT8y/7ZzC5xb7WatSrf9UplXvGPKV/QnZ9rhcePDmU7rimOOgu24fuHRMWiFQkdtlwgJ6ZHP8A9eSWO9vEhP+ub30XjiOOpv2NM7Et4DLvu4a+DVnVjEj+3p65TOXw1Nx6UfJHavBDhAWcgad9lOQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDB6V7BAVUObewG5ppKbVhohNP9S9regt1IyXT30MqwYJaFMs0dHMtDuYwnhQOoESPoGfTpCzohuMc3tkCdz44cC" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_29 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_29 new file mode 100644 index 0000000000..f56e55d232 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_29 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "15", + "previousBlockHash": "20988DDE40C0A1E97EC2FD821BF207D890FB231BB6742D3A523539BF03A5AF05", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:dvWPJv7/cEihdoQWJ6a8GF9QcJjv2paT64GGJk0JGTQ=" + }, + "size": 17 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063032385, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "663D7D89E1DBD9FF7F116CCAE8E3A7498DC0819B464B83776E0645DFDCB8877A" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////lbwdzQgqZBzmRP6qAgi6953PpYWf2HuKCZ4Z7JMlCjowRwSF5IEiOmshsRofsMuhgODtqfTdQt0F18TwjEBEQkMyavEdqPQ4S+80bc4o37g3/5gGhActP5OHLwWR9ItbD5Tn5qUn6aqS3lM6xfRi3DtpA84Y4+vjHuxKAXK+X+MV82vwSFT3uTvyW+aAVLPprCv6r/qVrPx60gt5LOAlNuCYM4T83NpxrHs+O1xOJn+xuqfUPLvsXuUCpXwkiKGgIOq/dXXaL7WeRqr9pIuV4M7M7mdXgA76tzFzi8AEetnlKakdXi9WNrl52jVeEE45/VZp/gD3bjP2j7ke8WvKQtOKZOamunpXJ9Q1BvjNrthXEZE7tEUQ7zNG6QU9eP7F8ZwJV3TCfDFoqCur2HAOkj1YarpSTggrxAsZrsk4gRxgA7IYYGoVhU8BThmqpChfRyEEzrEuVvxeZxj0D0+toctQHaHH8t0sCOIWvG8xJiwVhJ2/TSBZZRZjDjgqFGPZZr8iQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAt9OO+a8BYttaQrSK3Ytlc2ARSLYzp7Zb6GozIb1ozR7ufHIs+UYfewRckccCp0P3hjnJQF2AJt4O58cPFb1AK" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_3 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_3 new file mode 100644 index 0000000000..82cb6dcdd9 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_3 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "2", + "previousBlockHash": "F6801B559C46AAEE1BC94398C353F6B2BDABE50A44664A703B165CB0BE6A7D26", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:z8ttC4dxjgP05vkYPdvOMI6iZJEuH08WBrLoJxcmwxQ=" + }, + "size": 4 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617062203828, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "4B2EBCA1BE52860F7056D3EFC7696BA6CFC4189D850E7041C50BF7905C992828" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////guLTPnWDY6ucwfRkDLxbVrOQ3hjIsuGpuGTyMsbn7vIiPTiNnRru/Wesbi9WJrDmgkaAR7PMyxPPb4USzGSNZ8JFuYMQHhjW2bHU/Lrz61epiCH3kUb0SOxQvpc+XJOdFxEuvdBQYQN8ytBScWaZwLk/vaqrv+Juj5p9ZTn5cw0grl1RrsfhKC8rL/aedT+duaYlvVR03/cdxenjvj/mIWDbLZAEm/RmQ4wBFeTwQg9UI+tDAfUPo7NZKGRP0SQh/RlGTMiItrYv5kBOVCM3gxBhOwsnAf+8hvqvGLTsvV7aJn2xQu1XSXuj685/t3V5ZazvKK4ZQc/qZtK6RJNiWNMqg9XbMiYJWHFNZtPNqMMfbWxKHn3hyRR6tX6kLrulanAcqaL/OcysYSzUhDH3ctWXp3RLQsfe/xdOnF4YLOpfOhLtxii17dwoxDEPkPuL9L/DbqoAd96vLzL1jxJ2I/PeGW+2BfXH2GvlABLGvNOsyuNW7jAyG5x0B77CEkXPvAQaQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCzq+anZLb0dYCBq4qAXz8vk75uaOGkTdrqKaFMPtKxY324itiF9VCtp3FEbXAyPvvSDx6xaJxKY9MOX0uWjbQI" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_30 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_30 new file mode 100644 index 0000000000..d4da98e9c4 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_30 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "15", + "previousBlockHash": "8B2A37B1171C24AEA0D42A52F6556A79D75207524CF0A11705F237C920FE5367", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:jMlilkTM06T41KxXlhq+NdiVjpk3KMFURLOwlv9IbRA=" + }, + "size": 17 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063037743, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "55F2BC5DBC870869BD67F8C9A387F5372D6857AB91923259C6C1B30B43D24133" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////rqr3bJnSBsE22fBpA29gI9JKeGRKq/GzkMspBtNaU/pJ/uc1HXVOw/tZ21CSu1oBufGIkjwyh4X1KTH3WkUtZkw0wLsm2xG/Sftm09SIE/Ci0W/2C+3+syUjS4o406hUEzGsa2Dj/pDnJu8s/65dkBT5XO6FEHy1LuiDsUsgcPSyC7Ps8j5BGdKqVHhlN593gaaqiyJQt7OfJObOf2RxjrsY1i8Y6jKQbC55U9DTbO/jam3QOknZQCdzyOzVD0HfgTOm79lj9sc1pwQGYiWHjUho+L7sKutKSKFjN1yB/XFFvT4wTWBxkCdiAcB2hCvIn2E8pN0ueaMG6wukEiUxGp9fS3Dupduiz8JUYFm2WGVRy5LNZzEjxjA6Bj7T5dMNVD0rFjpJrmIAA7XFzBxkLzPmQVaWhPJS1EF3kHMIpDtg3lCextY4EsEttBDwJyN36ZiI3vAEsZd50LuDaB/EDqb7yyPlShVrXWYPbzzglHb5jaW2S9uy5jAt58y/NDQVWal6QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAFtgSACVK7cTHmI7Bdd3kpHaydqvWr70Bmvxv/GFhOABFGZ4vHX8Ds266E6wSptLYeS6ZTAhv9EooWqAhU8BgB" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_31 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_31 new file mode 100644 index 0000000000..4ecaff0584 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_31 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "16", + "previousBlockHash": "663D7D89E1DBD9FF7F116CCAE8E3A7498DC0819B464B83776E0645DFDCB8877A", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:a5DQuXVduqbS7D5/8LqIUgc1CtQ3GT9haw9xKtH+YR0=" + }, + "size": 18 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063042860, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "580186E18A44F28B6844FF1A6197E3C4537020A79F9C8A259412AB50857216B3" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////jlIAqAz8Lyo1Vdmvc9aL2GeIJIw1t7h5vnak13jV1HM4jWoIxgzz8UZuuJ7iKPm7o4I2wkKIM2bQ29NLUph6p4VGeJh1Cf/lWAJsDkX9gSe0kFddu78o9MPDVQfzguZ2EfsCgDicbWxwaztgz2K3359pvnCJxOWgiFVVHrge+wAJ7zMtOfc819GPuqUoGydKq1XEpzZXd01uQXaXmAdk9fWbZWME7NmmVizWHSgN3AjvQUWI6nJVPxa4eGT5bFLzblJpzvTWEJuJZSAvdqHIcwPECcCxjpJhndE7yznWnw1VZpwnXJjg+owdGiJ4BPepKpMJSpRld7PKRG2+Vz5uEDLyTUFwfMJ7uPXjutjltwwIeBf0LpVLKEz8Z5fkTqaX505U55nSVCKaUKswcmH10adB/1fBMqWuOV6aCNA659ZcEMYhOAZIv5uMsiO9e7xSOmPjmo/W49jyVoMXSjid6hCUH+SdgW6DDXHr0sRoXM0yeBo4Mvflk0NLzMfe3y310Y9cQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDOKEQTonBAFxjhn2RMN5VT2cTUMLbggpxN7/8apjeMmt2A/j/b7w6mHXIbFeXjRCp7qt5R1ZcEWYdfMtPckV4E" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_32 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_32 new file mode 100644 index 0000000000..0797695a75 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_32 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "16", + "previousBlockHash": "55F2BC5DBC870869BD67F8C9A387F5372D6857AB91923259C6C1B30B43D24133", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:5ScvBlpiusRmFu1Q5tJsubCR+l36+v54lrCu22oTOAQ=" + }, + "size": 18 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063048250, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "E2510913DB8E2C9A6A25E9C81DCAE80A057F33059CD2F0F07ADF9302B340A2A7" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////gu6NDpSV/9ErouLJaPk4VK2zOeie64r16HwclAbSOHg58kKG3b6IzigY4JyNgaynpUqzJug48mci4l+HEOcri/YTa3O9IYHKxIhgyUDBUy0uELQNnB4NLa+OQJGqlnmGDLGTNPCbluibrr/U80//QsTqdGT+5iOz6kbhjZN94QxCFPftTjphFtc06zOoZOfCk3oDZDq7Vwm7qJC+pyeghIlq4hzplXBnO2G2sHtjaMz9kW48mPQR45nemmSjw85oNCogBTMjO80uz2XVCYoX09z5gDwZbUcA/ZF7wDwo71dy/mvK1EihpKMR2ePXy4mtM1P21E1CohGC6JwAEEi0KTOeVf8Ce/Rmw9IK1TavbPLovHDiZIRWwQ8+JD4D0xqSlIIbXi6o9lZt7U9yKhRRVIpnd+tfiOjxtX39zb90GmvVGcTelFEsEgh6cGC6Z5YpCJWgv9IVlxK0xIlk+diAJSPcK9YWQlhyGA6oFFHoM2rItLrrSVeLSwGMq3I/fmty9fAKQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDB10jKkF1MXE/HmPKP7HI0jTCm+HmCODAj58UE33G00tidRGnffEe4q9ubK5SlSDT82ghLnMDhmT+g5O02B0jAH" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_33 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_33 new file mode 100644 index 0000000000..59d63cc696 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_33 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "17", + "previousBlockHash": "580186E18A44F28B6844FF1A6197E3C4537020A79F9C8A259412AB50857216B3", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:xagp+8rjJyCrIDaMBmDytYAnW7R2zfNResnBBYZRu18=" + }, + "size": 19 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063053399, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "226D68CDC7236EBE91FD0560A2614A8C2CAAAD25786AF8C73CD31E2A69617CB9" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////hxxkVbLTyT7nlQkhsma0uDu5MTYst50FF8qZlJsooOIPVwgq7kD+PDZFCpldiox7qOtOLlFknmsiIhttqRK6wuScqJ8St35ygB5Sm1QAb/nG7SnD47LqHctze1AWvbgkASLbhkHoPPLuIjsuaMKllALyOTrt3JRmo3r5fNptiC6m+SDWFPAqFwyioHNFDQNWrPEe2YShPWB7KJxnH8+WPwWFjTf9FdOMbf/IY4IdrHTjcgMMgBSQtVP4E9Zp/d7ZB1K93XQTITGEdNHIWSck3NG8MIObX8QHrxwf1PWX0VKR35Xh1Qi6Qx+HJNT9evbO2cjbR7WUfO5xClx/q3yuXrr5bbokpoHaO+ptuQCkAYIHguUxlsCr9FJKmrmvoW+EpsliWV3dNy9lJoKyFf1fB4zqrsI3iJ5RqIX9WINzalGdwOskRUNA7O7C3Q0NuVZbyT9Xs1EP+yxYEwCRiJCCNmuC+nj7WT868R/uhUofTt974ZHow9NVa2SqxlDvc/mfCv1RQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCX3PCJYOPa8QFx8CON/61RIztoZ0D0SNspJbO2EkffIN/X2EmIg0Ko2Hf+IbguT4dHggr8pK+D1CY5Sdpgyy4E" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_34 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_34 new file mode 100644 index 0000000000..0f1d2e81a1 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_34 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "17", + "previousBlockHash": "E2510913DB8E2C9A6A25E9C81DCAE80A057F33059CD2F0F07ADF9302B340A2A7", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:UZpqWpdDm6XLbx0VvUySCKdsH0WhlThGDpRTI0jC7RY=" + }, + "size": 19 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063058391, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "59686B32CC61DFE33044F5FC9C6369055A73AF2B72C3C9B8FD76C23B0C383FF6" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////gifJTmlDcvtZCpx/mWX84lXFWsw1vwvjc8twXyO3W14V5yHYPcLhuB41IbEZ2tqYh1db64v1T0ZrFG0OjTwzqtlZaRCkSVpTuug2EmTuasqgsG7Fup4WtZ3O8SSn8mXKDUjSVlWC0TbZstk9pOWIaaxURXQhdH+7w4m9J6dEFCkLwVUEZ8CxZGtbkIcpTLwmkTvVtdcTu9lIZxjfzh3vDjqoffjh/hIyt8qB5OrzG4asztXSincspJ4Tph6F5s8Z2iFwlvAHmgv5OHm92lZ6bDOZp3cDnO+bs5fOz9GeoE8fY1gZKrghHTRnjXrvnBdzZVpYmIgS2EX6HE9arjYsUshTAOuIaMmQQ+1kF6mHBzx+047mlHeJs8DZzror1bhYm1UKpkaEJaTvCMtUQRjRsQzxYadvQnRZtDdi6xqSVfqDi3EC6RZT6huIgScfV+ZT7CYDhS8z+yEp2c7PLFtrHQcWJPdU9FuCizRoR518msEPbjmgE+Wd1u6ktYr1e0GaM2C3QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAWxB6OXcF/yfDCSOjJpkHXus3J4Hdp8mcLWQRN+w1JqZHCscHa9Qcqz1xpCPeiOTKE8+0ABOnCKCiAQeB+5T4J" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_35 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_35 new file mode 100644 index 0000000000..4fc1ecf035 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_35 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "18", + "previousBlockHash": "226D68CDC7236EBE91FD0560A2614A8C2CAAAD25786AF8C73CD31E2A69617CB9", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:ut2WjdcEdkryW4WLEeg4CwoxGVlte5pqqDEXzyZrGUA=" + }, + "size": 20 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063063914, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "FA4DB4B6CE8BA5AE12DE4E81F433C2EA5F86911253BEB137CEF2C587B626D5D8" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////hoIGEpaa0FZb0lZojW0VxRa3RaWOymYIzmPR5W8HNTiLUVmh+CFlUhKE6Hr7oygrhunGMdCHPT1w0TF3ARf+jjeCd/s5n+AMN6oedAj3iC84JL9Ix10x1buztTENiVTNAwfjDc+lyfaVbG9J9u8eQ5iesvAlHTmfbmrwpIjfgoe8Q6tmY2I2lkBBHt/apzaGhDW1OOTGrgLojdwnu08Ofa4c6psNj24ORllFlafcIiBQ2cB/bA0G6J1+1Zez/jN9iTgItD8FGSb5YvphzAHLMkO7N4n2RIsE/zySdORo8YohfpheZ5vo2kUNq/yMBZZULOkpfqLaF9Q4NKJxiSwXEAYwsWaa+SIsPmRbnoi/CpFNal34OkvEE3SaUxoMvba+SdbnNYCVb8kuu46zu6mFeSaObUr6uLSH2Kz93Bs6Wn0CPmS0Py8mibrqYmjVfVJ1DuYcQDtl4UYUFVT1XSyTpxCAgMesOIIZILikFLYSHYEkNPZCumS1Lh461jOxvM07xvHjQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAp0DkFXr/aZCzx1Dob3HvTKA8LpFhKtCyfkCwbloZxhJD2Pi8Dxy+AIyTtk8q0uPbRmCWfzYAfBaGwK04i0sM" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_36 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_36 new file mode 100644 index 0000000000..ee92a8472f --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_36 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "18", + "previousBlockHash": "59686B32CC61DFE33044F5FC9C6369055A73AF2B72C3C9B8FD76C23B0C383FF6", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:o0SkQqsjWTK+QQrS1n4xYZLPeepC4r10CTaMp8nGQQ8=" + }, + "size": 20 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063069225, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "FAA9A3FF712C7997BDE70C642BF93C25F80220B32C86EC0854B13F59FC0B11F1" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////qgC+nA/24UoZyue2vSPIgQ8nFtHhLFP6DQGiezVKi1jK5cZZpTUpFWF4h6VK7nWmq37+S5JTo0V9zpE6IkN9GpwcpsGpk6neg6EL/VFKevVmXBdGg/vdHPWGvvx9wCApAXQLu1LbQ/CuNP7cfAoVaU3+O5wxQWENEe1tBzHrLcHX9uGqPq3PLZ6xbkf4GJizjYfjyJT1go09FhyfSH0+mGb/cXXPtNC7LXrAh9iipGpeEnnnIRN85A65Q5cv99n01LPCmjaiKwBMLm53NklQCNifT1tHBIngkaGSKVh4IO60hVM/77RMkbBJy7j3/3zkDDlx9uU+7CitSgNzYfkzO0az2Q/LoICpIbbSqB8K/Hl7JC2QiGSXPR+Hp6Ghmo6AYQBB2FnbaBpe/TdNwywAvGTHiRPQz8AgR5UEpE3ymLfMSiFwoPZFCHoakvTfvTBlMEec6F2gKWejvYCxgftVJ/k0o+c5AsxQ2vI9Qw6Vgq5a7O1xAe/0HPM27zOvauumMm74QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAPKtUCKxxW+KQ0CXIqucapf7zBWeZhvh14TF1r+hs95DVYx1ffNBHbw+S0dIB7y8lLZ6aA/WG3xIvJzkpJvWUG" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_37 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_37 new file mode 100644 index 0000000000..fa52723207 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_37 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "19", + "previousBlockHash": "FA4DB4B6CE8BA5AE12DE4E81F433C2EA5F86911253BEB137CEF2C587B626D5D8", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:8v45ubqBCUGSOUW/Ca9nROvEoMhsnrGPrH74BucHXQA=" + }, + "size": 21 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063074812, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "C777F8E246A0BBF80F35ABBA56A19BD7290D57C2FAD9315733C95957905B0C5D" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////ogFgzIkQKU3M1WSbsjNFIh9QHgxhKQWC9CBjNBTlMcunkrTUsLvFa0W2UvHEaxCBgeWsQPFBbi1ckvmTtNmWN6fHp/XvArrAoe8G0YaYVzXX75uAAdcDiDGfFIa9I7CfEOM6Cb/oWjgZxklv2RabJMEnywFlSUUsJ9RcHvmDkQsdxFGG6DMZ6GP0C6fdWl92h0wxsqEnh10Qyskdh3mqTJ8ceLMGXR3VVjOe8Ek9Bbd/5/nhCnHOHvVsyRKd+E9QVzAAo7z0bcZ3AEoQwevxo96fzFtsd0EsHtSTCVK1LuS1zjbOMxDcBqGuaN8H4i+ERgzRXm1u4tYzBMtV+/xVGnQLLALH7tsBXqHJ+yKm+CyBiAo8nRGXuSO7EvLU63o/r7+7SP36fyJ/MNfrSHIp6x9pF8z9bP9eTaM2E3iyQBUUJArxb1EE1oCLnidN2hRD50zodpKAitNY7l5C93z1+jWZFCvL5cSSHMioFPdXiMqjNxjCBY5EHXlrH8pGUuZZSiBpQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDOeKFAM6vwaTKcvxbtiUCtLrD3zTllmgkNy5mOqrsMTtiePKcT+SD0MaQvWYe9ie3Qej0gg5bBGnN3ZClqmyEM" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_38 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_38 new file mode 100644 index 0000000000..fd197f7e9b --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_38 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "19", + "previousBlockHash": "FAA9A3FF712C7997BDE70C642BF93C25F80220B32C86EC0854B13F59FC0B11F1", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:yRIPidXOdwnL6RBpujHGSj563t46FFq1sYN4oZGIRQ8=" + }, + "size": 21 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063080057, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "247DD96E64D3B5943910320CA1C297DC4B11119F444E758B6315C5365925085E" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////sOSLssiQRfOAJ+2MUVbYx4C67+7cJCKSeLtJ3kt9eLTSONZJr33ZSbBkEB8PIlggp/De6TBtgPLKkawgoAssVdn7l0E8DHhiTX0umWRO4mnaJI4tt3m93I4XsuSaJ4NlApHAYrOIER8Ub3pfrtksOC+LIRNXRevbxHG8/ZpZu4JJHUODpSPmvSSjkBnX+Cw7tf3DkDoTe3xi1H7ywaxeqdCEfBM28zRzFbw5ur4/Mql5J3uHQG/CweFyqYTWHc7cHl7CrBPixo9YhkRcekC1aU2XXpUrxKkBe1NEGgucJq5xy5cTvAKh58eGDcZczTOm/JqkBuPrMGIORcCakbolFi+RKRTGfySvt3gKFJmSqtkPHxNPwGpsgUOl8cHTtIIDHIyfYChKdBxMX8vO7uZVcuWEju4UwhUhOoU0f6vlv8ThW6Jot+elMhdSwavjJ+IaifDwT0TsThhiUSRQYlgNoCN6+WoUttlTW6eK5EtY4Esp6jtYGg4s784/hpK7y/kK0lCmQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDdGGljOpoa3O+1+jtsDpdNnfjl+2XP0wtIWehbfPC+ye5aaIsdrfs9S5jhmz17DEaSqzkijNC2pJxvqdx9AWgD" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_39 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_39 new file mode 100644 index 0000000000..4b772c8a1c --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_39 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "20", + "previousBlockHash": "C777F8E246A0BBF80F35ABBA56A19BD7290D57C2FAD9315733C95957905B0C5D", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:+dMvuZ2/aa8QRwswohWSMsEj+Ub7ZJcRihMYyR4eB1s=" + }, + "size": 22 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063085526, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "CD19A9C0FF1464A78D1CB55EF688240EF6C0BFFA9672E4B73A6027B68187301F" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////iWr3xARoG3UX/pZCvj4ixsiHULg5on71A/YPF1cpFfH7QdvOq6xKdoRHMvYbbKwsjB5lneJIvmAPX1BWeSf1ufwPMMLt49lyw9/qziL/B13plLM2wVNiQBsmKc0oyULGCCYvOVdPoXyFge9HX6G09iKwDNDpOXCv7QbHbqa9+V1WGKo7q2h3eo1NvK4JSJdXg6bt+VZ0kMVQ1AWsSvL57m3eQFyeREJuG79NAgcmv/sTv3R5eH/KNiEWlVAitpLykUsiIC2WQrjSSGNNP9ZsCV0zwIW85qDaVf/HKNhJcqAOlGFUB7Y2RkfeGNevHh2wn3YwcG5EELHkF0khI9TOZ14BxwodRxK+ucXw8Nz4iIfsENxnLL+ztRPOwsVahB3oGk40w7cz8ID+bzbv7rXvc1GBDzKePyQAEWFImxvP7Vly+8WsewkennEkhMMcT5KPUvqPQBbnHr9C0xUNN6HB76RRNcyODZ6+NOZ+7m6aSk6L6QZE/+G/MMvPtgMGLmzMucrmQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAOmKwfcqiyorf0NXuP3g6q9lCt2fQXQnBLVPjrlPm9lTZzGLNjgtlkzkMOXmxei50VmdMeuGrHtwg46u/+cUEI" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_4 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_4 new file mode 100644 index 0000000000..bce73e8679 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_4 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "2", + "previousBlockHash": "F6801B559C46AAEE1BC94398C353F6B2BDABE50A44664A703B165CB0BE6A7D26", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:IO6W+jpmMhJSx8a0huqUzIQpMlLDTFN/9WGTRcehniI=" + }, + "size": 4 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617062226090, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "7B8E751B057D19E4F69477A7661AA73EFAACE4DE6BE7E34D287FA506EF6E2433" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////oADlsODbPBnoJVCJ/+AkOwzWQDyj8FyCplU8WcU7D9L24ToYv1wqJCcLKEcUdgGJl1T5owkpS0rz8SbRj/ex+0d2af2MDPbpoUwRYiy9KY9M+N1NyFaDi//Qlc/pajinESR5eGRV7zw1XntmQxl/No6/BW9nyBpdPiSa5fZ9TC3WLEqKXpqhHG598CKMCYi5kAIJgQPN4+IQElocUhSrhU19GMfPE2LPvGqwF2Gq1C8GtEmFQA8zVQ3S1ew8LTAg9TEVkS7td/noJEFB2fY4IbhDp9LfqQjA7TTVU0njKxoF4/6fqCUu2Y+6Q5VQc5YFHu2tEdJU4EIAfha9C8mIUnasJ+Ov25vv1lwWDesPtMRZADVF50+/sqKvzI0ZNc86+2X0ei7bKE8+3k7fge/4SP0vhkrPDBZessK1yLbYanjho0pYJ1C2GDwc/HAhD3ZL/sE8LgVHRd/n9Wzn4d410wjMNosQrohcAdj5s4+rq9FrVXjjUx/+cBnueF4fZ+OOm1OjQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDV+fet4IUa6EBA6Q/077FM1bsmoeUUUAwDbkZOKpUtuTJhTuyzp69Y8zVqP0uTg2yGf/aUT/nES5I+tmLAshwL" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_40 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_40 new file mode 100644 index 0000000000..6758637ec9 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_40 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "20", + "previousBlockHash": "247DD96E64D3B5943910320CA1C297DC4B11119F444E758B6315C5365925085E", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:gSpWmMDodWK7+FjzJyM1YrxTaiE1Rt8F7k6liI/9PU4=" + }, + "size": 22 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063090821, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "BAE9052B16D1488725AC3F71DD30F2EDC8667F2A3DC48909B2225D4ABDF2CA84" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////gt8v7Kij693q9aD+VN0A2HAr+GdvHBX9A1H+4gu1QbEwEGT6VuD1iBAt+jDQQJrditaaj/0hi+U9e7XWmNdgDnpy++F2kjbnDJR9oMhJ5o/o6KCtuKnaD4BnwJc8vu9sF1x/16ZzIzRuIhGVyPAVg7T2g0yXdS27/j6YS9/cpnulla3+B8qIvj3g2zKjixlyq0xBLneUCJo9gUvqEMBevv1gB4Azc3dCwgc9yo83ejnaHPhAkmk6S1xIZDCROnT7W1f/E6YyaIMSxq02XPfGxsPhYE/Mq5G8sKFvj7J/AqBgrDuSXvhluNbEHG0WFsp76a1oJw5hse6yDwJXYHscWq1OjEGXW626OQ/zYihVnV9TQQqRretIeXllrwvLwphgne2uei8PIXO9UacuzpqrrjoEDZB8Eq23L1B5FuaLvat8/ZFqq/bG+GTrTQja/1BxPSxfE95zIyoJRmau1at63J/NdhD0/HMCDu5s0RS5iBGo0jM4MtCgovT6HeUsyN9nrkB8QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA5NeHwHKehIbzhY83QzlTFPV0IMxefGt35HytVzygvnsfOitOiqvhTmYzHP2XKXeVFkN6wzNl2/LG0GWRuM14F" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_41 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_41 new file mode 100644 index 0000000000..3bedc448a4 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_41 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "21", + "previousBlockHash": "CD19A9C0FF1464A78D1CB55EF688240EF6C0BFFA9672E4B73A6027B68187301F", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:atCTK8j1nqFG3wusGny5ldLO6rCoGWQiJzCk2y9OhUs=" + }, + "size": 23 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063096036, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "E06D0CDD93C1F747A48801F9F0E89888F39C84134ECCF04D24CDDC331232D85F" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////qIgtBoRdKdXNIDarIPYuzhajImM2rHyJMoQIpAPkwnVkBqSos5bvsn7XDRwzgnu1ofwaU91dvA/6abbowcShr5ByBDHma8zZY/TTw9vIFDy9SW0bAbAjEmEtEmX+Y7BjF4icWCNJzHL31cDqguYFYJMwu6BEO2k3Zn0yMgCeWjKH8BdNbRaz654oStDUkZAUr6cWw7RDTvM5TfkDIHUhMwBwWfaiyj6+86eZ/r/+AdCMewNygtvN7xn0XvqYP05j05raHf6llidycFGg87gG8EPhDHhKlaojytamsXOTFDq/x6F7iug9Rlgj4xMgVH6ODf19BMIMFhBCbWfmaXesUTjAVaxKMJYWNhh8ANf2QARwrh+vUD4Jcl2S+cP6twbe/BT89nbOXnyRS/BtWafOB+P3auCGRTEkmtQlPTYbiaj328LAMF6/0eIVipDNYFFTLilDBKzbUKLNXXzElGXJIsxGfFkH5EsXKV2r1ObEHR/BII+iouSu2WqLQPH2hAOOUJuQQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCny6Uom0/qJJMA5EqyVZ3S6DL0Qmduqv4ai4bnIl4ty0oaVE4FlhGrHZ0ES0PuqB8CUY6iUHNCuLPXlt4pH/oM" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_42 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_42 new file mode 100644 index 0000000000..2cd19640f8 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_42 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "21", + "previousBlockHash": "BAE9052B16D1488725AC3F71DD30F2EDC8667F2A3DC48909B2225D4ABDF2CA84", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:nstkyUrhTJz4qi2L2uPEhz3gZXXAwlB3bzLoJEzftEw=" + }, + "size": 23 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063101204, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "CDE0C0FA2F0B0ECAA5A3B9FA4FFFA87FCB55263B82738F7EF9791BB9C0F697D9" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////rd9KmP3T0HaBTE+mzGptg2hrUv9N29MMKxf93kS6QrqhtItEySVxgqGBaM2pO7WTuNBSmy5RriIOe+9pwROEFvuPNJy2mydckGkwaW9UmWMBPmMIpOgaEeTQaXrlajLhFWNxRd9IcnrBdHkOQkiwpYq2/3sjCTSJfVzwEGY+h3KdPgiwKalGjC9RzTua79LjguGo2NLPOJOhyCpOkwgXFSNo4WOEXybvqGD4AqJTI/30LsBKYkmqzwe0lmlbIZMHJQZ+vAT9GNDXW7drqyCUTI2ECjvSsITbvdYmR1do74UCCDawiVQjNtaLZaiGOgshlkdiL7USirY0y6ZYupSmRN3ODVvu1qyG2O08TD8hczNRI4MfnrfSXcK7FVfZ9abRDt77j1u9suiUCg/KqUhG4Qnd14RbaSepzESumvYWpw6wrkc2oWK2VRVQPi2UJe3KvL632V1NwyUdLg/yQtQOhdLbMJ166mA2C7E7T9VfvRL05JSp7+f7pSg/5fMyIUJGzS9mQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAdRG4U+ZxKuhr3G9NoF35Ku+K3m76w0blbAY3e2oLh6KrtqWKA1XOYNP8F5ju/eJZ9dwTZc7ESOX1Pw0HrofYA" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_43 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_43 new file mode 100644 index 0000000000..0eb5c6c702 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_43 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "22", + "previousBlockHash": "E06D0CDD93C1F747A48801F9F0E89888F39C84134ECCF04D24CDDC331232D85F", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:s4pZLTuDPegK71pjVXaYcFtIx0aOVbJ6vg2LL+yOMi8=" + }, + "size": 24 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063106359, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "C3AAA66CF91407CC0F6F8413EB72C0752B006C2C83704CFBAE27A6B8A53DC800" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////h4VVDB8HIho7j1miLpNHbjq2ZiAQSbdo7bNMYmiLhrxBTXdwDT+Ax+aOnSVD0/LVpthCLc0n81wJfIASn6pnVJocKKoKdxOS5HO4mV7zUcJp6PbfwteCgN5+hu2mvOcaBjs07s/3SIhLfREgve0xSEXgJOOFGysbSYrucSPXWu/KYt9uvtHjynCe+AaE1sj/lCAMiH4dJQlt27FxTnzq5PqCASRvHEDgooFhl7Qx0ShGqHvCiyvWwm5gNvqqTZXPS5Ltvb2VO9N9uyla36gO6WQLriPZRYIi4sQ0QYJdZxtnX5oX55NlZJEZ2zcDkOVNVMbWeWoFQNgv1hK9MBb4RfGFfztBziONAlJCeCM/7PHQlvW5ASe4wB78UDXrd2uD2PJA8L0gfCrh2Y/LVTzWPmyE5tzSNxJHsGzhoLZD9j+UN8wNgUKSpj2CJ+sv94UtO+bonRvHisfPDtJAGK+kcqzHhm9uFvvaxXPDoJyecXwws+SyAKp6LqmgCypZnyabSEIFQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDK3rG8/qWgn107HRYfDyOt/kuAMHEQbU72oatmJNOPmLGiA79f/mnpIOMKv3M0/LdThIjwnXUC1XL7PcP2NQsL" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_44 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_44 new file mode 100644 index 0000000000..1b5d89af1e --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_44 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "22", + "previousBlockHash": "CDE0C0FA2F0B0ECAA5A3B9FA4FFFA87FCB55263B82738F7EF9791BB9C0F697D9", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:PrY95olgjMwZiNU73cCmuPnwYyHOVOvih8pw0p+Oul0=" + }, + "size": 24 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063111724, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "4A751916BF3A74AFD1582A897EE50DD8B6F46502D91451F2C27DBEEFEB1C6C8C" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////siVpjOW5X43Qk4kAWL8lN+DFRr6JybhRWcXEC5E54TJTdQ1AIlK5MtGSDJumUhVHp3PIZG8C8jQBaq9pFBzqz20QNKH6ae+2oP8XO0zRFSAmlVoQJpniiFNy4XFVcGpfCcwjuBcKEw0GMv6559ioQ3sRGKVcT329uwxVMsP+C1jzl+vAZjTskIDAg6IZXh9msDoSgDE8S6ZbYt24Z6WBwDD3w7kXGvQiUU3Yu1902t+tvSDm/SIYAuuNR9GloY2clldcyuLf6slWrK/xUIRMc7M21uHUQajDNNzjSTO1sA2dHAbeXn0ueSiSHg5G9Hs1pCtsblYvi3L+xiBX2hogXFPgucxXPhgwD1eh8uQW1YRfJVb4EqYgVW6TdXVsIDUo9QdcLjwC+NrOlrXZaJPdrGLD7Oag8C0jCIo2lIZxElceji+ZXYSih4zN4cZt+2k23WVbZxOvKCrztgQasFA+Bfi9vyKF6JB9nd5etx7dmJmj+qCTvkUx09bUZO+vWn/SYVHPQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAHgzno6DYC6Db9hOqB9e/mrUP/U/a2dmCioj+9HP4uvz+RwEFx04oefn/IYb35E0wO32Onwfqne8tDwCvd10kB" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_45 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_45 new file mode 100644 index 0000000000..093b3efac6 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_45 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "23", + "previousBlockHash": "C3AAA66CF91407CC0F6F8413EB72C0752B006C2C83704CFBAE27A6B8A53DC800", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:X2xDQl7Scnwi9ZsctFHNBQNH0KnCoRT6qM8OaBI6FEo=" + }, + "size": 25 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063117373, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "F7923812C762929231E10EFD96568FC956D810491FE8375C078660435CE460E7" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////jwBNNtXxdn04+2zt+HLn8cmoxKeaJGLyWNl9S4uqcEfI3IC/bJJ5apOI/MdKGg58svU/QoKEeiSSz2y7xAGA3J4vBxPiaeWlmWeBTn0gIMNh+xg7/VQpUHYL973zSKoJEvFdFGqPPwwcsjy374ilu6vHckCjiucBfesHtG398uREMmMTsQCoJUegaerbHDD2g+tt08CPLnv+Jkb7G63h3dVjm0KQTj2tMNRLPL2396++V+/2BsBrighcSYEoINr09HI9idxcwd6MqtcdnM7dPxW6oe+v+lz9rz3Fw9vE9i5FgbC9K7eOfFl71XKZyD99/B6+Lc1djdS7Drn/ay8ALYdNYrN5mtqQI4YfcxTaXUODpFJIiy/XvY//3oJUs6zhk7oS4HMZ6QBiXfFLAsMHxCYpN0eVoHvIx62b07M4jpYKu6Gszyqc1ySj85hcTxk9Icu56YQdnWJsbqnlyxhARByujwAMiY45w35YlaOAe7fqWnM8WbEb0RaWSOAGjzQqxi+OQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAWpqwNh4wXI/X8eFoBjEO8B2UW3tx9l9v81mEdDasZE2RNeedXZhnp5bp9f0nI2ZPfUn8lYlCrPAXqThAx80QO" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_46 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_46 new file mode 100644 index 0000000000..281fe6cb62 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_46 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "23", + "previousBlockHash": "4A751916BF3A74AFD1582A897EE50DD8B6F46502D91451F2C27DBEEFEB1C6C8C", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:p44DQHB8yR/6VIjzfqVLwxVZ9bRqDhJsSDMe8MeHyEc=" + }, + "size": 25 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063122675, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "E0CE3891E3A3188A7018DFB023316053F35A959EF497C19E3A989CF82B69A62A" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////su9hN7BEcTFGT3lxPRVl/ZDG4ae96OjLmUNfDvRnq4m9zH0ZexRe2g/9ItsO5T+ik0EvIi0cXbDPk1XODxV0WZT7eL1EmkcKrjQHnA6Oywt6/WWJwLC7FX4a8CPVvaCfFAkpqfEmhAjSzhDsrSHnKyw7mCUT4fa988+Jzi6/IC8pRkULTBGGMOs/jtmasVFmhpIJ5wYGCf/OH0425/9XWChLSHIAaWR9AQkZpyZzCud4OefwNRZfeK1w/SCVOGEHSKMrYSCjdgKbe7H3Mnu24qfxxCU8qVJG6GuQ9CK/kaxuOXLPOobvCbGKlUZhe1XQJAuXElrUgqLEvt4KlatlSmcv+tnqav9Yu8CfMucLsq0F48aX/LyXC2gQw6Jiex1K9fO0KggCpijHX0R20/K6j4AnCeQtcKUrCk2gvzzTySRsAq+6+gxJz+Q4vjpbS8BfxhU/l/oN00xHl+tfcz9rqsd6Xk/hMNHMxaR0M8ZFmwKT/meDy2JKriJnAcunld5hA3zjQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDZduypC1k5+sfyUGsHPkygzocaUEzmFXC4IsqKetoPIC+BWR2++RBTFvIe7CyY7ZJ/E204FvjCrpd6d9lOsbsK" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_47 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_47 new file mode 100644 index 0000000000..a30883eae8 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_47 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "24", + "previousBlockHash": "F7923812C762929231E10EFD96568FC956D810491FE8375C078660435CE460E7", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:ZXrg7Y+UkEyzZiV+9y3dFWTktudcqx7eBVb/3Es0RGE=" + }, + "size": 26 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063128407, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "AAB20F848B2F3CE841892D860D393DD9ACF4AEC5A7537A1B55C8356DCFA3C75A" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////jGU/3Wxcknw6GjmdDqa0XmlA1aRfqtD1ppm1BIEDQerCKYCOCZlpFv6wYliWlLvWgIyhk/ZMzpqw3I+aEHWk60tV3ov4JcK/9LEO+A0rjsm+SXETI4a61DogyePji6MaBo/og0nUlymBake5lAxn5jvyN0fndRlWMIlPnb7cLXdb3OZt5hDn/Bq2rmAyzkYxg68gHv6J/m7aXE4JxYY+BWWZZjIjr5uAJwu1Rr62MipwboiC7n/qBvXNHchCp0MQarlDYlr5FpMRst9O2MK1IgYl1YnJ8wTMj7bL/6JdxQt6lUK63aOgV9vT6ULpUceh5xKzSq31xWhxoB332vMxXMXc0ZQVqXNjpgPgqX2f2ULOF9N9wRzQV77fm/rw9/BtuCwfgdbtKg9rgot23cuwSf9hcx5BoN6wFgMpnftD5Tabq7HoZlnfLMDfpjw9LyCvUz9nEvdQwSXf80Iv93uTZVPUYpRytmAE8PXdLlJIbmW3nf5xkkGOaRO/u3P5OytJCD7lQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDI5hckJtoqfNxwyGXotR9QX3/ZoktBVtPlwO68joL/8JemQk6YY6WTrdWCPQ528jVdngZ+7ajphknqzTLcPBkB" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_48 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_48 new file mode 100644 index 0000000000..6dec70837b --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_48 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "24", + "previousBlockHash": "E0CE3891E3A3188A7018DFB023316053F35A959EF497C19E3A989CF82B69A62A", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:TwrcOFAFeHRo4zw4caZojVAcr2dXf/u+dSWEhf/oOgo=" + }, + "size": 26 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063133857, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "ED983CF1C4824C2DFF7EFF6E6824EA2C78A80993E17C109C04A4609238B4540A" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////i/klDRmULKKO1/ocbGLnOQ/6hpO/0TBWQqNCJ7SqCdSQ3KaWNejgY0Per34AjPcejxWAUK4HhHP/w6/fQhTP+fweXNGpHJLP9u8czM1L2Huo3GL88VlEtZKd9oS5tEnqFG6n5E+hPxoeLBhO8frHeQ5F3UVplLyjUdNQnke8zW/iuWaO5+UOFN5G8hcwE2nOkvCjaRnVLfsBY6kzAzgTSXuXDWK9e3hBP9dee6B3644H8u74UqX1aNTrJpccnwx1tBS9jkadZAX80zgdYM8ylbnxgD2yfnO983/l9vwjzGx/Ff/2+DZHfZ+Lx/tFoMv3v+YTmksFnHXZZNh/CqU7RdUnmmhiky/rcuHZMVNaIuNGOmH3+CEG+qNSBHa3/avFOuvRwB318cUlM9yOVnVefiE62gGDnO+pBaIUTuAo3HjBMZVSfu9RP06sqJgJid/RGK2bG1vDSxElFn9A14a1oD8eztJf1UBq+TUZndgrw9WhmcB+HxnpuiFv7rCUR2XiLDMRQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBDKfoKl9FlmRPW/K7wl3+oDdWdvNpFNwq/8NW37/5wLPC+Czj0UsicPhyajBfDTOydHoUKjKR9/uP7Y78QESgD" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_49 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_49 new file mode 100644 index 0000000000..5dbaa9ee00 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_49 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "25", + "previousBlockHash": "AAB20F848B2F3CE841892D860D393DD9ACF4AEC5A7537A1B55C8356DCFA3C75A", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:IJjxvYRPh0ETAzKcD7VWV0nu19lx9Jq0KnnJT4dHDTQ=" + }, + "size": 27 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063139429, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "10D65933A8263B3D1E6ADB59AE73D6E7DE47CAADDC32BD33F476F0B127AB5B1A" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////l7RlyvrcUJj/XsYDy10Ir2ljxk275LI9oHBxN/Sh2Tu4/JsQxBlkRyTbfztTv7GeqtsS9OSkUdhvhnPfaSfT4zkWbDx8+lrMGaFS4rAhYIO6t+kJ2ZUcoB2FkB/pWCMMFERjVwOB1ujVDLur3XfClz2iF4EgD4Dv6kQmeVovSPGYVwtVTrDRMUuGggPFaoStsaCgWwftcc7Wzg6xoIeCJI8sJUfI2ue0FkLCS+9SYg6ptnWGB6u0/HqH+uI2N8CRVddU2uZLupJ0OTX6tsa9gxL38OZpoei9YfhBZUZCcpDol7VAaEl6XWVm2J6WR+sogZDLvUueulSDtOCDBJ+tEEJnC+2Z50n56ItrET8LSBLU6M+auTf48IdUKwTbofncPuJ5zI62YOMjokl5TgRpImq8BrX7YxqQgNW+ajk/TkQTvhlmzeaXqqaXu82Av48Lq3N/M/2qFPvxHtJT/eoX7KaeCjw3AJ5S+V7gExj9dnxJf8qtaJTd/cqAobAOtPNqcgZQQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDUf5tZC6HLuHyfSESsdAgfRM9Mp003vK00YhqmFhyS5cAKvOorCYntZ5ATJACD3czmi0va5u5V0d3jdrq/5I8A" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_5 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_5 new file mode 100644 index 0000000000..b7b019fa34 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_5 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "3", + "previousBlockHash": "4B2EBCA1BE52860F7056D3EFC7696BA6CFC4189D850E7041C50BF7905C992828", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:LTdVMT2kwCj5SacLX2MVs/CfH2D7vfC7auTIAbQJbA4=" + }, + "size": 5 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617062231172, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "013169745E2F6730D2017DC2FD6E7F2FD9A3F9099EC5A76E92FBF8F144BE6D64" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////jN2L69ARvGd0SBPrB4CAXxhjjfjSHdzR2JBq6Ly9Rd4BjtCsME0nD9f8z9c07NfqjzOiBSsHWVkoHqRaa0eEQ4bX3lkHWGApbLO0UyHhAUhpDvMV46D1TxXYm9qTxGbFF4OXdpneL2uNFUSoM+m3zNZsDWUYT2hfF7uxcwfIrb9jYZ7s+zYkXxOlDERRW9ZRiy/ElTuNWkVEo9QbP9EU66Ka9QmCC28ru+CWUwjlkIOTPhyvV2hke+EXCgxkNJDs3kWE5RoH+DfsVhEonIDvQzp1pnh3E1MvdPLdcgy59digdybY/2GmOt+VZUm+U97ZXBiabb4gzhLYWb0YDpmUaWYmcjH5rgPKmdfeFjuPqoaR2tlPAcH2zHPYrBkZQ4iBbKq8Jq+HUX78Zhvp9hVdgFDS/IIAZunFrqJInM6J0WSEDEcHvfo87E8YW0rXySE49sDnZU92M6cMfEGw66GicY6DB4MJDRXTeUxVdfIWHdM+Fzfh0zvqqgt4aZppe418HetKQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDndNQxihhbMjTVegv6xTVj6lLku64GSypL/dnVfQUsViIwHstX/sF2xPVvKU7rhfWhS08nO4QzwhxAbtQHgD0J" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_50 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_50 new file mode 100644 index 0000000000..724ec650a1 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_50 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "25", + "previousBlockHash": "ED983CF1C4824C2DFF7EFF6E6824EA2C78A80993E17C109C04A4609238B4540A", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:1rOksYkrPbYe/dan3Hy4BfHj6VdeZ0Kq8s+uefhAd0s=" + }, + "size": 27 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063144932, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "192BC5338410B802120F9755484A2A18E44A862DF664E3457AADC5E2C17F55E8" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////rLS5vBXOcmBCD4NfZo5Ap0khKEmDrfdggVvJPRP25IedHbq3uS4rj7boffwkSVofsSVtIGSHh7qE4GrdrhsecCzAci5UXBloQG4hfgzklC7ct+wNZrxribUti4Zrs1chDK+lzLK6g9EM95V3SQ4h2mMArKbkhF5P5/cBcrXJgLBh50Vf117IHd6J/QaXZA8tpktLs+8Q/DzRe+N9xtF1NhBdGAAnLzRJZ/upMKxAZGMKZTgwlPl/tH3IB5ylQwlYjSy6CrXEHEvy28yDbQgLhppe/4i6amlcVgyDkkU+D6jZ38Umq9PiDdhRlSsgwYMIIgmkMzCEfMHqb/wjUKZcXWA+bxDiSdfpTOi0Y0aLcAICEXvpa8EAZW7pURr2qzcUfVBiTQj0LC7mHD4T4/m5soqPebfW/W1yFvwhsfT3VVpEI5ZAxkeO2OvfcWOgqi8RUyLd1Xd/EUyCZ2bYRPgTWG5S3pCgKlUIBIqUPO14dTO60/i4yyC6Jtf1UFhfmQIfJOfVQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDW2lLTM2JhG8c8TeEv8CyrPYFzto1bJ0s5c7P3PqXHkP0zRwfpNqZ9UZh6YcsmpwRU6W3JuvVs/qrZgELJHlgG" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_51 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_51 new file mode 100644 index 0000000000..c13438a0cc --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_51 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "26", + "previousBlockHash": "10D65933A8263B3D1E6ADB59AE73D6E7DE47CAADDC32BD33F476F0B127AB5B1A", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:ZG/upIXN97X6AvzEaiWrCbtTK9Hi1i9RFh4XipYnKwY=" + }, + "size": 28 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063150623, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "DF79BA89722511B31EEDC89E2A1EA196F29887A45D31B8C4B20B6FE657D0C516" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////uQJCcDaPuwP9gGpbbGeWhsqiCIlX3v5T4HZiJx99Pa8gHAfHiuHtZGe3i9uSy1CJlDGeD8vDMfCWVX10fmATdcbH6XNuisgtsaafEzA9igqbcM2LzbLHBPH8bHPuxtTiBMCZffjHfa3857myu4GbrFx4vuOPQxReFpQGOwsljwEs+RrbJH92LB1tkNaYG6pWjPty1LbMt//SZzJwsL20pHIrMxKHmOcvreWGDfUNkOLZUxRpoJ496X/jF5HT/nUIlBguzbGhZ/U7IZFs08J3Wy5xJ9VkZOXbr5P4k4KnLA81LOO1wVcj6f1dIcUQ536vif1e+p09mStCzaX+TAO7VmQDKD52rcosmjsh7rebZvr/iAlsAQZYhHkEg1ta8agf/2/ZaXRx8guZr9MvCks8rWVqeg3JgiuIX7mQ1aMUN3EOtTC0+SmHbYX05VuZzodgLSyIJr094llVuAte44RVKYk+TwJCq3P48BKrVLjUpcTmdptgxF77UVy/0xmm3/QxsCxWQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA052/61TgnjcQleG9y1rFvg9q0gdjvzi9yu3+XChxYid7lfzP/opxdmUqTGkgoLmDjLnZJKJhh2lwOO/e6dykO" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_52 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_52 new file mode 100644 index 0000000000..8435fe8ed1 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_52 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "26", + "previousBlockHash": "192BC5338410B802120F9755484A2A18E44A862DF664E3457AADC5E2C17F55E8", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:3s4QR3xrai+XppZfHUpa0BbWcY7YZ2pjKfnNcHnFcGk=" + }, + "size": 28 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063156152, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "243990CAEC2EF6DE3EFE298036D2B08AFE04B411F09EBBA30F1DF93576624985" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////sGVs+4a9a0/3/devmryXzUYXSa8XjBs+rXIllGzepan2KJs6yuQ9dHtqbVYxQZppi06HeBjf6gQRJmb1rIlWXqMuAEaHUPKs2fknERNVEG+HTKJifFNeOsLCMfLwZeFbBo85qg3Cx615kRiky/X4QooewbCTa3+1NZAKxSzzkPn0Fdw0MH8cKP90TxK6uZBaiqgUoVuHLQDxpdmUQMQWQSmPnuLhfG2oE1iRBtahngY4fgeHr/3xQIyjne/W3HbGBSFbORPw5yjrwl9xob5JZsCme/qzThmSF8QZ8FLJmy+oTsTumRA+sks+Qr9AfPwkgYiWBzntcflNhp0THRwjS3/N4btgVzmCrhohbsHbxc7VfeZsxV14/NJ5HzQJGY+bEMRhOkRE7SMXaweiNr8Yg+E8Ekap4tHPQ1mceXLCFgLyXCX0uFwkpezjkMroPFD+AOYI10Egg3y7pL+bh3fGZq7GsN7zsjNM7Dg9IU5zS10kjeoc/UNCjFLOa9QJd96U7ntFQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCzdl0B3gNqIhV6JD5XVOXysipKuy5FFW4MtbtyCoOUW+YcSrx9DgJcGODirQRk08f+BARNQctY0H/9AOrIMokI" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_53 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_53 new file mode 100644 index 0000000000..6cbaf0d8f8 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_53 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "27", + "previousBlockHash": "DF79BA89722511B31EEDC89E2A1EA196F29887A45D31B8C4B20B6FE657D0C516", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:gYoN1EybJYf8yxWpksyQD2XFSJFlNXhsUZBZgWHzDTk=" + }, + "size": 29 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063161762, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "BAF111C73A6DFC28415B40796361B4F9BEAC604FF7D0493047F4A654C3F28219" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////loCSEELZHYgsDsIbEJsxL6OAe0kjHQQNnBhVAs10WcGCaDMPHQmxvtBjixkpsJrrt9TVuOjWGcsStkxhJOAsIc+SccCO7an5AdJ7aw7VKlbau4qKeIstitxgyngm4CC1FCWBtejjeTJTdjvI8NOUgM12vMgA1zkIn0K2XCI+AeeMj0pvjI1AEDxWyR7yWZkzhvsjHu3+NZvAcKH9wBQVEo8JzFavpRr0BPsOKiJYx1489voKyYGLTDtfh9McNsXPEybHC63oMyjGkc4ZQ0V9Yh+8qvt+h9KvfD7Udhro4qDXlCHVNhDt9pzYgsFcvWvfv7BMoCPphIsIAcHj8oqeHcYQyDVwOYEgLyxhZR2uJXZOFLAA05OXBq5j2nq7CHMPh/lGUXTCpciGf6d5snqVUo2nIcGO6bblCsj7HNhECXVtlqTV/SiP1in/I7YaP90WnpC1h3wNCvJse3W7GW0uHEJCIUeWX8kAsOIomh1t0Y89LDEwAc8jOwh+EqwNfNNtetHhQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBIembkLykSnNBBlqUI2Lzk6garIwMFgKD83tT8jQbYUpNzPuJIjy3OHGxvzh6t2DhozI8+BuT6qavmn4MGuYcC" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_54 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_54 new file mode 100644 index 0000000000..cf3ecffbd2 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_54 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "27", + "previousBlockHash": "243990CAEC2EF6DE3EFE298036D2B08AFE04B411F09EBBA30F1DF93576624985", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:rslgFjy//3J1LrgxR77ZYPLpILjtcU/HVcJzS+WLZWM=" + }, + "size": 29 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063167202, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "69C085CD88556B2906F67F778FDA750D704147248203A6EE0419DC0D8B21E277" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////mNAd7yauX5Rm8DvYt/z+Oyb+FjgIVpCnZ+mX4GhoXtPTjpf39F9bePWQ8nUOdi+psz/0ZELlApLkWPy8z09VHXm5VqlsiVjXCO4UlV7455EZ7onOwCzGjsFZEC6BodyWCShDgDrBcgpGX1jTEISPStU/WKeIlHexPxXtPaRkvl6Mmmx9ZlsiYG6MoeSpHxyMtXdfza82xfXy0ecDsZNm/SFi2HwWhpgoV3EhrPgZMCdhFOxrkf4/ohbghJAXOq5T6EGYGzvYHl5kmcZW/MF3HjgnV1I2cvp7awuQdG7wWhszmtyAuwGblVcBBI/XFRK9LuBtZLsLMOD6pXc9MkXwTxx95ejTg3BDs4/K6F2e6fSkUjNfgUMX/N7MkiOFXZy7DzkXARf7M5NZ1VGbCiY0GzjL59FLaH+w2FBavvJ41oTWmSrpsDzquZgy/StMD5FZhNTZC6dkYMmDOecS/oOJ+kcT+2jY+tTcCHPfq3TeYPL/k+x2HNHCs5hmRFg91FPjoLSxQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDz35IE7hXbNwH+q6p9cklTxV/EPogitAuYaxQg3S5NO487tZxQHHlQeF1keP7uo+xGSuwtj83IbRyOocxB8IQC" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_55 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_55 new file mode 100644 index 0000000000..69adacbfcf --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_55 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "28", + "previousBlockHash": "BAF111C73A6DFC28415B40796361B4F9BEAC604FF7D0493047F4A654C3F28219", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:MC+7bAClvI/GadQNbCIm00F+DaKJ4TPDA2M6pS67yA8=" + }, + "size": 30 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063172742, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "3FAA811E66D521D83B33A4AB793EF0BCA5554586A233797929EF524B9F6B51D4" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////qhz2mrElOf8gTYZVM+Ci16K/I/BbvriLpgpqhObGTgLjXwAbULpQo/13sErTecTvkLQxaCI+x0U8M+mVPmTlgib5B7t4OXPoZ0LJGpHghEMCSoXc5KQdrSx+k2AWswDdAyilSQXGTBiSPKxlOKZRhAORrMU61CvH4UTfB+k+I5VSichGzs4N+d6I4Ye9VETxoDUYteWRNKlhgWvtz3XTsiIoPr7IsRIax9Y1hQrTEL2LhgTcWY3RkM8bFR7fLq9ZOAewAvWYqF76GcTyhkgEf9JxUBaap5NYqaPvuguUGyy1jUWjJGjGMvzdJKImDcwtlAMkUraLFi6I1/gvBVRcPputC/wf8H/TX01znN6qfzdKpmmUKrTidk9CnjM4TD26eZFTmxK1OuqqY6qVXJfuEuk0LFceOU7bqw0Hca88Uovb8zeZHUDd2qOkxcP2zJ135Oflu3GLvx1qzhxLLeJu3Dlpgj6D9tqvLS40JUR5CqsuC9L4oDVfD7g2YEJwn17b0igyQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAgh3JUST2r21LK0+AOUQrWfR/ALkB/U20f7ktI62Rggiy84qRracKVF9qCYcrXu9zSpjpaCe+FxR/aP4El87wN" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_56 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_56 new file mode 100644 index 0000000000..5c9c6e2395 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_56 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "28", + "previousBlockHash": "69C085CD88556B2906F67F778FDA750D704147248203A6EE0419DC0D8B21E277", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:RMRdLq+RFlgJGZgkOsqLL5JdeeQ3pWJ3wK1mpvaERws=" + }, + "size": 30 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063178177, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "20D0D47C080AB4463F8DD2800BE4A8E05F5EF24DE5EA7E3E5132B673C77186C0" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////hhaUbuXleKyeN+q7Zfa4cG3PDfYnuE2ouY+3XhA/AQ6yGofyBUptLzT0eg730SSNgRZl+DFgYoVBjBBMeCtS49+pGlilNKZG8Euz9fGfkJn7sZ2gXFw9PsXSJYAvdTV+CbiA7a7asunI/H0ZQFS76GujHI5J2W/BMW486PqOHe59bPT72Zy7fH56KgnlPLCjlnJQt//hqL5xHnSBQjKS+QMcZ4xewnkVSFObRtB10IF+nZVdEckcaTYWrdYGRUe3y/GuWn5fnP0Mgj9zOnNt6yozaUxYd84Nnna9NjQAE3IHcu5RHtHcuRAj+HNS+dyuCxTLmpeP/DhzMCggtyFHO0oiPbts8TwqZU1szGZ1xCop+/03SefjY1yokSf7Za7p6HahzBxk1HQKIYmSrxPOEDko2ExHv4Ye3ptuXErZAlzrIk5VQgJNB82M9w2RzF0Ttw0q46Hrj7zO6bacrSHXtxdXXBuOw2qUXx/F9bxU7fZeoMB1g2YVMTBE32UdgJRtnhY6QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDC6NyKyZDLTfEgdS8gMP9dUGJM5l0Qdcp3/cQAwV3d5ZcHgtySvdiHw0COg6QCLEeHtKYKydGZhZJCOmNc4zasH" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_57 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_57 new file mode 100644 index 0000000000..f4e4f7387f --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_57 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "29", + "previousBlockHash": "3FAA811E66D521D83B33A4AB793EF0BCA5554586A233797929EF524B9F6B51D4", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:MmwJHrKy0XregVoIFmWRqESa8Tg2NcyFReEIbD7+qEc=" + }, + "size": 31 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063183741, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "88701E4DBD21AC99DC7584C40B1F10978220315B7ADF9A32C4F9773A6D83C8D2" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////kIj1REfftybQVTwu6K5Q8ca9Up1k+kd72nVMLUjofyNZmzUtdBdRKURgrZ4rfqu7mOWyJsWf8XBbQ35+Cic4IL7nm7FtkAn8MgN1exmZQ9qcKX5b11/AtdDmOxDWmyoJA/fcCYVskQ4543u4LveSyk5Ijjgqyqgw+uxD8ruei6MISQw/HaFM8CyLew3FXhWKj6jFphttSp6RsPxo18JesgK7JO+gc29lamLFuNI83H0lFOu7IlbSLSEegqfz5NzGil/1uXeFQ3Q43jYqhC1yr6UUp/rW3F0HvtQwwdHYj4wIcdMQOjXgEJwFBcIZrNCcDEl3kn5buBW72mxVSaz1YzK2tlW4lQONYus8dClXWYvZGSXoogtPfkbe7Rt/y3an2xdyZwn2fx9swbD+gnVsAqWCJQsawYwO6ekd4fXSk/hEi4B7suuSjqge4RlMSqSrf4swVDfs+QKsL4YVLXccux9ctuc3FNZYRcpBmINpRRP+32PoulIYRarpOs8Xag0r7lvvQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDB4Iq/uAmSea3KoXMSG4JhMaHaSDpMtJwm3oMDHpuOUkNk1xMzZQycnvsQ6t8eq9S/3FhmKoqPqhpPT4fyu9/QC" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_58 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_58 new file mode 100644 index 0000000000..42f1fe4431 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_58 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "29", + "previousBlockHash": "20D0D47C080AB4463F8DD2800BE4A8E05F5EF24DE5EA7E3E5132B673C77186C0", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:4LSW27W2Uasblo5mz1P79F8q7A0phr2/CzWJMZe0zCc=" + }, + "size": 31 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063189196, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "6C843A38C8F51E8C6576709ABC9ECB2FD791F9B5D976957B3282AEAD92ABB256" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////seg0Dhbd+WRDo4NXZwvV7uD1qToyg0AwZN/ElWMk68MJtl+824edy0/jdlVmGaD1gPJVQd5490ncaKo2cfWd4tNLxdp5J3GDn1a4cpCK+8FG89yWFYPXmfbIUeV5ttwPGZLjqFS41SXHcK0WWtZbwyfDn8MxM6iC6nCDs5sRiYtSa36PafWGuLYb2CTDAqrpkbSFXVXZ8vDevkM+MgvXdInFXmkEL6YQBF2EIgTnriPp0pS7W5FIG2o4gLPu3eI2oj9xfCd4okL4cXrFca8K02fHsVHa6QbjACcoB9mBwd7W5qcN82SrEXc+EDRThySckktByH4+GBnHMPZo9eomFcJbQx7NEo0v7t7U8BW0y1aHIwLvQ5UWr+SWeaT9T7mEaN4npeFSWAfKJaOPlYCXeC91muMiqOLD2+2XPK9MHCA54Wtsa1JIeErGXLlJ2CagRSUOy9UggwZNUtbRuIvPInMJpPRTM9+elF8Ze4aG6Eum7vNPWawPPYds6g+9r/4D5PvnQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDoavUTMTw1ixV4ZnUPv3dh7W3IuVO/r9hM8iC+qU+hgHiLQ3rA917HhAJAF7DKvYBbMK8y63bdfYhMHzbMX9IL" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_59 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_59 new file mode 100644 index 0000000000..04345bc416 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_59 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "30", + "previousBlockHash": "88701E4DBD21AC99DC7584C40B1F10978220315B7ADF9A32C4F9773A6D83C8D2", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:F0M5qhyOgwCEX9Ivm5s88WsnOZEAV6YSCr6fBzTizm4=" + }, + "size": 32 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063194958, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "636B1606F64EDCC8F7B96C5FFF91ECEEC80BD1CBFA9F9022DFF0F470490FB64A" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////jnZ40kqRnNi4WcI7D2zI7IlWdDmRuNzV4obaexyn5QInYDcK3zW7/hB+x8gMXkUprhYefFiP/sNGZJCLNfIWUF5k3BX+2qg0IFUw/xqXLO/LR+jQPYMroW8KnasiamyjEHqhluMveo4JY6We0pxLjKfEZFs2sbr3W8MEztd347dGqT8jh4vBjgITu53340A2tqtlZNHLyYXxP+fEy/tJIPmD0i65yMV4mXnPw15LgnQA9LkhOKHYH8kBuKgvOj5C2fscG/3eMRkkqAsOU7rgenlYTbJZL//+YViWgvn2a98F036IDjmS8/9bQNyQhhR2sorIQ+dYeerjN++oKZmPIcvClKgy0OTjWZIn1qZB8PaLz725zXjJZq+17bRP3j7gbnSAnZv77cSrwkxWhM0Th+fROVneznvVZmf9t5WLVLcY71Msq1tvlkaJx0/+Fhdhf6CYm9UiluadM7RjT25O3g4lEt2Behylsk9CFTYxbr4Zhh1ijPje6Js6kygI8juCPcaIQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDdiLSgNZc20FM3NuI1BOQGb7JseP1PN9oLGqS50Safa/+vi0tWmT0ub8dQI3OqEFnlSeAW8KFSQ5mNkwnE6PkB" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_6 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_6 new file mode 100644 index 0000000000..0b7c657ce2 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_6 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "3", + "previousBlockHash": "7B8E751B057D19E4F69477A7661AA73EFAACE4DE6BE7E34D287FA506EF6E2433", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:tfAbUJdLuAlV0yYGHCsBayKJ318exGze+fIoJCCpgEc=" + }, + "size": 5 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617062236172, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "42AA08DFD4FE8165F38CA99D942CA7265D32D5DE2FD73AD6CDD775CC68216A49" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////iAekpxhqxfIhXmhClcrTUhe2rmoilfFGaZ20WdrFrVQHTeND0fTM8Qjjdd1PvZQjgbEWbjHJzWe0asGJT4BrckNu1njbQ7F6JnUKPs4nNiDf9yfZVcDMXwTY/VMFwOu/AWZKdEMiP8dFfSTDZwDLrCkG/HKXQfflRLieXidGgHOZqYLtFHoGum+uINulKonKhoYLB+VW9E0XfF1/feH/RiB4GMX/6sDMZVcLMUwaB59xmznokIk1ha5eMS4/N1qRMPIDJnAtKsooHaeuQIzuSQZ8ctwmeww0kYyqFOWEayZv31YmrSvTWN2bzrFlik8mYT8T9Sxn3czdKSIo+jb2C5xZnZFXBiLx5EgNXfODUIzIDK673fo+M5piLsNUhFLURMudQRqOj1P6bnmrbG2gR6s8Ex3ASt5LPlGLiv8s21S0vm8PMJn4qTNruY4f+U4xPbaQYRP3FBnKJLAc+Zdr9W750xSoMB89bSff7T1n0zytcYHhOqZuk+D9OFuySMHWjcuLQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA3OIGNDz6woNAEIxoauzXHvr1W8rR8wk3gieFbEw86HuM7jVFkc7pWNHx24f7P+hhCALpfcVh4ZWzFMoRjXuEN" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_60 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_60 new file mode 100644 index 0000000000..e958f9baef --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_60 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "30", + "previousBlockHash": "6C843A38C8F51E8C6576709ABC9ECB2FD791F9B5D976957B3282AEAD92ABB256", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:9M/nA1+9oz6oVgrjFNEe8E1k5NpANfBwvivHqOYo8Us=" + }, + "size": 32 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063200740, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "506898D90A2C480856AE2DCFE96ED70A50064F5E71849333845255DAEB54D95E" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////mQWNh1Q1CBSk0LeH6i6JxyGrdNRPDbvnI8jQV+4wS6j5LrZe+k7ezALxE1sIWxIfmVRzWkDhBuzZ28vjDMBhPZ7G9Y88kFE1ymYgfRFcKbR+5YQOXnXpzu2Neh4Q+FrnEZff0zL6xL4KKItNmhHkB86a4A3i5sfI/ju9cXnTrk5khBzGxCpq0IiCC6DudUSKrtUwPle83piHyNY7yZvLK4ZWVxqxn8rxiU1rXhbpOwH4OFpaqF+1EnIYFIdE22YzSg9TCLL07EO/7BXSZV3nEdXpMFEE3FE115mrmyCPfW7Uz7eD+qH0Xvs24t6m++mvdMV1nov29jewMxDVfs06AmUgynvVPOqZRTNIptlJ5xk2UkoqhsWJQP6e1EJZUx7qP1sAk0vY5cd37QHdgaVcHFvVnWTRGmKISdy66NFc07YHGyw96i3GTbrt4k3yvEEziflglkjrb+2lzRVaO8r1vaXk2NROSNBEZS16HipbXEueFKYr/L1QvZqYUjHb8Tq+wWduQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCOBbIX6Bqw5k/gP10etH8cm+ehX4HI5L6x7o6/Ee3gls2qQYL746FcD4//TmwtxgrUy5Du23UAx4Obnznl5ZUB" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_61 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_61 new file mode 100644 index 0000000000..738ab2e3c6 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_61 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "31", + "previousBlockHash": "636B1606F64EDCC8F7B96C5FFF91ECEEC80BD1CBFA9F9022DFF0F470490FB64A", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:vVhQY0Q2ZEYpbUkytwP9UKt5rXKeBfnf9JQZ9RavIyo=" + }, + "size": 33 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063206716, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "6A708096B847B40C241C1B8A566D496E8D5916924936FCB60938A3C69D91E7DF" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////p0ygpVxyaW2syaXR4DVF+8EIQVaJ1xhipOASz4obnUHjkhcKOzZW1UR002IjTzegjt+IeiRynZIT7/1eOL1t5lmXpaFtpcBu3Ph0RS1CFf9jr66IL/BtoT9htnKygx6KAw88od3arB8QnjstusxfFUvM4+LzY53rWKd3hsNzn2ncQHvYR83abotDVyok02a8iDhnbA7rEDqKTBy1q1L1t6YPzuLBxgLww30CMWfU/uA6xlYN0bu+cEUn7vuGMWlRQSNeC+cPLoQ4rRWQRLP2UhGwzg4lnXnUngRelZBy4bUIJJUh5Hd5ipHqZLdhikLltedYPEX08qx3hhwRlLNcHNZiKYlY6RN1LaR9TUAhc2IUxke6XtvCSDULdBo1GnVzrgc7lv/C/QxAx2DiqGeC5au00L2nBVjGToSuMK9cfhOBBwcQCGIEZbwZt+64VrW+aTxyy12bkYiNiaLHwBor08hlvlkTobd0U5el2bWKlCNbMJy9uUZ/r2RrTyNPi0HbarE1QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAUvC3e1oWLuDbVnVOAVvBoJ0ZHSs8Fk5jPZZfHEEd4kDnoOEM1Cj68rQwYUb+5rhagfu6DPc4WwkH/vL26+UwO" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_62 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_62 new file mode 100644 index 0000000000..217650dd70 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_62 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "31", + "previousBlockHash": "506898D90A2C480856AE2DCFE96ED70A50064F5E71849333845255DAEB54D95E", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:h8LT6l78b7jCUql2bWQcVoNP050oI35QtePLc2RenD0=" + }, + "size": 33 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063212659, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "4AC053510DF1B2DA7D52DDF33E8547D945FAD91B7A4D8A932381664BAEF90780" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////lmwogBAfnWOZNC1dHH3KvAl1PFXSV4yjAmUof3cs0eYEFuM4KOlF8O3ix0xGq6EosNLZWyP2ZpTDIRZnueWeXazXBlCQ2V4ohg2QIQzFVkGzAXXn/sMgVCbGS6I9xNZ5CZaHFz6rKkm9Zwa/POuw2le7A+CP9goEh0LwKdoDVGHfIIgfJZ2GRjAtNTij99vNhnysfDDgdYNwGKKsjlX1iuEODglNQ6/5bIN/hCkxCr/tTEM8MQU9liMrPNRaZD/CfU7w+CjcH0q5XY7Z9Byw/5C+uzKiJWrM2Sg/aDw353C9Eoosr7E5YuYAOBc6PmNgmmshSTEwVIwKqcLDJm1OaRHzlcqGaO6m5mzhEwJpfpELB7ZNIol7mZP1KsyGKqMNt1kpWEvQmTRZOKfNXjbkAFavNkNOgSLBTXU4DUuRQVUkOV/i9KZ0vnBrEawg+pjmg8dEVeERODTbkJsK+UJD5EBW6281SXX+NMOY7JPnhvmElpzU5VMQ1HEb/WE0q+od16ePQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAUNHSz6LA9wVt2sQ7uk4zN0eLRp8dljspe0I1qh1sHqopv0QSQdpiUjZL+lF2HH8+MFqMe5Ig/cRVQdrS11UUL" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_63 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_63 new file mode 100644 index 0000000000..b17677f651 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_63 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "32", + "previousBlockHash": "6A708096B847B40C241C1B8A566D496E8D5916924936FCB60938A3C69D91E7DF", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:0VVS+FEzkdWebh2vfHmx0VCDOCuQgPFIx2u5IaDhAGg=" + }, + "size": 34 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063218727, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "4555DDAD8C155140B8361FB379E9010A32AE952C38B8D78C93BB5259469DD7DF" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////toKi7gCXnYtqG1FJraBzetNmyDaVFxefDA+G6r+C/SsB49foBRohvx7xUn3t7V49poKwjb5rlZmXFcp+Hnxtm/trPWTuj3PZPZaLNbPQxvfYoLXfTtD7oEnd+OdqEeMCCLQ1PApob6Z6s+hyb8luxv+GsCN4YytxmfESJPMB6sz0ofsUXN/GjkAXEP3e69K9oj5jjw8iWIT4UEgPEDkxSTaCNth3aZlhSdWKTJhC/ML7iwc1nD6SCPBkpryI+3PnkeJsLcXnD/nGP/aIKCy5iR43HfZyPVJgZa1e73PiWTZeEer07AKaPGNbKhOG0vXX9svSTacuT22X93bmBumxcMJiEOeTgz1P9pBMEQvkvrYpHYNvAocc2Zr4Lh5XFhBPmrzxrQ6qJwQvS0HKfoW/3NRcLFwAPLw11B4wSxt7yZzsZJf8QVj+O/528AbRG9mC3EjxeJT7on0C6YvEa8CVYvYph69UUKAxNE2hTnUf7U4jAXdf3sg5gK2R3buLo0CgB2q1QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAVgMrjiJv3A3Q9xDT7LAunVPSaGehiFfxpcRt8hvNFEUzh0AZSeAzQ+3gTFHjEyj5Upjn85yEi28xIt3ObXPwF" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_64 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_64 new file mode 100644 index 0000000000..4f15373518 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_64 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "32", + "previousBlockHash": "4AC053510DF1B2DA7D52DDF33E8547D945FAD91B7A4D8A932381664BAEF90780", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:Dps2624KKDuM4fW6K2DOrF0MKychhBqhZnyQy+g59WE=" + }, + "size": 34 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063224687, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "BA740513862F6D25F252DD34A4591642CC50D1AE0854D3FD16B1E6D219C86494" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////qaafF56C3nvOpm4nysCj8Eaa971bnKJlM6aMw4LBK+FyznTSUPOoJljBMMvtonw7pTyviQbyjG2fZQvfxb1rdnlD7mVvZvWmXcIrFKpPrMFklNea9xTHX4EiGB9LXr1eBzYlbALbN08rfWlKGmI3M5/UpQPWg1i/nbZ+TTGCnNol3JeY08sKuLukPvnVvwyBqb0ymCY5+bUtja6td5qv49/J0WOQ0/fjH8cKkjIg3YV79/JWyY/T1BdMrUi9lzOgjQJomSrChk3KKV/T68Fq2xYMxUQ2LuH2m8OMFWWzjzxlBtyikOzPlp4xNBQflzOXCdh2ZDI7tpYHGNOUrK7AKa1ceXuUA6gji957OxCX2Z9y26xNrNq2JpCpJGH3mr/yuSmYRNKaImcazOsfZoVtTPyD4lrjmrkN5JGB/cMmguVEC8MLcDvFhZlIxr8gbKIvvkY/ve1s3h9KYPwdYcSVN2mSowsxFvVe18b1nzD6Ylw5qfUfjYDl/oMPmtrEbtIvh83sQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCO6gxw/I22qmESwL3MHhJJCJFPSsNIZ/vgAOdCiv5l8mHui67RJE3+z1Kfs0cH+3ZLme2kuT6ObwI06NN5aOYL" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_65 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_65 new file mode 100644 index 0000000000..362e0cc7f4 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_65 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "33", + "previousBlockHash": "4555DDAD8C155140B8361FB379E9010A32AE952C38B8D78C93BB5259469DD7DF", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:vKgS6K8pgFrldVR5ojtfCGrSlT/DOhiF8+1/ywhZVVA=" + }, + "size": 35 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063230523, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "6E2B07FC5E0D04F4CD0ADF2AFFB0CCFEABCAA4455B99036FEF6ED9275A6790FA" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////l/w/bhZevE9Co1W25BhuszZ9ivRfYRInXiAcfQd40FWTj3wHycZQgDKitVcXXMhzuZo6kxx17UI9vGsGG1uMtN92DhosLozaCtSmcM//qaUWzNsXyG08uiaN6O9E0y7ADgHYV6Ki1y8NLfZxgqgIWxB96QBIRodTHvs0UVHs/kfXX7djB1xO49NKqS+H9WEpshY71qTO/gjQzRmx1Ccq65Bc70gKCpllUcoSbw74m3P0RY1GcQSSaMMqXwZvr32Yn6e5heiOpRoBb/Kt6QeYyKEPtfyWg1guOdg75atWEAbWGNw4AlqndXihL+Bv4PyPZoQPOxB/bNhdEUeO7QEiLK+Cpz5gtZiBJL9wwISYpZg0WW5GHTCNcPTMBTrAzKA1z/uUGQaPi6j3iwA9/jgXq9IRJi8M/85qlh0G+jWhcPfLqlkEYRTh4fVQ8UQyEZjrUOeYE53Bgdt9NI234BtmHZ3d+PXFGWg/6ixO6fdLj/5jlcMznXwk6hM3FFzETvlh1trvQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBSXpTNccWgb6ycAMwfhxM/akT0SyR7zeEoKz/7r3NAnSfRPyGAQlUhJbEunxadSN4GMapsHhsfg4MBuLHkuCMN" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_66 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_66 new file mode 100644 index 0000000000..7e272acd87 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_66 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "33", + "previousBlockHash": "BA740513862F6D25F252DD34A4591642CC50D1AE0854D3FD16B1E6D219C86494", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:rULamHM7nJfIZLBxfBHqVF5PO7ZXz+6rqU5EZfbMNgQ=" + }, + "size": 35 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063236143, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "0DC3C574B161FE540659D3FA4E16DD471E4AE448A444F02323A9BB0364FB4117" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////kmUg51ixxvsDR2ZnXWrYZ6kEWlDA3Xr5X8eiai0UkBAEbdzGpnDPKl8R9+E4HQ+CoMWBriym2pDXLqGcWDnasx0Pe+ykGBRSlsTfsqpY9CRUTmEWPhm0bLPX2jpxEVGEFHyA7Yyx/To0rdLR9TJFnDinIJXUT2A3pQtnNvb5wnsSYwwXF8x3dQr8u7wUASgkkO5nmAky0CHskFsx9EBZUof05S8LwEEviNuD+o35CDuwn0R7JD3WN/rUYckdkBt/7bWgpfMij6UuPV7JHhfmIscS2ovrRPEOQo2MOi8OwrTQcmDmjEbDBZbOgXhAov2tUTb8H2mwObpcUNyBez7jILt+BLXZ5E39GPRW5sXtUVkp2C7gyXmH9t8v4zIxYHUkY+URZqB79d//giJ86DjvMkgLgX7eRC1hDy+7BwgnYGgzn1SzvHCtwxMPHYJ0Ssm8JJX8pUiQX0KJD+PslsxepVsSINzbHOiE8hfKt4B9+Q/oMxENl+IJhPp56nUW3PbX77nNQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCKOelVv1HI52OaXx4oY2nFiWQd5cNIWTMXOu1jqMKf0Y0XTEbZ+1UxxPiV/9QR0ewrvUEHnCpYOry1uCzWHe0C" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_67 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_67 new file mode 100644 index 0000000000..af1e506b52 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_67 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "34", + "previousBlockHash": "6E2B07FC5E0D04F4CD0ADF2AFFB0CCFEABCAA4455B99036FEF6ED9275A6790FA", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:wOHTwqlafE39sH94sSzExtK61YuIcQMX1WJ18YD+ax8=" + }, + "size": 36 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063241867, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "893BE7F312257D1386C02351B65E3F06029346989C06285DB05C7EA67A1B525A" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////j2zJykeNrNPUf5LjwbGmlz09VbV2MgnYxe48zufGCMh0/BITVIMgWURSo5qxxCLyhm1f77u4A0CzASSzKLxsOp8zKDFI4I8D/6sAtfhMVEk303jinM9h4NTrsaCIAWcxFYoh9pLvfSH3Oc7syzs6FkVq4Sc85kYSU7Cw9Ro4E1i3UT1F7+3Z+Y28t38bFPUqkDPBpD4S/M8mpQNZbmtweF/tHsquZeM5afZXlI1OzEeBMrzURc+rITDnH6eGLwDi+nDHKS4rtv/OjWC1r5w9KQh2npia8m8ompJ7a5EXJ96T3RuEQdaIo7Kazl07lpJjGy20F2HJLCUqbjsPgv8CLeKkKP4uDuR5IGppCKlErxKQ6z2aNTyRarhUNVnpa2ohj8RuvgPcSnoGQARTDHuEwjx/ZJk+sFUDvvEYwQ3GS188GZisly76ozv3Az/5AdjBU8QBWKF8ALq4kp39NIdDEqVw7nLS2ogZ2alFK9qkmlGLXYbDV4T/gfiRFQL1f1mPMCoqQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDChgcUwXnbXzllkqv+wifwa/nhr5ZjaMiwvKCsoEVZEOz0PGaM5jxIWZusHrc8lm6gg7ZeyfAOT9NsvdhMenQ8F" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_68 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_68 new file mode 100644 index 0000000000..4a7c168412 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_68 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "34", + "previousBlockHash": "0DC3C574B161FE540659D3FA4E16DD471E4AE448A444F02323A9BB0364FB4117", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:Yo1YHrvsWaNFzv6WHYFcMTWP+LzrlQ5EspU1G0XEfiw=" + }, + "size": 36 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063247501, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "699CBF4FA8BDFB3F1CAE6102847C08700DC11A79FC184A61503386A47AD53985" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////jUi/uZr1SPod/EhVY9MFhzk8tc6WAu7u2DYlzxBc+AmeyA/H13rZ4ZAcXpjDFy9yicQOBvM4ZJ+66nXca7Nup2MWNsoAwrL0njw3Mx8221bNmX7bf9TsO477BL5Xn3zkFB8KyF1GF07vilJN0cmAl4qgfDdWW78evrL9a5XNN2hlwBHkAMeMCYBgrlZFMMUdqzrg+bp8poMutRQpT281Reb4ckhmN37qnZvTjNTUEDwOn6cn6ZDD4Wxe9m+taMKJikyRrUKePtpTffK1WgQhABXTVGYBqe/5RP1stUFlCsFWj5ZpbN32mkrDd6Fn1JayG9qcfyoff4hr1OTqNIitFL09Ss6F3udLYElcaEWj63XSYzOzuf6JImq4eGdXjg5QWwvbiXXvs8zhh5T2mi3Wb2Zws4+tUlxT0ywrpF4dARMUvtzdsRLrQzlU2tkzoSbZRvUCIbIrZto/HV7iOJqxbpFsBdXPz+1iBBDB54Myy8SSRNsEQZW18w/5DJo++eOzBhQjQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCOFYm240mFZKqvDt8u+UtCi4mRQWbX1f3NljsdCFAlpEY3DvcQwp2xt5r3bcNxi+9qbiGj5ow+dNkb9LXhp+EF" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_69 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_69 new file mode 100644 index 0000000000..3b0d16d4d3 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_69 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "35", + "previousBlockHash": "893BE7F312257D1386C02351B65E3F06029346989C06285DB05C7EA67A1B525A", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:/qCmL1+2ze0IMrOJMJsYJxpIcdNRCVr35Ydh5IcwYQw=" + }, + "size": 37 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063253226, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "F0AFC18B21182AF0CB5869CA42608698E2D20BC97A90C62C1BE957FA261A652B" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////sJIYdlC116KMQd3T3JI31IgFYGh3hiHmtkq8kqpf6gDQixKjT70MhqJpsBi6emosspq5uCqTBTHexVULb8U7muMC3xgszb1sIancQ9tSm0CeB4BdE5HcqbXvKA2VjNZrFvI87BB15I22dEibssAYDnfkuFwFISHzjY2oyzHS3VVeEjAEPPl8IT5kTyJ0fPNMi/Qi923RX/4uMvOIUudbOyRq2wZiex+bI4EmL0T3T5Y4FIwVkgpfgBFFV26vjyUU+GcRmZVIg7Rat4zCFXu6KxFPU88XOBSozVq2tT6OYe7EdwNNIVLX5AZqp5cqIREIQVmssYtDc54ceouF39AAKJSkyY7TyVsvsnpI6/xOervf1HEy7n+HXZM4YuvlbzOOedAt/uTkrzKaJGVbvzdgV7Pg3JHGN3nCIGwt6vtfu+1ShiLPGa5oe4cTFk/+xx970iE6ynpCgdJFE8uLEHL3AI5VUdq0noJhOc5VEI2afVeMOGUDg2fSTJVF9AUDvKBq0dSMQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA1Z5RQDTVjF4uYBdie6isXkcy3RAJlgQZoysSTpiSgp1ZTGXLSBo234rRzg1XvcCUy6znwb2ao8lWp56/1ZCIC" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_7 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_7 new file mode 100644 index 0000000000..8a72b04d65 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_7 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "4", + "previousBlockHash": "013169745E2F6730D2017DC2FD6E7F2FD9A3F9099EC5A76E92FBF8F144BE6D64", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:WKwoQ3/0eKGxfx7Iys5U3ZBiNkzWuTQWZV0k0I+q4QA=" + }, + "size": 6 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617062241260, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "6C4C96C829413A645B7D646B1468C0E2BA5F4F6EAF0733F63BFB61D612689FB1" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////tM8GyISj3gOyyESnTVpQfyYyr2p+8tagsAJIl0u2KefIOB64G8n3/trLQpyPkFXQhuNjFA6D+IuHpB8UDzU1ZhsGSIAiD7UxruAKoWL2mA6yIS2Fe6lfmbpaF0RiS/zSB9veMqCW89ash4rfWjnErEaaF/rknu9VKpevANGwz2fUx0Bs1BPfjhc0OhL9RVXTke0g1moFUiZZhHCrw03Y98DI1mnoGZUBbz6LVKrHaWAZt+T1V+4KyoqhkFhloX5iGYjEmbHs2Hi+X3L8b3xzwrhADfnMzUBdJk0v7jFgS8VnHoKbqw19Qup/A+42vqF6X+AnsyYJFBfqBnS4B6iSPkzZOmb1CKuI27kS5YToG2MFGPLYTdxGv0LRacQ83LwrvQgg32XuynoxbaqgnNvwcnD8Ql4Z7hy0lG6ueJNFcgs1a8ycUl79evZmUCzvkR9JLDbQ7txhwYNyhDJ4gd51H305yLbLqnfVZpAWRKVvdq397LzoWG+1D3aUmh9ti7cuFlFpQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDC6pYAz5+zjfx+oTOls5CTIOClx4ELKUIxOt11usF4/q7I3DPHdAWfWjkts+Lv7j9OV7gb2Yk7CCPE8DXqeA4sM" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_70 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_70 new file mode 100644 index 0000000000..0996d06f20 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_70 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "35", + "previousBlockHash": "699CBF4FA8BDFB3F1CAE6102847C08700DC11A79FC184A61503386A47AD53985", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:JUaTot0LOXdgdXdQo0dNthpJQQnxq414HJSZl0S6FSQ=" + }, + "size": 37 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063258828, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "5B6A22E5CF5FBC942D94CDDD5DA4FCF0A74733C5A88C5383E0378D0E80F8F6B8" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////pwA1fDbrMpzKmtWRcp/XYM+pEXn/AWzdg4cLY9weRk3l5aCW1czL2vDfbehI74PhgpifpQKrpFFuD/lleCdKZcRPOSEny4yDfp/yvobpDqu1//4bvjoMpOwaTnJIqFUPBbuDa8t2MX26jz9GNH3rp6iuraoao+CtRAwHb3Ys70nZG/tCjl/3Ya4Im9kskNsHgIi8nqZNVEjh+TXwLYZHf3bTLZvOdqbiGN1yESlYjubeGSzINU3mgCdZDcQhkAiiCfsRX5E4e/mdD0cDGGRQedcbiDMgNariuN5gDX2pnGzcCo5KRr6ZRM82TEZEuPHvA2SDchBwQRQ9FaD3l0GtOS8PuyFUp9V0jFEjAFZ9RyLGR4D4dCMsOxgNFzP51BnCY8DoMYYy+2MmBTxQepQyCjyE85jkc1jJ/bImaGDl/gq2Y0s4XWtQCwl5mg+bQ02cxr/NcAKaQ3hn8qI2/t7UtY94P/fck6Td0VStodLKYk8JHkW9sxRZ3LC3SXRvy41hPSovQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBNNGfhsy9L9YDyXhEwusaAbfEqaFWqmm40B2HzvEulgwbA9x+wUQRm2XF6Aj8Iwaior96qX5kEhKCWAuiM+/8J" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_71 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_71 new file mode 100644 index 0000000000..ded4c7aa77 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_71 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "36", + "previousBlockHash": "F0AFC18B21182AF0CB5869CA42608698E2D20BC97A90C62C1BE957FA261A652B", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:tRaGea4of8H1MB6GogK6TNRhk6WwFB5EZ9LedILn3h0=" + }, + "size": 38 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063264618, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "66DE47BEC00AA86B189D7F6C84CCD09C3BF5132CE98317575C94250315D4B40F" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////g9LJbX6ZG4DzZaSd7XsHiVMfbQdlN0DRpGRjINWPZXAMv+TB0+YZGIhPCMb48Q+xktzQU/z6Tlrfl7vGT4kNs8K4tSP8Ej4fFy2I5zPMhYWj7wOkeXIwtlYvL6FwQAz6ECUPlF95bwEPkHK7cANEoWcQO2aR01Yxyjfin1fZA8zPnLLDbzoU38/gUZ7LCU8CrsVzf6dC7YRQn8UvdqoYECu9d2vyFHmzCEYKeGo+q91BMffSjWuD0y5XtHOoBX0bt2pqp8AsTu7J8grHD9AFgvCfEwYrQKKbAwQ0mAO5ol05UBLQfedN9jzfGrNJUXNlh4M0JlTHvL/+7aHxBGbHAHwvHPEOTOgpZkAs17Po6FbkQg2QjxqT+JVmfuAadAk9DsdaJW8orZ3N+mcXHbUFPXXqGTlbJZrmnnuTx27bApXiOMPsXhBcj9F7RztkQ+zFghvoctDXaXuYeCAZjNSLA9/pzsAnkTJKoXO1xs5lO7/Qml48629J8gopc8aFKrq3dbLdQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCj4lT3pMVI3xEnPHKilny4/Fgkq3UfLBTTLeup/MdgMIv6DSEiAygomsiiaxgukBFYvtILafQCy3kBtEontf4H" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_72 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_72 new file mode 100644 index 0000000000..4cfa2468d4 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_72 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "36", + "previousBlockHash": "5B6A22E5CF5FBC942D94CDDD5DA4FCF0A74733C5A88C5383E0378D0E80F8F6B8", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:B+bowd+6NBlUFWp2NfMfKHnByZ0GcIh3FplhvzGgWh4=" + }, + "size": 38 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063270226, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "C38CFE307EA6C404568C82664FF83C9E964BA968DF5F796937911FCCA81AEDBE" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////pIy2Hpoc3dnBjhrVo4+ab+LGVmhU41jb+zyv4GXlSDEVX1fKgZyC4EKeJ4GdaORkglP750irwQ9g8S8TQzuKqrTgWubH/6PwSShtsxhAyCRyr7RrRmfIsX0dpzP++qapGIqQcv1cnNA4t4pM793gBFJbniCuB/gXUsV2Ms2c6GzN6rJa1fhTfNBszuaVX2F0p4tq0FGOsEqa3+FCd0YTIw8d1ukIhqL6kYppPTLIIml7+tvWp1XZhgRUyEa4tQQM1nICJ5H3yiFzLfgVAJR6qgIcXJHdr5gE8MZm9hle10/gH2TEDvRrewqP0u/l0wUCT2Z2mdodZwuCBtUyL5U2QP97I7hgB35o/GGMjuoRh3vfBLZZCobkww71x03VjPa/mHMKqet4EmN4hES/l+lXtCCk8ysKh0WOy8hfsLpYSTYnEsD9+I0hfFaAaIsxOxPWi6mphqODEI8qxAys6niLBQBUBgMlYjm0UQ15UYUCYgyaXg0f7IY1lIFLmmADvD3eoHppQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAyMVpP8vfytJIv0aeMcZFDuzbEN/kTlQNHSzOcW+RDIEE1t/70BAAhEeh/eoCObB1H/o7bZaWc4mZJbAgmP3AC" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_73 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_73 new file mode 100644 index 0000000000..1b3061d39d --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_73 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "37", + "previousBlockHash": "66DE47BEC00AA86B189D7F6C84CCD09C3BF5132CE98317575C94250315D4B40F", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:TRgSc6UCc2k/1D5XyjUEVdhbqu3CbeY2pyU3swtbImY=" + }, + "size": 39 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063275759, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "1153D366821280122BFAF8A6176A001C573A9699DD05330E421ED4FFF9051C07" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////rS/A3WtnPkwJLpJH9sG/Z8mPrju4dS2X3v6ptXJB6iMOYv4tO+g6x3DNCrgJq2iehVmzIcVjCyf979F85F4huvC5l9gK2aO2s59tGF2yXXaV4rHlIhJkFyDFyTkP0IQpFHly/aP5TiyiBVU+DTznQoEDSRLTc2HPzkHqhhh+ZYj3kcfZerO8jVfgxy8K83GHgKlggVS6awVqARp5RqwZAdcGwUInPkY6bmDtP4wyWEgoirZltuOv1x1l2PFD3ibTDnsdqRmLZIJQgiEZ+PBek7a8P+ki+MKEu73aF4gVoxLzYNzWTGBTQ5Oh2y2FV8eMHPsBfNRjRW6G+KPBGiyYYGQW4RtbSlNMc3Mm4VhSp4A0GEDECsuLgMKQF+YtluBSa0tPSkVVuZT1qYlVX9M/N4GIRPicEKZ2f6HrfqtsLbr+s612G9gcN2/D/Jl77IMZVnFg5C5GbzCLx5/0c9cjNlJ1NHQTd5P5U7lw6Wrqtt+9nMMl0RWhynQ5r+cs+1NgO4zNQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDASCiv/V7wThXhZMXR5cRFaMKQe/QN0cSy/+avftIJsLKpx5Vr91uzwwvGZH6a+5LWgyaTQ3Xwwljf0pIPjD7oC" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_74 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_74 new file mode 100644 index 0000000000..d8c2f71050 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_74 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "37", + "previousBlockHash": "C38CFE307EA6C404568C82664FF83C9E964BA968DF5F796937911FCCA81AEDBE", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:rTsnbbpa81HPhT6nrAAcBopxg4oklbfRWph/U+I/oyg=" + }, + "size": 39 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063281286, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "5E3948C4825C3E685EDFB21F02AC1AEF0DD633EFF5A514FAF6D2B9FE983ECF0A" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////lqWOqzf7t4ZtNqgGwHLxIzU2hR5sM/9iF/yzZyNiJIOOXgZpckS6ralcbP1sN3tPomA8CTIGtItouLinof2FZ2dA2Xxt+hNuYGtYxVBEXiRsv2b5frvz9h0IcSDa1TYjFqSObIrNZJ5f208GGHZiSqapedB9AGZtPYTsal/u9VGIyQYpIE+lc4PUlZ+/c3EXqeMlISkE7Ii3ImmJkxO0GnhBKhG/vSkTvfejAhBF1PtLQYxJw97FrOI1TA050AaNXYkVUvsgirsgVRq+/8XC9huyZe6MtVZLdy0Eq+2T3mlm2WigXOpGls4vndOV8Canhxld40G4jriiRtOo/FFyKagw4N/xXZOTiqXUOVWHF6rXJVLFWRzfDxxCKo6fz4cBF7FNolWpIZalTKrsWju3AUDV5e6qoosgmmCcDBOEs17ysxtTX9l34VjwXF6quLJTYj6mf47mRG8I2jvGjFZjfjJqNTkmYkHAIClXhElaOixlMZYeZ3nCk+BmdPX4znokTf2zQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA9lv7vnczXV74UdKEpW86v4ia5JFGPn2Xyc69biRXqtOz11VcG92fhcUrqk5mG1gTb4YU3XBoaRtokUT8F0ywE" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_75 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_75 new file mode 100644 index 0000000000..15d00a4a6e --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_75 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "38", + "previousBlockHash": "1153D366821280122BFAF8A6176A001C573A9699DD05330E421ED4FFF9051C07", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:xahv7y1gS60Eai8hV0dPHNyizRaH7ytqYNLVWfbQUG0=" + }, + "size": 40 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063286857, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "F815C46BE10E65F60F81D72F28EB1F9F34E63A30AF70C14C1BD8EACBFCEADA67" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////reRf25Uyhykc9x3qTzTrufbhUCn9ow7zBCMJVTPG8IzAEuMR3Xle0kNB8wlLnJ4et0LG+dtjP9RUuwz+4iAz9CLD7sdlobG6EFr8CGs9zJjsZH4HFGyf9AHvdsE2Yq+NBVPnDWRclLBKrK5FIbyJAqCMcTeor8wShZokAnF89IGRMWTWwc8Q7CI4KoAbhOXXtx5ccf9LMTjkzSRUIOWIl0jymvmb/U3CihOXC4l07mfli3ydyddSm2EWEfawNA4GOsSLKJFBvOIML8F1fpNadXE0esziiVR/TNIVHYMIlbyVoc6mJHiFVMo6Ax5GmCU9ehUhFT0sIeEc5YIChIWNVeNDDa4FudeRphJRFZxDW3Qfzm/iIEehqwh1oGDlIRVoAyqWAzV3Oe0ECMBKwox4OAPkxmzJSgKsW8mrT0HMikO41kCI2gTIPc4xsjGwChgozHb0EF9+L9pwxoLc94WE5h+48yrTpygeeU/BP1xAmxbFqWDnoFSB3H6O8ZbTKGcw5+M2QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDB7W5gg1kn3DuHFb/G03o3t/NxvcQNrtQlBZxv2KCtkmWcQXYC0Dcu+dJ9zJ0e8jVlym9Dvzih49E4TXsVB0BsI" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_76 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_76 new file mode 100644 index 0000000000..37293674f5 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_76 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "38", + "previousBlockHash": "5E3948C4825C3E685EDFB21F02AC1AEF0DD633EFF5A514FAF6D2B9FE983ECF0A", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:CkLx7BeBNUlD2BQfd2uAzS1Bf0bVu91wEdQXowrywgo=" + }, + "size": 40 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063292131, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "C4D817DE1D902E7257ED168AF88442BEF26149AEAF539D492A1149AAC73CC587" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////pYIpAS+eSjwt4aCYjoD0nf5hnc8BlB2CUjKIfcnzzaCG0/nltGe5XjSVX9tlqBjXo3AleOWWfJvLwh9yXQpIXtpFWYTBE8N4MnA5sLwoEFVzko7D5xIT4JRQJO34NwQcCKAzH10xPC84R9l2y+fSW0pWnnyMRFb68fZTgNkNvoV1Cg7ETh7sMz2tgtcudT0UjfiFbBhvN4zEjDMmVr3w2fSYBKpTe6oDS83+d7KpbiDebimaiQi+y9oFrokvStNLojhU9l6XLh0P4RIoMEVbx7VRgne5skKHKbuysEoUV3NA/j3yMKiVcFsn/22FQ8eH0vp+3oSwQiMIQu87xcpJViDUSprqv9/PVy2fOJVi/m6qezZv/m5m4VnRxpGUpFvjyjYgP1+mJ+U1v1R5E5a2KS8UqSu4ljq5yKaRiFXqBcSUwsVIT6EZ+9L1Ph4o13KPdI5NI62uBMnJM9Urc0cww0NzNxD1Qx+CxECxXl1yLHSzdEaB0zohxcQP0SFpD5zV3T4RQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDACNKzzcZITBuzNIEciJ52QdEf+Ua16aK8rKAmOgWwJopNC6xgMMLv7DvlUHg0EnD/ALSd+VxjuYTzn/4SlXuAD" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_77 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_77 new file mode 100644 index 0000000000..32f461f2cb --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_77 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "39", + "previousBlockHash": "F815C46BE10E65F60F81D72F28EB1F9F34E63A30AF70C14C1BD8EACBFCEADA67", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:lqOZ3lKbWrRRW5IKNE1t5z8XkiQKJ2jMPnLrRy13Rz8=" + }, + "size": 41 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063297091, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "3E6CEE07FC6C8CF57E35247A438EF58A2E7E62637F1598F551E746BE4BAF8FF9" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////hGLkORfohkOCvPODRG0ZCRlMoGnw9YWwK2qiXKGst3VW29e0oFdwIj0cnpt7kkbcswu9DIHL9GAo2TG06HH9kpixc1tYdQkA2Wk8teYpuV1D0FbxclUiUq2rVHvNN7BAD2bTpTzitArgnuTtIPDGDruO+cgofKjudH7SR9foWDKgS5pzIlUiWqraCauNXNEGo6abbmAgrY4LHH/WNdZ+r9TdE1UyJ+e/Qy9LyKWhN7aZ26o0WTsI8Bq5IXRxWQR6iUsMAE6qe+TsU7kS2Sjc19SFmilPX2Idz9QcDHPsNGvkCW8eX9WngzNHniGpvpqoqjt+NGurFg/22kq/ljs1Lwo1n9vrumv8Anf33YG/RQ/SXIodu1Ofcoy/pllcCVhX39FA27ibttVIqBxGDklZGSd4+mcxYZpYWjBBmYc3VA3VhTKWZ/xOAE/V2pKehA8La25j4BNdtGbiNTodPRgjcfMMf6JEWgySWUBSujs8efW/CIfT5txO5/pt4dSeWSbQqE73QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAj4t34oTPb9MB2OCxRyCXmAySBXAVdtj17d0/k+/A2iOHVsOSrP4/nyhDI5Bp7I8LVjhtTgGkUIsHNJm0stHcJ" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_78 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_78 new file mode 100644 index 0000000000..d3376003fc --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_78 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "39", + "previousBlockHash": "C4D817DE1D902E7257ED168AF88442BEF26149AEAF539D492A1149AAC73CC587", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:FZ7FOlV+xDPyoLJrxWlgJS3kuSP+f/PvTsAPxyHcQRU=" + }, + "size": 41 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063302242, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "C89CD2FB029F200337279480CA1F2F96866B5BBD008AD06506EA92E68702D466" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////prrtz/qJNT8sRH1FPMpgQygR0UsKOqf1g+/JF/HyfECkRURa7wuecOkSKEQGThACoaeOzNvMhmDuqjs1F7Cj/jfDbgun0uRbdtWukKGs+tGWZZhEB3rgfEo+vTMKwHQKFVC9BoDwE55HqABa5YsyaoU89k763ZRdhSMgwi30yntrBkM6/lB1aj4nLyT4uwCelG9tJDDMCywQLsTXl/dVjYvwJiV5ewJpd2HtUVepXTqO1u3W3YUaPVW4FvXwS5AwpDlp1C7MruWZ1expLSoCqLcMLav1ZZxpuG2CEgEfPbS1FxJr4bO/Ozy4RXkiur4kxpbY/R8shyeTy7yG8W7wGcCb2iQRp79n0xobWGaZCsRn5SlpI+Gs7TL3zZzYyzmy4nmCUHOFBkfg2BQX5B/OILQUR1CH3c5ICKSTYGmVbJ17hxJ+n2GPnd/FXaQnq0/Iiz1oAMU4MUnzONSgQDNGGOJbdH/iJZIHz5WtcuKNvrXY0WFEYhROaNE8swaJ5t7RITK2QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDFVu4T9meUO0d3hZ2XbqLSeTehgLrXwkb4tm7UOAokTfx9+7kVnkkaXxxJshba0XQYQ8h5Hxjis0pWDDLb+ZUE" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_79 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_79 new file mode 100644 index 0000000000..7cc89dd0dd --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_79 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "40", + "previousBlockHash": "3E6CEE07FC6C8CF57E35247A438EF58A2E7E62637F1598F551E746BE4BAF8FF9", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:mD/b9BHJ6ZOJmLjZcAi7V0kEuqa18nBSJd7OhrwOlms=" + }, + "size": 42 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063307438, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "C3819734F1EA34FE8B4F216308B5645B881CAB6079B953CB3333F66FF717366F" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////id05W390va+a2pNGe1L8EQ4xi8lbqV3o1TZr1RdULsQX7svY1QhacOw2Z95GkRpUtWYAiD8EauHwvxzTLEmN6uNatEhVHaeeWjxx1GHTNXqQyItMVQms2w8l6B+cKisbFslkheIwE8MqUwt4/tCNOhEo4+uzcF4mgxOTVvthgkKUZg9M9+F+Yq3zCECJwKEbqaz7FzXClUkDOjZFxbunDhoiMbUBEmb5GxYykgmaFO7aoSw7t4bfjyeOJm2IK4ehr0CUwAC/LkmfNlgqQrl36vbJ0I0ycLeTU9UWhyO9F5t8aKageQZeLeh1VZBLh3sg4w8lgOKNZTYQlT6oIA9FJOP4Q2xPVokzZTgq4ffv/r1G95wiLaFM02/8zCr5IdQ3M7cRVIltAr2T3FWOf0pTUhryOKn4BNLuEkGgA5UZYyoFug4ggTJzbcpuwdEoiHLMPxgJntXWhG2rqVnJfluGzcs0aO7ZXdWNpKTr1bpHbuIAqtdes4EgIds3FrqaegANwCi3QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBdIDTjuCxQ55KCgN2vgYPqpzWnNug+QkHI+eg6G3prDY4OAMuS23xxPTv+a+Qis8EMwzpsBl2Zpbg+XvHZXrgE" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_8 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_8 new file mode 100644 index 0000000000..54a34ad7c0 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_8 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "4", + "previousBlockHash": "42AA08DFD4FE8165F38CA99D942CA7265D32D5DE2FD73AD6CDD775CC68216A49", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:BbKe5r0maNiz5n3q8CZh6IDRCupOzkNIx25WPOqNZnM=" + }, + "size": 6 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617062246536, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "70F8084A83D6DA9C496C1584E0F35961458F04ADDED67A30F85CEE922D8326F8" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////qvyDcSVtphKMKdTkweqBYjXtp+60oCVUKELR+YWGjV6vdRj1zyBYM5ffm7T8xpx3th/dMnr/k0RPYjTTj8d1cPcHbov2JaSLTgLXe4oY4cnDuGM6ODFlp/gE41nB/BWOB0qF+vTiQKd1AfqQoi116/AG0oCvLgsGySxvJjNF600kaxdlEuRY+NHKCLlhryfos14uqt6HjobT2OJMjjrdHmxl6/3ZlRluXbLwnIblcS+LvIvDC3nB1ylxehe1koyfDoUFnX8rEguJbPszQquA6/WwuQUSV2H3aQnO/CvmDchvI9xGOBt+jPWpUuWl8Us75WbExX+1GOg/RYd1KB7CSAUd1BoAg6hS0hXG8JqlmGBONl+pHSt0T+XISs1UmKYDO1zRfaBSSBxFvNgHSbElzVzWA35HrNbDUqSs25jP65RMjtJ96CLNpCqSZx0ZeMw5UUe8s8EeVH1JJd/T26ZqdgFbRrcmYOd8zWB1VEb3yW0sGD6915Bkb0im0cXA+H21RkIsQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCeV8pMRlsQfbLXzGpAhXWPRVqxNwkG6eYREmYXGjz9IC2HgH5MTerFu2nwqnjmutyKsGZDElpzOV07H3qLTvwL" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_80 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_80 new file mode 100644 index 0000000000..c25719d995 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_80 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "40", + "previousBlockHash": "C89CD2FB029F200337279480CA1F2F96866B5BBD008AD06506EA92E68702D466", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:Df3CoXFu4RIGv1/WFtTjQUegDgmOVNRXxo8KFkkDRHE=" + }, + "size": 42 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063312493, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "7D1AAAE2DCF7FCCE0C0ED0AAA2C3E422BB65A1BC875B15DBDCDDDDC4DAD1752F" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////qduwcx/0rpYiEfY2t1EekMjKVfX0sxNoFE+7QCokZkyZs8etA9kc6LbVAS7E0qIsp34UW1Hk2ORrrFj+7lHCq53809Cp16b0Fg3ejgDdTBwV18Deh/QBA/qIErZzacYPFJjpAAM9TVoyOry49Ee5U1D4UAFtrLQHBiZ8xtnnICwJRirrc4QFpSk/O1SVfbkjiMjSZY/BlvxZkzxNjlf97iWygftSENtDTc1OlTlvaotoJKhjPDCtXEV88ykxwE82ItC0yQnF7R5IMMD9XlErT4ovZZi6uo9Fo4Hh0BA9yWcOIIbmgINA0uDFLdeMzjDsED0LfzhEyCup+GAa/V46NnQbpJxU8ip+g0eaea4SnqN+V4ZVhVPHDwpJNLE24Ra5IN4pdL8OFERtXBybulSxv1JNsvvOelTVMQH5k5d1KyS5n+8ZanoEQlnyE3sEAFLPYTNcdixIjKgnvTpwOL1bYaMHwsIlMq6LOGrChu9fwMPij4VlH8P757W5GgD3FbW+zAy+QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBGDdKyvlT72erwT+bdi3HZc+LVWtmnxHXmG6UELLsWnHNZ7ZLumuCU4HXkYCb3ca3YDcVBhL/QCAHQwvo2NwME" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_81 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_81 new file mode 100644 index 0000000000..1e89fe4883 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_81 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "41", + "previousBlockHash": "C3819734F1EA34FE8B4F216308B5645B881CAB6079B953CB3333F66FF717366F", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:evdN7CZpypGcF7wB16sFgAa9Kwv/zRhRivUEYJajamE=" + }, + "size": 43 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063317591, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "76DB5B32286C7022FC1C2CD6BAD7734CB2A828F5DD23699BABF0151D1982378A" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////uQQG5yrA2AExGGp8igMoLLLx6O1iTssNIAanXhnRdNXkkSvDjo4Li9LIMNCP7g2xr0PgL1qWpc9itVM4jEOnoPuKOLjtE2Xf2HjXN76kLMbPjP2dtul4q38C8vmOHBc/CiJYtv8/t/tZf1owbg6Ho5V/ezMQeXy+AHpUGG0BuQs+nOXy6pAnz4L4/ZIh6uENt6i2gCGfj5RzuCylapU9tlQvRWqB+mO5Z76pKsZDCFu64c7bKcRDy/tfpSaXW7V42GvPfguNpEpAyKxxcPpOp7jPv08r4djc+v8+Vva1DLIOSRUiUfD2H7wiE8WKjTC2j1XLj77nKhOQufflAYN1LtLZhI421UW/e0WTYA0efCRYoaj+sVjoUa508CQb3HVuBeexgYaDPZ1xYqL1pR5LD1LfZbtBofpJ5C8ZNSUV73+dQ6vGbgmTHwJGmq0UsbRpYPPdvXj+kfZ5DoIVk6OPpb2mHi2w3QI+tin5JZd7Yn+Pgi5RZY/xpJBTkJe3SIgMTwn6QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBxAmahKJGmKB4hL+6JSPJIGuJziyNLBmhEqJGIxHP7I0EiVW+u3Cd5A+CDY0njCzyBaeYK/XrhOdyu3lkjyHkD" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_82 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_82 new file mode 100644 index 0000000000..3b4aefa43b --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_82 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "41", + "previousBlockHash": "7D1AAAE2DCF7FCCE0C0ED0AAA2C3E422BB65A1BC875B15DBDCDDDDC4DAD1752F", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:Z5o05r0C44wM3dKlh9ubv92eUF+LxuLxjt/dF3fUqgc=" + }, + "size": 43 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063322708, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "CB8B64C0709D39BE1F6C87047259BE1294F8615EF96F7F3D427E592D235D6527" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////poS7c3E6zL8vMYMJfnpNuxv5h6HjKSxXfJCCtZwZn0zxIBeqNVZtEQ/+lgietisjpYBxgN17HCXjfgK0tN7x3IExTqr2dmSZRvjSn7I+8HUKrvvBJgn5dYn3ZF/6nbb7CZoZrKUqJex7EmG9uYjgBqTMYhpbwd0oqsUOy9UPhWh1Juu4YgNs2eunKcRKqhlii1d20k1eSR8qpnEzTb0D/rNceqGPc89PJjyVHXpPpVFlD7S+NjeCGbvor9+x0jiZNO+SyW2YnbAY2ByLjjKOmlHJxIW+xuD8NHfNWoM/612XVKTgxq5B7cuSuzfU7Pb+FMCAfdRnYIpn57A6fYSkLEUhG7DdnnbO8+xJDcOYa6wKUWdf99OHWJnEP62eUuqKE/Y7h9BNYs86feFIKyU17/Bj2GpLN4lgNjWbl3XRGZEHIf+R5Q7OB+HeuhOZQeX0Eq0M286XpG/Esln2jWUCD40EqKRcjt8X+6L/OuXdOae8ewKLBnA7i5uvc/pk8le2QTXwQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCf2b4CL/uqZWRTLO+tzX/PSDbqFEEoeopswxOwnJGLISJXivIuHO8svlNPJG/nCH67ekP0/0ZQAlA6WHSoukwJ" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_83 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_83 new file mode 100644 index 0000000000..9942c6da02 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_83 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "42", + "previousBlockHash": "76DB5B32286C7022FC1C2CD6BAD7734CB2A828F5DD23699BABF0151D1982378A", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:iPpRCZPdZfxZWZRmzT4kopBRXEQOEGJVmVK4ah2Woyk=" + }, + "size": 44 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063327856, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "B633858D53FE763C9846C532545C372E15ED7D1FC34172F726D972D4642D9652" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////tRUplsQH/HgSN8QN9tySzPyRX3OrDeK3HEU7kBL3QQaMbis6TMsekRkaal+fRKJBjz490gflgp1c8FnBpXuDEQtRWnKcFSB6psURlxqnMM/ImGF4MdZhxvAVg+uyBn6YFrnjyPjZfxCKOtLIdWgRO7p7P70vNQgrz8ib2U2h15o1iVutUkgIpqCjfJObCVrHjKqgprtdou7TTlQL2WBVSjr6NVSnGtYIwwh924HYaUNx4hNQey6CpByoMxfXSi+0EQqhIXj+OcY2GBe4NHKbI1qy5fMLB2FJnmLReWRjKyWKGBIMJQup9BhF0DsZqcSvBx5aqFdXd3LFHQ5Hv++7Tva7+9sglsCgdYrX5ajrzEhxBP5PJCTRFh3MZj0hZ6NAM4U/XxtlUqTa3o2lagy3P2fZFQ0P9N9sK9viE6BOBYa/NtXJkL+mcDTtTS+F2AB8Rd7pGhKakrn4qnqW/Zpg4bu7VH9z7CLLLFL+1oQXClPGEv0ig+CK+QoQbx0LqR1e/67DQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDD3IdNpEsQu0JjxvoD3d1fMnBwATARk+bo7yX35cOcQqEpathfDlk4EbrmE7m25jm/TNqJ7ZUwQG0mPREdfQ8IN" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_84 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_84 new file mode 100644 index 0000000000..8144d16940 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_84 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "42", + "previousBlockHash": "CB8B64C0709D39BE1F6C87047259BE1294F8615EF96F7F3D427E592D235D6527", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:+OSRvmGki2srY7tAxZboWaVvwqZTmRQxHwjkVkogtkA=" + }, + "size": 44 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063332729, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "353BAB7CECCD4FF46A380AEADA59A180BDA27BBEE74177DD9B4679643866F66B" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////qc8P6L4bpDH63+VdLWsynKAYDiL+/xFN0bns6LcaZCriZExfB/Zz+q+cO1R84OTbsIyy3sNZDLVnvekMuM7DJ2ZzhOmN+AXnIuOj5HK6ytEHCwnNYWmy1tSAlnSx7IUJDGbcxrKSzMhmkA7obJzqL0Ks8nk5hUjYJYbaMGhLkjIJ6G8qlP9wOzdjIt/96SsSlRQF/5BQk+I9DFe8ZI9+x+inXyK1afCxHFtOw2SL7lmO0WhPbGddvJ+xH8BCbtE+8m5zNUkUFUVxPTnqq/GgD4YfR0TsEKyps3LUz/+cYac1O3avOTzZxRym/DoXFn6XYKJ1k5Og0YY6S3njPXIFCpVRa780Air9WQUu+CC9jkWVfifIgskrrPRHSCTja5NSCwi1X2V8TK0yI5t8e60RWaRtmhsJujYp6/FypEXm1d8nCEV7oxNToeeaxP075Wyt5CbmngVSTmgcGsmm/nXL97sZZeIBs8SqfuLHc0/XbQw9Wo31t7Z3+qjrVk1AFC297xUuQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDARTaW/mktyQxVZnFomSy/zVD2HfLlDwunB675tGr6UGmatqjzbmAh+LtpYwtsaTcn6kjzWxytiRTolAwHYNYEF" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_85 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_85 new file mode 100644 index 0000000000..018cf123f3 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_85 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "43", + "previousBlockHash": "B633858D53FE763C9846C532545C372E15ED7D1FC34172F726D972D4642D9652", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:8fIEzIAmjfOpEzMcMZnonhVj6vAGSaupxqzUVyChZxk=" + }, + "size": 45 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063337847, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "C0E23D7154C0C76CCAD70DA7C03C24890B03AEDF567F45BD3AE63372FE384AB8" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////j1dhHp/CE7HEu+ELsNTEYxRBE0EjnaI4WPsHf5CnZZnypgGOYseM9fUZSyzOh/VwlhTUfAUrKYr0CRppfzE5QecTu94KubfUITHgLAga1ehwv6a/fE+XakFtluhoSPX0BA0plC03Wx784DWpLp4bQFy3asyjDZ3+YsYETyyxgKb/kW9Hh3vCfY13757qA+wlh+oiBTCDdYQFwp8cD0jUGkj2WeVTJdy4u+NjDN0wxeDNJ7ryIsN/bETsm0b3oPV4u2/V8nryU+k8aEmbf8RT7X3n8M60aTw93p+g3Mps52PeaVC2p5xxAdQ45+CupDUT+tMVLwYHrpe0Zgr8H95kTlVq65Re+SqZxeI7SrRXlOUP2cnhM3TPQBHLzKP1lDsbVbCJv9YhZuWbwwmYk1cfO/4vqgW79Hkow+0tKGLqvaUgKSJiU93ltjunGKmz9K/rueqbnSpgfybUULS4lo4VAZo3+msjnXqvjcpGibALrKYGvTGKMKaNk5rW8cjvTzyEmNh7QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDB5kZQW/4bFQXS4WmGPMf96oesgV+ERppvQQJSNp/o/Vb8UoMOTi+2IKcHBEy+F/8j/iAH1/kZ3IOF8pxGDtzMN" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_86 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_86 new file mode 100644 index 0000000000..208fdf3f3c --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_86 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "43", + "previousBlockHash": "353BAB7CECCD4FF46A380AEADA59A180BDA27BBEE74177DD9B4679643866F66B", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:TkucnWB4aCEZgFMv50c7JYF52kqarjDn0zinI7xe+3I=" + }, + "size": 45 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063343064, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "564A43022FE901E414F5F851EE344FD4957CEED62C90E211072160D558DA73D1" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////taIYb8qWku9/JwR3ZPZLCk0HUg7ZDc9wKKW3A+esqjBJAX97QRqNR93+WHxXBqMcrDtUi+hNhqJWO4fvRySifUhwrVtF/jhWbkRFvic7qRj/JN8bLroUqOPxYUJReEszAs7dIXUoUJ6jn/eHE+is+YbfRibllvDBa67MWhoX96Ho1nVhlx2ks529ZNQkk2TeljMx6dvnH/u48T0VupHg4Myz5ejhTtfXhVSkiM5+jFUBTbQOl/NqdfDyCMxkizlq4F5gJAcKFM6T1mHsnoXEpLMmClCwkqjS6wqyCL+X+0qwNups4dfqts+5RE7elk7kZ1Qpmws2SLYbuXEz5AtOM/enAZejDE6xD6wvb/X9fO/AyUFzALja3I+Vx8hWMbMRdgfTTNIom0bJ69zijZsP/d6VI+ObPmu9j+GVYkELliN8cQDsbMO9naz17CpQIM6aEGp3rQjDzt5BtBhMScR202KHyGXoBEdcGkisPdkrCsngk1YekOA50Ar4wx6tZ8Hffe2SQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCmckzbTDDPubLpIuDETZ1WIFBrIbjTrhXZjk2InD5U26i3rwBZVKRvp9lKCtExV0REjsQhf2dGAPih2pm7suIH" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_87 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_87 new file mode 100644 index 0000000000..b5121f6f3f --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_87 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "44", + "previousBlockHash": "C0E23D7154C0C76CCAD70DA7C03C24890B03AEDF567F45BD3AE63372FE384AB8", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:11EB303wzaUAG7IRYYecI08cp/AzOdqH6FC2J1cNCjU=" + }, + "size": 46 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063348250, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "D0731C8F4A5BFE25DE7120C1C57108195BB22567697FFDDCC774591DD66436A9" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////ufcRCQ9os+jlU+rWidf7D8dPMLGIC29ErIy4iw4Ml4sPdQS+5e9wvNC1FMgvoIBOpv51c3Dqt0zEq1vB6a3UzCq8xSRHNhuw8qlDiIdS8QkzwxpcmI8onXlTsg+oQEejDivIkmeAsX8RYa1do5hKyj8kydThn7WNTgQRSfLmvWU34SeuGz6Py56hKsw9AwrOt5iDqMo5zMDqGF3i3ULjMPA++4UBp702YuUUOgkAq2lR7WrE5GkK/DysTiMc8GLvhxOnhl9P33LiAn4acGXfSjmt2/0RpfhZxLkd6ssNFAsaTHh/AB1ij3+m7zaOm3sfGc3ecG9gX1amPXzMr0ACQS7Ez/q+PBnk8LdD2Fbz+UAClCDu9+IqOG4EW8chp1WkNVMZ7jrtr9RVwtM2e8RPCVdKLTzU+HlQZ+CBgeN4Ofk+p8PUlDDKnuPY+HVCnaQKJmJJWe6aFmtkIAULjjxkEHWK2aaERZvn0ada2ZhMQF5eqhJRI0bAKDLWUJsztbbNfVrlQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCN1cxYE1ZvRIvW9oUL8Hg+8UlpPOnpdvqVTZtAXljK2CdEIFSZSmnNu6ISiz1AVd0KDBEDfa3U3eCF71Y/+M0G" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_88 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_88 new file mode 100644 index 0000000000..e5ab187236 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_88 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "44", + "previousBlockHash": "564A43022FE901E414F5F851EE344FD4957CEED62C90E211072160D558DA73D1", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:vpMHq1C6LqsrB3atgGL4qA1Xd0HzAU0AwuoUATQrBXI=" + }, + "size": 46 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063353189, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "FFD55953325DEAE990C7B3EDBB243F95DB6EACC36AD8F09C35C4D9FA1D785B23" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////ksZPgAVueddddy3FCjCpdBEgiGvpYGLs8sKZjpR9Me9G5AVnjwCy3bfy6xnV9tS/iZR7Vz8x483d6x+NXX8RuGJbIX+HrjhGrplyf77JZk9re7hAM0qwjDAwA3mX3s7fEZQAfCVVRbFTCNC6+r7UQtCH19h6X/Ls1a4XOjfSeq/gzkU39Oii9NF+nZo5BwJ2pod64j8GjgVdfATfGDD03lVKLdBIIEK82tg2rxtQybLJQmfAGXMUgLd0PvK6Fup+DY4MCps+VLHqoax5bXmRsARQLc+GLhx+HMierHt1RJh1CxTGKXQXyqMCrx0zyiTv0wRzc1UTE73E2KLMT8pgBQzL+uPAer8ZSso+msUCgOooGEu8i61FSx+j4kagWhEWszC/rQ+2H9bSF46aQwHoq/fXNqjMijUdTIfqHdjIwRv1pg+6bqZO3L8pye1MDFZC13CRFxlIg7hHaZfpLgNbCRi+UmkisrPuGF+ZRexbKLXhAXzhjJlKwxSs9bO4ZV5CbLymQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAH6iTwirFz14YzZGEaJ4hpXXjfsQ1wP4KrZMVgJXSJptqVghvguvLL6+jWxiiouWlpOBACvdv8z4HU6kNdFAsH" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_89 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_89 new file mode 100644 index 0000000000..edafa4a421 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_89 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "45", + "previousBlockHash": "D0731C8F4A5BFE25DE7120C1C57108195BB22567697FFDDCC774591DD66436A9", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:xzthNRa90A4Zjx9+oefAXkpn9j3D4xd7eC45Sut1fjE=" + }, + "size": 47 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063358499, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "FD591F6FA45B01C59EC1FEE626292B952CC21DBDD01558D14F88407E31B1FA01" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////kcEF78Fkx8foOxq47Fb67tMbNQYlD16+Q6PnE9pSHK4p49jyW5gy1Udzjv+dMzBDrctVfRS0RFUYvfTzE8OdfDHoPstATlkAxzmDYbWjC+/u2OCkypg4LzJOqVA+reFkFcuIC006AnCqGdm+Gsuwo8llUZ4MvuVqQg4oHfPPhf8wajagVncfNMzzZOTZaVN4j5viyiYKHyuG4m5jgVX1UCwvTM+HR/GfyoDeGfPO3RIUmLqI7dIRZpJ7509fdPJrj5B/uLQV852G6Y7InfiryIEYVxC3L23qekLbeuygHBA1mVw3ePFYIOzg8znI+3oz3mwK+/G1fKSOBYfcJuT3ApBpr2NjS7WfgJ43Dfh48tmHC6IZEv5FlBZYpnr1ZKpnQZVruTic18HcFUbu+mSvECIbUYwQWginDEUhMXmyXmmDk2Bi+hW2cGh38cxDg0y11C3VTm8GCzKVAxCOEEnbReSFYtkkETUZQG6J71BIGSuK+jTurJTC+UB7QPxzvxPi2sMDQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBhNLNEEkstxJCZ32oNcztBy9bszXLg7fQ8YioE3SNE0UD8IO14Vr0SY4iOSbG3u+YfBWFuJknqNzplThAE+3oD" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_9 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_9 new file mode 100644 index 0000000000..0bf232f7ba --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_9 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "5", + "previousBlockHash": "6C4C96C829413A645B7D646B1468C0E2BA5F4F6EAF0733F63BFB61D612689FB1", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:1ODhNNtOMgjnlaPquKfHyI/IFMCqC2QdLPaL+/tSVzE=" + }, + "size": 7 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617062251885, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "3D2C0BCE1E669D763BA72284E92D977D6C4486CE3BF43FD0AEEE93A0C4A89B49" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////rhy2ZJoGUdfwPXNKHQ6rCZ/3XBFY1LdQF5cvWLnqCSN8QS+P2TeEW4WhbGSYtwsmgkq83uFOnv6asZdIayzbvc/iY7046jP+zj5t7diLhgFkQpnwZDg3xo1zCEJqDgrxAk4kCkJe98SdaAF+vYbfjbI3oex6pk/jixbIaTv7iZk7ByHRfNedV+gExAGejU+rqLYdHzWL82j9EprZc/SWffe2QD69CDQtghd/shj/q7msJgUlgfTeB61Tx/DeYR1ipwNBGi3XGgCBkWgJImVU6VnCat27udfNrW79yU+2zr+tUhEHqYdZbqion/IEyT35Px+CJcjTuLMvNw5n88ITEFZAsgnr/+4pPqDMtGh3AtI6H9lEsTp9ddr2EEMiznere9hmF519nfoaG2eElOovPSJc5zE10JrB4OW5+113MVY0v+q132MBVhqeh/lGfS6rSvXNNhkaghylEFDqAa8gBnbsaOVGv0NujQPOPQsvqYYIRCPWvNSG2s23cKqvSpYdNnCcQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCUKJovXR6uvcXZoo6nmJKUXlbpq2dXYK9MjWIg3AmOFIEhGokZt0hYV07eazSGu3aIkD4G35+RoxelYPJSMFIF" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_90 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_90 new file mode 100644 index 0000000000..673f9eba91 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_90 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "45", + "previousBlockHash": "FFD55953325DEAE990C7B3EDBB243F95DB6EACC36AD8F09C35C4D9FA1D785B23", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:0TzKnwgRkkFIf2gzfW6A+h56kkg6UWbD9CXXxaX6zTM=" + }, + "size": 47 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063363638, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "CDD236319B37B70F383BD766BB8A5CFB47FC355A18B130048E6007BB6CAD4045" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////p/HxfwfkWNWD+b4++NsCGEOegzG551dKPFzbBLRKAEcHFg8QUikWJkXZV0eFSuuwtQG0MTJweR0cBAhis9LVZNyxxjW76zUZ9Dh5uYt75Rt6U2/0IRD2CGP0Rbvzsyh/DB/v1QTvAHaZqloaYvcnmCx7jyqc+cZ/N5CupicsZfMD5NHtMDMWIJ2VlNFpDfcUpWsVYjwtk5GvTMnVxkByyiaYIThk9o6oPwivUohAQM+ctwEb/uPKguqhtG2Wph2h663HPb22LBFawcRcmHETQ2BO3gbbDxwykpEMWyB3ZCyS9ltXwdNSrbBMFkXnxoib0zD/h40Yfzfn+q4g1DsTbA/E+AYvxpOrJ1NXZk9x0B8+WlMlo5jkqAKPwmdBP2PjUsEg9tZSGX6XP4Qrv32Q1/oOHvJ5w2K5kn/iYGYXLSKtDcFv2N4xfN18OomRzHJqyYdSY3j/0gEBXPOAJ+wzdQQ2UNQgg4m7uJe5U4f0mA8cW1BnwOiZeT9WH530HrGZYUroQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAKFPxjWyUMuL0W9VcCpLZlHS5DgHaLy+B5JnwJEfhk0dlgCAQa/8M4MsI8/WvtHYIjwXVLl37f+VfEnJRAE3gA" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_91 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_91 new file mode 100644 index 0000000000..c4dd9c36d5 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_91 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "46", + "previousBlockHash": "FD591F6FA45B01C59EC1FEE626292B952CC21DBDD01558D14F88407E31B1FA01", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:vd2D6S/0EUGlLHsbvHrSTatPByCQWi9NpEw7scPwxWI=" + }, + "size": 48 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063369189, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "CF7FF4252400ADCA26B2093739BA6BE2142434152E3AB4DF776882AB5F9648DD" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////stmSI5hZzbf3ATU/ULlpNA2iWUphqC6nfV0FuHl4ZST8PzEvJptUrmrhU1k8cvp9jUhqaNFGS41uCBLxU2zphTAAVHfvfVfqVrkZ4ipRjfjTRuncXuHfLVq2wixnEhAED9AYqEm+CoRn1VhR9jM9hNfj/FifPfSeWko8oMmkbh82oAQP5+AoWH0Sv71FWl1ujduqbvMwFxFJaIE8CPrYM2DyGPUzKVDfudNwYo18o70F7ZPvFMy6A9IPrvCKvIxyd3w1Cd0EMBnZT2n4EfucJh4QDc59iaTjFLGi/6nOGETcgMEq+RL1oQx7mUZwuXXmHA96scIdvrUsbjBgbendM8ONRVZjuykJdTPFnatE/GnoAK7oejO7yFLoEV4zkAdNmMvk5h7SMKTqRlDugSY5GcpPiHn4uIAUeJhsZt6i7Kg1lpMrFZ1B2+watLdxgKodqgPw9ZsXB0dgCrQLdDf2Es0nWfX4V5VmzIL+QcyS24JuOm+W/drtqRMlqERD5Joj5M/PQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBCYlgoOvU+npmvMlQIzj9i3Z6CJkJCwxn0+Pb1+6ActYhOKKtDe6c/gXcG4UwZxPPDFHB2v6c4cNVr6KcPuZcD" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_92 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_92 new file mode 100644 index 0000000000..c9b57bc78b --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_92 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "46", + "previousBlockHash": "CDD236319B37B70F383BD766BB8A5CFB47FC355A18B130048E6007BB6CAD4045", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:ji/9HbTE/SZbsvSzxznMv5A3OcU18rgMAbf4YbyM0jE=" + }, + "size": 48 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063374562, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "C739949F34DA1079264CE10C7A19B6A7C5058CE20A2E2C6B39B22CE08847666E" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////gBniI/n5ZrZmQFGQCuCSiOGbYnNd9SLdgNjEWm/3kPHl4Z/vdq+D7VSVZLVHmwkZotU9qulStwY1TCeN0sQp8eMTeYJdNHnhrN5pUugf4w5KiTQmPM5UZLio8++NoPYpE0m/WUaKPqaXeaFYSEYR3oaV+1M8h7ZTr4oooMDKtBFqrsNOQwOzwLdvey1QgJh4mJE8ajzO6j2uAsZ9n/URhIEdZPfMcRXumJlMRSscFnAmpLZx5zQ7o1dqs744qMi5DONdwWHjDLJxtI0+G4dkg1paY1JImhobG2Lby+mgZQECgHm0F3AK0k4/fehXNyKq+nY+LkvbqQ29NbU1QTveUP8gnxMMHkoet5mB2TgqQYZFxByX3kV4tNawnxs5d6jAQC7MBB9b/3qglIm58Td3oCD6lMxJK7nSRfrqCfQLZYy6jNN1r1C7bOyicK/yLDCOu+odfNatp5ExNEimeCnjOA6eTMTo6G8fALzqFVVfgODbwnEG029ErZYu2mqTkQZjN8VBQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDApmO9IlT4RVvWQH/c5U/MYsumOzpA7phn2sL41X3UlMoT9Byrqci6KJN2p8eNrlgsCdo3dFQho4HMZN/YOG2QJ" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_93 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_93 new file mode 100644 index 0000000000..c108e194f2 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_93 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "47", + "previousBlockHash": "CF7FF4252400ADCA26B2093739BA6BE2142434152E3AB4DF776882AB5F9648DD", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:jljOI6cfTh1KUXSE2yURVgpJZjqt4ERJ0UCxSIRWgCA=" + }, + "size": 49 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063380107, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "611C0C6311A05E8AD8E53FE7A3C7077BA915623EBC747BE272267FD3E2D1E21F" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////o0jc85FAfqFCiKnIUZuzh/NSZe6ZuiRk4be2AGy/mehQ+XoWkFs4msmRKFTlhCvHgDSgSbu0iqs6XLzC8DEvMtwClGjgDJWiJm8dFqBspmXYwWEDuuKWJhgrnHJ8E48OBm870gTxnlPaSv0CYlH6K8ec1M5ICaHLH9HRdG+0TAXDImfZZ76BqGqaF5Dwt/H1pxk/J/3FKUst6DW4xFyskyjmJHv7DUpAwenP0thZQ2vqVN3vEjNJ/rHTMJPtQ4eFfvJtBl9CFTYlAHEXZNM3UWSkzC0GOCRCxn3bXWsOKaXxG6F5gXMokf3FsO98u1mjGTF/gfhnU+i8LjaY86zhYz78lgjNYlIkdR5FONzMK29EHC8wE5GfVxr5BwWIFb/zQfFn6TX4faIfFjc912I76KRXdqz1CytOqD1LDpTQFEx//ZuFr/PTsDfSPjiSIEyLRqs9wzyZNvTamWqeX2TvpGYAXvXzdbIhnY8F4QOM6cdVX4sST8Wsf1ZT8zyFkTRgOAdQQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBPUGb6NKyqXY9h/U1FV4A2vgYGilR+b5wZHEEh6F7mR563gXi6VvIlYIyQEngtKvYNUUmh0MSSN9ThqwPrOcAC" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_94 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_94 new file mode 100644 index 0000000000..971c3afedf --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_94 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "47", + "previousBlockHash": "C739949F34DA1079264CE10C7A19B6A7C5058CE20A2E2C6B39B22CE08847666E", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:5ItQMHQqgvoHNEkokVTBH/A+j/QtzwGcQE8nOzVVyVM=" + }, + "size": 49 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063385484, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "B2E80C129F5E40EA9402D83829558000F9FF30C5BCC4C9A51DDB7FD78374D0E6" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////tjPiR4l99huGvcXnb/UzzvmjBDSY1rTzmoznNihiLQSCzmnk4O8FJWpJ1yS+iKFGqfQQAqijjAQmxyNky4HHAyPigWoA59p6FQMKCbmyAH6cqTOt1ugDVsMQ9RDVk+jIDBWPo/no63Ov9rWGf8VLWmwWNxIMT38+Vp8HoD1i49c2xUa4gq1Qr2bg2WjFBYsehiPV41xxy+33LLwf5sc7tjZURKlqyOUPyR09rAz4hVGGCOwcB2spqKry8+LOze87RcHxfIiZI3yf5hEey/lkNSKJIv0gxnN5CHZvFCvmhM463d2h5HvG4390c++GHklwXuYejkfCTvTVCGy+TX2gPG9KWNn+hVS7yOVj1hwIG6IscsNU6cyFVfsGnceZJQhWDc29M2+JMREhwurMbXb3LLUmkWhF0bnWD3X13qFPGy3WFQS3DOyf+V7b9CeOCm+gnd04PkoHJKsxbLBcdlEZk9FV+86zTC9EUqkdk7BNDb4yKVt45EE2OWc6iyNNg5tFlk1oQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDB9CTOzps9MulFRvmfJLqGruRPtPPeshzb/EuD8f2jZBEi9ShxersBR9KZScFmD5SxcWqiII89eyPh+o+aXTYMF" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_95 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_95 new file mode 100644 index 0000000000..116c38c929 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_95 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "48", + "previousBlockHash": "611C0C6311A05E8AD8E53FE7A3C7077BA915623EBC747BE272267FD3E2D1E21F", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:F5zXPhdlfXl9K9xDZSk7+mrW8bI+puVWgmQEAO/WRSo=" + }, + "size": 50 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063390882, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "B3607D2ACA245225FDE3BF00963EC73BEE13BEA73798022719BE4CF6A6C2F662" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////p1qH4sYGJzHNid9CMeZtRDy/QgjGQpfMQAg4XbeWOjXwSHPUoWB5HagjLVfGU0DUhLmGbYkqgwcJQrbODKOP6EHk0bPCyJTGSJLdpCU5bD1Loo0Dwgfif8H9kYk/r5hfGOMOQH05XXzjiXPleJIiDV5hHvf0KEStt/uM2jLLKjgvl0eqoUMmrmtiTD5EdqLDuKFMPrRBLTn9n/KLDM9ZVcalQsf0SZMk9M3KKzuRL8/8gKGvPE7FAXNiR2Lmzu5YEZSPENYGq65/jwt9oTGYc1HfCVSjKxygvVV4NgiBv4imhRwYFy7VXs/WqrKW+1e9tmM4sqaEVepK5TXAd9N6SXaeispC0Fp01HxCd5nHM4S5mtyhyiTglf1T5tgYJT4RNbXXaHx+niEbzwalPMmpVbowrSaiIH4BQ82xGgdYpVrIFCCmu1JZ2f2ync3v96LhyDJEYbJO47IzAqNTRmf0HCu/jHu7Bd9CkBbeXaQ12MmuB8gUcNJdU5UeFQSsP0o6udPcQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDD5Zt4pAQuQeKoYamUIsiaxG4vnb3th7iqu1j4QGOCB8uL7aN6DnkUAusuJSLsxPgOA1M6VHPf7v8+PZVi3qmYO" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_96 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_96 new file mode 100644 index 0000000000..844624cb1d --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_96 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "48", + "previousBlockHash": "B2E80C129F5E40EA9402D83829558000F9FF30C5BCC4C9A51DDB7FD78374D0E6", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:r2Sjx7lz57ajE40QYdubKFHz9/B7RWx3l/iPjvnf/S4=" + }, + "size": 50 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063396428, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "EF06E325181C8FCEC566AC03C2B3F6D8FF421903E3C61F3F4330CABA2FE4FC7D" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////sDGdC/V9FiYNheA9OWy4g8Y3DM5W9fsnZ5MS65GxAojvQfPL4tYgxJMX2MON8RACtLG94Tz9ZeDHIpwYbsqSaY1Hl0Zu0ZXPuMPASbbjTBEtVwIzGqjEPafe9nov25KeAryFUQfqo8kNbSk8H4aPpSsMn0XJNR0WlJykif923VbwRzYJrV9CzA6HILiNwbP0uOlWX4ga0R5esOt6BKHx7R7H8+pL8VsJCi/lbuSIPB3liPMSqySo/hgio5ZxQu+iFc+bIWQ7mCebosepKA5PUWg+Zp53T3vydAG/gbpSuSuuqRiY5i4GJpsUjpR1Ps+QiSCNtSf7zTRb+fVXijt3HogrqBnjupqhtUVpQc7DSRau82P6It4g0aY+PG+6p2kyNeGClO9TfRn+3vMEoI15qIqvpsuqhAgy7uKhrbmfSL2uogKSEO+e6BEeDtlb8Xsaetw6svh3H9lMJlLdenbVRTj/w01Q5wHNXPms5ltyVeClOHG5NzP7+KKY9VzuDxGazcrAQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBemvcKrOF2fIv2qaYYR0q06fOZkO1CEQXXGGVLGtrnUkBwi1Ef1guQqibrcLKwrtSL3y/vqoeA/Lqlmlm6rDwD" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_97 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_97 new file mode 100644 index 0000000000..4030e7563f --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_97 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "49", + "previousBlockHash": "B3607D2ACA245225FDE3BF00963EC73BEE13BEA73798022719BE4CF6A6C2F662", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:vUvPBx9jGdX5ytcyWFKzZHxYb/RYiUyW0jCHEgFL0Ws=" + }, + "size": 51 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063401802, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "DF015E5E04EAE7DE14B9CCA86E6BE6A157702167C31453087B0D24D38EB5654C" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////t6To96ecMDh9nqOGdvveNQVD9y50cG1zJQRJyuUKsBwgRwFmEaX9kvMpDZMgibEDqa/1g4BlIaZmEJCU6Pda/LDrCIPr0jUDRXDddVF4K0z8tHjgQBinhS0llK5xEe6JDAwK+pLt6tBPh8bST9KUBEVB0NXRv8qSkgPpfaAsDqK04m1WoysggX6cYloOjUxztqA4jqpKBn2KcK885R281ZJf05nlBmpdhb/f2w4E3JhKZAlp6NQtf6kqneC904sY2F5inJuUh/+rLOQZwAJL8uG3yqMjsA8niHY3Zpx75ws/07Eyk8xyD0ZxPlB1GKjnn4Iok4FhMWzTTqtHFxjFHt8PuSwF2mEU0x4uT5Fd7lZ9hJw6FhLNkf0/PJsDUUsc2fHoxBpE0g3Mg/P2xQOqZqK+hxkSgFIk2kFL257xEEukvUG36pt/fGYWUZY2QPapZBZ8dbLE5HTP0ZvUpAuuGvf5cgREGBPrfSKoOyFUgzRz8AJTtv0Y3AEFD+OWZax7t+LBQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDgjUC3fYREjsiPjIcIDbSOMwtgbOYcaVoVkLaowNPFNPBg70oEGTDFshYgOr1eXTN88DpYXB4r1t5WM5Kw5ZsK" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_98 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_98 new file mode 100644 index 0000000000..304b4a7820 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_98 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "49", + "previousBlockHash": "EF06E325181C8FCEC566AC03C2B3F6D8FF421903E3C61F3F4330CABA2FE4FC7D", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:Z41XPIFV8a6QVdD9afzJinyAbTVxbH1IwqtOo0zw1RE=" + }, + "size": 51 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617063407036, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "DAC0DCA5DC258F064E161B27C298C2FFEC74715F67EF432C01606D76C128E1B5" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////pvxQxS3UqNsV1F6UvZomN+Dh9Mddhn8SDwj98KEdiKd+dOujUc3wla/EQ0GDtxytjYSLbNoYBrG8Mf133YFlk5BtA+ks4q+KJPgh4HlnsHBlmqh8s5fUTiBeDJQ+/U8EAkepxKgFtK3NDsL3/Mx8jBaH8jY95iQt4AVt80lssEwq9kg8njlD2ziKY1koyb6UrqSX/AEn8nZtNZ7o5r+pjrH2AcclXGsRk4pqFaMZneVihJdffr270+2KGjPfCTEgsLfEtvpuO2+meFsOfDvRzOscPCUZfgAYm3IIBAbgwTXWymwosDRw0fxLs+XxoSV83SR+OuNX64aDacwg6mCBTQ8dXfBWOG56tvC46zGI8yqaUK2qA3Xx7vrIgIlDxn7QYeIt7ZtU7Ep7CDWbIlBdrtW8LXqe2i9qKHsbqxcAE8QgW7jfeGm9hC1marOSzGSXtQMcg3gxDGxHrUIK/pQpNhOJpWTfS4VC8E7zp9pMwf6tw1xp3zRFQ6MZJ9g+CSfWWJtqQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAlyGrlWWed1UOEIh5/02mAtq4l6Jp3+tJcX9hPNtwY7gsKtxCiqZVZUyarsiwglyLLYCxhofywwhGcGx3Dvz4K" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_99 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_99 new file mode 100644 index 0000000000..b6b56c6106 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_Add_Block_with_fork_99 @@ -0,0 +1,32 @@ +{ + "header": { + "sequence": "50", + "previousBlockHash": "DF015E5E04EAE7DE14B9CCA86E6BE6A157702167C31453087B0D24D38EB5654C", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:2f1LpzVtOLq/76VLfPkuuyLU7NKGlLBRtI+A0Y8I5Eo=" + }, + "size": 52 + }, + "nullifierCommitment": { + "commitment": "38D9A6BDEC6F35135626166DF2149CA3E3B61E2D8D62F9226ACAE0FDAD29DE3D", + "size": 1 + }, + "target": "882992383764307249142653314182893391999679604880738805815775866336575232", + "randomness": 0, + "timestamp": 1617063412635, + "minersFee": "-5", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "70E1ADEF509F11D1EA8B23E81C3BE8EA1053F58989F012FC30EAAA49D1DFD74C" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAPv/////////pMVQvVrNFORM0KaCd3L1HyuJLxSa0bDwfCmPUJn8zLoCC8+qi5XGxpI/dkLqvDASr0ve16M4YJeJ9Q4Q+Udupf6Rkr4pFr5OgGWMH7jutz/3euGquUGO6fk+/NpQ4qW8BopjCaKiwLRszYZW753+GXja5RsRrZayAiEb9dCjsrYzW2DZ0ao0Uyz+2WZIa6y1j/lHicw65D3wSl3/oPBnOygXwiYu4Fa1h7MHGexqe7YhChpZlIKHz/JkiWiRD5O06DVnlPWoHsvsYwTe9nj+/8INtptgqfgWAu1jpO7dQbFgNieSVsxSG6iokfaLCah7ZhqMklFYKlE8d7GoI/35QiJTgJZpLgScIt/NxjUs1zDGsLm4CQvy20G1E8Uog82dtFKOFSAc1UU3HjHABswj4/R/jYDQGsIH2FTtfwcRMuLeXP3XgmFHidUugeBuWiHQ6lBOtxHJvcnLjD/pomm1sraj3D3YZeSTlqhwjxg2TrhhNlihDv1msqm8zXqB7Fj5KV5FQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDD/je247Xzvvh8blJvaBjFg8o/f5P6ah8pfWG3eewqLCprzGG1UcYFI1V06h5+2d35nUjezg+f0bcqhJD6vXlUH" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_sanity_check_1 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_sanity_check_1 new file mode 100644 index 0000000000..0c3e2ef644 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_sanity_check_1 @@ -0,0 +1,8 @@ +{ + "name": "testA", + "spendingKey": "650f3bb0ee4d3e38512906b4a204af625eb21d4a4307177289b726c9a91243a1", + "incomingViewKey": "3fbc6da8785093b80a18a2b9b3ff3c86912515b9f1ae75b9c67003f51ed82f03", + "outgoingViewKey": "30b025349939e64fd2c45759a7899e75ee476a86a62381bc5bec0aecbbba0bf3", + "publicAddress": "d970e99f1fe1bf3162203e9f347157ad2ec3b3137e8fd229efdc012610334125c53fb67ac842b45fd7ad87", + "rescan": null +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_should_add_notes_to_trees_linearly_1 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_should_add_notes_to_trees_linearly_1 new file mode 100644 index 0000000000..b50fac3b8c --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_should_add_notes_to_trees_linearly_1 @@ -0,0 +1,8 @@ +{ + "name": "account", + "spendingKey": "0c01db2640122741eeacc1d660f748fd805ee407d1e382483a56435710499e4b", + "incomingViewKey": "9bcfbe972c8946af750761aa9ab55fe9935df21b88fbe07c01427b439fea1b06", + "outgoingViewKey": "f1ae363f64d9ecb6c43cff9ad784429aeb1b1f8c83feb3d747c5595c6222f0e9", + "publicAddress": "dec87bb2362565122dbd4c9d30592c43085a37bbeb375543c0b8caa14702ae5d3efb8c465cbcd01f016df3", + "rescan": null +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_should_add_notes_to_trees_linearly_2 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_should_add_notes_to_trees_linearly_2 new file mode 100644 index 0000000000..c2f56752fe --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_should_add_notes_to_trees_linearly_2 @@ -0,0 +1,33 @@ +{ + "header": { + "sequence": "2", + "previousBlockHash": "A1BA91BD54FCFE97D0DF8FE1F66B6217197B96CD52DB97FAD58B275E164600AB", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:05RSUsGr57GP0XUMR7fTHZRdBs69Uce1iBpiZm23/wU=" + }, + "size": 4 + }, + "nullifierCommitment": { + "commitment": "78A72F2020CE738CBA3A8995C0EBCA23C83A4CE2ADD7927210EAA2DE6466E0C9", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617568603569, + "minersFee": "-500000000", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "FC58840611BC3E5D5961A4498C97833391C80EEE41B286CBFE68DB3F77EEB8DB", + "graffiti": "0000000000000000000000000000000000000000000000000000000000000000" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAACbMuL/////tCiLEb6eaSXTGxRB65Zlj5qkYJpGDi+gDG4g/9DQFP4B2WjfP4edzlB8a6ag0Jl/szIfG88Rjhi+oE4cJ76ozeBqWAFBM/HdmHj9d5wpU46OOwG642j4cdlghuTS/bQsBokn7D1OaPWYodAyesSccMolhU1Heg7gzv1U+GQAMhFhyQ9nwxvWA3VD9sKGpReAq+8rgMJmv+8TyYmPqTf47HZgsXOKHVxyg5yLybztauVSBLelFH03+8ond8G6WJVY6Ep3PsF6WVggPRXnhdFuu67pdIuVILGblzWe7SuG7pf5heaXkMPt9ahVNs1IdqaHlHJTnRcnFU4CPxd/ft5xCTJtK+20rgeemrkN3Bqbns0Djg+215SNuork0nGiOwZCmRba6XaICKBG9pmBqOHhDDk8/+LIu3QjpuBl7z6oD6CwRyjT/GeFtzn+/ijKoc6DVKfSDzId04OYCzvmbAFGbiUNVJl7pt+Hr4BK28x8UgqXbh54eC5FHoRHoogG/UNS+FFaQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDB4XSBaS7TLbD9qS82qj0YANkUp5fB1fABoEnplfQ41A279q8JOuVcMb7N6XurxgwishY16rz9mJnE61Bx2NR0E" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_should_linearly_add_to_trees_1 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_should_linearly_add_to_trees_1 new file mode 100644 index 0000000000..1598fa53e9 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_should_linearly_add_to_trees_1 @@ -0,0 +1,8 @@ +{ + "name": "account", + "spendingKey": "e242617fc992297e5631cc60c4d1c5fd072acde68432dcaaf32e284f9fabba67", + "incomingViewKey": "557850a24e15a710cfcdbe83c3c8c8bf625828b7eb06841d2a5c5f9694623305", + "outgoingViewKey": "a90c25c595d3234ea48494e988a94f255c26452f5470004fa484cf65e691f28e", + "publicAddress": "0321685b8fb9b590a289435ba62324a12232ec8844991c94dd54fa11497c000f893285aaaaf30ac9624888", + "rescan": null +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_should_linearly_add_to_trees_2 b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_should_linearly_add_to_trees_2 new file mode 100644 index 0000000000..1c4722fa54 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/fixtures/Blockchain_should_linearly_add_to_trees_2 @@ -0,0 +1,33 @@ +{ + "header": { + "sequence": "2", + "previousBlockHash": "A1BA91BD54FCFE97D0DF8FE1F66B6217197B96CD52DB97FAD58B275E164600AB", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:4KcqEDnq6qivusMM/tYnK/kWbII0mIztYIlxAoEMxRY=" + }, + "size": 4 + }, + "nullifierCommitment": { + "commitment": "78A72F2020CE738CBA3A8995C0EBCA23C83A4CE2ADD7927210EAA2DE6466E0C9", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617558953832, + "minersFee": "-500000000", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "5EA4FE958A4436E854865D25B7A35CBAE534CFD063EB97A20D2B278CDDB98B74", + "graffiti": "0000000000000000000000000000000000000000000000000000000000000000" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAACbMuL/////hb8MTTYK63n+Jcmd/B1TpGTTpkIeOdqXMRgk0XCB+AzyzoJ2CDD4NotMW1ZB8vDllMtamIIOtp+AgFJ54VxeuS0FmnumXk3TKDL3zBc5ILOSQ/i2CwONTndl1aPvky3AC9QeHUFDiLnbmsCaX/8lavLlc1M/MhC5U4rYw6TDL8M3VlZbL8SenKguFRtOYo1lrzfCp9R8EX1BeLoFzQLY3MGFdU0GQjhe8wGB1GSfsZZDLLXEntYwrT6TCD/NheUK24+YRLQTSMJtcxz6cZXzEzkTCpyIm1LG95DwtLRBQFgXxqFmzb0GPsjg6eEOfiPAmzxSuQtCdeWh5k25k/K3AwIcOscWhDQx3L5e+ujqAFFqxFeMX6LhQZ/Il4hFOVgVxHJvML7Vn3zQxJWa3F8SSk6S/wS6iMTMywiJKDGS26hy5rHKLy7RHWCWKegmSx5FR7rKNKRI8iUQMhOuhb4d2YtEOZv3YJw6DuvEzrYeonNCpuEOk1G1IIO1ZSB0acm8YyA3QmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDkgcwFiO3sa1F4NVxqJqWXLdSGI8n3zsJfc3WHNXP4nJzq5VOYB5SVHSZdMz14TJygeh0pvjYVTKZLVvJU5B0I" + } + ] +} \ No newline at end of file diff --git a/ironfish/src/captain/anchorChain/blockchain/index.test.perf.ts b/ironfish/src/captain/anchorChain/blockchain/index.test.perf.ts new file mode 100644 index 0000000000..4f50dcb33e --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/index.test.perf.ts @@ -0,0 +1,188 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +/* eslint-disable no-console */ +import { createNodeTest, useAccountFixture, useBlockFixture } from '../../../testUtilities' +import { IronfishBlock } from '../../../strategy' +import _ from 'lodash' +import { MathUtils, UnwrapPromise } from '../../../utils' +import { Assert } from '../../../assert' + +describe('Blockchain', () => { + const nodeTest = createNodeTest() + + it('Add Block with fork', async () => { + const { node: nodeA } = await nodeTest.createSetup() + const { node: nodeB } = await nodeTest.createSetup() + await Promise.all([nodeA.seed(), nodeB.seed()]) + + const accountA = await useAccountFixture(nodeA.accounts, 'accountA') + const accountB = await useAccountFixture(nodeB.accounts, 'accountB') + + const blocksA = new Array() + const blocksB = new Array() + + // Create 100 blocks each on nodeA and nodeB + for (let i = 0; i < 100; ++i) { + console.log(`Creating Blocks ${i}`) + + const blockA = await useBlockFixture(nodeA.captain, async () => + nodeA.captain.chain.newBlock( + [], + await nodeA.strategy.createMinersFee(BigInt(0), BigInt(2), accountA.spendingKey), + ), + ) + + const blockB = await useBlockFixture(nodeB.captain, async () => + nodeB.captain.chain.newBlock( + [], + await nodeB.strategy.createMinersFee(BigInt(0), BigInt(2), accountB.spendingKey), + ), + ) + + await Promise.all([ + nodeA.captain.chain.addBlock(blockA), + nodeB.captain.chain.addBlock(blockB), + ]) + + blocksA.push(blockA) + blocksB.push(blockB) + } + + async function runTest( + testCount: number, + forkLength: number, + ): Promise<{ + testCount: number + forkLength: number + all: number[] + add: number[] + fork: number[] + rewind: number[] + }> { + forkLength = Math.min(Math.min(forkLength, blocksA.length), blocksB.length) + + const samplesAll = [] + const samplesAdd = [] + const samplesFork = [] + const samplesRewind = [] + + for (let i = 0; i < testCount; i++) { + console.log(`Running Test ${i}`) + + const { node } = await nodeTest.createSetup() + await node.seed() + + const startAll = Date.now() + + // Add 99 blocks from blocksA + for (let i = 0; i < forkLength - 1; ++i) { + const startAdd = Date.now() + await node.captain.chain.addBlock(blocksA[i]) + const endAdd = Date.now() + samplesAdd.push(endAdd - startAdd) + } + + // Add 99 blocks from blocksB + for (let i = 0; i < forkLength - 1; ++i) { + const startFork = Date.now() + await node.captain.chain.addBlock(blocksB[i]) + const endFork = Date.now() + samplesFork.push(endFork - startFork) + } + + // Now add the new heaviest block from blockB which causes + // the blocks from blocksB to be removed from the trees + const startRewind = Date.now() + await node.captain.chain.addBlock(blocksB[forkLength - 1]) + const endRewind = Date.now() + samplesRewind.push(endRewind - startRewind) + + const endAll = Date.now() + samplesAll.push(endAll - startAll) + + // Verify the head is the last block in blocksB + const actualHead = await node.captain.chain.getHeaviestHead() + const expectedHead = blocksB[forkLength - 1] + Assert.isNotNull(actualHead, 'Chain has no head') + expect(actualHead.hash.toString('hex')).toEqual( + expectedHead.header.hash.toString('hex'), + ) + } + + return { + testCount, + forkLength, + all: samplesAll, + add: samplesAdd, + rewind: samplesRewind, + fork: samplesFork, + } + } + + function printResults(result: UnwrapPromise>): void { + console.log( + `[TEST RESULTS: Times Ran: ${result.testCount}, Fork Length: ${result.forkLength}]` + + `\nTotal Test Average: ${MathUtils.arrayAverage(result.all).toFixed(2)}ms` + + `\nInsert ${result.forkLength - 1} blocks linear: ${MathUtils.arrayAverage( + result.add, + ).toFixed(2)}ms` + + `\nInsert ${result.forkLength - 1} blocks on fork: ${MathUtils.arrayAverage( + result.fork, + ).toFixed(2)}ms` + + `\nAdd head rewind fork blocks: ${MathUtils.arrayAverage(result.rewind).toFixed( + 2, + )}ms`, + ) + } + + printResults(await runTest(5, 1)) + printResults(await runTest(5, 3)) + printResults(await runTest(5, 5)) + printResults(await runTest(5, 10)) + printResults(await runTest(5, 50)) + printResults(await runTest(5, 100)) + }, 780000) +}) + +// Last results on Jason Spafford's Machine +// If you decide to change addBlock() consider +// running these tests and updating the results +// here: yarn test test.perf.ts --testPathIgnorePatterns + +// [TEST RESULTS: Times Ran: 5, Fork Length: 1] +// Total Test Average: 36.80ms +// Insert 0 blocks linear: 0.00ms +// Insert 0 blocks on fork: 0.00ms +// Add head rewind fork blocks: 36.80ms + +// [TEST RESULTS: Times Ran: 5, Fork Length: 3] +// Total Test Average: 228.40ms +// Insert 2 blocks linear: 35.00ms +// Insert 2 blocks on fork: 27.10ms +// Add head rewind fork blocks: 104.20ms + +// [TEST RESULTS: Times Ran: 5, Fork Length: 5] +// Total Test Average: 365.40ms +// Insert 4 blocks linear: 36.50ms +// Insert 4 blocks on fork: 47.10ms +// Add head rewind fork blocks: 31.00ms + +// [TEST RESULTS: Times Ran: 5, Fork Length: 10] +// Total Test Average: 711.20ms +// Insert 9 blocks linear: 35.36ms +// Insert 9 blocks on fork: 25.91ms +// Add head rewind fork blocks: 159.80ms + +// [TEST RESULTS: Times Ran: 5, Fork Length: 50] +// Total Test Average: 3651.00ms +// Insert 49 blocks linear: 36.48ms +// Insert 49 blocks on fork: 27.10ms +// Add head rewind fork blocks: 535.60ms + +// [TEST RESULTS: Times Ran: 5, Fork Length: 100] +// Total Test Average: 7323.20ms +// Insert 99 blocks linear: 36.58ms +// Insert 99 blocks on fork: 27.19ms +// Add head rewind fork blocks: 1009.60ms diff --git a/ironfish/src/captain/anchorChain/blockchain/index.test.ts b/ironfish/src/captain/anchorChain/blockchain/index.test.ts new file mode 100644 index 0000000000..f0d29b46a4 --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/index.test.ts @@ -0,0 +1,318 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Assert } from '../../../assert' +import { AsyncUtils } from '../../../utils' +import { createNodeTest, useAccountFixture, useBlockFixture } from '../../../testUtilities' +import { makeBlockAfter, addBlocksShuffle } from '../../testUtilities' + +describe('Blockchain', () => { + const nodeTest = createNodeTest() + + it('add blocks and build graphs', async () => { + const { strategy, chain } = nodeTest + strategy.disableMiningReward() + + await nodeTest.node.seed() + const genesis = await chain.getGenesisHeader() + Assert.isNotNull(genesis) + + // G -> A1 -> A2 + // -> B2 -> B3 + + const blockA1 = makeBlockAfter(chain, genesis) + const blockA2 = makeBlockAfter(chain, blockA1) + const blockB2 = makeBlockAfter(chain, blockA1) + const blockB3 = makeBlockAfter(chain, blockB2) + + // Added in a specific order for the test below + // so that Genesis, A1, A2, have the same graph, + // and B2 merges into graph [A1-A2], and [A1-A2] merge + // into genesis block graph so [B2-B3] -> [A2,A2,Genesis] + await chain.addBlock(blockA1) + await chain.addBlock(blockA2) + await chain.addBlock(blockB2) + await chain.addBlock(blockB3) + + const headerGenesis = await chain.getBlockHeader(genesis.hash) + const headerA1 = await chain.getBlockHeader(blockA1.header.hash) + const headerA2 = await chain.getBlockHeader(blockA2.header.hash) + const headerB2 = await chain.getBlockHeader(blockB2.header.hash) + const headerB3 = await chain.getBlockHeader(blockB3.header.hash) + + Assert.isNotNull(headerGenesis) + Assert.isNotNull(headerA1) + Assert.isNotNull(headerA2) + Assert.isNotNull(headerB2) + Assert.isNotNull(headerB3) + + const graphGenesis = await chain.getGraph(genesis.graphId) + const graphA1 = await chain.getGraph(headerA1.graphId) + const graphA2 = await chain.getGraph(headerA2.graphId) + const graphB2 = await chain.getGraph(headerB2.graphId) + const graphB3 = await chain.getGraph(headerB3.graphId) + + Assert.isNotNull(graphGenesis) + Assert.isNotNull(graphA1) + Assert.isNotNull(graphA2) + Assert.isNotNull(graphB2) + Assert.isNotNull(graphB3) + + expect(headerA1.graphId).toEqual(headerGenesis.graphId) + expect(headerA2.graphId).toEqual(headerA1.graphId) + expect(headerB2.graphId).not.toEqual(headerA1.graphId) + expect(headerB3.graphId).toEqual(headerB2.graphId) + + expect(graphGenesis.mergeId).toEqual(null) + expect(graphA1.mergeId).toEqual(null) + expect(graphA2.mergeId).toEqual(null) + expect(graphB2.mergeId).toEqual(headerA2.graphId) + expect(graphB3.mergeId).toEqual(headerA2.graphId) + + expect(graphGenesis.tailHash?.equals(genesis.hash)).toBe(true) + expect(graphGenesis.latestHash?.equals(headerB3.hash)).toBe(true) + expect(graphGenesis.heaviestHash?.equals(headerB3.hash)).toBe(true) + }) + + it('iterateToBlock', async () => { + const { strategy, chain } = nodeTest + strategy.disableMiningReward() + + await nodeTest.node.seed() + const genesis = await chain.getGenesisHeader() + Assert.isNotNull(genesis) + + // G -> A1 -> A2 + // -> B2 -> B3 + // -> C3 -> C4 + // -> D4 + + const blockA1 = makeBlockAfter(chain, genesis) + const blockA2 = makeBlockAfter(chain, blockA1) + const blockB2 = makeBlockAfter(chain, blockA1) + const blockB3 = makeBlockAfter(chain, blockB2) + const blockC3 = makeBlockAfter(chain, blockB2) + const blockC4 = makeBlockAfter(chain, blockC3) + const blockD4 = makeBlockAfter(chain, blockC3) + + const { isAdded: isAddedB3 } = await chain.addBlock(blockB3) + const { isAdded: isAddedA2 } = await chain.addBlock(blockA2) + const { isAdded: isAddedA1 } = await chain.addBlock(blockA1) + const { isAdded: isAddedC3 } = await chain.addBlock(blockC3) + const { isAdded: isAddedB2 } = await chain.addBlock(blockB2) + const { isAdded: isAddedC4 } = await chain.addBlock(blockC4) + const { isAdded: isAddedD4 } = await chain.addBlock(blockD4) + + expect(isAddedA1).toBe(true) + expect(isAddedA2).toBe(true) + expect(isAddedB2).toBe(true) + expect(isAddedB3).toBe(true) + expect(isAddedC3).toBe(true) + expect(isAddedC4).toBe(true) + expect(isAddedD4).toBe(true) + + // should be able to start at the tail + let blocks = await AsyncUtils.materialize(chain.iterateToBlock(genesis, blockD4)) + expect(blocks.length).toBe(5) + expect(blocks[0].hash.equals(genesis.hash)).toBe(true) + expect(blocks[1].hash.equals(blockA1.header.hash)).toBe(true) + expect(blocks[2].hash.equals(blockB2.header.hash)).toBe(true) + expect(blocks[3].hash.equals(blockC3.header.hash)).toBe(true) + expect(blocks[4].hash.equals(blockD4.header.hash)).toBe(true) + + // should be able to start at the head + blocks = await AsyncUtils.materialize(chain.iterateToBlock(blockD4, genesis)) + expect(blocks.length).toBe(5) + expect(blocks[0].hash.equals(blockD4.header.hash)).toBe(true) + expect(blocks[1].hash.equals(blockC3.header.hash)).toBe(true) + expect(blocks[2].hash.equals(blockB2.header.hash)).toBe(true) + expect(blocks[3].hash.equals(blockA1.header.hash)).toBe(true) + expect(blocks[4].hash.equals(genesis.hash)).toBe(true) + + // should be able to start after the tail + blocks = await AsyncUtils.materialize(chain.iterateToBlock(blockA1, blockB3)) + expect(blocks.length).toBe(3) + expect(blocks[0].hash.equals(blockA1.header.hash)).toBe(true) + expect(blocks[1].hash.equals(blockB2.header.hash)).toBe(true) + expect(blocks[2].hash.equals(blockB3.header.hash)).toBe(true) + + // If we iterate the same block, it should be yielded once + blocks = await AsyncUtils.materialize(chain.iterateToBlock(genesis, genesis)) + expect(blocks.length).toBe(1) + expect(blocks[0].hash.equals(genesis.hash)).toBe(true) + }) + + it('iterateToBlock errors', async () => { + const { strategy, chain } = nodeTest + strategy.disableMiningReward() + + await nodeTest.node.seed() + const genesis = await chain.getGenesisHeader() + Assert.isNotNull(genesis) + + // G -> A1 -> A2 + // -> B1 -> B2 + + const blockA1 = makeBlockAfter(chain, genesis) + const blockA2 = makeBlockAfter(chain, blockA1) + const blockB1 = makeBlockAfter(chain, genesis) + const blockB2 = makeBlockAfter(chain, blockB1) + + const { isAdded: isAddedA1 } = await chain.addBlock(blockA1) + const { isAdded: isAddedA2 } = await chain.addBlock(blockA2) + const { isAdded: isAddedB1 } = await chain.addBlock(blockB1) + const { isAdded: isAddedB2 } = await chain.addBlock(blockB2) + + expect(isAddedA1).toBe(true) + expect(isAddedA2).toBe(true) + expect(isAddedB1).toBe(true) + expect(isAddedB2).toBe(true) + + // Cannot iterate between 2 forks when graph path happen to make it seem like + // it can work, a few wrong blocks are yielded in this case + + // left-to-right + let result = AsyncUtils.materialize(chain.iterateToBlock(blockA1, blockB2)) + await expect(result).rejects.toThrowError( + 'Failed to iterate between blocks on diverging forks', + ) + // right-to-left + result = AsyncUtils.materialize(chain.iterateToBlock(blockB2, blockA1)) + await expect(result).rejects.toThrowError( + 'Failed to iterate between blocks on diverging forks', + ) + + // Cannot iterate between 2 forks when graph path looks immediately wrong + // because the graph path does not merge into the destination + + // left-to-right + result = AsyncUtils.materialize(chain.iterateToBlock(blockB1, blockA2)) + await expect(result).rejects.toThrowError( + 'Start path does not match from block, are they on a fork?', + ) + + // right-to-left + result = AsyncUtils.materialize(chain.iterateToBlock(blockA2, blockB1)) + await expect(result).rejects.toThrowError( + 'Start path does not match from block, are they on a fork?', + ) + }) + + it('iterateToHead', async () => { + const { strategy, chain } = nodeTest + strategy.disableMiningReward() + + // Iterate an empty chain + let blocks = await AsyncUtils.materialize(chain.iterateToHead()) + expect(blocks.length).toBe(0) + + // Add the genesis block + await nodeTest.node.seed() + const genesis = await chain.getGenesisHeader() + Assert.isNotNull(genesis) + + // Iterate with genesis block + blocks = await AsyncUtils.materialize(chain.iterateToHead()) + expect(blocks.length).toBe(1) + expect(blocks[0].hash.equals(genesis.hash)).toBe(true) + + // Add another block + const block = makeBlockAfter(chain, genesis) + await chain.addBlock(block) + + // iterate from genesis -> block + blocks = await AsyncUtils.materialize(chain.iterateToHead()) + expect(blocks.length).toBe(2) + expect(blocks[0].hash.equals(genesis.hash)).toBe(true) + expect(blocks[1].hash.equals(block.header.hash)).toBe(true) + }) + + it('findFork', async () => { + const { strategy, chain } = nodeTest + strategy.disableMiningReward() + + await nodeTest.node.seed() + const genesis = await chain.getGenesisHeader() + Assert.isNotNull(genesis) + + // G -> A1 -> A2 + // -> B2 -> B3 + // -> C3 -> C4 + // -> D4 + + const blockA1 = makeBlockAfter(chain, genesis) + const blockA2 = makeBlockAfter(chain, blockA1) + const blockB2 = makeBlockAfter(chain, blockA1) + const blockB3 = makeBlockAfter(chain, blockB2) + const blockC3 = makeBlockAfter(chain, blockB2) + const blockC4 = makeBlockAfter(chain, blockC3) + const blockD4 = makeBlockAfter(chain, blockC3) + + await addBlocksShuffle(chain, [ + blockA1, + blockA2, + blockB2, + blockB3, + blockC3, + blockC4, + blockD4, + ]) + + const { fork: fork1, isLinear: isLinear1 } = await chain.findFork(blockA1, blockA1) + expect(fork1?.hash.equals(blockA1.header.hash)).toBe(true) + expect(isLinear1).toBe(true) + + const { fork: fork2, isLinear: isLinear2 } = await chain.findFork(blockA1, blockA2) + expect(fork2?.hash.equals(blockA1.header.hash)).toBe(true) + expect(isLinear2).toBe(true) + + const { fork: fork3, isLinear: isLinear3 } = await chain.findFork(blockA2, blockB2) + expect(fork3?.hash.equals(blockA1.header.hash)).toBe(true) + expect(isLinear3).toBe(false) + + const { fork: fork4, isLinear: isLinear4 } = await chain.findFork(genesis, blockD4) + expect(fork4?.hash.equals(genesis.hash)).toBe(true) + expect(isLinear4).toBe(true) + + const { fork: fork5, isLinear: isLinear5 } = await chain.findFork(blockB3, blockD4) + expect(fork5?.hash.equals(blockB2.header.hash)).toBe(true) + expect(isLinear5).toBe(false) + + const { fork: fork6, isLinear: isLinear6 } = await chain.findFork(blockC4, blockD4) + expect(fork6?.hash.equals(blockC3.header.hash)).toBe(true) + expect(isLinear6).toBe(false) + }) + + it('should add notes to trees linearly', async () => { + const { node, chain } = nodeTest + await nodeTest.node.seed() + + const genesisNotes = await chain.notes.size() + const genesisNullifiers = await chain.nullifiers.size() + + const account = await useAccountFixture(node.accounts, 'account') + + const block = await useBlockFixture(node.captain, async () => + chain.newBlock( + [], + await chain.strategy.createMinersFee(BigInt(0), BigInt(2), account.spendingKey), + ), + ) + + expect(block.transactions.length).toBe(1) + const minersFee = block.transactions[0] + + await chain.addBlock(block) + expect(await chain.notes.size()).toBe(genesisNotes + 1) + expect(await chain.nullifiers.size()).toBe(genesisNullifiers) + + const treeLeaf = await chain.notes.getLeaf(genesisNotes) + const treeNote = treeLeaf.element + + minersFee.withReference(() => { + const minersFeeNote = minersFee.getNote(0) + expect(minersFeeNote.serialize().equals(treeNote.serialize())).toBe(true) + }) + }, 10000) +}) diff --git a/ironfish/src/captain/anchorChain/blockchain/index.ts b/ironfish/src/captain/anchorChain/blockchain/index.ts new file mode 100644 index 0000000000..f1b9378eca --- /dev/null +++ b/ironfish/src/captain/anchorChain/blockchain/index.ts @@ -0,0 +1,1804 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import Strategy from '../strategies' +import Transaction from '../strategies/Transaction' +import Block from './Block' +import { Validity } from './VerificationResult' +import BlockHeader, { BlockHeaderSerde, BlockHash } from './BlockHeader' +import { BufferSerde, JsonSerializable } from '../../../serde' +import Target from './Target' +import { Graph } from './Graph' +import { MetricsMonitor } from '../../../metrics' +import { Nullifier, NullifierHash } from '../nullifiers' +import { Event } from '../../../event' + +export const GRAPH_ID_NULL = 0 + +import { + HeadersSchema, + SCHEMA_VERSION, + SequenceToHashSchema, + TransactionsSchema, + GraphSchema, + HashToNextSchema, +} from './Schema' +import { + BufferArrayEncoding, + BufferEncoding, + IDatabase, + IDatabaseStore, + IDatabaseTransaction, + JsonEncoding, + SchemaValue, + StringEncoding, +} from '../../../storage' +import { Logger } from '../../../logger' +import { Verifier } from '../..' +import Serde from '../../../serde' + +export { default as Block, BlockSerde, SerializedBlock } from './Block' +export { + default as BlockHeader, + BlockHash, + BlockHeaderSerde, + SerializedBlockHeader, +} from './BlockHeader' +export { default as Target } from './Target' +export { Validity, VerificationResult } from './VerificationResult' +import MerkleTree from '../merkleTree' +import { Assert } from '../../../assert' +import { AsyncUtils } from '../../../utils' + +export interface AddBlockResult { + isAdded: boolean + connectedToGenesis?: boolean + isHeadChanged: boolean + resolvedGraph?: Graph +} + +/** + * The hash used in the "previousHash" field on the initial block in the + * chain. The initial block is intentionally invalid, so we need to special + * case it. + */ +export const GENESIS_BLOCK_PREVIOUS = Buffer.alloc(32) +export const GENESIS_BLOCK_SEQUENCE = BigInt(1) + +export default class Blockchain< + E, + H, + T extends Transaction, + SE extends JsonSerializable, + SH extends JsonSerializable, + ST +> { + blockHeaderSerde: BlockHeaderSerde + blockHashSerde: BufferSerde + noteSerde: Serde + logger: Logger + genesisBlockHash: BlockHash | null + genesisHeader: BlockHeader | null + looseNotes: { [key: number]: E } + looseNullifiers: { [key: number]: Nullifier } + verifier: Verifier + + // Block is a header + transactions + // (both are indexed by block hash) + headers: IDatabaseStore> + transactions: IDatabaseStore> + + // Given a sequence, return an array of blocks with that sequence + sequenceToHash: IDatabaseStore + + // Given a hash, return an array of blocks pointing to it as previous + hashToNext: IDatabaseStore + + graphs: IDatabaseStore + + // Notes & Nullifiers Merkle Trees + notes: MerkleTree + nullifiers: MerkleTree + + // MetricsMonitor used to create and record performance metrics + metrics: MetricsMonitor + + onChainHeadChange = new Event<[hash: BlockHash]>() + + // When ever a block is added to the heaviest chain and the trees have been updated + onConnectBlock = new Event<[block: Block, tx?: IDatabaseTransaction]>() + + // When ever a block is removed from the heaviest chain, trees have not been updated yet + onDisconnectBlock = new Event< + [block: Block, tx?: IDatabaseTransaction] + >() + + /** + * Construct a new Blockchain + */ + private constructor( + readonly db: IDatabase, + readonly strategy: Strategy, + notes: MerkleTree, + nullifiers: MerkleTree, + logger: Logger, + metrics: MetricsMonitor, + ) { + this.blockHeaderSerde = new BlockHeaderSerde(strategy) + this.blockHashSerde = new BufferSerde(32) + this.noteSerde = notes.merkleHasher.elementSerde() + + this.logger = logger.withTag('blockchain') + this.genesisBlockHash = null + this.genesisHeader = null + this.metrics = metrics + this.notes = notes + this.nullifiers = nullifiers + this.looseNotes = {} + this.looseNullifiers = {} + + this.verifier = strategy.createVerifier(this) + + this.headers = db.addStore({ + version: SCHEMA_VERSION, + name: 'Headers', + keyEncoding: new BufferEncoding(), // block hash + valueEncoding: new JsonEncoding>>(), + }) + + this.transactions = db.addStore({ + version: SCHEMA_VERSION, + name: 'Transactions', + keyEncoding: new BufferEncoding(), // block hash + valueEncoding: new JsonEncoding(), + }) + + this.sequenceToHash = db.addStore({ + version: SCHEMA_VERSION, + name: 'SequenceToHash', + keyEncoding: new StringEncoding(), // serialized bigint sequence + valueEncoding: new BufferArrayEncoding(), // array of block hashes + }) + + this.hashToNext = db.addStore({ + version: SCHEMA_VERSION, + name: 'HashToNextHash', + keyEncoding: new BufferEncoding(), // serialized bigint sequence + valueEncoding: new BufferArrayEncoding(), // array of block hashes + }) + + this.graphs = db.addStore({ + version: SCHEMA_VERSION, + name: 'Graphs', + keyEncoding: new StringEncoding(), // graph id + valueEncoding: new JsonEncoding(), + }) + } + + /** + * Construct a new Blockchain backed by the given database + */ + static async new< + E, + H, + T extends Transaction, + SE extends JsonSerializable, + SH extends JsonSerializable, + ST + >( + db: IDatabase, + strategy: Strategy, + logger: Logger, + metrics?: MetricsMonitor, + ): Promise> { + metrics = metrics || new MetricsMonitor(logger) + const notes = await MerkleTree.new(strategy.noteHasher(), db, 'anchorchain notes', 32) + const nullifiers = await MerkleTree.new( + strategy.nullifierHasher(), + db, + 'anchorchain nullifiers', + 32, + ) + return Promise.resolve(new Blockchain(db, strategy, notes, nullifiers, logger, metrics)) + } + + async getBlockToNext(hash: BlockHash, tx?: IDatabaseTransaction): Promise { + return (await this.hashToNext.get(hash, tx)) || [] + } + + async setBlockToNext( + hash: BlockHash, + hashes: BlockHash[], + tx?: IDatabaseTransaction, + ): Promise { + await this.hashToNext.put(hash, hashes, tx) + } + + async setGraph(graph: Graph, tx: IDatabaseTransaction): Promise { + await this.graphs.put(graph.id.toString(), graph, tx) + } + + async getGraph(graphId: number, tx?: IDatabaseTransaction): Promise { + const graph = await this.graphs.get(graphId.toString(), tx) + if (!graph) { + this.logger.debug(`Could not find requested graph with id ${graphId}`) + return null + } + return graph + } + + async resolveBlockGraph(hash: BlockHash, tx?: IDatabaseTransaction): Promise { + const header = await this.headers.get(hash, tx) + + if (!header) { + this.logger.debug(`Couldn't get header ${hash.toString('hex')} when resolving graph`) + return null + } + + return await this.resolveGraph(header.graphId, tx) + } + + async resolveGraph(graphId: number, tx?: IDatabaseTransaction): Promise { + let graph = await this.getGraph(graphId, tx) + if (!graph) { + this.logger.debug(`Could not resolve graph with id ${graphId}`) + return null + } + + while (graph && graph.mergeId) { + graph = await this.getGraph(graph.mergeId, tx) + } + + return graph + } + + async getBlockGraph(hash: BlockHash, tx?: IDatabaseTransaction): Promise { + const header = await this.headers.get(hash, tx) + if (!header) { + this.logger.debug(`Couldn't get header ${hash.toString('hex')} when getting graph`) + return null + } + return this.getGraph(header.graphId, tx) + } + + async getTail( + hash: BlockHash, + tx?: IDatabaseTransaction, + ): Promise | null> { + const graph = await this.resolveBlockGraph(hash, tx) + if (!graph) { + return null + } + + const tailHash = graph.tailHash + const tailHeader = await this.headers.get(tailHash, tx) + + if (!tailHeader) { + this.logger.debug(`No tail for hash ${hash.toString('hex')}`) + return null + } + return this.blockHeaderSerde.deserialize(tailHeader) + } + + async getHead( + hash: BlockHash, + tx?: IDatabaseTransaction, + ): Promise | null> { + const graph = await this.resolveBlockGraph(hash, tx) + if (!graph) return null + + const heaviestHash = graph.heaviestHash + + if (!heaviestHash) { + this.logger.debug( + `Couldn't get heaviest hash ${hash.toString('hex')} for graph ${ + graph.id + } when getting head for graph`, + ) + return null + } + + const header = await this.headers.get(heaviestHash, tx) + if (!header) { + this.logger.debug( + `Couldn't get header ${hash.toString('hex')} when getting head for graph`, + ) + return null + } + + return this.blockHeaderSerde.deserialize(header) + } + + async getLatest( + hash: BlockHash, + tx?: IDatabaseTransaction, + ): Promise | null> { + const graph = await this.resolveBlockGraph(hash, tx) + if (!graph) return null + const header = await this.headers.get(graph.latestHash, tx) + if (!header) { + this.logger.debug( + `Couldn't get header ${hash.toString('hex')} when getting head for graph`, + ) + return null + } + + return this.blockHeaderSerde.deserialize(header) + } + + async getBlockHeader( + hash: BlockHash, + tx?: IDatabaseTransaction, + ): Promise | null> { + const header = await this.headers.get(hash, tx) + return header ? this.blockHeaderSerde.deserialize(header) : null + } + + /** + * Saves block header, transaction and updates sequenceToHash + * without updating the chain (e.g. trees, graph, heaviest head, and so on) + */ + private async setBlock( + block: Block, + tx?: IDatabaseTransaction, + ): Promise { + return this.db.withTransaction( + tx, + [this.headers, this.transactions, this.sequenceToHash], + 'readwrite', + async (tx) => { + const hash = block.header.hash + Assert.isNotNull(hash, 'Header hash should be set before header is saved') + this.logger.debug(`Setting block ${hash.toString('hex')} ${block.header.sequence}`) + + await this.headers.put(hash, this.blockHeaderSerde.serialize(block.header), tx) + + await Promise.all([ + this.transactions.add( + hash, + block.transactions.map((t) => this.strategy.transactionSerde().serialize(t)), + tx, + ), + this.sequenceToHash + .get(block.header.sequence.toString(), tx) + .then((sequences: BlockHash[] = []) => { + sequences.push(hash) + return this.sequenceToHash.put(block.header.sequence.toString(), sequences, tx) + }), + ]) + }, + ) + } + + /** + * This function produces a graph path for a block, which is an array of graph ids + * going left-to-right, starting at `toGraphId`. A graph is a compressed version of + * the block chain that records merge points and forks. Consider this graph + * + * A1 -> A2 -> A3 + * -> B2 -> B2 + * -> C3 + * + * A graph path from C3 -> A1 would be [A, B, C]. Using this we can make decisions about forks + * and specfically allows us to iterate from left to right. See `iterateToBlock` for more information. + */ + protected async getBlockGraphPath( + blockOrHash: BlockHash | BlockHeader, + toGraphId: number | null = null, + tx?: IDatabaseTransaction, + ): Promise { + // If we are a blockHash + if (blockOrHash instanceof Buffer) { + const header = await this.getBlockHeader(blockOrHash, tx) + Assert.isNotNull(header) + blockOrHash = header + } + + if (toGraphId === GRAPH_ID_NULL) toGraphId = null + return await this.getGraphPath(blockOrHash.graphId, toGraphId, tx) + } + + protected async getGraphPath( + graphIdOrGraph: number | Graph, + toGraphId: number | null = null, + tx?: IDatabaseTransaction, + ): Promise { + let graphId: number | null = null + let graph: Graph | null = null + + if (typeof graphIdOrGraph === 'number') { + graphId = graphIdOrGraph + graph = await this.getGraph(graphIdOrGraph, tx) + } else { + graphId = graphIdOrGraph.id + graph = graphIdOrGraph + } + + Assert.isNotNull(graph) + const path = [graphId] + + while (graph.mergeId) { + // Used to get a graph path ending at a certain blocks graph + if (toGraphId !== null && graph.id === toGraphId) break + + graph = await this.getGraph(graph.mergeId, tx) + Assert.isNotNull(graph) + path.push(graph.id) + } + + path.reverse() + return path + } + + /** + * Yields all block between 2 blocks including the two blocks + * The blocks must have a fast forward linear path between them. + * If the same block is passed in, then the block will be yielded + * once. It supports both left-to-right and right-to-left iteration. + * + * If the two blocks are on diverging forks, blocks will be yielded + * until it realizes it cannot find the target block and then an error + * will be thrown + * + * As an example, take this graph and consider iterateToBlock(A1, B2) + * A1 -> A2 -> A3 + * -> B2 -> B2 + * -> C3 + * + * First, this is left-to-right iteration. The way this is done is + * to first get the graph path of C3, which results in Array + * which is [A, B, C]. Then start at the beginning, and each time + * there are more than 1 block, look to see which of the blocks is the + * next step in the graph path. Let's see we would move from A1 -> C3 + * in the example above. + * + * 1. Get path to C3: [A, B, C] + * 2. Start at A1 + * 3. Load A2, B2 + * 4. B2 is graph B, the next graph we need so go there + * 5. Load B2, C3 + * 6. C3 is graph C, the next graph we need so go there + * 7. Current block is target block, stop. + * + * iterateToBlock(B2, A1) would be much simpler, and we just use the + * Block.previousBlockHash to go backwards until we find A1. + * + * @param from the block to start iterating from + * @param to the block to start iterating to + * @param tx + * @yields BlockHeaders between from and to + * @throws Error if the blocks are on diverging forks after yielding wrong blocks + * @throws Error if you try to iterate right-to-left + */ + async *iterateToBlock( + from: BlockHeader | Block, + to: BlockHeader | Block, + tx?: IDatabaseTransaction, + ): AsyncGenerator, void, void> { + if (from instanceof Block) from = from.header + if (to instanceof Block) to = to.header + + if (from.graphId === GRAPH_ID_NULL) return + if (to.graphId === GRAPH_ID_NULL) return + + // right-to-left iteration + if (from.sequence >= to.sequence) { + const path = await this.getBlockGraphPath(from.hash, to.graphId, tx) + + if (path[0] !== to.graphId) { + throw new Error('Start path does not match from block, are they on a fork?') + } + + let current = from + yield current + + while ( + current.sequence >= to.sequence && + current.sequence >= GENESIS_BLOCK_SEQUENCE && + !current.hash.equals(to.hash) + ) { + const header = await this.getBlockHeader(current.previousBlockHash, tx) + Assert.isNotNull(header) + yield header + current = header + } + + if (!current.hash.equals(to.hash)) { + throw new Error(`Failed to iterate between blocks on diverging forks`) + } + } + // left-to-right iteration + else { + const path = await this.getBlockGraphPath(to.hash, from.graphId, tx) + let pathIndex = 0 + + if (path[pathIndex] !== from.graphId) { + throw new Error('Start path does not match from block, are they on a fork?') + } + + let current = from + yield current + + // left-to-right iterate the number of sequences there are between from -> to + for (let i = current.sequence; i < to.sequence; ++i) { + const nextBlockHashes = await this.getBlockToNext(current.hash, tx) + + let nextGraphHeader: BlockHeader | null = null + let currentGraphHeader: BlockHeader | null = null + + for (const nextBlockHash of nextBlockHashes) { + const nextBlockHeader = await this.getBlockHeader(nextBlockHash) + Assert.isNotNull(nextBlockHeader) + + // We found a block on the current graph + if (nextBlockHeader.graphId === path[pathIndex]) { + currentGraphHeader = nextBlockHeader + } + + // We found a block on the next graph + if (pathIndex < path.length - 1 && nextBlockHeader.graphId === path[pathIndex + 1]) { + nextGraphHeader = nextBlockHeader + pathIndex++ + } + } + + if (nextGraphHeader) { + current = nextGraphHeader + yield nextGraphHeader + } else if (currentGraphHeader) { + current = currentGraphHeader + yield currentGraphHeader + } else { + throw new Error('No next block was found in our current or next graph') + } + } + + if (!current.hash.equals(to.hash)) { + throw new Error(`Failed to iterate between blocks on diverging forks`) + } + } + } + + /** + * Like iterateToBlock except it always iterates between the genesis block + * and the heaviest head + */ + async *iterateToHead( + tx?: IDatabaseTransaction, + ): AsyncGenerator, void, void> { + const head = await this.getHeaviestHead() + if (!head) return + + for await (const block of this.iterateFromGenesis(head, tx)) { + yield block + } + } + + /** + * This function will find the forking point of two blocks if it exists, or return null + * If the same hash is specified, the same block will be returned. If one block is a linear + * fast forward to the other with no forks, then the earlier block will be returned. + * + * @param fromHash the hash of the first block to find the fork for + * @param toHash the hash of the second block to find the fork for + * @param tx + * @returns a BlockHeader if the fork point was found, or null if it was not + */ + async findFork( + fromHash: BlockHash | BlockHeader | Block, + toHash: BlockHash | BlockHeader | Block, + tx?: IDatabaseTransaction, + ): Promise<{ fork: BlockHeader | null; isLinear: boolean | null }> { + // Gets a graph tails previous block header + const getGraphTailPrev = async ( + graph: Graph, + tx?: IDatabaseTransaction, + ): Promise> => { + const tailHeader = await this.getBlockHeader(graph.tailHash, tx) + Assert.isNotNull(tailHeader) + + const prevTailHeader = await this.getBlockHeader(tailHeader.previousBlockHash, tx) + Assert.isNotNull(prevTailHeader) + + return prevTailHeader + } + + let [fromHeader, toHeader] = await this.getHeadersFromInput([fromHash, toHash], tx) + + // Checking the same block + if (fromHeader.hash.equals(toHeader.hash)) { + return { fork: fromHeader, isLinear: true } + } + + let fromGraph = await this.getGraph(fromHeader.graphId, tx) + let toGraph = await this.getGraph(toHeader.graphId, tx) + + Assert.isNotNull(fromGraph) + Assert.isNotNull(toGraph) + + let fromMoved = false + let toMoved = false + + // eslint-disable-next-line no-constant-condition + while (true) { + // If both blocks are on the same chain, return the one with the lower sequence + if (toGraph.id === fromGraph.id) { + const fork = fromHeader.sequence < toHeader.sequence ? fromHeader : toHeader + const isLinear = !fromMoved || !toMoved + return { fork, isLinear } + } + + // If one graph merges into the other, the fork point is the previous block of the tail of the + // merging graph if there is an actual fork point, like in the example of A3 -> B3, the fork point is A2 + // because graph B merges into A, and graph B's tail is B3, so the previous block of the tail B3 is A2 + // A1 -> A2 -> A3 + // -> B3 + // + // Even though we found the merge point of the chains, our block could be further back along the merged + // into chain in some cases. Consider finding A1 -> B4 the merge point of graphs A and B is A2, but the + // fork point is actually A1 + // A1 -> A2 -> A3 + // -> B3 -> B4 + if (toGraph.mergeId === fromGraph.id) { + const mergeHeader = await getGraphTailPrev(toGraph, tx) + const isLinear = mergeHeader.sequence >= fromHeader.sequence + const fork = isLinear ? fromHeader : mergeHeader + return { fork, isLinear } + } + + if (fromGraph.mergeId === toGraph.id) { + const mergeHeader = await getGraphTailPrev(fromGraph, tx) + const isLinear = mergeHeader.sequence >= toHeader.sequence + const fork = isLinear ? toHeader : mergeHeader + return { fork, isLinear } + } + + const fromTailHeader: BlockHeader | null = await this.getBlockHeader( + fromGraph.tailHash, + tx, + ) + const toTailHeader: BlockHeader | null = await this.getBlockHeader( + toGraph.tailHash, + tx, + ) + + Assert.isNotNull(fromTailHeader) + Assert.isNotNull(toTailHeader) + + if (fromTailHeader.sequence >= toTailHeader.sequence) { + if (fromGraph.mergeId === null) break + fromHeader = await getGraphTailPrev(fromGraph, tx) + fromGraph = await this.getGraph(fromGraph.mergeId, tx) + Assert.isNotNull(fromHeader) + Assert.isNotNull(fromGraph) + fromMoved = true + } + + if (toTailHeader.sequence >= fromTailHeader.sequence) { + if (toGraph.mergeId === null) break + toHeader = await getGraphTailPrev(toGraph, tx) + toGraph = await this.getGraph(toGraph.mergeId, tx) + Assert.isNotNull(toHeader) + Assert.isNotNull(toGraph) + toMoved = true + } + } + + return { fork: null, isLinear: null } + } + + /** + * Like iterateToBlock except it always iterates between the genesis block + * and `to` + */ + async *iterateFromGenesis( + to: BlockHeader | Block, + tx?: IDatabaseTransaction, + ): AsyncGenerator, void, void> { + const genesis = await this.getGenesisHeader() + if (!genesis) return + + for await (const block of this.iterateToBlock(genesis, to, tx)) { + yield block + } + } + + /** + * This is the main and only method to use for adding new blocks. + * This updates the trees, the graphs, the heaviest head (and latest head) + * and updates the blockchain accordingly. + * @returns true if the block has been added or if it already exists. Returns false + * if it was invalid. + */ + async addBlock( + networkBlockToAdd: Block, + tx?: IDatabaseTransaction, + ): Promise { + const addBlockResult: AddBlockResult = await this.db.withTransaction( + tx, + [ + this.notes.counter, + this.notes.leaves, + this.notes.nodes, + this.nullifiers.counter, + this.nullifiers.leaves, + this.nullifiers.nodes, + this.headers, + this.transactions, + this.graphs, + this.hashToNext, + this.sequenceToHash, + ], + 'readwrite', + async (tx) => { + const hash = networkBlockToAdd.header.recomputeHash() + const genesis = await this.getGenesisHash(tx) + + if (await this.getBlockHeader(hash, tx)) { + const resolvedGraph = await this.resolveBlockGraph(hash, tx) + Assert.isNotNull(resolvedGraph) + const connectedToGenesis = + !!genesis && this.blockHashSerde.equals(resolvedGraph.tailHash, genesis) + + return { + isAdded: true, + isHeadChanged: false, + resolvedGraph: resolvedGraph, + connectedToGenesis: connectedToGenesis, + } + } + const block = networkBlockToAdd + block.header.isValid = false + block.header.work = BigInt(0) + block.header.graphId = GRAPH_ID_NULL + block.header.hash = hash + block.header.count = 0 + + // the block this block is pointing to + const previousBlockHeader = await this.getBlockHeader( + block.header.previousBlockHash, + tx, + ) + + const previousHashes = await this.getBlockToNext(block.header.previousBlockHash, tx) + const previousTail = previousBlockHeader + ? await this.getTail(block.header.previousBlockHash, tx) + : null + + // blocks pointing at us + const nextHashes = await this.getBlockToNext(hash, tx) + + // Check that we don't already have a genesis. We pass validation for genesis + // so want to be careful that no malicious blocks set their previousHash to + // GENESIS_BLOCK_PREVIOUS + const addingGenesis = + !genesis && + this.blockHashSerde.equals(block.header.previousBlockHash, GENESIS_BLOCK_PREVIOUS) + + // Adding to a genesis block chain? Or adding the genesis block itself? + const addingToGenesis = + addingGenesis || + (!!previousTail && + !!genesis && + this.blockHashSerde.equals(previousTail.hash, genesis)) + + // Check if we can validate this block (blocks can only be fully validated if + // they are valid *and* connected to genesis, so we check validation in case + // this is the right most block being added to the graph connected to genesis) + const verification = await this.verifier.isAddBlockValid( + previousBlockHeader, + block, + addingGenesis, + addingToGenesis, + tx, + ) + + if (previousBlockHeader && verification.valid == Validity.No) + return { + isHeadChanged: false, + isAdded: false, + } + + // Check if by adding this block we can validate next blocks pointing to it. + // A block is valid if it's internally valid, valid against previous block, + // and in a chain of valid blocks connected to genesis. + // Invalid next blocks are filtered out here so they don't get + // connected to the chain. + let nextBlocks = await Promise.all( + nextHashes.map(async (h) => await this.getBlock(h, tx)), + ) + nextBlocks = nextBlocks.filter((b) => + this.verifier.isAddBlockValid(block.header, b, addingGenesis, addingToGenesis, tx), + ) + + const nextBlockHeaders = nextBlocks + .filter((b: T | null): b is T => b !== null) + .map((b) => b.header) + + // We are not adding to the genesis block graph (adding to an island) + if (!addingToGenesis) { + // get resolved graph, return tail + const [_graph, resolved] = await this.addToGraphs( + previousHashes, + previousBlockHeader, + block.header, + nextBlockHeaders, + tx, + ) + + this.logger.debug( + `Adding a disjoint block ${block.header.hash.toString('hex')} ${ + block.header.sequence + }`, + ) + + await this.setBlock(block, tx) + + return { + isHeadChanged: false, + resolvedGraph: resolved, + connectedToGenesis: false, + isAdded: true, + } + } + + let graph: Graph | null = null + let resolved: Graph | null = null + + // Set to true when we detect a linear fast forward from the genesis block + let isFastForward = false + + if (previousBlockHeader) { + // We are adding to the genesis block chain + const [g, r] = await this.addToGraphs( + previousHashes, + previousBlockHeader, + block.header, + nextBlockHeaders, + tx, + ) + + graph = g + resolved = r + + isFastForward = this.blockHashSerde.equals( + previousBlockHeader.hash, + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + resolved.heaviestHash!, + ) + } else { + // We are adding the genesis block + const [g, r] = await this.addToGraphs([], null, block.header, nextBlockHeaders, tx) + graph = g + resolved = r + + isFastForward = true + } + + // Update the Block.header.work (accumulated work) of all nodes to the right + // Also look for the new heaviest node now that we have new nodes connected + const oldHeaviest = resolved.heaviestHash + ? await this.getBlockHeader(resolved.heaviestHash, tx) + : null + + await this.updateGraph( + resolved, + previousBlockHeader, + block.header, + nextBlockHeaders, + { + heaviest: oldHeaviest, + }, + tx, + ) + + // did the heaviest block connecting to genesis change? + const genesisHeaviestChanged = + genesis && + this.blockHashSerde.equals(resolved.tailHash, genesis) && + oldHeaviest && + resolved && + resolved.heaviestHash && + !this.blockHashSerde.equals(oldHeaviest.hash, resolved.heaviestHash) + + await this.setBlock(block, tx) + await this.setGraph(resolved, tx) + await this.setGraph(graph, tx) + + let headChanged = false + if (genesisHeaviestChanged && resolved.heaviestHash) { + this.logger.debug( + `Heaviest Changed ${oldHeaviest ? oldHeaviest?.hash.toString('hex') : ''} -> ${ + resolved.heaviestHash ? resolved.heaviestHash.toString('hex') : '' + }: ${isFastForward ? 'LINEAR' : 'FORKED'}`, + ) + headChanged = true + + if (isFastForward) { + await this.updateTreesBlockToHead(block, addingGenesis, tx) + } else { + Assert.isNotNull(oldHeaviest) + await this.updateTreesWithFork(resolved.heaviestHash, oldHeaviest, tx) + } + } + + if (addingGenesis) { + await this.addToTreesFromBlocks([block], 0, 0, tx) + } + + return { + isHeadChanged: headChanged, + resolvedGraph: resolved, + isAdded: true, + connectedToGenesis: true, + } + }, + ) + + if ( + addBlockResult.isHeadChanged && + addBlockResult.resolvedGraph && + addBlockResult.resolvedGraph.heaviestHash + ) { + this.onChainHeadChange.emit(addBlockResult.resolvedGraph.heaviestHash) + } + return addBlockResult + } + + // Sanity check to check that heaviest head exists, and trees match it + // If we just added a block that puts trees in a bad state, abort it + // as its incorrect + async checkTreeMatchesHeaviest( + // block: Block, + tx?: IDatabaseTransaction, + ): Promise { + const noteRoot = await this.notes.rootHash(tx) + const nullifierRoot = await this.nullifiers.rootHash(tx) + + const heaviestHead = await this.getHeaviestHead(tx) + if (!heaviestHead) { + this.logger.error(`No heaviest head — should never happen`) + return false + } + + const heaviestBlock = await this.getBlock(heaviestHead.hash, tx) + if (!heaviestBlock) { + this.logger.error(`No heaviest block — should never happen`) + return false + } + + if ( + !this.strategy + .noteHasher() + .hashSerde() + .equals(noteRoot, heaviestBlock.header.noteCommitment.commitment) + ) { + const blockNoteSize = heaviestBlock.header.noteCommitment.size + const noteSize = await this.notes.size(tx) + + const noteRootSerialized = this.strategy.noteHasher().hashSerde().serialize(noteRoot) + const blockRootSerialized = this.strategy + .noteHasher() + .hashSerde() + .serialize(heaviestBlock.header.noteCommitment.commitment) + + this.logger.error( + `Note Merkle Tree is in a BAD STATE: \n + Heviest head is ${heaviestBlock.header.hash.toString('hex')} seq ${ + heaviestBlock.header.sequence + } + Note tree size: ${noteSize} \n + Note root: ${ + noteRootSerialized ? (noteRootSerialized as Buffer).toString('hex') : '???' + } \n + Block commitment tree size: ${blockNoteSize}\n + Block commitment: ${ + blockRootSerialized ? (blockRootSerialized as Buffer).toString('hex') : '???' + }\n`, + ) + + this.logger.debug(`TREES IN BAD STATE`) + return false + } + + if ( + !this.strategy + .nullifierHasher() + .hashSerde() + .equals(nullifierRoot, heaviestBlock.header.nullifierCommitment.commitment) + ) { + const nullifierSize = await this.nullifiers.size(tx) + const blockNullifierSize = heaviestBlock.header.nullifierCommitment.size + this.logger.error( + `After adding block ${heaviestBlock.header.hash.toString('hex')} seq ${ + heaviestBlock.header.sequence + } Nullifier Merkle Tree is in a BAD STATE: \n + Nullifier tree size: ${nullifierSize} \n + Block commitment tree size: ${blockNullifierSize}`, + ) + this.logger.debug(`TREES IN BAD STATE`) + return false + } + + return true + } + + private async updateTreesWithFork( + newHeaviestHead: BlockHash, + oldHeaviestHead: BlockHeader, + tx: IDatabaseTransaction, + ): Promise { + const newHeaviestHeadHeader = await this.getBlockHeader(newHeaviestHead, tx) + Assert.isNotNull(newHeaviestHeadHeader) + + // Step 0: remove loost notes and loose nullifiers from queue as they are stale + this.looseNotes = {} + this.looseNullifiers = {} + + // Step 1: Find the fork between the two heads + const { fork } = await this.findFork(oldHeaviestHead, newHeaviestHead, tx) + Assert.isNotNull(fork, `No fork found in updateTreesWithFork`) + + // Step 2: Collect all the blocks from the old head to the fork + const removedIter = this.iterateToBlock(oldHeaviestHead, fork, tx) + const removedHeaders = await AsyncUtils.materialize(removedIter) + const removedBlocks = await Promise.all( + removedHeaders.reverse().map((h) => this.getBlock(h, tx)), + ) + + for (const block of removedBlocks) { + Assert.isNotNull(block) + this.onDisconnectBlock.emit(block, tx) + } + + // Step 3. Truncate trees to the fork + await Promise.all([ + this.notes.truncate(fork.noteCommitment.size, tx), + this.nullifiers.truncate(fork.nullifierCommitment.size, tx), + ]) + + // Step 3. Collect all the blocks from the fork to the new head + const addedIter = this.iterateToBlock(newHeaviestHeadHeader, fork, tx) + const addedHeaders = await AsyncUtils.materialize(addedIter) + const addedBlocks = await Promise.all( + addedHeaders.reverse().map(async (h) => { + const block = await this.getBlock(h, tx) + Assert.isNotNull(block) + return block + }), + ) + + // the forking point block is already in the chain (no need to re-add it) + addedBlocks.shift() + + // Step 4. Add the new blocks to the trees + await this.addToTreesFromBlocks( + addedBlocks, + fork.noteCommitment.size, + fork.nullifierCommitment.size, + tx, + ) + } + + private async updateTreesBlockToHead( + block: Block, + addingGenesis: boolean, + tx: IDatabaseTransaction, + ): Promise { + const blocks: Block[] = [] + + const heaviestHead = await this.getHeaviestHead(tx) + if (!heaviestHead) { + this.logger.error( + `While updateTreesBlockToHead heaviestHead was null — should never happen`, + ) + return + } + + const heaviestBlock = await this.getBlock(heaviestHead.hash, tx) + if (!heaviestBlock) { + this.logger.error( + `While updateTreesBlockToHead heaviestHead was null — should never happen`, + ) + return + } + + // we'll walk from heaviest to given block as we'll need + // to update trees with all those blocks + let currentBlock: Block | null = heaviestBlock + while ( + currentBlock && + !this.blockHashSerde.equals(currentBlock.header.hash, block.header.hash) + ) { + blocks.unshift(currentBlock) + currentBlock = await this.getBlock(currentBlock.header.previousBlockHash, tx) + } + + blocks.unshift(block) + + if (addingGenesis && blocks.length > 1) { + throw new Error(`Adding genesis out of order is not allowed`) + } + + const previousBlockHeader = await this.getBlockHeader(block.header.previousBlockHash, tx) + + await this.addToTreesFromBlocks( + blocks, + previousBlockHeader ? previousBlockHeader.noteCommitment.size : 0, + previousBlockHeader ? previousBlockHeader.nullifierCommitment.size : 0, + tx, + ) + } + + private async addToTreesFromBlocks( + blocks: Block[], + notesIndex: number, + nullifierIndex: number, + tx: IDatabaseTransaction, + ): Promise { + for (const block of blocks) { + await block.withTransactionReferences(async () => { + for (const note of block.allNotes()) { + await this.addNote(notesIndex, note, tx) + notesIndex++ + } + + for (const spend of block.spends()) { + await this.addNullifier(nullifierIndex, spend.nullifier, tx) + nullifierIndex++ + } + }) + + this.onConnectBlock.emit(block, tx) + } + } + + private async addToGraphs( + previousHashes: BlockHash[], + previous: BlockHeader | null, + current: BlockHeader, + nexts: BlockHeader[], + tx: IDatabaseTransaction, + ): Promise<[Graph, Graph]> { + let graph: Graph | null = null + let resolved: Graph | null = null + let latest: BlockHeader | null = null + + // Connecting block into previous block's graph + if (previous && previous.count === 0) { + current.graphId = previous.graphId + previous.count++ + + graph = await this.getGraph(previous.graphId, tx) + resolved = await this.resolveGraph(previous.graphId, tx) + Assert.isNotNull(resolved) + + latest = await this.getBlockHeader(resolved.latestHash, tx) + Assert.isNotNull(latest) + + if (this.isBlockLater(current, latest)) { + latest = current + resolved.latestHash = current.hash + } + + await this.setHeader(previous, tx) + await this.setHeader(current, tx) + await this.setGraph(resolved, tx) + } + + // Merge all nexts's graphs into block's graph, but choose one for + // block to take if it doesn't have one already + if (nexts.length) { + for (const next of nexts) { + const nextGraph = await this.getGraph(next.graphId, tx) + Assert.isNotNull(nextGraph) + + const nextLatest = await this.getBlockHeader(nextGraph.latestHash, tx) + Assert.isNotNull(nextLatest) + + if ((graph === null || resolved === null) && nextGraph) { + // If the block has no graph yet just take it from the right block + graph = nextGraph + resolved = nextGraph + latest = nextLatest + current.graphId = nextGraph.id + + // block is the newest lowest sequence in the adopted graph + graph.tailHash = current.hash + } else { + // merge right graph into the left graph + nextGraph.mergeId = current.graphId + await this.setGraph(nextGraph, tx) + + // when merging a graph, check if we found a new latest from the right graph + Assert.isNotNull(latest, `Latest is not truthy`) + if (resolved && this.isBlockLater(nextLatest, latest)) { + latest = nextLatest + resolved.latestHash = nextLatest.hash + } + } + + current.count++ + } + + Assert.isNotNull(graph) + Assert.isNotNull(resolved) + + if (graph && resolved) { + await this.setGraph(graph, tx) + await this.setGraph(resolved, tx) + } + + await this.setHeader(current, tx) + } + + if (current.graphId == GRAPH_ID_NULL) { + // Create a new graph for this floating block not connected to anything + const graphId = Math.round(Math.random() * 10 ** 16) + current.graphId = graphId + + graph = { + id: graphId, + mergeId: null, + tailHash: current.hash, + heaviestHash: null, + latestHash: current.hash, + } + + latest = current + resolved = graph + + await this.setGraph(graph, tx) + await this.setHeader(current, tx) + } + + // Now merge our current block's graph into the previous block's graph + if (previous && previous.graphId !== graph?.id) { + Assert.isNotNull(latest) + Assert.isNotNull(graph) + + const prevGraph = await this.getGraph(previous.graphId, tx) + Assert.isNotNull(prevGraph) + const prevResolved = await this.resolveGraph(previous.graphId, tx) + Assert.isNotNull(prevResolved) + const prevTail = await this.getBlockHeader(prevResolved.tailHash, tx) + Assert.isNotNull(prevTail) + const prevLatest = await this.getBlockHeader(prevResolved.latestHash, tx) + Assert.isNotNull(prevLatest) + + if (this.isBlockLater(latest, prevLatest)) { + latest = current + prevResolved.latestHash = current.hash + } + + previous.count++ + graph.mergeId = prevGraph.id + resolved = prevResolved + + await this.setGraph(graph, tx) + await this.setHeader(previous, tx) + await this.setGraph(prevGraph, tx) + } + + previousHashes.push(current.hash) + await this.hashToNext.put(current.previousBlockHash, previousHashes, tx) + + if (!graph || !resolved) throw new Error('Block should always have a graph') + return [graph, resolved] + } + + private async updateGraph( + resolved: Graph, + previous: BlockHeader | null, + current: BlockHeader, + nexts: (BlockHeader | null)[], + memo: { heaviest: BlockHeader | null }, + tx: IDatabaseTransaction, + ): Promise { + // Update current blocks work from the previous block + current.work = current.target.toDifficulty() + if (previous) current.work = BigInt(current.work) + BigInt(previous.work) + + await this.setHeader(current, tx) + + // Look for a new heaviest head for the graph + if (memo.heaviest === null || this.isBlockHeavier(current, memo.heaviest)) { + memo.heaviest = current + resolved.heaviestHash = current.hash + } + + // Now recurse down to all of the next blocks check those too + for (const next of nexts) { + if (!next) continue + const nextHashes = await this.getBlockToNext(next.hash, tx) + + const nextNexts = await Promise.all( + nextHashes.map(async (h) => await this.getBlockHeader(h), tx), + ) + await this.updateGraph(resolved, current, next, nextNexts, memo, tx) + } + } + + /** + * Get the block with the given hash, if it exists. + */ + async getBlock( + hashOrHeader: BlockHash | BlockHeader, + tx?: IDatabaseTransaction, + ): Promise | null> { + let header = hashOrHeader instanceof BlockHeader ? hashOrHeader : null + const hash = hashOrHeader instanceof BlockHeader ? hashOrHeader.hash : hashOrHeader + + return this.db.withTransaction( + tx, + [this.headers, this.transactions], + 'read', + async (tx) => { + const [serializedHeader, transactions] = await Promise.all([ + header ? null : this.headers.get(hash, tx), + this.transactions.get(hash, tx), + ]) + + if (serializedHeader) { + header = this.blockHeaderSerde.deserialize(serializedHeader) + } + + if (header && transactions) { + return new Block( + header, + transactions.map((t) => this.strategy.transactionSerde().deserialize(t)), + ) + } else if (header || transactions) { + throw new Error( + `DB has inconsistent state header/transaction state for ${hash.toString('hex')}`, + ) + } + + return null + }, + ) + } + + /** + * Returns true if the blockchain has a block at the given hash + */ + async hasAtHash(hash: BlockHash, tx?: IDatabaseTransaction): Promise { + const header = await this.headers.get(hash, tx) + return !!header + } + + private async setHeader( + header: BlockHeader, + tx: IDatabaseTransaction, + ): Promise { + const serializedBlockHeader = this.blockHeaderSerde.serialize(header) + const hash = header.hash + await this.headers.put(hash, serializedBlockHeader, tx) + } + + async getHeaviestHead( + tx?: IDatabaseTransaction, + ): Promise | null> { + const genesisHash = await this.getGenesisHash(tx) + if (!genesisHash) return null + return await this.getHead(genesisHash, tx) + } + + async getLatestHead( + tx?: IDatabaseTransaction, + ): Promise | null> { + const genesisHash = await this.getGenesisHash(tx) + if (!genesisHash) return null + return await this.getLatest(genesisHash, tx) + } + + /** + * Returns true if the blockchain has any blocks at the given sequence + */ + async hasAtSequence(sequence: BigInt, tx?: IDatabaseTransaction): Promise { + const hashes = await this.getAtSequence(sequence, tx) + return !!hashes && hashes.length > 0 + } + + /** + * Returns an array of hashes for blocks at the given sequence + */ + async getAtSequence(sequence: BigInt, tx?: IDatabaseTransaction): Promise { + return (await this.sequenceToHash.get(sequence.toString(), tx)) || [] + } + + /** + * Create a new block to be mined. Excluding the randomness, the new block is + * guaranteed to be valid with the current state of the chain. + * If the chain's head does not change, then the new block can be added + * to the chain, once its randomness is set to something that meets the + * target of the chain. + * + * If a valid block cannot be constructed, an error is thrown. This should + * only happen if any of the transactions or the miner's fee + * is invalid. + * + * Mining is the process of adjusting the randomness and calculating the hash + * until you find a hash that is lower than the block's target. That does not + * happen in this function. + * + * After calling this function, the chain itself remains unchanged. No notes + * or nullifiers have been added to the tree, and no blocks have been added + * to the chain, including the newly minted one. + */ + async newBlock( + userTransactions: T[], + minersFee: T, + graffiti?: Buffer, + ): Promise> { + const transactions = userTransactions.concat([minersFee]) + return await this.db.transaction( + this.notes.db + .getStores() + .concat(this.nullifiers.db.getStores()) + .concat([ + this.headers, + this.transactions, + this.graphs, + this.hashToNext, + this.sequenceToHash, + ]), + 'readwrite', + async (tx) => { + const originalNoteSize = await this.notes.size(tx) + const originalNullifierSize = await this.nullifiers.size(tx) + + let previousBlockHash + let previousSequence + let target + const timestamp = new Date() + + const heaviestHead = await this.getHeaviestHead(tx) + if (!heaviestHead) { + previousBlockHash = GENESIS_BLOCK_PREVIOUS + previousSequence = BigInt(0) + target = Target.initialTarget() + } else { + if ( + originalNoteSize !== heaviestHead.noteCommitment.size || + originalNullifierSize !== heaviestHead.nullifierCommitment.size + ) { + throw new Error( + `Heaviest head has ${heaviestHead.noteCommitment.size} notes and ${heaviestHead.nullifierCommitment.size} nullifiers but tree has ${originalNoteSize} and ${originalNullifierSize} nullifiers`, + ) + } + previousBlockHash = heaviestHead.hash + previousSequence = heaviestHead.sequence + const previousHeader = await this.getBlockHeader(heaviestHead.previousBlockHash, tx) + if (!previousHeader && previousSequence !== BigInt(1)) { + throw new Error('There is no previous block to calculate a target') + } + target = Target.calculateTarget( + timestamp, + heaviestHead.timestamp, + heaviestHead.target, + ) + } + + for (const transaction of transactions) { + for (const note of transaction.notes()) { + await this.notes.add(note, tx) + } + for (const spend of transaction.spends()) { + await this.nullifiers.add(spend.nullifier, tx) + } + } + + const noteCommitment = { + commitment: await this.notes.rootHash(tx), + size: await this.notes.size(tx), + } + const nullifierCommitment = { + commitment: await this.nullifiers.rootHash(tx), + size: await this.nullifiers.size(tx), + } + + graffiti = graffiti ? graffiti : Buffer.alloc(32) + + const header = new BlockHeader( + this.strategy, + previousSequence + BigInt(1), + previousBlockHash, + noteCommitment, + nullifierCommitment, + target, + 0, + timestamp, + minersFee.transactionFee(), + graffiti, + ) + + const block = new Block(header, transactions) + if (!previousBlockHash.equals(GENESIS_BLOCK_PREVIOUS)) { + // since we're creating a block that hasn't been mined yet, don't + // verify target because it'll always fail target check here + const verification = this.verifier.verifyBlock(block, { verifyTarget: false }) + + if (verification.valid !== Validity.Yes) { + throw new Error(verification.reason) + } + } + + // abort this transaction as we've modified the trees just to get new + // merkle roots, but this block isn't mined or accepted yet + await tx.abort() + + return block + }, + ) + } + + /** + * Notes may come in any order, so its possible a given note is not + * eligible to be added to the merkle tree yet. In this case, the note is + * stored in self.looseNotes until the missing note arrives. + */ + async addNote(index: number, note: E, tx?: IDatabaseTransaction): Promise { + return this.db.withTransaction( + tx, + [this.notes.counter, this.notes.leaves, this.notes.nodes], + 'readwrite', + async (tx) => { + let noteCount = await this.notes.size(tx) + // do we have a note at this index already? + if (index < noteCount) { + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const oldNote = (await this.notes.get(index, tx))! + if (!this.noteSerde.equals(note, oldNote)) { + this.logger.warn( + `Tried to insert a note, but a different note already there for position ${index}`, + ) + } + return + } + this.looseNotes[index] = note + for (;;) { + const note = this.looseNotes[noteCount] + if (note) { + await this.notes.add(note, tx) + noteCount++ + } else { + break + } + } + // Garbage collecting. We keep notes in looseNotes after they are added + // to deal with adding them back after truncation events, + // but once the chain is large enough, the oldest notes are not likely to + // be truncated. (Truncations happen at forks, which are typically near the head) + // TODO replace with LRU cache + const indexesToPrune = noteCount - 1000 + for (const index in this.looseNotes) { + if (parseInt(index) < indexesToPrune) { + delete this.looseNotes[index] + } + } + }, + ) + } + + /** + * Notes may come in any order, so its possible a given note is not + * eligible to be added to the merkle tree yet. In this case, the note is + * stored in self.looseNotes until the missing note arrives. + */ + async addNullifier( + index: number, + nullifier: Nullifier, + tx?: IDatabaseTransaction, + ): Promise { + return this.db.withTransaction( + tx, + + [this.nullifiers.counter, this.nullifiers.leaves, this.nullifiers.nodes], + + 'readwrite', + async (tx) => { + let nullifierCount = await this.nullifiers.size(tx) + // do we have a nullifier at this index already? + if (index < nullifierCount) { + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const oldNullifier = (await this.nullifiers.get(index, tx))! + if (!this.strategy.nullifierHasher().elementSerde().equals(nullifier, oldNullifier)) { + this.logger.warn( + `Tried to insert a nullifier, but a different nullifier already there for position ${index}`, + ) + return + } + } + this.looseNullifiers[index] = nullifier + for (;;) { + const nullifier = this.looseNullifiers[nullifierCount] + if (nullifier) { + await this.nullifiers.add(nullifier, tx) + nullifierCount++ + } else { + break + } + } + // Garbage collecting. We keep nullifiers in looseNullifiers after they are added + // to deal with adding them back after truncation events, + // but once the chain is large enough, the oldest nullifiers are not likely to + // be truncated. (Truncations happen at forks, which are typically near the head) + // TODO replace with LRU cache + const indexesToPrune = nullifierCount - 1000 + for (const index in this.looseNullifiers) { + if (parseInt(index) < indexesToPrune) { + delete this.looseNullifiers[index] + } + } + }, + ) + } + + async getGenesisHash(tx?: IDatabaseTransaction): Promise { + if (this.genesisBlockHash) return this.genesisBlockHash + // first check if we have anything at GENESIS_BLOCK_SEQUENCE + const genesis = await this.getAtSequence(GENESIS_BLOCK_SEQUENCE, tx) + if (!genesis) return null + + this.genesisBlockHash = genesis[0] + + return this.genesisBlockHash + } + + async getGenesisHeader( + tx?: IDatabaseTransaction, + ): Promise | null> { + if (!this.genesisHeader) { + const genesisHash = await this.getGenesisHash() + if (!genesisHash) return null + this.genesisHeader = await this.getBlockHeader(genesisHash, tx) + } + return this.genesisHeader + } + + hasGenesisBlock(tx?: IDatabaseTransaction): Promise { + return this.hasAtSequence(GENESIS_BLOCK_SEQUENCE, tx) + } + + private isBlockLater( + a: BlockHeader, + b: BlockHeader, + ): boolean { + if (a.sequence !== b.sequence) return a.sequence > b.sequence + // tie breaker + return a.hash < b.hash + } + + isBlockHeavier( + a: BlockHeader, + b: BlockHeader, + ): boolean { + if (a.work !== b.work) return a.work > b.work + if (a.sequence !== b.sequence) return a.sequence > b.sequence + if (a.target.toDifficulty() !== b.target.toDifficulty()) + return a.target.toDifficulty() > b.target.toDifficulty() + return a.hash < b.hash + } + + async isEmpty(tx?: IDatabaseTransaction): Promise { + return (await this.notes.size(tx)) === 0 && (await this.nullifiers.size(tx)) === 0 + } + + /** + * Iterates through all transactions, starting from the heaviest head and walking backward. + */ + async *getTransactions( + fromBlockHash: Buffer | null = null, + tx?: IDatabaseTransaction, + ): AsyncGenerator< + { transaction: T; initialNoteIndex: number; sequence: BigInt; blockHash: string }, + void, + unknown + > { + let to: BlockHeader | null + if (fromBlockHash) { + to = await this.getBlockHeader(fromBlockHash, tx) + } else { + to = await this.getHeaviestHead(tx) + } + + if (!to) return + + for await (const header of this.iterateFromGenesis(to, tx)) { + for await (const transaction of this.getTransactionsForBlock(header, tx)) { + yield transaction + } + } + } + + async *getTransactionsForBlock( + blockHeader: BlockHeader, + tx?: IDatabaseTransaction, + ): AsyncGenerator< + { transaction: T; initialNoteIndex: number; sequence: BigInt; blockHash: string }, + void, + unknown + > { + const blockHash = blockHeader.hash + let initialNoteIndex = blockHeader.noteCommitment.size + + if (!blockHeader) { + throw new Error(`No block found with hash ${blockHash.toString('hex')}`) + } + + // Transactions should be handled in reverse order as they're added in anchorChain.newBlock, + // so treeSize gets decremented appropriately + + const serializedTransactions = await this.db.withTransaction( + tx, + [this.transactions], + 'read', + async (dbTransaction) => { + if (blockHash === null) return + return await this.transactions.get(blockHash, dbTransaction) + }, + ) + + if (serializedTransactions) { + for (const serializedTransaction of serializedTransactions.reverse()) { + const transaction = this.strategy.transactionSerde().deserialize(serializedTransaction) + initialNoteIndex -= transaction.notesLength() + + yield { + transaction, + initialNoteIndex, + blockHash: blockHash.toString('hex'), + sequence: blockHeader.sequence, + } + } + } + } + + /** + * This function will take multiple BlockHash | BlockHeader | Block and normalize it to BlockHeader + * performing database loads if it needs to. It's useful for operating on blocks with variadic + * inputs for convenience. + * + * @param inputs BlockHash | BlockHeader | Block to turn into BlockHeader + * @param tx + * @returns BlockHeader[] assocaited with the inputs + */ + protected async getHeadersFromInput( + inputs: Array | Block>, + tx?: IDatabaseTransaction, + ): Promise>> { + type LoadResult = [BlockHeader, BlockHash, number] + + const outputs: BlockHeader[] = [] + const promises: Promise[] = [] + + for (let i = 0; i < inputs.length; ++i) { + const input = inputs[i] + + if (input instanceof Block) { + // Transform any blocks to headers + outputs[i] = input.header + } else if (input instanceof Buffer) { + // Load any hashes into headers + const promise = this.getBlockHeader(input, tx).then((r) => [r, input, i] as LoadResult) + promises.push(promise) + } else { + // headers should just get copied over + outputs[i] = input + } + } + + // Wait for all block headers to load + if (promises.length > 0) { + const loaded = await Promise.all(promises) + for (const [header, hash, index] of loaded) { + Assert.isNotNull(header, `Error loading block by header: ${hash.toString('hex')}`) + outputs[index] = header + } + } + + return outputs + } +} diff --git a/ironfish/src/captain/anchorChain/index.ts b/ironfish/src/captain/anchorChain/index.ts new file mode 100644 index 0000000000..daa3e8897c --- /dev/null +++ b/ironfish/src/captain/anchorChain/index.ts @@ -0,0 +1,3 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ diff --git a/ironfish/src/captain/anchorChain/merkleTree/MerkleTree.test.ts b/ironfish/src/captain/anchorChain/merkleTree/MerkleTree.test.ts new file mode 100644 index 0000000000..e5d14afb45 --- /dev/null +++ b/ironfish/src/captain/anchorChain/merkleTree/MerkleTree.test.ts @@ -0,0 +1,605 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import MerkleTree, { depthAtLeafCount, Side } from '.' +import { makeDb, makeDbName, makeFullTree, makeTree } from '../../testUtilities' + +describe('Merkle tree', function () { + it('initializes database', async () => { + const tree = await makeTree() + await expect(tree.size()).resolves.toBe(0) + await expect(tree.counter.get('Nodes')).resolves.toBe(1) + }) + + it("doesn't reset db on second run", async () => { + const name = makeDbName() + + const tree1 = await makeTree({ name }) + await tree1.add('a') + await expect(tree1.size()).resolves.toBe(1) + + await tree1.db.close() + + const tree2 = await makeTree({ name }) + await expect(tree2.size()).resolves.toBe(1) + }) + + it('maintains two separate trees', async () => { + const database = makeDb() + const tree1 = await makeTree({ depth: 4, database: database }) + const tree2 = await makeTree({ depth: 4, database: database }) + await database.open() + + await tree1.add('a') + await tree2.add('A') + await tree2.add('B') + + expect(await tree1.size()).toBe(1) + expect(await tree1.get(0)).toBe('a') + expect(await tree1.rootHash()).toBe( + '<<<|-1>|<|-1>-2>|<<|-1>|<|-1>-2>-3>', + ) + + expect(await tree2.size()).toBe(2) + expect(await tree2.get(0)).toBe('A') + expect(await tree2.get(1)).toBe('B') + expect(await tree2.rootHash()).toBe( + '<<<|-1>|<|-1>-2>|<<|-1>|<|-1>-2>-3>', + ) + }) + + it('adds nodes correctly', async () => { + const tree = await makeTree() + + await tree.add('a') + await expect(tree).toHaveLeaves('a', [0]) + await expect(tree).toHaveNodes([]) + + await tree.add('b') + await expect(tree).toHaveLeaves('ab', [1, 1]) + await expect(tree).toHaveNodes([[1, Side.Left, 0, '']]) + + await tree.add('c') + await expect(tree).toHaveLeaves('abc', [1, 1, 2]) + await expect(tree).toHaveNodes([ + [1, Side.Left, 3, ''], + [2, Side.Right, 1, ''], + [3, Side.Left, 0, '<|-1>'], + ]) + + await tree.add('d') + await expect(tree).toHaveLeaves('abcd', [1, 1, 2, 2]) + await expect(tree).toHaveNodes([ + [1, Side.Left, 3, ''], + [2, Side.Right, 1, ''], + [3, Side.Left, 0, '<|-1>'], + ]) + + await tree.add('e') + await expect(tree).toHaveLeaves('abcde', [1, 1, 2, 2, 4]) + await expect(tree).toHaveNodes([ + [1, Side.Left, 3, ''], + [2, Side.Right, 1, ''], + [3, Side.Left, 6, '<|-1>'], + [4, Side.Left, 5, ''], + [5, Side.Right, 3, '<|-1>'], + [6, Side.Left, 0, '<<|-1>|<|-1>-2>'], + ]) + + await tree.add('f') + await expect(tree).toHaveLeaves('abcdef', [1, 1, 2, 2, 4, 4]) + await expect(tree).toHaveNodes([ + [1, Side.Left, 3, ''], + [2, Side.Right, 1, ''], + [3, Side.Left, 6, '<|-1>'], + [4, Side.Left, 5, ''], + [5, Side.Right, 3, '<|-1>'], + [6, Side.Left, 0, '<<|-1>|<|-1>-2>'], + ]) + + await tree.add('g') + await expect(tree).toHaveLeaves('abcdefg', [1, 1, 2, 2, 4, 4, 7]) + await expect(tree).toHaveNodes([ + [1, Side.Left, 3, ''], + [2, Side.Right, 1, ''], + [3, Side.Left, 6, '<|-1>'], + [4, Side.Left, 5, ''], + [5, Side.Right, 3, '<|-1>'], + [6, Side.Left, 0, '<<|-1>|<|-1>-2>'], + [7, Side.Right, 4, ''], + ]) + + await tree.add('h') + await expect(tree).toHaveLeaves('abcdefgh', [1, 1, 2, 2, 4, 4, 7, 7]) + await expect(tree).toHaveNodes([ + [1, Side.Left, 3, ''], + [2, Side.Right, 1, ''], + [3, Side.Left, 6, '<|-1>'], + [4, Side.Left, 5, ''], + [5, Side.Right, 3, '<|-1>'], + [6, Side.Left, 0, '<<|-1>|<|-1>-2>'], + [7, Side.Right, 4, ''], + ]) + + await tree.add('i') + await expect(tree).toHaveLeaves('abcdefghi', [1, 1, 2, 2, 4, 4, 7, 7, 8]) + await expect(tree).toHaveNodes([ + [1, Side.Left, 3, ''], + [2, Side.Right, 1, ''], + [3, Side.Left, 6, '<|-1>'], + [4, Side.Left, 5, ''], + [5, Side.Right, 3, '<|-1>'], + [6, Side.Left, 11, '<<|-1>|<|-1>-2>'], + [7, Side.Right, 4, ''], + [8, Side.Left, 9, ''], + [9, Side.Left, 10, '<|-1>'], + [10, Side.Right, 6, '<<|-1>|<|-1>-2>'], + [ + 11, + Side.Left, + 0, + '<<<|-1>|<|-1>-2>|<<|-1>|<|-1>-2>-3>', + ], + ]) + }) + + it('truncates nodes correctly', async () => { + let tree = await makeFullTree() + await tree.truncate(0) + await expect(tree).toMatchTree(await makeTree({ characters: '' })) + + tree = await makeFullTree() + await tree.truncate(1) + await expect(tree).toMatchTree(await makeTree({ characters: 'a' })) + + tree = await makeFullTree() + await tree.truncate(2) + await expect(tree).toMatchTree(await makeTree({ characters: 'ab' })) + + tree = await makeFullTree() + await tree.truncate(3) + await expect(tree).toMatchTree(await makeTree({ characters: 'abc' })) + + tree = await makeFullTree() + await tree.truncate(4) + await expect(tree).toMatchTree(await makeTree({ characters: 'abcd' })) + + tree = await makeFullTree() + await tree.truncate(5) + await expect(tree).toMatchTree(await makeTree({ characters: 'abcde' })) + + tree = await makeFullTree() + await tree.truncate(6) + await expect(tree).toMatchTree(await makeTree({ characters: 'abcdef' })) + + tree = await makeFullTree() + await tree.truncate(7) + await expect(tree).toMatchTree(await makeTree({ characters: 'abcdefg' })) + + tree = await makeFullTree() + await tree.truncate(8) + await expect(tree).toMatchTree(await makeTree({ characters: 'abcdefgh' })) + + tree = await makeFullTree() + await tree.truncate(9) + await expect(tree).toMatchTree(await makeTree({ characters: 'abcdefghi' })) + + tree = await makeFullTree() + await tree.truncate(10) + await expect(tree).toMatchTree(await makeTree({ characters: 'abcdefghij' })) + + tree = await makeFullTree() + await tree.truncate(11) + await expect(tree).toMatchTree(await makeTree({ characters: 'abcdefghijk' })) + + tree = await makeFullTree() + await tree.truncate(12) + await expect(tree).toMatchTree(await makeTree({ characters: 'abcdefghijkl' })) + + tree = await makeFullTree() + await tree.truncate(13) + await expect(tree).toMatchTree(await makeTree({ characters: 'abcdefghijklm' })) + + tree = await makeFullTree() + await tree.truncate(14) + await expect(tree).toMatchTree(await makeTree({ characters: 'abcdefghijklmn' })) + + tree = await makeFullTree() + await tree.truncate(15) + await expect(tree).toMatchTree(await makeTree({ characters: 'abcdefghijklmno' })) + + tree = await makeFullTree() + await tree.truncate(16) + await expect(tree).toMatchTree(await makeTree({ characters: 'abcdefghijklmnop' })) + + tree = await makeFullTree() + await tree.truncate(17) + await expect(tree).toMatchTree(await makeTree({ characters: 'abcdefghijklmnop' })) + }) + + it('adds to tree after truncating', async () => { + const tree = await makeFullTree() + await tree.truncate(1) + + for (const char of 'bcdefghjklmnopqr') { + await tree.add(char) + } + + expect(await tree.size()).toBe(17) + }) + + it('iterates over notes', async () => { + const tree = await makeFullTree() + + let notes = '' + for await (const note of tree.notes()) { + notes += note + } + + expect(notes).toBe('abcdefghijklmnop') + }) + + it('calculates past and current root hashes correctly', async () => { + const tree = await makeTree({ depth: 4 }) + + await expect(tree.rootHash()).rejects.toMatchInlineSnapshot( + `[Error: Unable to get past size 0 for tree with 0 nodes]`, + ) + await expect(tree.pastRoot(0)).rejects.toMatchInlineSnapshot( + `[Error: Unable to get past size 0 for tree with 0 nodes]`, + ) + await expect(tree.pastRoot(1)).rejects.toMatchInlineSnapshot( + `[Error: Unable to get past size 1 for tree with 0 nodes]`, + ) + await tree.add('a') + await expect(tree.rootHash()).resolves.toBe( + '<<<|-1>|<|-1>-2>|<<|-1>|<|-1>-2>-3>', + ) + await expect(tree.pastRoot(1)).resolves.toBe( + '<<<|-1>|<|-1>-2>|<<|-1>|<|-1>-2>-3>', + ) + await expect(tree.pastRoot(2)).rejects.toMatchInlineSnapshot( + `[Error: Unable to get past size 2 for tree with 1 nodes]`, + ) + await tree.add('b') + await expect(tree.rootHash()).resolves.toBe( + '<<<|-1>|<|-1>-2>|<<|-1>|<|-1>-2>-3>', + ) + await expect(tree.pastRoot(1)).resolves.toBe( + '<<<|-1>|<|-1>-2>|<<|-1>|<|-1>-2>-3>', + ) + await expect(tree.pastRoot(2)).resolves.toBe( + '<<<|-1>|<|-1>-2>|<<|-1>|<|-1>-2>-3>', + ) + await expect(tree.pastRoot(3)).rejects.toMatchInlineSnapshot( + `[Error: Unable to get past size 3 for tree with 2 nodes]`, + ) + await tree.add('c') + await expect(tree.rootHash()).resolves.toBe( + '<<<|-1>|<|-1>-2>|<<|-1>|<|-1>-2>-3>', + ) + await expect(tree.pastRoot(1)).resolves.toBe( + '<<<|-1>|<|-1>-2>|<<|-1>|<|-1>-2>-3>', + ) + await expect(tree.pastRoot(2)).resolves.toBe( + '<<<|-1>|<|-1>-2>|<<|-1>|<|-1>-2>-3>', + ) + await expect(tree.pastRoot(3)).resolves.toBe( + '<<<|-1>|<|-1>-2>|<<|-1>|<|-1>-2>-3>', + ) + await expect(tree.pastRoot(4)).rejects.toMatchInlineSnapshot( + `[Error: Unable to get past size 4 for tree with 3 nodes]`, + ) + await tree.add('d') + await expect(tree.rootHash()).resolves.toBe( + '<<<|-1>|<|-1>-2>|<<|-1>|<|-1>-2>-3>', + ) + await expect(tree.pastRoot(1)).resolves.toBe( + '<<<|-1>|<|-1>-2>|<<|-1>|<|-1>-2>-3>', + ) + await expect(tree.pastRoot(2)).resolves.toBe( + '<<<|-1>|<|-1>-2>|<<|-1>|<|-1>-2>-3>', + ) + await expect(tree.pastRoot(3)).resolves.toBe( + '<<<|-1>|<|-1>-2>|<<|-1>|<|-1>-2>-3>', + ) + await expect(tree.pastRoot(4)).resolves.toBe( + '<<<|-1>|<|-1>-2>|<<|-1>|<|-1>-2>-3>', + ) + await expect(tree.pastRoot(5)).rejects.toMatchInlineSnapshot( + `[Error: Unable to get past size 5 for tree with 4 nodes]`, + ) + for (let i = 0; i < 12; i++) { + await tree.add(String(i)) + } + await expect(tree.rootHash()).resolves.toBe( + '<<<|-1>|<<0|1-0>|<2|3-0>-1>-2>|<<<4|5-0>|<6|7-0>-1>|<<8|9-0>|<10|11-0>-1>-2>-3>', + ) + await expect(tree.pastRoot(1)).resolves.toBe( + '<<<|-1>|<|-1>-2>|<<|-1>|<|-1>-2>-3>', + ) + await expect(tree.pastRoot(2)).resolves.toBe( + '<<<|-1>|<|-1>-2>|<<|-1>|<|-1>-2>-3>', + ) + await expect(tree.pastRoot(3)).resolves.toBe( + '<<<|-1>|<|-1>-2>|<<|-1>|<|-1>-2>-3>', + ) + await expect(tree.pastRoot(4)).resolves.toBe( + '<<<|-1>|<|-1>-2>|<<|-1>|<|-1>-2>-3>', + ) + await expect(tree.pastRoot(5)).resolves.toBe( + '<<<|-1>|<<0|0-0>|<0|0-0>-1>-2>|<<|-1>|<<0|0-0>|<0|0-0>-1>-2>-3>', + ) + await expect(tree.pastRoot(6)).resolves.toBe( + '<<<|-1>|<<0|1-0>|<0|1-0>-1>-2>|<<|-1>|<<0|1-0>|<0|1-0>-1>-2>-3>', + ) + await expect(tree.pastRoot(7)).resolves.toBe( + '<<<|-1>|<<0|1-0>|<2|2-0>-1>-2>|<<|-1>|<<0|1-0>|<2|2-0>-1>-2>-3>', + ) + await expect(tree.pastRoot(8)).resolves.toBe( + '<<<|-1>|<<0|1-0>|<2|3-0>-1>-2>|<<|-1>|<<0|1-0>|<2|3-0>-1>-2>-3>', + ) + await expect(tree.pastRoot(9)).resolves.toBe( + '<<<|-1>|<<0|1-0>|<2|3-0>-1>-2>|<<<4|4-0>|<4|4-0>-1>|<<4|4-0>|<4|4-0>-1>-2>-3>', + ) + await expect(tree.pastRoot(10)).resolves.toBe( + '<<<|-1>|<<0|1-0>|<2|3-0>-1>-2>|<<<4|5-0>|<4|5-0>-1>|<<4|5-0>|<4|5-0>-1>-2>-3>', + ) + await expect(tree.pastRoot(11)).resolves.toBe( + '<<<|-1>|<<0|1-0>|<2|3-0>-1>-2>|<<<4|5-0>|<6|6-0>-1>|<<4|5-0>|<6|6-0>-1>-2>-3>', + ) + await expect(tree.pastRoot(12)).resolves.toBe( + '<<<|-1>|<<0|1-0>|<2|3-0>-1>-2>|<<<4|5-0>|<6|7-0>-1>|<<4|5-0>|<6|7-0>-1>-2>-3>', + ) + await expect(tree.pastRoot(13)).resolves.toBe( + '<<<|-1>|<<0|1-0>|<2|3-0>-1>-2>|<<<4|5-0>|<6|7-0>-1>|<<8|8-0>|<8|8-0>-1>-2>-3>', + ) + await expect(tree.pastRoot(14)).resolves.toBe( + '<<<|-1>|<<0|1-0>|<2|3-0>-1>-2>|<<<4|5-0>|<6|7-0>-1>|<<8|9-0>|<8|9-0>-1>-2>-3>', + ) + await expect(tree.pastRoot(15)).resolves.toBe( + '<<<|-1>|<<0|1-0>|<2|3-0>-1>-2>|<<<4|5-0>|<6|7-0>-1>|<<8|9-0>|<10|10-0>-1>-2>-3>', + ) + await expect(tree.pastRoot(16)).resolves.toBe( + '<<<|-1>|<<0|1-0>|<2|3-0>-1>-2>|<<<4|5-0>|<6|7-0>-1>|<<8|9-0>|<10|11-0>-1>-2>-3>', + ) + + await expect(tree.pastRoot(17)).rejects.toMatchInlineSnapshot( + `[Error: Unable to get past size 17 for tree with 16 nodes]`, + ) + }) + + it('finds contained values', async () => { + const tree = await makeTree() + expect(await tree.contained('1', 0)).toBe(false) + expect(await tree.contained('1', 1)).toBe(false) + for (let i = 1; i < 32; i++) { + await tree.add(String(i)) + for (let j = 1; j < i; j++) { + expect(await tree.contained(String(i), j)).toBe(false) + expect(await tree.contained(String(j), i)).toBe(true) + } + expect(await tree.contained(String(i), i)).toBe(true) + expect(await tree.contained(String(i), i + 1)).toBe(true) + expect(await tree.contains(String(i))).toBe(true) + } + }) + + it('calculates correct witnesses', async () => { + const witnessOrThrowFactory = ( + witnessTree: MerkleTree, + ) => async (index: number) => { + const witness = await witnessTree.witness(index) + if (witness == null) throw new Error(`Witness at ${index} was unexpectedly null`) + return witness + } + + const tree = await makeTree({ depth: 3 }) + const witnessOrThrow = witnessOrThrowFactory(tree) + await expect(tree.witness(0)).resolves.toBe(null) + await tree.add('a') + await expect(tree.witness(1)).resolves.toBe(null) + let witness = await witnessOrThrow(0) + expect(witness.verify('a')).toBe(true) + expect(witness.verify('b')).toBe(false) + let expectedRoot = '<<|-1>|<|-1>-2>' + expect(witness).toMatchWitness(1, expectedRoot, [ + [Side.Left, 'a'], + [Side.Left, ''], + [Side.Left, '<|-1>'], + ]) + + await tree.add('b') + await expect(tree.witness(2)).resolves.toBe(null) + expectedRoot = '<<|-1>|<|-1>-2>' + witness = await witnessOrThrow(0) + expect(witness.verify('a')).toBe(true) + expect(witness.verify('b')).toBe(false) + expect(witness).toMatchWitness(2, expectedRoot, [ + [Side.Left, 'b'], + [Side.Left, ''], + [Side.Left, '<|-1>'], + ]) + witness = await witnessOrThrow(1) + expect(witness.verify('b')).toBe(true) + expect(witness).toMatchWitness(2, expectedRoot, [ + [Side.Right, 'a'], + [Side.Left, ''], + [Side.Left, '<|-1>'], + ]) + + await tree.add('c') + await expect(tree.witness(3)).resolves.toBe(null) + expectedRoot = '<<|-1>|<|-1>-2>' + witness = await witnessOrThrow(0) + expect(witness.verify('a')).toBe(true) + expect(witness).toMatchWitness(3, expectedRoot, [ + [Side.Left, 'b'], + [Side.Left, ''], + [Side.Left, '<|-1>'], + ]) + witness = await witnessOrThrow(1) + expect(witness.verify('b')).toBe(true) + expect(witness).toMatchWitness(3, expectedRoot, [ + [Side.Right, 'a'], + [Side.Left, ''], + [Side.Left, '<|-1>'], + ]) + witness = await witnessOrThrow(2) + expect(witness.verify('c')).toBe(true) + expect(witness).toMatchWitness(3, expectedRoot, [ + [Side.Left, 'c'], + [Side.Right, ''], + [Side.Left, '<|-1>'], + ]) + await tree.add('d') + await expect(tree.witness(4)).resolves.toBe(null) + expectedRoot = '<<|-1>|<|-1>-2>' + witness = await witnessOrThrow(0) + expect(witness.verify('a')).toBe(true) + expect(witness).toMatchWitness(4, expectedRoot, [ + [Side.Left, 'b'], + [Side.Left, ''], + [Side.Left, '<|-1>'], + ]) + witness = await witnessOrThrow(1) + expect(witness.verify('b')).toBe(true) + expect(witness).toMatchWitness(4, expectedRoot, [ + [Side.Right, 'a'], + [Side.Left, ''], + [Side.Left, '<|-1>'], + ]) + witness = await witnessOrThrow(2) + expect(witness.verify('c')).toBe(true) + expect(witness).toMatchWitness(4, expectedRoot, [ + [Side.Left, 'd'], + [Side.Right, ''], + [Side.Left, '<|-1>'], + ]) + witness = await witnessOrThrow(3) + expect(witness.verify('d')).toBe(true) + expect(witness).toMatchWitness(4, expectedRoot, [ + [Side.Right, 'c'], + [Side.Right, ''], + [Side.Left, '<|-1>'], + ]) + + await tree.add('e') + await tree.add('f') + await tree.add('g') + await tree.add('h') + await expect(tree.witness(8)).resolves.toBe(null) + expectedRoot = '<<|-1>|<|-1>-2>' + witness = await witnessOrThrow(0) + expect(witness.verify('a')).toBe(true) + expect(witness).toMatchWitness(8, expectedRoot, [ + [Side.Left, 'b'], + [Side.Left, ''], + [Side.Left, '<|-1>'], + ]) + witness = await witnessOrThrow(1) + expect(witness.verify('b')).toBe(true) + expect(witness).toMatchWitness(8, expectedRoot, [ + [Side.Right, 'a'], + [Side.Left, ''], + [Side.Left, '<|-1>'], + ]) + witness = await witnessOrThrow(2) + expect(witness.verify('c')).toBe(true) + expect(witness).toMatchWitness(8, expectedRoot, [ + [Side.Left, 'd'], + [Side.Right, ''], + [Side.Left, '<|-1>'], + ]) + witness = await witnessOrThrow(3) + expect(witness.verify('d')).toBe(true) + expect(witness).toMatchWitness(8, expectedRoot, [ + [Side.Right, 'c'], + [Side.Right, ''], + [Side.Left, '<|-1>'], + ]) + witness = await witnessOrThrow(4) + expect(witness.verify('e')).toBe(true) + expect(witness).toMatchWitness(8, expectedRoot, [ + [Side.Left, 'f'], + [Side.Left, ''], + [Side.Right, '<|-1>'], + ]) + witness = await witnessOrThrow(5) + expect(witness.verify('f')).toBe(true) + expect(witness).toMatchWitness(8, expectedRoot, [ + [Side.Right, 'e'], + [Side.Left, ''], + [Side.Right, '<|-1>'], + ]) + witness = await witnessOrThrow(6) + expect(witness.verify('g')).toBe(true) + expect(witness).toMatchWitness(8, expectedRoot, [ + [Side.Left, 'h'], + [Side.Right, ''], + [Side.Right, '<|-1>'], + ]) + witness = await witnessOrThrow(7) + expect(witness.verify('h')).toBe(true) + expect(witness).toMatchWitness(8, expectedRoot, [ + [Side.Right, 'g'], + [Side.Right, ''], + [Side.Right, '<|-1>'], + ]) + }) + + it('witness rootHash should equal the tree rootHash', async () => { + const tree = await makeTree({ depth: 3 }) + await tree.add('a') + await tree.add('b') + await tree.add('c') + await tree.add('d') + await tree.add('e') + await tree.add('f') + await tree.add('g') + await tree.add('h') + + const rootHash = await tree.rootHash() + for (let i = 0; i < (await tree.size()); i++) { + const witness = await tree.witness(i) + if (witness == null) throw new Error('Witness should not be null') + expect(witness.rootHash).toEqual(rootHash) + } + }) + + it("throws an error when getting a position that doesn't exist", async () => { + const tree = await makeTree() + await expect(() => tree.get(99)).rejects.toThrowError( + `No leaf found in tree ${tree.treeName} at index 99`, + ) + + await tree.add('1') + await expect(() => tree.get(99)).rejects.toThrowError( + `No leaf found in tree ${tree.treeName} at index 99`, + ) + }) + + it('calculates correct depths', () => { + expect(depthAtLeafCount(0)).toBe(0) + expect(depthAtLeafCount(1)).toBe(1) + expect(depthAtLeafCount(2)).toBe(2) + expect(depthAtLeafCount(3)).toBe(3) + expect(depthAtLeafCount(4)).toBe(3) + expect(depthAtLeafCount(5)).toBe(4) + expect(depthAtLeafCount(6)).toBe(4) + expect(depthAtLeafCount(7)).toBe(4) + expect(depthAtLeafCount(8)).toBe(4) + expect(depthAtLeafCount(9)).toBe(5) + expect(depthAtLeafCount(10)).toBe(5) + expect(depthAtLeafCount(11)).toBe(5) + expect(depthAtLeafCount(12)).toBe(5) + expect(depthAtLeafCount(13)).toBe(5) + expect(depthAtLeafCount(14)).toBe(5) + expect(depthAtLeafCount(15)).toBe(5) + expect(depthAtLeafCount(16)).toBe(5) + expect(depthAtLeafCount(17)).toBe(6) + expect(depthAtLeafCount(32)).toBe(6) + expect(depthAtLeafCount(33)).toBe(7) + }) +}) diff --git a/ironfish/src/captain/anchorChain/merkleTree/Schema.ts b/ironfish/src/captain/anchorChain/merkleTree/Schema.ts new file mode 100644 index 0000000000..848a4efabb --- /dev/null +++ b/ironfish/src/captain/anchorChain/merkleTree/Schema.ts @@ -0,0 +1,38 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { DatabaseSchema } from '../../../storage' +import { LeafIndex, NodeIndex, Side } from './index' + +export const SCHEMA_VERSION = 1 + +interface CounterEntry extends DatabaseSchema { + key: T + value: number +} + +export type CounterSchema = CounterEntry<'Leaves'> | CounterEntry<'Nodes'> + +export interface LeavesSchema extends DatabaseSchema { + key: LeafIndex + value: { + index: LeafIndex + element: E + merkleHash: H + parentIndex: NodeIndex + } +} + +export type NodeValue = { + index: NodeIndex + side: Side + hashOfSibling: H + parentIndex?: NodeIndex // left nodes have a parent index + leftIndex?: NodeIndex // right nodes have a left index +} + +export interface NodesSchema extends DatabaseSchema { + key: NodeIndex + value: NodeValue +} diff --git a/ironfish/src/captain/anchorChain/merkleTree/Witness.ts b/ironfish/src/captain/anchorChain/merkleTree/Witness.ts new file mode 100644 index 0000000000..2cf3670d34 --- /dev/null +++ b/ironfish/src/captain/anchorChain/merkleTree/Witness.ts @@ -0,0 +1,64 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Side } from './index' +import { MerkleHasher } from './hashers' +import { JsonSerializable } from '../../../serde' + +export interface WitnessNode { + side: Side + hashOfSibling: H +} + +export class SerializedWitnessNode { + constructor(readonly _side: Side, readonly _hashOfSibling: SH) {} + + side: () => Side = () => this._side + hashOfSibling: () => SH = () => this._hashOfSibling +} + +/** + * Commitment that a leaf node exists in the tree with an authentication path + * and the rootHash of the tree at the time the authentication path was calculated. + */ + +export default class Witness { + constructor( + readonly _treeSize: number, + readonly rootHash: H, + readonly authenticationPath: WitnessNode[], + readonly merkleHasher: MerkleHasher, + ) {} + + verify(myHash: H): boolean { + let currentHash = myHash + for (let i = 0; i < this.authenticationPath.length; i++) { + const node = this.authenticationPath[i] + if (node.side === Side.Left) { + currentHash = this.merkleHasher.combineHash(i, currentHash, node.hashOfSibling) + } else { + currentHash = this.merkleHasher.combineHash(i, node.hashOfSibling, currentHash) + } + } + return this.merkleHasher.hashSerde().equals(currentHash, this.rootHash) + } + + authPath(): SerializedWitnessNode[] { + return this.authenticationPath.map( + (n) => + new SerializedWitnessNode( + n.side, + this.merkleHasher.hashSerde().serialize(n.hashOfSibling), + ), + ) + } + + treeSize(): number { + return this._treeSize + } + + serializeRootHash(): SH { + return this.merkleHasher.hashSerde().serialize(this.rootHash) + } +} diff --git a/ironfish/src/captain/anchorChain/merkleTree/hashers/ConcatHasher.ts b/ironfish/src/captain/anchorChain/merkleTree/hashers/ConcatHasher.ts new file mode 100644 index 0000000000..34e81d2873 --- /dev/null +++ b/ironfish/src/captain/anchorChain/merkleTree/hashers/ConcatHasher.ts @@ -0,0 +1,26 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { MerkleHasher } from '.' +import { StringSerde } from '../../../../serde' + +/** + * Demo merkle hasher implementation that combines hashes via concatenation. + * + * Useful for unit testing or displaying demo trees. + */ +export default class ConcatHasher implements MerkleHasher { + elementSerde(): StringSerde { + return new StringSerde() + } + hashSerde(): StringSerde { + return new StringSerde() + } + combineHash(depth: number, left: string, right: string): string { + return left + right + } + merkleHash(element: string): string { + return element + } +} diff --git a/ironfish/src/captain/anchorChain/merkleTree/hashers/RangeHasher.ts b/ironfish/src/captain/anchorChain/merkleTree/hashers/RangeHasher.ts new file mode 100644 index 0000000000..cca5019445 --- /dev/null +++ b/ironfish/src/captain/anchorChain/merkleTree/hashers/RangeHasher.ts @@ -0,0 +1,31 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { MerkleHasher } from '.' +import { StringSerde } from '../../../../serde' + +/** + * Demo merkle hasher implementation that indicates a range of hashes. + * + * Useful for unit testing or displaying demo trees. Assumes the hashes are + * in ascending order. Takes the left and right side of a hyphen in each hash + * and combines them. + */ +export default class RangeHasher implements MerkleHasher { + elementSerde(): StringSerde { + return new StringSerde() + } + hashSerde(): StringSerde { + return new StringSerde() + } + combineHash(depth: number, left: string, right: string): string { + const leftSplit = left.split('-') + const rightSplit = right.split('-') + return leftSplit[0] + '-' + rightSplit[rightSplit.length - 1] + } + + merkleHash(element: string): string { + return element + } +} diff --git a/ironfish/src/captain/anchorChain/merkleTree/hashers/StructureHasher.ts b/ironfish/src/captain/anchorChain/merkleTree/hashers/StructureHasher.ts new file mode 100644 index 0000000000..8aa4237da2 --- /dev/null +++ b/ironfish/src/captain/anchorChain/merkleTree/hashers/StructureHasher.ts @@ -0,0 +1,27 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { MerkleHasher } from '.' +import { StringSerde } from '../../../../serde' + +/** + * Simple hasher that encodes the tree structure in its hashes so its easy + * to test if said structure is correct. + * + * Only useful for various types of unit testing. + */ +export default class StructureHasher implements MerkleHasher { + elementSerde(): StringSerde { + return new StringSerde() + } + hashSerde(): StringSerde { + return new StringSerde() + } + combineHash(depth: number, left: string, right: string): string { + return `<${left}|${right}-${depth}>` + } + merkleHash(element: string): string { + return element + } +} diff --git a/ironfish/src/captain/anchorChain/merkleTree/hashers/index.ts b/ironfish/src/captain/anchorChain/merkleTree/hashers/index.ts new file mode 100644 index 0000000000..b2e107a140 --- /dev/null +++ b/ironfish/src/captain/anchorChain/merkleTree/hashers/index.ts @@ -0,0 +1,30 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import Serde, { JsonSerializable } from '../../../../serde' +/** + * Interface for objects that can calculate the hashes of elements. + * + */ +export interface MerkleHasher { + /** + * Serializer and equality checker for the notes in the tree + */ + elementSerde: () => Serde + + /** + * Serializer and equality checker for the hashes in the tree + */ + hashSerde: () => Serde + + /** + * Get the hash of a given element + */ + merkleHash: (element: E) => H + + /** + * Combine two hashes to get the parent hash + */ + combineHash: (depth: number, left: H, right: H) => H +} diff --git a/ironfish/src/captain/anchorChain/merkleTree/index.ts b/ironfish/src/captain/anchorChain/merkleTree/index.ts new file mode 100644 index 0000000000..300bba3b42 --- /dev/null +++ b/ironfish/src/captain/anchorChain/merkleTree/index.ts @@ -0,0 +1,790 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { + IDatabase, + IDatabaseEncoding, + IDatabaseStore, + IDatabaseTransaction, + JsonEncoding, + SchemaValue, +} from '../../../storage' +import { CounterSchema, LeavesSchema, NodesSchema, NodeValue, SCHEMA_VERSION } from './Schema' +import Witness, { WitnessNode } from './Witness' +import { MerkleHasher } from './hashers' +import { JsonSerializable, IJSON } from '../../../serde' +import { Assert } from '../../../assert' + +export type { MerkleHasher } from './hashers' +export { default as ConcatHasher } from './hashers/ConcatHasher' +export { default as RangeHasher } from './hashers/RangeHasher' +export { default as StructureHasher } from './hashers/StructureHasher' +export { NodeValue } from './Schema' +export { default as Witness } from './Witness' + +/** + * Represent whether a given node is the left or right subchild in a tree, + * or an empty node with a known hash. + */ +export enum Side { + Left = 'Left', + Right = 'Right', +} + +export type LeafIndex = number +export type NodeIndex = number + +export default class MerkleTree< + E, + H, + SE extends JsonSerializable, + SH extends JsonSerializable +> { + counter: IDatabaseStore + leaves: IDatabaseStore> + nodes: IDatabaseStore> + + private constructor( + readonly merkleHasher: MerkleHasher, + readonly db: IDatabase, + readonly treeName: string, + readonly treeDepth: number, + ) { + class LeafEncoding implements IDatabaseEncoding['value']> { + serialize = (value: LeavesSchema['value']): Buffer => { + const intermediate = { + ...value, + element: merkleHasher.elementSerde().serialize(value.element), + merkleHash: merkleHasher.hashSerde().serialize(value.merkleHash), + } + return Buffer.from(IJSON.stringify(intermediate), 'utf8') + } + deserialize = (buffer: Buffer): LeavesSchema['value'] => { + const intermediate = IJSON.parse(buffer.toString('utf8')) as Omit< + LeavesSchema['value'], + 'element' | 'merkleHash' + > & { element: SE; merkleHash: SH } + return { + ...intermediate, + element: merkleHasher.elementSerde().deserialize(intermediate.element), + merkleHash: merkleHasher.hashSerde().deserialize(intermediate.merkleHash), + } + } + + equals(): boolean { + throw new Error('You should never use this') + } + } + + class NodeEncoding implements IDatabaseEncoding> { + serialize = (value: NodeValue): Buffer => { + const intermediate = { + ...value, + hashOfSibling: merkleHasher.hashSerde().serialize(value.hashOfSibling), + } + return Buffer.from(IJSON.stringify(intermediate), 'utf8') + } + deserialize = (buffer: Buffer): NodeValue => { + const intermediate = IJSON.parse(buffer.toString('utf8')) as Omit< + NodeValue, + 'hashOfSibling' + > & { hashOfSibling: SH } + + return { + ...intermediate, + hashOfSibling: merkleHasher.hashSerde().deserialize(intermediate.hashOfSibling), + } + } + + equals(): boolean { + throw new Error('You should never use this') + } + } + + this.counter = db.addStore({ + version: SCHEMA_VERSION, + name: `${treeName}_Counter`, + keyEncoding: new JsonEncoding(), + valueEncoding: new JsonEncoding(), + upgrade: async (db, oldVersion, newVersion, tx): Promise => { + if (oldVersion === 0) { + await this.counter.put('Leaves', 0, tx) + await this.counter.put('Nodes', 1, tx) + } + }, + }) + + this.leaves = db.addStore({ + version: SCHEMA_VERSION, + name: `${treeName}_Leaves`, + keyEncoding: new JsonEncoding['key']>(), + valueEncoding: new LeafEncoding(), + keyPath: 'index', + }) + + this.nodes = db.addStore({ + version: SCHEMA_VERSION, + name: `${treeName}_Nodes`, + keyEncoding: new JsonEncoding['key']>(), + valueEncoding: new NodeEncoding(), + keyPath: 'index', + }) + } + + /** + * Construct a new merkle tree given a concrete merklehasher instance. + */ + static async new( + hasher: MerkleHasher, + db: IDatabase, + treeName: string, + treeDepth?: number, + ): Promise> + static async new( + hasher: MerkleHasher, + db: IDatabase, + treeName: string, + treeDepth = 32, + ): Promise> { + return Promise.resolve(new MerkleTree(hasher, db, treeName, treeDepth)) + } + + /** + * Get the number of leaf nodes (elements) in the tree. + */ + async size(tx?: IDatabaseTransaction): Promise { + return await this.db.withTransaction(tx, [this.counter], 'read', async (tx) => { + const value = await this.counter.get('Leaves', tx) + + if (value === undefined) { + throw new Error(`No counter record found for tree ${this.treeName}`) + } + + return value + }) + } + + /** + * Get the leaf element at the given index. Throws an error if the + * index is not in bounds. + */ + async get(position: LeafIndex, tx?: IDatabaseTransaction): Promise { + return (await this.getLeaf(position, tx)).element + } + + /** + * Get the leaf element at the given index. Throws an error if the + * index is not in bounds. + */ + async getLeaf( + index: LeafIndex, + tx?: IDatabaseTransaction, + ): Promise>> { + const leaf = await this.getLeafOrNull(index, tx) + if (!leaf) throw new Error(`No leaf found in tree ${this.treeName} at index ${index}`) + return leaf + } + + /** + * Get the leaf element at the given index. Returns null if the + * index is not in bounds. + */ + async getLeafOrNull( + index: LeafIndex, + tx?: IDatabaseTransaction, + ): Promise> | null> { + return await this.db.withTransaction(tx, [this.leaves], 'read', async (tx) => { + const leaf = await this.leaves.get(index, tx) + return leaf || null + }) + } + + /** + * Get the node element at the given index. Throws an error if the + * index is not in bounds. + */ + async getNode( + index: NodeIndex, + tx?: IDatabaseTransaction, + ): Promise>> { + const node = await this.getNodeOrNull(index, tx) + if (!node) throw new Error(`No node found in tree ${this.treeName} at index ${index}`) + return node + } + + /** + * Get the node element at the given index. Returns null if the + * index is not in bounds. + */ + async getNodeOrNull( + index: NodeIndex, + tx?: IDatabaseTransaction, + ): Promise> | null> { + const node = await this.nodes.get(index, tx) + return node || null + } + + /** + * Get the count of a given tree. Throws an error if the + * count is not in the store. + */ + async getCount(countType: 'Leaves' | 'Nodes', tx?: IDatabaseTransaction): Promise { + const count = await this.counter.get(countType, tx) + if (count === undefined) + throw new Error(`No counts found in tree ${this.treeName} for type ${countType}`) + return count + } + + /** Iterate over all notes in the tree. This happens asynchronously + * and behaviour is undefined if the tree changes while iterating. + */ + async *notes(tx?: IDatabaseTransaction): AsyncGenerator { + const numLeaves = await this.size(tx) + + for (let index = 0; index < numLeaves; index++) { + const leaf = await this.getLeafOrNull(index, tx) + if (leaf === null) { + return + } + + yield leaf.element + } + } + + /** + * Add the new leaf element into the tree, and update all hashes. + */ + async add(element: E, tx?: IDatabaseTransaction): Promise { + await this.db.withTransaction( + tx, + [this.counter, this.leaves, this.nodes], + 'readwrite', + async (tx) => { + const merkleHash = this.merkleHasher.merkleHash(element) + const indexOfNewLeaf = await this.getCount('Leaves', tx) + + let newParentIndex: NodeIndex + + if (indexOfNewLeaf === 0) { + // Special case where this is the first leaf, with no parent + newParentIndex = 0 + } else if (indexOfNewLeaf === 1) { + // Special case where this is the second leaf, and both leaves need a new parent + newParentIndex = 1 + + const leftLeafIndex = 0 + const leftLeaf = await this.getLeaf(leftLeafIndex, tx) + const hashOfSibling = this.merkleHasher.combineHash( + 0, + leftLeaf.merkleHash, + merkleHash, + ) + + await this.nodes.put( + { + side: Side.Left, + parentIndex: 0, + hashOfSibling, + index: newParentIndex, + }, + tx, + ) + + await this.leaves.put( + { + element: leftLeaf.element, + merkleHash: leftLeaf.merkleHash, + parentIndex: newParentIndex, + index: leftLeafIndex, + }, + tx, + ) + + await this.counter.put('Nodes', 2, tx) + } else if (isRight(indexOfNewLeaf)) { + // Simple case where we are adding a new node to a parent with an empty right child + const leftLeafIndex = indexOfNewLeaf - 1 + const leaf = await this.getLeaf(leftLeafIndex, tx) + newParentIndex = leaf.parentIndex + } else { + // Walk up the path from the previous leaf until finding an empty or right-hand node + // Create a bunch of left-hand nodes for each step up that path + const previousLeafIndex = indexOfNewLeaf - 1 + const previousLeaf = await this.getLeaf(previousLeafIndex, tx) + let previousParentIndex = previousLeaf.parentIndex + + let nextNodeIndex = await this.getCount('Nodes', tx) + let myHash = this.merkleHasher.combineHash(0, merkleHash, merkleHash) + let depth = 1 + let shouldContinue = true + + newParentIndex = nextNodeIndex + + while (shouldContinue) { + const previousParent = await this.getNode(previousParentIndex, tx) + + if (previousParent.side === Side.Left) { + // found a node we can attach a child too; hook it up to the new chain of left nodes + const newNode = { + side: Side.Right, + leftIndex: previousParentIndex, + hashOfSibling: previousParent.hashOfSibling, + index: nextNodeIndex, + } + + await this.nodes.put(newNode, tx) + nextNodeIndex += 1 + + await this.counter.put('Nodes', nextNodeIndex, tx) + + if (!previousParent.parentIndex || isEmpty(previousParent.parentIndex)) { + const newParent = { + side: Side.Left, + parentIndex: 0, + hashOfSibling: this.merkleHasher.combineHash( + depth, + previousParent.hashOfSibling, + myHash, + ), + index: nextNodeIndex, + } + + await this.nodes.put(newParent, tx) + + await this.nodes.put( + { + side: Side.Left, + hashOfSibling: previousParent.hashOfSibling, + parentIndex: nextNodeIndex, + index: previousParentIndex, + }, + tx, + ) + + nextNodeIndex += 1 + await this.counter.put('Nodes', nextNodeIndex, tx) + } + + shouldContinue = false + } else { + // previous parent is a right node, gotta go up a step + myHash = this.merkleHasher.combineHash(depth, myHash, myHash) + + if (previousParent.leftIndex === undefined) + throw new UnexpectedDatabaseError(`Parent has no left sibling`) + + const leftSibling = await this.getNode(previousParent.leftIndex, tx) + + if (leftSibling.parentIndex === undefined) + throw new UnexpectedDatabaseError(`Left sibling has no parent`) + const leftSiblingParentIndex = leftSibling.parentIndex + + const newNode = { + side: Side.Left, + parentIndex: nextNodeIndex + 1, // where the next node will be (in the next iteration) + hashOfSibling: myHash, + index: nextNodeIndex, + } + await this.nodes.put(newNode, tx) + + nextNodeIndex += 1 + + await this.counter.put('Nodes', nextNodeIndex, tx) + + previousParentIndex = leftSiblingParentIndex + depth += 1 + } + } + } + + await this.counter.put('Leaves', indexOfNewLeaf + 1, tx) + + await this.leaves.put( + { + element, + merkleHash, + parentIndex: newParentIndex, + index: indexOfNewLeaf, + }, + tx, + ) + + await this.rehashRightPath(tx) + }, + ) + } + + /** + * Truncate the tree to the values it contained when it contained pastSize + * elements. + * + * After calling, it will contain at most pastSize elements, but truncating + * to a size that is higher than this.length is a no-op. + * + * This function doesn't do any garbage collection. The old leaves and nodes + * are still in the database, but they will be overwritten as the new tree + * grows. + */ + async truncate(pastSize: number, tx?: IDatabaseTransaction): Promise { + return await this.db.withTransaction( + tx, + [this.counter, this.leaves, this.nodes], + 'readwrite', + async (tx) => { + const oldSize = await this.getCount('Leaves', tx) + if (pastSize >= oldSize) { + return + } + + await this.counter.put('Leaves', pastSize, tx) + + if (pastSize === 0) { + await this.counter.put('Nodes', 1, tx) + return + } + + if (pastSize === 1) { + await this.counter.put('Nodes', 1, tx) + const firstLeaf = await this.getLeaf(0, tx) + firstLeaf.parentIndex = 0 + await this.leaves.put(firstLeaf, tx) + return + } + + const depth = depthAtLeafCount(pastSize) - 2 + const leaf = await this.getLeaf(pastSize - 1, tx) + let parentIndex = leaf.parentIndex + let maxParentIndex = parentIndex + + for (let i = 0; i < depth; i++) { + let parent = await this.getNode(parentIndex, tx) + + if (parent.side === Side.Right) { + Assert.isNotUndefined(parent.leftIndex) + parent = await this.getNode(parent.leftIndex, tx) + } + + Assert.isNotUndefined(parent.parentIndex) + parentIndex = parent.parentIndex + + if (parent.parentIndex > maxParentIndex) { + maxParentIndex = parent.parentIndex + } + } + + const parent = await this.getNode(parentIndex, tx) + + if (parent.side === Side.Right) { + // Indicates error in this method's code + throw new Error('Expected new root node to be a left node') + } + + parent.parentIndex = 0 + await this.nodes.put(parent, tx) + await this.counter.put('Nodes', maxParentIndex + 1, tx) + await this.rehashRightPath(tx) + }, + ) + } + + /** + * Calculate what the root hash was at the time the tree contained + * `pastSize` elements. Throws an error if the tree is empty, + * the request size is greater than the size of the tree, or the requested + * size is 0 + */ + async pastRoot(pastSize: number, tx?: IDatabaseTransaction): Promise { + return this.db.withTransaction( + tx, + [this.counter, this.leaves, this.nodes], + 'readwrite', + async (tx) => { + const leafCount = await this.getCount('Leaves', tx) + + if (leafCount === 0 || pastSize > leafCount || pastSize === 0) { + throw new Error( + `Unable to get past size ${pastSize} for tree with ${leafCount} nodes`, + ) + } + + const rootDepth = depthAtLeafCount(pastSize) + const minTreeDepth = Math.min(rootDepth, this.treeDepth) + const leafIndex = pastSize - 1 + const leaf = await this.getLeaf(leafIndex, tx) + + let currentHash = leaf.merkleHash + let currentNodeIndex = leaf.parentIndex + + if (isRight(leafIndex)) { + const sibling = await this.getLeaf(leafIndex - 1, tx) + const siblingHash = sibling.merkleHash + currentHash = this.merkleHasher.combineHash(0, siblingHash, currentHash) + } else { + currentHash = this.merkleHasher.combineHash(0, currentHash, currentHash) + } + + for (let depth = 1; depth < minTreeDepth; depth++) { + const node = await this.getNode(currentNodeIndex, tx) + + switch (node.side) { + case Side.Left: + Assert.isNotUndefined(node.parentIndex) + currentNodeIndex = node.parentIndex + currentHash = this.merkleHasher.combineHash(depth, currentHash, currentHash) + break + + case Side.Right: { + Assert.isNotUndefined(node.leftIndex) + const leftNode = await this.getNode(node.leftIndex, tx) + Assert.isNotUndefined(leftNode.parentIndex) + currentNodeIndex = leftNode.parentIndex + currentHash = this.merkleHasher.combineHash( + depth, + node.hashOfSibling, + currentHash, + ) + break + } + + default: + Assert.isUnreachable(node.side) + } + } + + for (let depth = rootDepth; depth < this.treeDepth; depth++) { + currentHash = this.merkleHasher.combineHash(depth, currentHash, currentHash) + } + + return currentHash + }, + ) + } + + /** + * Get the root hash of the tree. Throws an error if the tree is empty. + */ + async rootHash(tx?: IDatabaseTransaction): Promise { + const size = await this.size(tx) + return await this.pastRoot(size, tx) + } + + /** + * Check if the tree contained the given element when it was the given size. + * + * This is an inefficient linear scan. + */ + async contained(value: E, pastSize: number, tx?: IDatabaseTransaction): Promise { + return this.db.withTransaction( + tx, + [this.counter, this.leaves, this.nodes], + 'readwrite', + async (tx) => { + for (let i = 0; i < pastSize; i++) { + const leaf = await this.getLeafOrNull(i, tx) + + if (leaf === null) { + break + } + + if (this.merkleHasher.elementSerde().equals(value, leaf.element)) { + return true + } + } + return false + }, + ) + } + + /** + * Check if the tree currently contains the given element. + */ + async contains(value: E, tx?: IDatabaseTransaction): Promise { + return await this.contained(value, await this.size(), tx) + } + + /** + * Construct the proof that the leaf node at `position` exists. + * + * The length of the returned vector is the depth of the leaf node in the tree + * + * The leftmost value in the vector, the hash at index 0, is the hash of the + * leaf node's sibling. The rightmost value in the vector contains the hash of + * sibling of the child of the root node. + * + * The root hash is not included in the authentication path. + * + * returns null if there are no leaves or the position is not in the list. + */ + async witness( + index: LeafIndex, + tx?: IDatabaseTransaction, + ): Promise | null> { + return this.db.withTransaction( + tx, + [this.counter, this.leaves, this.nodes], + 'readwrite', + async (tx) => { + const authenticationPath: WitnessNode[] = [] + + const leafCount = await this.size() + if (leafCount === 0 || index >= leafCount) { + return null + } + + const leaf = await this.getLeaf(index, tx) + let currentHash = leaf.merkleHash + let currentPosition = leaf.parentIndex as NodeIndex | undefined + + if (isRight(index)) { + const hashOfSibling = (await this.getLeaf(index - 1, tx)).merkleHash + authenticationPath.push({ side: Side.Right, hashOfSibling }) + currentHash = this.merkleHasher.combineHash(0, hashOfSibling, currentHash) + } else if (index < leafCount - 1) { + // Left leaf and have a right sibling + const hashOfSibling = (await this.getLeaf(index + 1, tx)).merkleHash + authenticationPath.push({ side: Side.Left, hashOfSibling }) + currentHash = this.merkleHasher.combineHash(0, currentHash, hashOfSibling) + } else { + // Left leaf and rightmost node + authenticationPath.push({ side: Side.Left, hashOfSibling: currentHash }) + currentHash = this.merkleHasher.combineHash(0, currentHash, currentHash) + } + + for (let depth = 1; depth < this.treeDepth; depth++) { + const node = + currentPosition !== undefined ? await this.getNodeOrNull(currentPosition, tx) : null + + if (node === null) { + authenticationPath.push({ side: Side.Left, hashOfSibling: currentHash }) + currentHash = this.merkleHasher.combineHash(depth, currentHash, currentHash) + } else if (node.side === Side.Left) { + authenticationPath.push({ side: Side.Left, hashOfSibling: node.hashOfSibling }) + currentHash = this.merkleHasher.combineHash(depth, currentHash, node.hashOfSibling) + currentPosition = node.parentIndex + } else { + authenticationPath.push({ side: Side.Right, hashOfSibling: node.hashOfSibling }) + currentHash = this.merkleHasher.combineHash(depth, node.hashOfSibling, currentHash) + Assert.isNotUndefined(node.leftIndex) + const leftSibling = await this.getNode(node.leftIndex, tx) + currentPosition = leftSibling.parentIndex + } + } + + return new Witness(leafCount, currentHash, authenticationPath, this.merkleHasher) + }, + ) + } + + /** + * Recalculate all the hashes between the most recently added leaf in the group + * and the root hash. + * + * `transaction` is passed in so that a rollback happens for the entire change + * if a conflict occurs. + */ + private async rehashRightPath(tx: IDatabaseTransaction) { + let depth = 0 + const leafIndex = (await this.getCount('Leaves', tx)) - 1 + const leaf = await this.getLeaf(leafIndex, tx) + let parentIndex = leaf.parentIndex as NodeIndex | undefined + const leafHash = leaf.merkleHash + let parentHash + + if (isRight(leafIndex)) { + const leftSiblingIndex = leafIndex - 1 + const leftSibling = await this.getLeaf(leftSiblingIndex, tx) + const leftSiblingHash = leftSibling.merkleHash + parentHash = this.merkleHasher.combineHash(depth, leftSiblingHash, leafHash) + } else { + parentHash = this.merkleHasher.combineHash(depth, leafHash, leafHash) + } + + while (!isEmpty(parentIndex)) { + const node = await this.getNode(parentIndex, tx) + depth += 1 + + switch (node.side) { + case Side.Left: { + // Since we are walking the rightmost path, left nodes do not + // have right children. Therefore its sibling hash is set to its + // own hash and its parent hash is set to the combination of that hash + // with itself + await this.nodes.put( + { + side: Side.Left, + hashOfSibling: parentHash, + parentIndex: node.parentIndex, + index: parentIndex, + }, + tx, + ) + + parentIndex = node.parentIndex + parentHash = this.merkleHasher.combineHash(depth, parentHash, parentHash) + break + } + + case Side.Right: { + // since this is a new right node, we know that we have the correct + // hash because we set it correctly when we inserted it. But the left + // node needs to have its hashOfSibling set to our current hash. + if (node.leftIndex === undefined) + throw new Error(`Expected node ${node.index} to have left node`) + + const leftNode = await this.getNode(node.leftIndex, tx) + + await this.nodes.put( + { + side: Side.Left, + parentIndex: leftNode.parentIndex, + hashOfSibling: parentHash, + index: node.leftIndex, + }, + tx, + ) + + parentIndex = leftNode.parentIndex + parentHash = this.merkleHasher.combineHash(depth, node.hashOfSibling, parentHash) + break + } + } + } + } +} + +/** + * Is the given leaf a right child or left child of its parent node. + * + * Leaves are added in order, so this is the same as asking if the index + * is an od number + */ +function isRight(index: LeafIndex) { + return index % 2 === 1 +} + +/** + * Is the given node index the empty node above the root node? + */ +function isEmpty(index: NodeIndex | undefined): index is undefined | 0 { + return index === 0 || index === undefined +} + +/** + * The depth of the tree when it contains a certain number of leaf nodes + */ +export function depthAtLeafCount(size: number): number { + if (size === 0) { + return 0 + } + if (size === 1) { + return 1 + } + return Math.floor(Math.log2(size - 1)) + 2 +} + +export class UnexpectedDatabaseError extends Error { + constructor(message?: string) { + super(message || 'Inconsistent db state detected: Database was in an unexpected statef') + } +} diff --git a/ironfish/src/captain/anchorChain/nullifiers.test.ts b/ironfish/src/captain/anchorChain/nullifiers.test.ts new file mode 100644 index 0000000000..f7876023b4 --- /dev/null +++ b/ironfish/src/captain/anchorChain/nullifiers.test.ts @@ -0,0 +1,43 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { NullifierHasher } from './nullifiers' + +describe('NullifierHasher', () => { + it('constructs a nullifier hasher', () => { + expect(new NullifierHasher()).toMatchSnapshot() + }) + + it('calculates some hashes', () => { + // These are arbitrary snapshots, but it tests that they don't change + const nullifier = Buffer.alloc(32) + expect(new NullifierHasher().merkleHash(nullifier)).toMatchSnapshot() + nullifier[0] = 8 + expect(new NullifierHasher().merkleHash(nullifier)).toMatchSnapshot() + nullifier[10] = 125 + expect(new NullifierHasher().merkleHash(nullifier)).toMatchSnapshot() + nullifier[28] = 2 + expect(new NullifierHasher().merkleHash(nullifier)).toMatchSnapshot() + nullifier[31] = 255 + expect(new NullifierHasher().merkleHash(nullifier)).toMatchSnapshot() + expect(nullifier).toMatchSnapshot() + }) + + it('combines hashes', () => { + const nullifier1 = Buffer.alloc(32) + const nullifier2 = Buffer.alloc(32) + expect(new NullifierHasher().combineHash(0, nullifier1, nullifier2)).toMatchSnapshot() + nullifier1[0] = 8 + expect(new NullifierHasher().combineHash(5, nullifier1, nullifier2)).toMatchSnapshot() + nullifier2[10] = 125 + expect(new NullifierHasher().combineHash(17, nullifier1, nullifier2)).toMatchSnapshot() + nullifier1[28] = 2 + expect(new NullifierHasher().combineHash(31, nullifier1, nullifier2)).toMatchSnapshot() + nullifier2[31] = 255 + expect(new NullifierHasher().combineHash(16, nullifier1, nullifier2)).toMatchSnapshot() + expect(new NullifierHasher().combineHash(12, nullifier1, nullifier2)).toMatchSnapshot() + expect(nullifier1).toMatchSnapshot() + expect(nullifier2).toMatchSnapshot() + }) +}) diff --git a/ironfish/src/captain/anchorChain/nullifiers.ts b/ironfish/src/captain/anchorChain/nullifiers.ts new file mode 100644 index 0000000000..a2c9a49866 --- /dev/null +++ b/ironfish/src/captain/anchorChain/nullifiers.ts @@ -0,0 +1,45 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { MerkleHasher } from './merkleTree' +import { BufferSerde } from '../../serde' + +import { createKeyed } from 'blake3-wasm' + +export type Nullifier = Buffer +export type NullifierHash = Buffer + +const NULLIFIER_KEY = Buffer.alloc(32, 'IRONFISH BLAKE3 NULLIFIER PRSNAL') +const COMBINE_KEY = Buffer.alloc(32, 'IRONFISH NULLIFIER COMBINE HASHS') + +export class NullifierHasher implements MerkleHasher { + _elementSerde: BufferSerde + _hashSerde: BufferSerde + + constructor() { + this._elementSerde = new BufferSerde(32) + this._hashSerde = new BufferSerde(32) + } + elementSerde(): BufferSerde { + return this._elementSerde + } + + hashSerde(): BufferSerde { + return this._hashSerde + } + + merkleHash(element: Nullifier): NullifierHash { + const hasher = createKeyed(NULLIFIER_KEY) + hasher.update(element) + return hasher.digest() + } + + combineHash(depth: number, left: NullifierHash, right: NullifierHash): NullifierHash { + const hasher = createKeyed(COMBINE_KEY) + hasher.update([depth]) + hasher.update(left) + hasher.update(right) + return hasher.digest() + } +} diff --git a/ironfish/src/captain/anchorChain/strategies/Transaction.ts b/ironfish/src/captain/anchorChain/strategies/Transaction.ts new file mode 100644 index 0000000000..bbf17e0ce1 --- /dev/null +++ b/ironfish/src/captain/anchorChain/strategies/Transaction.ts @@ -0,0 +1,82 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Nullifier } from '../nullifiers' +import { VerificationResult } from '../blockchain/VerificationResult' + +export interface Spend { + nullifier: Nullifier + commitment: H + size: number +} + +export default interface Transaction { + /** + * Verify whether or not all the transactions in the list are valid proofs. + */ + verify(): VerificationResult + + /** + * The number of notes in the transaction. + */ + notesLength(): number + + /** + * Iterate over all the notes created by this transaction. + */ + notes(): Iterable + + /** + * The number of spends in the transaction. + */ + spendsLength(): number + + /** + * Iterate over all the spends in the transaction. A spend includes a nullifier, + * indicating that a note was spent, and a commitment committing to + * the root hash and tree size at the time the note was spent. + */ + spends(): Iterable> + + /** + * Preallocate any resources necessary for using the transaction. + */ + takeReference(): unknown + + /** + * Return any resources necessary for using the transaction. + */ + returnReference(): void + + /** + * Wraps the given callback in takeReference and returnReference. + */ + withReference(callback: (transaction: unknown) => R): R + + /** + * Get the transaction fee for this transactions. + * + * In general, each transaction has outputs lower than the amount spent; the + * miner can collect the difference as a transaction fee. + * + * In a block header's minersFee transaction, the opposite happens; + * the miner creates a block with zero spends and output equal to the sum + * of the miner's fee for the block's transaction, plus the block chain's + * mining reward. + * + * The transaction fee is the difference between outputs and spends on the + * transaction. + */ + transactionFee(): bigint + + /** + * Get transaction signature for this transaction. + */ + transactionSignature(): Buffer + + /** + * Get the transaction hash. + */ + transactionHash(): Buffer +} diff --git a/ironfish/src/captain/anchorChain/strategies/index.ts b/ironfish/src/captain/anchorChain/strategies/index.ts new file mode 100644 index 0000000000..dd2b7a60ef --- /dev/null +++ b/ironfish/src/captain/anchorChain/strategies/index.ts @@ -0,0 +1,87 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { MerkleHasher } from '../merkleTree' +import { NullifierHash, Nullifier } from '../nullifiers' +import { BlockHash } from '../blockchain/BlockHeader' +import Transaction from './Transaction' +import Serde, { JsonSerializable } from '../../../serde' +import Verifier from '../../Verifier' +import Blockchain from '../blockchain' + +export { default as Transaction, Spend } from './Transaction' + +/** + * Strategy to allow anchor chain to remain + * generic across computations. + * Methods give access to the hasher and nullifier hasher + * and custom calculations for block hash, target, + * and miner's fee. + */ +export default interface Strategy< + E, + H, + T extends Transaction, + SE extends JsonSerializable, + SH extends JsonSerializable, + ST +> { + /** + * Create a verifier used to validate conensus + */ + createVerifier(chain: Blockchain): Verifier + + /** + * Get the hasher used to calculate hashes of notes in the tree. + */ + noteHasher(): MerkleHasher + + /** + * Get the hasher used to calculate hashes of nullifiers. Note that you + * probably want to use a NullifierHasher here. + */ + nullifierHasher(): MerkleHasher + + /** + * Get the object that can serialize and deserialize lists of transactions. + */ + transactionSerde(): Serde + + /** + * Given the serialized bytes of a block header, return a 32-byte hash of that block. + * + * Note: in Ironfish, the hashing algorithm is hard-coded into the mining thread, + * and hashBlockHeader must always return the result of miningAlgorithm.hashBlockHeader. + * + * Ideally we could remove this method altogether, but unit tests rely + * on it heavily. + */ + hashBlockHeader(header: Buffer): BlockHash + + /** + * Create the miner's fee transaction for a given block. + * + * The miner's fee is a special transaction with one receipt and + * zero spends. It's receipt value must be the total transaction fees + * in the block plus the mining reward for the block. + * + * The mining reward may change over time, so we accept the block sequence + * to calculate the mining reward from. + * + * @param totalTransactionFees is the sum of the transaction fees intended to go + * in this block. + * @param blockSequence the sequence of the block for which the miner's fee is being created + * @param minerKey the spending key for the miner. + */ + createMinersFee( + totalTransactionFees: bigint, + blockSequence: bigint, + minerKey: string, + ): Promise + + /** + * Calculate the mining reward for a block based on its sequence + */ + miningReward(blockSequence: bigint): number +} diff --git a/ironfish/src/captain/blockSyncer.test.ts b/ironfish/src/captain/blockSyncer.test.ts new file mode 100644 index 0000000000..9cc647df2d --- /dev/null +++ b/ironfish/src/captain/blockSyncer.test.ts @@ -0,0 +1,298 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { BlockSerde } from './anchorChain/blockchain/Block' +import Target from './anchorChain/blockchain/Target' +import { BlockSyncer, Validity } from '.' +import { RangeHasher } from './anchorChain/merkleTree' +import { Assert } from '../assert' +import { Direction, IncomingPeerMessage } from '../network' +import { BufferSerde } from '../serde' +import { + makeCaptainSyncable, + makeCaptain, + response, + request, + TestBlockSyncer, + blockHash, + makeFakeBlock, + makeChain, + SerializedTestTransaction, + TestStrategy, + TestCaptain, + TestBlockchain, +} from './testUtilities' +import { StringUtils } from '../utils' + +import { BlockRequest, BlocksResponse, MessageType } from './messages' +import { createRootLogger } from '../logger' +import { NetworkBlockType } from './blockSyncer' + +const serializedBlockHash = (position: number): string => { + const hash = blockHash(position) + return new BufferSerde(32).serialize(hash) +} + +describe('BlockSyncer', () => { + describe('Handlers', () => { + const strategy = new TestStrategy(new RangeHasher()) + let syncer: TestBlockSyncer + let targetSpy: jest.SpyInstance + + beforeEach(async () => { + targetSpy = jest.spyOn(Target, 'minDifficulty').mockImplementation(() => BigInt(1)) + const captain = await makeCaptain(strategy) + syncer = new BlockSyncer(captain, createRootLogger()) + }) + + afterAll(() => { + targetSpy.mockClear() + }) + + it('constructs a block syncer', () => { + expect(syncer).toBeDefined() + }) + + it('handler returns the heaviest block if forwards direction request cannot be fulfilled', async () => { + const request: IncomingPeerMessage = { + peerIdentity: 'somebody', + message: { + type: MessageType.Blocks, + payload: { + hash: Buffer.from(StringUtils.hash('blockyoudonthave')).toString('hex'), + nextBlockDirection: true, + }, + }, + } + const { blocks } = await syncer.handleBlockRequest(request) + // the test blockchain comes with 9 blocks + expect(Number(blocks[0].header.sequence)).toBe(9) + }) + + it('handler returns the requested block with hash only', async () => { + const request: IncomingPeerMessage = { + peerIdentity: 'somebody', + message: { + type: MessageType.Blocks, + payload: { + hash: serializedBlockHash(6), + nextBlockDirection: false, + }, + }, + } + const { blocks } = await syncer.handleBlockRequest(request) + expect(blocks.length).toBe(1) + const block = syncer.blockSerde.deserialize(blocks[0]) + expect(block).toBeTruthy() + }) + }) + + describe('RequestOneBlock', () => { + const strategy = new TestStrategy(new RangeHasher()) + const blockSerde = new BlockSerde(strategy) + let onRequestBlockSpy: jest.SpyInstance + let syncer: TestBlockSyncer + let targetSpy: jest.SpyInstance + let spyQueue: jest.SpyInstance + + beforeEach(async () => { + targetSpy = jest.spyOn(Target, 'minDifficulty').mockImplementation(() => BigInt(1)) + const captain = await makeCaptainSyncable(strategy) + syncer = new BlockSyncer(captain, createRootLogger()) + spyQueue = jest.spyOn(syncer, 'addBlockToProcess') + onRequestBlockSpy = jest.spyOn(syncer.captain.onRequestBlocks, 'emit') + spyQueue.mockReset() + onRequestBlockSpy.mockReset() + }) + + afterAll(() => { + targetSpy.mockClear() + }) + + it('successfully requests next block from genesis', async () => { + const block = makeFakeBlock(strategy, blockHash(5), blockHash(6), 6, 6, 9) + block.header.graphId = -1 + const serializedBlock = blockSerde.serialize(block) + + const blocksResponse: IncomingPeerMessage< + BlocksResponse + > = { + peerIdentity: 'somebody', + message: { + type: MessageType.Blocks, + direction: Direction.response, + rpcId: 1, + payload: { blocks: [serializedBlock] }, + }, + } + const heaviestHead = await syncer.chain.getHeaviestHead() + Assert.isNotNull(heaviestHead) + syncer.requestOneBlock({ hash: heaviestHead.hash, nextBlockDirection: true }) + expect(onRequestBlockSpy).toHaveBeenCalledWith(heaviestHead.hash, true) + + const request: BlockRequest = { + type: MessageType.Blocks, + payload: { + hash: heaviestHead.hash.toString('hex'), + nextBlockDirection: true, + }, + } + + syncer.handleBlockResponse(blocksResponse, request) + await syncer.blockRequestPromise + + expect(spyQueue).toHaveBeenCalledWith(block, NetworkBlockType.SYNCING) + await syncer.shutdown() + }) + + it('fails if the block cannot be deserialized', async () => { + const blocksResponse: IncomingPeerMessage< + BlocksResponse + > = { + peerIdentity: 'somebody', + message: { + type: MessageType.Blocks, + direction: Direction.response, + rpcId: 1, + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + payload: { blocks: undefined! }, + }, + } + const hash = Buffer.from('') + syncer.requestOneBlock({ hash, nextBlockDirection: false }) + expect(onRequestBlockSpy).toHaveBeenCalledWith(hash, false) + + const request: BlockRequest = { + type: MessageType.Blocks, + payload: { + hash: hash.toString('hex'), + nextBlockDirection: true, + }, + } + + syncer.handleBlockResponse(blocksResponse, request) + await syncer.blockRequestPromise + + expect(spyQueue).toHaveBeenCalledTimes(0) + await syncer.shutdown() + }) + }) + + describe('Requesting blocks until synced', () => { + const strategy = new TestStrategy(new RangeHasher()) + let syncer: TestBlockSyncer + let syncedSyncer: TestBlockSyncer + let dbnum = 0 + let databasePrefix: string + let syncedSyncerDBPrefix: string + let targetSpy: jest.SpyInstance + let captain: TestCaptain + let fullChain: TestBlockchain + let requestBlockSpy: jest.SpyInstance + + const createCaptain = async (synced: 'SYNCED' | 'EMPTY' | 'OUT OF SYNC') => { + databasePrefix = `optimistic_sync_test_db_${dbnum++}` + fullChain = await makeChain(strategy, `${databasePrefix}-fullchain`) + if (synced === 'SYNCED') { + captain = await makeCaptain(strategy, databasePrefix) + } else if (synced === 'EMPTY') { + captain = await makeCaptainSyncable(strategy, databasePrefix, false) + } else if (synced === 'OUT OF SYNC') { + captain = await makeCaptainSyncable(strategy, databasePrefix, true) + } + + captain.onRequestBlocks.on(async (hash, nextBlockDirection) => { + const message: BlockRequest = { + type: MessageType.Blocks, + payload: { + hash: hash?.toString('hex'), + nextBlockDirection: nextBlockDirection, + }, + } + + const formattedResponse = await syncedSyncer.handleBlockRequest( + request(message.payload), + ) + syncer.handleBlockResponse(response(formattedResponse), message) + }) + + syncer = new BlockSyncer(captain, createRootLogger()) + jest.spyOn(syncer.chain.verifier, 'isAddBlockValid').mockResolvedValue({ + valid: Validity.Yes, + }) + requestBlockSpy = jest.spyOn(captain, 'requestBlocks') + } + + const areChainHeadsEqual = async ( + chainOriginal: TestBlockchain, + chainToSync: TestBlockchain, + ): Promise => { + const chainedHeaviestHeadHeader = await chainOriginal.getHeaviestHead() + const syncedHeaviestHeadHeader = await chainToSync.getHeaviestHead() + Assert.isNotNull(chainedHeaviestHeadHeader) + Assert.isNotNull(syncedHeaviestHeadHeader) + expect(Number(syncedHeaviestHeadHeader?.sequence)).toEqual( + Number(chainedHeaviestHeadHeader?.sequence), + ) + } + + beforeEach(async () => { + targetSpy = jest.spyOn(Target, 'minDifficulty').mockImplementation(() => BigInt(1)) + + syncedSyncerDBPrefix = `synced_syncer_test_db_${dbnum++}` + + syncedSyncer = new BlockSyncer( + await makeCaptain(strategy, syncedSyncerDBPrefix), + createRootLogger(), + ) + }) + + afterEach(async () => { + await syncer.shutdown() + targetSpy.mockClear() + }) + + it('makes only latest call when run on a fully synced chain', async () => { + await createCaptain('SYNCED') + + await syncer.start() + await syncer.shutdown() + + expect(requestBlockSpy).toBeCalledTimes(1) + }) + + it('fully syncs a chain from scratch when chain is empty', async () => { + await createCaptain('EMPTY') + + await syncer.start() + + // 8 blocks missing in the chain + for (let i = 0; i <= 8; i++) { + await syncer['blockSyncPromise'] + await syncer['blockRequestPromise'] + } + + expect(requestBlockSpy).toBeCalledTimes(9) + + await syncer['blockSyncPromise'] + await areChainHeadsEqual(fullChain, captain.chain) + }) + + it('syncs missing blocks when chain is out of sync', async () => { + await createCaptain('OUT OF SYNC') + + await syncer.start() + + // 6 blocks missing in the chain + for (let i = 0; i <= 6; i++) { + await syncer['blockRequestPromise'] + await syncer['blockSyncPromise'] + } + + expect(requestBlockSpy).toBeCalledTimes(7) + + await areChainHeadsEqual(fullChain, captain.chain) + }) + }) +}) diff --git a/ironfish/src/captain/blockSyncer.ts b/ironfish/src/captain/blockSyncer.ts new file mode 100644 index 0000000000..6c37be64fa --- /dev/null +++ b/ironfish/src/captain/blockSyncer.ts @@ -0,0 +1,629 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import Blockchain, { AddBlockResult } from './anchorChain/blockchain' +import Block, { BlockSerde, SerializedBlock } from './anchorChain/blockchain/Block' +import { BlockHash } from './anchorChain/blockchain/BlockHeader' +import Transaction from './anchorChain/strategies/Transaction' +import { BlockRequest } from './messages' +import { + CannotSatisfyRequestError, + IncomingPeerMessage, + MessagePayload, + RPC_TIMEOUT_MILLIS, +} from '../network' +import Serde, { BufferSerde, JsonSerializable } from '../serde' +import { MetricsMonitor, Meter } from '../metrics' +import Captain from '.' +import { BlocksResponse } from '.' +import { Logger } from '../logger' +import LeastRecentlyUsed from 'lru-cache' +import { ErrorUtils } from '../utils' +import { Assert } from './../assert' + +export const MAX_MESSAGE_SIZE = 500000 // 0.5 MB +export const MAX_BLOCKS_PER_MESSAGE = 1 + +export const ALLOWED_TRANSITIONS_TO_FROM = { + ['STARTING']: ['STOPPED'], + ['SYNCING']: ['IDLE', 'REQUESTING', 'SYNCING'], + ['IDLE']: ['SYNCING', 'REQUESTING', 'STARTING'], + ['STOPPING']: ['IDLE', 'SYNCING', 'REQUESTING'], + ['STOPPED']: ['STOPPING'], + ['REQUESTING']: ['SYNCING', 'IDLE'], +} + +/** + * Responsible for the metrics used in the status command. + */ +export type BlockSyncerChainStatus = { + blockAddingSpeed: Meter + speed: Meter +} + +export type Request = { + hash: BlockHash + nextBlockDirection?: boolean +} + +export enum NetworkBlockType { + GOSSIP = 'GOSSIP', + SYNCING = 'SYNCING', +} + +export type BlockToProcess< + E, + H, + T extends Transaction, + SE extends JsonSerializable, + SH extends JsonSerializable, + ST +> = { + block: Block + type: NetworkBlockType +} + +type ExcludeTypeKey = K extends 'type' ? never : K +type ExcludeTypeField = { [K in ExcludeTypeKey]: A[K] } +type ExtractActionParameters = A extends { type: Type } ? ExcludeTypeField : never + +type ActionIdle = { type: 'IDLE' } +type ActionStopped = { type: 'STOPPED' } +type ActionStarting = { type: 'STARTING' } +type ActionStopping = { type: 'STOPPING' } +type ActionRequest = { + type: 'REQUESTING' + request: Request +} +type ActionSyncing< + E, + H, + T extends Transaction, + SE extends JsonSerializable, + SH extends JsonSerializable, + ST +> = { + type: 'SYNCING' + block: BlockToProcess +} +type ActionState< + E, + H, + T extends Transaction, + SE extends JsonSerializable, + SH extends JsonSerializable, + ST +> = + | ActionRequest + | ActionStopped + | ActionStarting + | ActionStopping + | ActionIdle + | ActionSyncing + +type ActionType< + E, + H, + T extends Transaction, + SE extends JsonSerializable, + SH extends JsonSerializable, + ST +> = ActionState['type'] + +/** + * Responsible for syncing blocks with the chain. + * + * Blocks can be synced from three different sources + * - from a gossip message in the networking layer + * - from the mining director when a miner successfully added a block + * - from a network response following a request + * + * Blocks are added in the queue blocksForProcessing and missing blocks + * between the heaviest chain and the latest block are requested + * + * @remarks + * Note the *heaviest* chain is the chain of blocks that we currently believe + * has accrued the most work, based on the blocks we have actually received. + * + * The *latest* chain is the one that starts from the block that the network claims + * was mined most recently. + * + * @typeParam E WasmNoteEncrypted + * Note element stored in transactions and the notes Merkle Tree + * @typeParam H WasmNoteEncryptedHash + * the hash of an `E`. Used for the internal nodes and root hash + * of the notes Merkle Tree + * @typeParam T Transaction + * Type of a transaction stored on Captain's chain. + * @typeParam SE SerializedWasmNoteEncrypted + * @typeParam SH SerializedWasmNoteEncryptedHash + * @typeParam ST SerializedTransaction + * The serialized format of a `T`. Conversion between the two happens + * via the `strategy`. + */ +export class BlockSyncer< + E, + H, + T extends Transaction, + SE extends JsonSerializable, + SH extends JsonSerializable, + ST +> { + hashSerde: Serde + blockSerde: Serde, SerializedBlock> + chain: Blockchain + private metrics: MetricsMonitor + + private _state: Readonly> = { + type: 'STOPPED', + } + + public status: BlockSyncerChainStatus + + private blockSyncPromise: Promise + public blockRequestPromise: Promise + + /** + * Reference blocks that we most recently got a request for. + */ + recentBlocks: LeastRecentlyUsed> + + /** + * Think of this like callbacks for the network bridge to use when we get + * a response to our block request, mapping the request we've made to the response + */ + private blockRequests = new Map< + string, + { + resolve: (message: BlocksResponse) => void + reject: (error?: unknown) => void + } + >() + + blocksForProcessing: BlockToProcess[] + + logger: Logger + + /** + * construct a new BlockSyncer + * + * @param captain Reference to the Captain object, which holds + * the AnchorChain that lets us interact with the local chain. + */ + constructor(readonly captain: Captain, logger: Logger) { + this.hashSerde = new BufferSerde(32) + this.blockSerde = new BlockSerde(captain.strategy) + this.metrics = captain.metrics + this.chain = this.captain.chain + this.blockSyncPromise = Promise.resolve() + this.blockRequestPromise = Promise.resolve() + this.logger = logger + this.recentBlocks = new LeastRecentlyUsed(500) + this.blocksForProcessing = [] + + this.status = { + blockAddingSpeed: this.metrics.addMeter(), + speed: this.metrics.addMeter(), + } + } + + get state(): Readonly> { + return this._state + } + + /** + * Start the tasks for requesting the latest blocks and for optimistic sync. + */ + async start(): Promise { + this.dispatch('STARTING') + this.dispatch('IDLE') + + const heaviestHead = await this.chain.getHeaviestHead() + Assert.isNotNull(heaviestHead) + this.dispatch('REQUESTING', { + request: { hash: heaviestHead.hash, nextBlockDirection: true }, + }) + } + + dispatch>( + type: Type, + args?: ExtractActionParameters, Type>, + ): void { + const { type: fromType } = this.state + + if (!ALLOWED_TRANSITIONS_TO_FROM[type].includes(fromType)) { + return + } + + let action + switch (type) { + case 'IDLE': + action = { type, ...args } as ActionIdle + this._state = action + this.getNextBlockToSync() + break + case 'REQUESTING': + action = { type, ...args } as ActionRequest + this._state = action + this.requestOneBlock(action.request) + break + case 'SYNCING': + action = { type, ...args } as ActionSyncing + this._state = action + this.blockRequester(action.block) + break + default: + action + this._state = { type } as { type: 'STOPPING' | 'STOPPED' | 'STARTING' } + } + } + + addBlockToProcess(block: Block, type: NetworkBlockType): void { + if ( + this.blocksForProcessing && + this.blocksForProcessing[0] && + block.header.sequence <= this.blocksForProcessing[0].block.header.sequence + ) { + this.blocksForProcessing.unshift({ block, type }) + } else { + this.blocksForProcessing.push({ block, type }) + } + + this.getNextBlockToSync() + } + + getNextBlockToSync(): void { + if (this.state.type !== 'IDLE') return + + const nextBlockToProcess = this.blocksForProcessing.shift() + + if (nextBlockToProcess) this.dispatch('SYNCING', { block: nextBlockToProcess }) + } + + /** + * Instruct all requesting tasks to shut down. + * + * Does not resolve until all outstanding promises have terminated. + */ + async shutdown(): Promise { + if (this.state.type === 'STOPPED' || this.state.type === 'STOPPING') return + + this.dispatch('STOPPING') + await this.blockRequestPromise + await this.blockSyncPromise + this.dispatch('STOPPED') + } + + async handleBlockRequestHelper( + message: IncomingPeerMessage, + ): Promise[]> { + const date = Date.now() + const blocks: SerializedBlock[] = [] + const hash = + message.message.payload.hash !== null + ? this.hashSerde.deserialize(message.message.payload.hash) + : null + if (!hash) { + throw new CannotSatisfyRequestError(`Couldn't deserialize request`) + } + + if (message.message.payload.nextBlockDirection) { + const nextBlocks = await this.chain.hashToNext.get(hash) + if (!nextBlocks) return [] + + for (const nextHash of nextBlocks) { + const block = await this.getBlock(null, nextHash) + if (block) { + const serialized = this.blockSerde.serialize(block) + blocks.push(serialized) + } + } + } else { + // request is for a specific block + const block = await this.getBlock(null, hash) + if (!block) { + throw new CannotSatisfyRequestError( + `Don't have requested block ${hash.toString('hex')}`, + ) + } + + const serialized = this.blockSerde.serialize(block) + blocks.push(serialized) + } + + const direction = message.message.payload.nextBlockDirection ? 'FORWARDS' : 'BACKWARDS' + this.logger.debug( + `Responding to ${hash.toString('hex')} in ${direction} direction with ${ + blocks.length + } blocks — ${Date.now() - date} ms`, + ) + + return blocks + } + + /** + * Handle an incoming request for a specific block + * or request for next block given a hash + * + * @returns a promise that resolves to the requested block, or throws a + * CannotSatisfyRequest error if we don't have it. + */ + async handleBlockRequest( + message: IncomingPeerMessage, + ): Promise>> { + const blocks: SerializedBlock[] = await this.handleBlockRequestHelper(message) + + if (blocks.length == 0) { + const heaviestHead = await this.chain.getHeaviestHead() + Assert.isNotNull(heaviestHead) + + const hash = + message.message.payload.hash !== null + ? this.hashSerde.deserialize(message.message.payload.hash) + : null + // if the request is for a block ahead of heaviest, return [] + if (hash && this.hashSerde.equals(hash, heaviestHead.hash)) { + return { blocks: [] } + } + + // then the request was for blocks in the forwards direction that we didn't have + // in this case we'll send them our heaviest head instead + const block = await this.getBlock(null, heaviestHead.hash) + Assert.isNotNull(block) + + const serialized = this.blockSerde.serialize(block) + blocks.push(serialized) + } + + const response = { blocks: blocks } + + return response + } + + /** Called when a BlockResponse has been returned in response to a BlockRequest. + * It resolves any pending requests for this block by sequence or hash. + */ + handleBlockResponse( + message: IncomingPeerMessage>, + originalRequest: BlockRequest, + ): void { + const request = this.blockRequests.get(this.getCacheKey(null, originalRequest.payload.hash)) + + request?.resolve(message.message) + } + + /** Handler for when an error occurs when trying to + * process a pending block request. */ + handleBlockRequestError(originalRequest: BlockRequest, error?: unknown): void { + const request = this.blockRequests.get(this.getCacheKey(null, originalRequest.payload.hash)) + + request?.reject(error) + } + + /** + * Fill in any gaps between the latest block tail and heaviest head, + * and between the heaviest tail and the genesis block. + * + * 1. Get latest block + * 2. If their latest is ahead of our heaviest block, request blocks from latest to heaviest + * 3. Repeat from the tail of the heaviest chain + * 4. stop conditions: + * - latest is not ahead of heaviest + */ + blockRequester(blockToProcess: BlockToProcess): void { + // if the latest block we've processed is ahead of the head, ask for head + 1 sequence blocks + this.blockSyncPromise = (async () => { + const time = Date.now() + const latestBlock = blockToProcess.block + const addBlockResult: AddBlockResult = await this.chain.addBlock(latestBlock) + const timeToAddBlock = Date.now() - time + this.logger.debug(`Adding block took ${timeToAddBlock} ms`) + + // Metrics status update + this.status.speed.add(1) + this.status.blockAddingSpeed.add(timeToAddBlock) + + if (!addBlockResult.isAdded || !addBlockResult.resolvedGraph) { + this.logger.debug( + `Block ${latestBlock.header.hash.toString('hex')} ${ + latestBlock.header.sequence + } is either already added, or invalid`, + ) + this.dispatch('IDLE') + return + } + + // if we added a gossip block that is connected to genesis, + // we dont need to request later blocks + if ( + blockToProcess.type === NetworkBlockType.GOSSIP && + addBlockResult.connectedToGenesis + ) { + this.dispatch('IDLE') + return + } + let request: Request + + // is the block we added connected to genesis or is it an island graph? + if (addBlockResult.connectedToGenesis) { + Assert.isNotNull(addBlockResult.resolvedGraph.heaviestHash) + // then we request the next block in the forward direction + request = { + hash: addBlockResult.resolvedGraph.heaviestHash, + nextBlockDirection: true, + } + this.logger.debug( + `Requesting NEXT block from ${addBlockResult.resolvedGraph.heaviestHash.toString( + 'hex', + )}`, + ) + } else { + // we just added an island, so we want to request the previous block of the tail + // for the resolved graph (until it's no longer an island and connects to genesis) + const tailHeader = await this.chain.getBlockHeader( + addBlockResult.resolvedGraph.tailHash, + ) + Assert.isNotNull(tailHeader) + this.logger.debug( + `Requesting BACKWARDS block ${tailHeader.previousBlockHash.toString( + 'hex', + )} from resolved tail of an island block`, + ) + + // this should never happen + if (tailHeader.sequence === BigInt(1)) { + throw new Error(`Chain in bad state - can't request block before genesis`) + } + + request = { + hash: tailHeader.previousBlockHash, + nextBlockDirection: false, + } + } + + this.dispatch('REQUESTING', { request }) + })() + } + + /** Starts a pending request for a block by hash + * + * The returning promise resolves when the block is received + * through handleBlockResponse rejected if the request times + * out, or errors. + */ + async requestBlocks(originalRequest: Request): Promise | null> { + const key = this.getCacheKey(null, originalRequest.hash) + + return new Promise>((resolve, reject) => { + const timeout = setTimeout( + () => reject(`Request block timeout exceeded ${RPC_TIMEOUT_MILLIS}`), + RPC_TIMEOUT_MILLIS, + ) + + const request = { + resolve: (...args: Parameters): void => { + this.blockRequests.delete(key) + clearTimeout(timeout) + resolve(...args) + }, + reject: (...args: Parameters): void => { + this.blockRequests.delete(key) + clearTimeout(timeout) + reject(...args) + }, + } + + this.blockRequests.set(key, request) + this.captain.requestBlocks(originalRequest.hash, !!originalRequest.nextBlockDirection) + }) + } + + /** + * Request a single block. + * + * @remarks This may be used to request a specific block, or the latest block. + * + * @param requestId An identifier for the request. + * @param hash A hash to index a request from. If not defined, the request is + * for the latest hash. + * @param sequence The returned block will be for the block that has the + * given sequence and is before `hash`. + */ + requestOneBlock(request: Request): void { + this.blockRequestPromise = (async () => { + const hash = request.hash + + // Already requesting this block + if (this.blockRequests.has(this.getCacheKey(null, hash))) { + this.dispatch('IDLE') + return + } + + let response + const time = Date.now() + try { + response = await this.requestBlocks(request) + } catch (error: unknown) { + this.logger.debug( + `Request for ${request.hash.toString('hex')} ${ + request.nextBlockDirection ? 'FORWARDS' : 'BACKWARDS' + } failed: ${ErrorUtils.renderError(error)}`, + ) + + // If a request fails because of a disconnect, we may be stopping because were no longer connected to the network + if (this.state.type === 'STOPPING' || this.state.type === 'STOPPED') { + return + } + + this.dispatch('IDLE') + return + } + + if (!response) { + this.logger.debug( + `Request for ${request.hash.toString('hex')} ${ + request.nextBlockDirection ? 'FORWARDS' : 'BACKWARDS' + } came back with nothing`, + ) + + this.dispatch('IDLE') + return + } + + this.logger.debug( + `Request for ${request.hash.toString('hex')} ${ + request.nextBlockDirection ? 'FORWARDS' : 'BACKWARDS' + } resolved in ${Date.now() - time}ms`, + ) + + let block + try { + const blocks = response.payload.blocks + for (const serializedBlock of blocks) { + block = this.blockSerde.deserialize(serializedBlock) + + // TODO Network serialization for Block would be great here + block.header.isValid = false + block.header.work = BigInt(0) + block.header.graphId = -1 + + this.addBlockToProcess(block, NetworkBlockType.SYNCING) + } + } catch { + this.logger.debug(`Couldn't deserialize incoming block`) + this.dispatch('IDLE') + return + } + + this.dispatch('IDLE') + })() + } + + getCacheKey( + sequence: string | BigInt | undefined | null, + hash: string | Buffer | null, + ): string { + if (Buffer.isBuffer(hash)) { + return `${hash.toString('hex') || ''}-${sequence?.toString() || ''}`.toLowerCase() + } + return `${hash || ''}-${sequence?.toString() || ''}`.toLowerCase() + } + + async getBlock( + sequence: BigInt | null, + hash: Buffer, + ): Promise | null> { + const cacheKey = this.getCacheKey(sequence, hash) + + const cachedBlock = this.recentBlocks.get(cacheKey) + + if (cachedBlock) { + return cachedBlock + } else { + const block = await this.chain.getBlock(hash) + if (block) this.recentBlocks.set(cacheKey, block) + return block + } + } +} diff --git a/ironfish/src/captain/index.test.ts b/ironfish/src/captain/index.test.ts new file mode 100644 index 0000000000..53c3dc5d44 --- /dev/null +++ b/ironfish/src/captain/index.test.ts @@ -0,0 +1,35 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { RangeHasher } from './anchorChain/merkleTree' +import { makeCaptain, TestCaptain, TestStrategy } from './testUtilities' +import { MessageType } from './messages' + +describe('Captain', () => { + const strategy = new TestStrategy(new RangeHasher()) + let captain: TestCaptain + + beforeEach(async () => { + captain = await makeCaptain(strategy) + + captain.onRequestBlocks.on((hash, nextBlockDirection) => { + captain.blockSyncer.handleBlockRequestError({ + type: MessageType.Blocks, + payload: { + hash: hash?.toString(), + nextBlockDirection: nextBlockDirection, + }, + }) + }) + }) + + it('constructs a Captain object', () => { + expect(captain).toBeDefined() + }) + + it('starts and stops a Captain object', async () => { + expect(() => captain.start()).not.toThrow() + await expect(captain.shutdown()).resolves.not.toBeDefined() + }) +}) diff --git a/ironfish/src/captain/index.ts b/ironfish/src/captain/index.ts new file mode 100644 index 0000000000..75d37a51c5 --- /dev/null +++ b/ironfish/src/captain/index.ts @@ -0,0 +1,234 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { default as Block, BlockSerde } from './anchorChain/blockchain/Block' +import Strategy from './anchorChain/strategies' +import Transaction from './anchorChain/strategies/Transaction' +import { BlocksResponse } from './messages' +import { Event } from '../event' +import { MetricsMonitor } from '../metrics' +import { createRootLogger, Logger } from '../logger' +import { JsonSerializable } from '../serde' + +import { BlockSyncer, BlockSyncerChainStatus } from './blockSyncer' +import { IDatabase } from '../storage' +import Verifier from './Verifier' +import Blockchain from './anchorChain/blockchain' + +export { Assert } from '../assert' +export { + Block, + BlockHash, + BlockHeader, + BlockHeaderSerde, + BlockSerde, + Target, + Validity, + VerificationResult, + GENESIS_BLOCK_PREVIOUS, + GENESIS_BLOCK_SEQUENCE, + SerializedBlock, + SerializedBlockHeader, +} from './anchorChain/blockchain' +export { Nullifier, NullifierHash, NullifierHasher } from './anchorChain/nullifiers' +export { default as Strategy, Transaction, Spend } from './anchorChain/strategies' +export { + default as MerkleTree, + MerkleHasher, + RangeHasher, + Side as WitnessSide, +} from './anchorChain/merkleTree' +export { + default as Witness, + WitnessNode, + SerializedWitnessNode, +} from './anchorChain/merkleTree/Witness' + +// Exports used in testUtilities +export { BlockSyncer, BlockSyncerChainStatus, BlocksResponse, Verifier } + +/** + * Captain ensures that the chain is kept in sync with the latest version + * of the network. + * + * It does the following tasks: + * * Request the head of the heaviest chain from a randomly chosen peer to make + * sure it's up to date + * * Request notes and nullifiers if the chain is not currently connected + * to the trees + * * Optimistically sync blocks if the chain is connected to the trees, but + * either its head is not its latest or its tail is not the genesis block + * * Verify incoming blocks and transactions before allowing them to be + * gossip'd + * * Respond to requests for notes, nullifiers, or blocks + * + * + * Captain is also responsible for routing miner and client events out to the network. + * These include: + * * Newly mined blocks + * * New transactions to be spent + * + * Finally Captain is responsible for routing network events out to the miner: + * * New transactions that need to be mined + * + * @typeParam E Note element stored in the notes Merkle Tree and emitted in transactions + * @typeParam H the hash of an `E`. Used for the internal nodes and root hash + * of the notes Merkle Tree + * @typeParam T Type of a transaction stored on Captain's chain. + * @typeParam ST The serialized format of a `T`. Conversion between the two happens + * via the `strategy`. + */ +export default class Captain< + E, + H, + T extends Transaction, + SE extends JsonSerializable, + SH extends JsonSerializable, + ST +> { + /** + * Blockchain strategy that tells us how to hash and serialize stuff. + */ + strategy: Strategy + /** + * The blockchain and two global merkle trees for notes and nullifiers + */ + chain: Blockchain + /** + * Responsible for syncing blocks that we don't have yet. + */ + blockSyncer: BlockSyncer + /** + * Serializer for blocks + */ + blockSerde: BlockSerde + /** + * Logger instance used in place of console logs + */ + logger: Logger + /** + * Metrics monitor to record performance based metrics + */ + metrics: MetricsMonitor + + /** + * Emitted when a new block has been created, such as + * when a new block has been mined. + */ + onNewBlock = new Event<[Block]>() + /** Emitted when a block is being requested by header hash or sequence */ + onRequestBlocks = new Event<[hash: Buffer, nextBlockDirection: boolean]>() + /** Emitted when a note is being requested by index */ + onRequestNote = new Event<[position: number]>() + /** Emitted when a nullifier is being requested by index */ + onRequestNullifier = new Event<[position: number]>() + + /** + * Private constructor for a `Captain`. + * + * @remarks Public code should use {@link Captain.new} instead. + * + * @param chain The storage-connected AnchorChain that manages the merkle trees + * and the blockchain. + */ + private constructor( + chain: Blockchain, + logger: Logger, + metrics: MetricsMonitor, + ) { + this.metrics = metrics + this.strategy = chain.strategy + this.chain = chain + this.blockSyncer = new BlockSyncer(this, logger) + this.blockSerde = new BlockSerde(chain.strategy) + this.logger = logger + } + + /** + * Construct a new `Captain` + * + * @remarks the type parameters are normally inferred from the `strategy`. + * + * @param chain The storage-backed AnchorChain that manages the merkle trees + * and the block chain. + * @typeParam E Note element stored in transactions and the notes Merkle Tree + * @typeParam H the hash of an `E`. Used for the internal nodes and root hash + * of the notes Merkle Tree + * @typeParam T Type of a transaction stored on Captain's chain. + * @typeParam ST The serialized format of a `T`. Conversion between the two happens + * via the `strategy`. + */ + static async new< + E, + H, + T extends Transaction, + SE extends JsonSerializable, + SH extends JsonSerializable, + ST + >( + db: IDatabase, + strategy: Strategy, + chain?: Blockchain, + logger: Logger = createRootLogger(), + metrics?: MetricsMonitor, + ): Promise> { + logger = logger.withTag('captain') + metrics = metrics || new MetricsMonitor(logger) + chain = chain || (await Blockchain.new(db, strategy, logger, metrics)) + return new Captain(chain, logger, metrics) + } + + /** + * Start the various syncing, requesting, and handling tasks. + * + * @remarks don't forget to call shutdown on completion + */ + async start(): Promise { + if ((await this.chain.hasGenesisBlock()) === false) { + throw new Error('Captain cannot start without a genesis block on the chain') + } + } + + onPeerNetworkReady(): void { + void this.blockSyncer.start() + } + + onPeerNetworkNotReady(): void { + void this.blockSyncer.shutdown() + } + + /** + * Instruct the various captain tasks to shut down their loops. + * + * Waits for all in-flight promises to complete before returning. + */ + async shutdown(): Promise { + await Promise.all([this.blockSyncer.shutdown()]) + } + + /** Used to request a nullifier by position */ + requestNullifier(position: number): void { + this.onRequestNullifier.emit(position) + } + + /** Used to request a note by position */ + requestNote(position: number): void { + this.onRequestNote.emit(position) + } + + /** Used to request a block by header hash or sequence */ + requestBlocks(hash: Buffer, nextBlockDirection: boolean): void { + this.onRequestBlocks.emit(hash, nextBlockDirection) + } + + /** + * Submit a freshly mined block to be forwarded to the p2p network + * + * This method would only be used by miners. + * @param block the block that has been mined by an external miner or pool. + */ + emitBlock(block: Block): void { + this.onNewBlock.emit(block) + } +} diff --git a/ironfish/src/captain/messages.ts b/ironfish/src/captain/messages.ts new file mode 100644 index 0000000000..b99186db8b --- /dev/null +++ b/ironfish/src/captain/messages.ts @@ -0,0 +1,190 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { LooseMessage, Message, MessagePayload, PayloadType, Rpc } from '../network' + +import { SerializedBlock } from './anchorChain/blockchain/Block' + +/** + * The type of a Iron Fish message. This is an exhaustive list of + * the messages that are sent from Captain. Other messages may + * be sent by peerNetwork's internal mechanisms (for example, a peer list). + * + * Note: A Response to a Request must have the same MessageType + */ +export enum MessageType { + Note = 'Note', + Nullifier = 'Nullifier', + Blocks = 'Blocks', + NewBlock = 'NewBlock', + NewTransaction = 'NewTransaction', +} + +/** + * A request for a note by its position in the notes merkle tree. + * + * Handler is in `TreeSyncer` + */ +export type NoteRequest = Message + +/** + * Type narrowing to confirm a `NoteRequest` has the requisite type and position field. + */ +export function isNoteRequestPayload(obj: PayloadType): obj is MessagePayload { + return obj != null && 'position' in obj && typeof obj.position === 'number' +} + +/** + * A response to a note request, returned by the handler in TreeSyncer + * + * The note is a serialized note entity. + */ +export type NoteResponse = Rpc + +/** + * Type narrowing to confirm a `NoteResponse` has the requisite type and + * a note payload. Does not try to deserialize the note or verify it in any way. + */ +export function isNoteResponsePayload( + obj: PayloadType, +): obj is MessagePayload> { + return obj != null && 'note' in obj && 'position' in obj && typeof obj.position === 'number' +} + +/** + * Type narrowing to confirm a `NoteResponse` has the requisite type and + * a note payload. Does not try to deserialize the note or verify it in any way. + */ +export function isNoteResponse(obj: LooseMessage): obj is NoteResponse { + return obj.type === MessageType.Note && 'payload' in obj && isNoteResponsePayload(obj.payload) +} + +/** + * A request for a nullifier by its position in the notes merkle tree. + */ +export type NullifierRequest = Message + +/** + * Type narrowing to confirm a `'nullifierRequest` has the requisite type and position + */ +export function isNullifierRequestPayload( + obj: PayloadType, +): obj is MessagePayload { + return obj != null && 'position' in obj && typeof obj.position === 'number' +} + +/** + * A response to a request for a nullifier + */ +export type NullifierResponse = Rpc< + MessageType.Nullifier, + { nullifier: string; position: number } +> + +/** + * Type narrowing to confirm a `NullifierResponse` has the requisite type and + * a nullifier payload. Does not try to deserialize the nullifier or verify it in any way. + */ +export function isNullifierResponse(obj: LooseMessage): obj is NullifierResponse { + return ( + obj.type === MessageType.Nullifier && + 'payload' in obj && + isNullifierRequestPayload(obj.payload) + ) +} + +export function isNullifierResponsePayload( + obj: PayloadType, +): obj is MessagePayload { + return obj != null && 'nullifier' in obj && typeof obj.nullifier === 'string' +} + +/** + * A request for a block. + * + * A response to this request should be for the block before the given hash + * with the given sequence + * + * If the given hash is undefined, return the head block of the heaviest chain + */ +export type BlockRequest = Message< + MessageType.Blocks, + { + /** + * The hash that the block request is relative to. + */ + hash: string + /** + * To either respond with the next block in the forwards direction + * from given hash or not + */ + nextBlockDirection: boolean + } +> + +/** + * Type narrowing to verify that the block has the hash and sequence parameters + * and that they are either undefined or strings. + */ +export function isBlockRequestPayload(obj: PayloadType): obj is BlockRequest['payload'] { + return obj != null && 'hash' in obj && (typeof obj.hash === 'string' || obj.hash === null) +} + +/** + * A response to a request for a block. A valid message contains an array of serialized block. + */ +export type BlocksResponse = Rpc< + MessageType.Blocks, + { blocks: SerializedBlock[] } +> + +/** + * Type narrowing to confirm the message payload contains a `block` object + * that represents a serialized block. + * Does not do anything to confirm whether that object is a legitimate block. + */ +export function isBlocksResponse(obj: LooseMessage): obj is BlocksResponse { + const ret = obj.type === MessageType.Blocks && 'payload' in obj && 'blocks' in obj.payload + return ret +} + +/** + * A newly mined block gossipped on the P2P network + */ +export type NewBlock = Message<'NewBlock', { block: SerializedBlock }> + +/** + * Type narrowing to confirm the message payload contains a `block` object. + * Does not try to confirm whether it is a correct block. + */ +export function isNewBlockPayload( + obj: PayloadType, +): obj is NewBlock['payload'] { + return obj != null && 'block' in obj && typeof obj.block === 'object' && obj.block != null +} + +/** + * A newly spent transaction that a client would like to have mined + */ +export type NewTransaction = Message< + 'NewTransaction', + { + transaction: ST + } +> + +/** + * Type narrowing to confirm the message payload contains a `transaction` + * object. Does not try to validate the transaction. + */ +export function isNewTransactionPayload( + obj: PayloadType, +): obj is NewTransaction['payload'] { + return ( + obj != null && + 'transaction' in obj && + typeof obj.transaction === 'object' && + obj.transaction != null + ) +} diff --git a/ironfish/src/captain/testUtilities/helpers/anchorChain.ts b/ironfish/src/captain/testUtilities/helpers/anchorChain.ts new file mode 100644 index 0000000000..c07550fd0b --- /dev/null +++ b/ironfish/src/captain/testUtilities/helpers/anchorChain.ts @@ -0,0 +1,355 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import Blockchain, { GENESIS_BLOCK_PREVIOUS, Target } from '../../anchorChain/blockchain' +import Block from '../../anchorChain/blockchain/Block' +import BlockHeader from '../../anchorChain/blockchain/BlockHeader' +import Strategy from '../../anchorChain/strategies' +import { Spend } from '../../anchorChain/strategies/Transaction' +import { RangeHasher } from '../../anchorChain/merkleTree' +import { IDatabase } from '../../../storage' +import { makeNullifier } from './blockchain' +import { SerializedTestTransaction, TestStrategy, TestTransaction } from '../strategy' +import { makeDb } from './storage' +import { fakeMaxTarget } from './blockchain' +import { createRootLogger } from '../../../logger' +import { Assert } from '../../../assert' +import { IronfishBlock, IronfishBlockchain, IronfishBlockHeader } from '../../../strategy' + +/** + * Type of a test anchorchain, encompassing the various generic parameters. + */ +export type TestBlockchain = Blockchain< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction +> + +/** + * Add the notes directly to the anchorchain's notes merkle tree + * without doing any of the checking or syncing that would happen in + * `anchor.addNote` + */ +export async function addNotes( + anchor: Blockchain< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction + >, + notes: number[], +): Promise { + for (const note of notes) { + await anchor.notes.add(`${note}`) + } +} + +/** + * Set the note and nullifier commitments of the given block to the size and root + * hash of the notes and nullifiers trees on the given chain. + * + * There is a chance this functionality could be useful for more than testing. + * It could be moved to a method on anchorChain. + */ +export async function syncCommitments( + header: BlockHeader< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction + >, + anchor: Blockchain< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction + >, +): Promise { + header.noteCommitment.size = await anchor.notes.size() + header.noteCommitment.commitment = await anchor.notes.rootHash() + header.nullifierCommitment.size = await anchor.nullifiers.size() + header.nullifierCommitment.commitment = await anchor.nullifiers.rootHash() +} + +/** + * Make a block that suits the two trees currently on the chain. All notes/nullifiers + * that were added to the anchorchain (using chain.notes.add, not chain.AddNote) + * since the head of the chain are entered as transactions + * into the fake block. The last note in the tree becomes the miner's fee. + * The hash and previous hash are all derived from the sequence. + * + * Warning: This will not work if you don't add at least one note to the anchorchain + * using chain.notes.add. + * + * This is kind of a strange workflow, but it's the easiest way to make a chain + * of consistent blocks: + * * Add several notes and nullifiers directly to the chain (chain.notes.add) + * * Call makeNextBlock to get a block that matches those trees + * * add the new block to the chain + */ +export async function makeNextBlock( + chain: Blockchain, + isGenesis?: boolean, + oldNoteCount?: number, + oldNullifierCount?: number, +): Promise> { + const head = await chain.getHeaviestHead() + const noteCount = await chain.notes.size() + const noteHash = await chain.notes.rootHash() + const nullifierCount = await chain.nullifiers.size() + const nullifierHash = await chain.nullifiers.rootHash() + + let newSequence = 1 + let previousBlockHash + + if (isGenesis) { + newSequence = 1 + oldNoteCount = 0 + oldNullifierCount = 0 + previousBlockHash = GENESIS_BLOCK_PREVIOUS + } else { + if (!head) { + throw new Error('Heaviest head must always exist after adding genesis') + } + newSequence = Number(head.sequence) + 1 + oldNoteCount = oldNoteCount ? oldNoteCount : head.noteCommitment.size + oldNullifierCount = oldNullifierCount ? oldNullifierCount : head.nullifierCommitment.size + previousBlockHash = head.hash + } + + const notes: string[] = [] + const spends: Spend[] = [] + for (let i = oldNoteCount; i < noteCount; i++) { + const note = await chain.notes.get(i) + Assert.isNotNull(note, 'makeNextBlock method requires adding notes to tree ahead of time') + notes.push(note) + } + for (let i = oldNullifierCount; i < nullifierCount; i++) { + const nullifier = await chain.nullifiers.get(i) + Assert.isNotNull( + nullifier, + 'makeNextBlock method requires adding nullifier to tree ahead of time', + ) + spends.push({ nullifier, commitment: noteHash, size: noteCount }) + } + + const minersFee = BigInt(-10) + const minerTransaction = new TestTransaction(true, notes, minersFee, spends) + const graffiti = Buffer.alloc(32) + graffiti.write('fake block') + + const newHeader = new BlockHeader( + chain.strategy, + BigInt(newSequence), + previousBlockHash, + { + size: noteCount, + commitment: noteHash, + }, + { + size: nullifierCount, + commitment: nullifierHash, + }, + fakeMaxTarget(), + 0, + new Date(1598970000000 + Number(newSequence)), + minersFee, + graffiti, + ) + + return new Block(newHeader, [minerTransaction]) +} + +/** + * Make an anchorchain with no blocks. + */ +export async function makeChainInitial( + strategy?: Strategy< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction + >, + dbPrefix?: string | IDatabase, +): Promise { + const db = + typeof dbPrefix === 'string' || dbPrefix === undefined ? makeDb(dbPrefix) : dbPrefix + const chain = Blockchain.new( + db, + strategy || new TestStrategy(new RangeHasher()), + createRootLogger(), + ) + + await db.open() + return chain +} + +/** + * Make an anchorchain with a genesis block that has one note and one nullifier. + */ +export async function makeChainGenesis( + strategy?: Strategy< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction + >, + dbPrefix?: string | IDatabase, +): Promise { + const chain = await makeChainInitial(strategy, dbPrefix) + await chain.notes.add('0') + await chain.nullifiers.add(makeNullifier(0)) + const genesis = await makeNextBlock(chain, true) + await chain.addBlock(genesis) + return chain +} + +/** + * Make an anchorchain with several valid blocks, + * notes, and nullifiers. + * + * The chain has eight blocks. Each block is valid and contains five notes, + * including the miner's fee note. Each block has two spends. + * + * Each block is sequentially one after the other and the chain is complete. + * + * The easiest way to add new blocks to a chain generated this way is to: + * + * * Add at least one note to the chain using `chain.notes.add()` + * (NOT `chain.addNote`) so it doesn't try to sync anything. + * * Optionally add some nullifiers to the chain using `chain.nullifiers.add()` + * * call `makeNextBlock(chain)` on the chain + * * Add the resulting block + * + * Not useful for testing forks or validity, but useful for any tests that + * require a prior existing chain. + * + * Can also be useful to pull valid blocks from if you are constructing a + * second chain. For example, you might want to test optimistic sync by creating + * a chain with only the last block in this test chain. + */ +export async function makeChain( + strategy?: Strategy< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction + >, + dbPrefix?: string | IDatabase, +): Promise { + const chain = await makeChainGenesis(strategy, dbPrefix) + + for (let i = 1; i < 8 * 5; i++) { + await chain.notes.add(`${i}`) + + if (i % 5 < 2) { + await chain.nullifiers.add(makeNullifier(i)) + } + + if ((i + 1) % 5 === 0) { + const oldNoteCount = (await chain.notes.size()) - 5 + const oldNullifierCount = (await chain.nullifiers.size()) - 2 + + const nextBlock = await makeNextBlock(chain, false, oldNoteCount, oldNullifierCount) + await chain.addBlock(nextBlock) + } + } + + return chain +} + +/** + * Create a test block with no transactions, that can be after any block either on the chain or not. + * It works by not affecting the merkle trees at all, and requires this block to have no transactions, + * therefore no notes. + * + * @param chain the chain is not used, only the verifier and strategy from the chain + * @param after the block the created block should be after + */ +export function makeBlockAfter( + chain: IronfishBlockchain, + after: IronfishBlockHeader | IronfishBlock, +): IronfishBlock { + if (after instanceof Block) { + after = after.header + } + + const sequence = after.sequence + BigInt(1) + const miningReward = BigInt(chain.strategy.miningReward(sequence)) + + if (miningReward !== BigInt(0)) { + throw new Error(`Must have mining reward disabled but was ${miningReward}`) + } + + const timestamp = new Date() + const target = Target.calculateTarget(timestamp, after.timestamp, after.target) + const randomness = Math.random() + const graffiti = Buffer.alloc(32) + graffiti.write('fake block') + + const header = new BlockHeader( + chain.strategy, + sequence, + after.hash, + after.noteCommitment, + after.nullifierCommitment, + target, + randomness, + timestamp, + miningReward, + graffiti, + true, + BigInt(1), + ) + + const block = new Block(header, []) + + Assert.isTrue(chain.verifier.verifyBlock(block).valid === 1) + return block +} + +/** + * This adds blocks to a chain in random order. It's useful to help root out bugs where insertion order + * can create bugs because someone accidently wrote code that is graph structure dependent. If any block + * fails to be added, the operation will stop and return false + * + * @param chain the chain to insert blocks into + * @param blocks the blocks to insert in random order + * @param randomDrop should it randomly decide drop blocks with a 10% chance + */ +export async function addBlocksShuffle( + chain: IronfishBlockchain, + blocks: IronfishBlock[], + randomDrop = false, +): Promise { + blocks = [...blocks] + + while (blocks.length > 0) { + const index = Math.floor(Math.random() * blocks.length) + const block = blocks.splice(index, 1)[0] + + const shouldDrop = randomDrop && Math.random() > 0.9 + if (shouldDrop) continue + + const { isAdded } = await chain.addBlock(block) + if (!isAdded) return false + } + + return true +} diff --git a/ironfish/src/captain/testUtilities/helpers/blockchain.ts b/ironfish/src/captain/testUtilities/helpers/blockchain.ts new file mode 100644 index 0000000000..f8412a91d9 --- /dev/null +++ b/ironfish/src/captain/testUtilities/helpers/blockchain.ts @@ -0,0 +1,115 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { SerializedTestTransaction, TestStrategy, TestTransaction } from '../strategy' +import Blockchain from '../../anchorChain/blockchain' +import Block from '../../anchorChain/blockchain/Block' +import BlockHeader, { BlockHash } from '../../anchorChain/blockchain/BlockHeader' +import Target from '../../anchorChain/blockchain/Target' +import { makeDb, makeDbName } from './storage' +import { RangeHasher } from '../../anchorChain/merkleTree' +import { createRootLogger } from '../../../logger' + +/** + * Make a block with a hash consisting of the given digit, + * the previous hash consisting of the next digit, and the start and + * end numbers of a sequence of notes in the block. + * + * Note: The resulting block is suitable for use on a blockchain.BlockChain, + * but will fail if you try adding it to an anchorchain without some extra + * massaging of the values. + * + * Specifically, the nullifier commitment does not have a correct value against + * the value in the tree. The note commitment should match up, though it depends + * exactly how the tree was initially produced. + * + * Most notably, a block created with this function will not go onto a chain + * created with makeChain or makeCaptain. You are probably better off using + * makeNextBlock from the anchorChain test utilities instead. + */ +export function makeFakeBlock( + strategy: TestStrategy, + previousHash: BlockHash, + hash: BlockHash, + sequence: number, + start: number, + end: number, + timestamp?: Date, +): Block { + const transactions = [] + for (let i = start; i < end; i++) { + transactions.push(new TestTransaction(true, [String(i)], 1)) + } + + const minersReward = strategy.miningReward(BigInt(sequence)) + const transactionFee = -1 * (end - start + minersReward) + const transactionFeeTransaction = new TestTransaction(true, [String(end)], transactionFee) + transactions.push(transactionFeeTransaction) + + const graffiti = Buffer.alloc(32) + graffiti.write('fake block') + + const header = new BlockHeader( + strategy, + BigInt(sequence), + previousHash, + { + commitment: `1-${end}`, + size: end, + }, + { commitment: Buffer.alloc(32), size: 1 }, + fakeMaxTarget(), + 0, + timestamp ? timestamp : new Date(1598970000000 + hash[0]), + BigInt(transactionFee), + graffiti, + ) + + return new Block(header, transactions) +} + +/** + * Make a block hash with the hash set to the given digit + */ +export function fakeMaxTarget(): Target { + return new Target(BigInt(2) ** BigInt(256) - BigInt(1)) +} + +/** + * Make a block hash with the hash set to the given digit + */ +export function blockHash(digit: number): BlockHash { + const hash = Buffer.alloc(32) + hash[0] = digit + return hash +} + +/** + * Make a nullifier with the hash set to the given digit. + */ +export function makeNullifier(digit: number): BlockHash { + const hash = Buffer.alloc(32) + hash[0] = digit + return hash +} + +export async function makeBlockchain(): Promise< + Blockchain< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction + > +> { + const name = makeDbName() + const database = makeDb(name) + + const strategy = new TestStrategy(new RangeHasher()) + const chain = await Blockchain.new(database, strategy, createRootLogger()) + + await database.open() + return chain +} diff --git a/ironfish/src/captain/testUtilities/helpers/captain.ts b/ironfish/src/captain/testUtilities/helpers/captain.ts new file mode 100644 index 0000000000..95e4315474 --- /dev/null +++ b/ironfish/src/captain/testUtilities/helpers/captain.ts @@ -0,0 +1,180 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Direction, IncomingPeerMessage, MessagePayload } from '../../../network' +import { default as Block } from '../../anchorChain/blockchain/Block' +import BlockHeader from '../../anchorChain/blockchain/BlockHeader' +import Captain, { BlockSyncer, BlocksResponse } from '../..' +import { BlockRequest, MessageType } from '../../messages' + +import { + blockHash, + makeDb, + makeChainGenesis, + makeChainInitial, + makeChain, + TestBlockchain, +} from '.' +import { SerializedTestTransaction, TestStrategy, TestTransaction } from '../strategy' +import { makeDbName } from './storage' +import { MemPool } from '../../../memPool' + +export type TestCaptain = Captain< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction +> + +export type TestMemPool = MemPool< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction +> + +export type TestBlockSyncer = BlockSyncer< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction +> + +export type TestBlockHeader = BlockHeader< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction +> + +export type TestBlock = Block< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction +> + +/** + * Make a test captain with a chain that contains only the genesis + * block (one note and nullifier) + */ +export async function makeInitialTestCaptain( + strategy: TestStrategy, + dbPrefix: string, +): Promise { + const db = makeDb(dbPrefix) + const chain = await makeChainInitial(strategy, db) + return await Captain.new(db, strategy, chain) +} + +/** + * Make a test captain with a chain that contains several valid blocks, + * notes, and nullifiers. + */ +export async function makeCaptain( + strategy: TestStrategy, + dbPrefix?: string, +): Promise { + if (!dbPrefix) dbPrefix = makeDbName() + const db = makeDb(dbPrefix) + const chain = await makeChain(strategy, db) + return await Captain.new(db, strategy, chain) +} + +/** + * Make a test captain whose chain has an initial block followed by + * a gap and then two blocks at the head. This is the kind of chain that + * requires syncing. It is designed such that if the chain becomes fully + * synced, it will be the same as that returned by `makeCaptain`. + */ +export async function makeCaptainSyncable( + strategy: TestStrategy, + dbPrefix?: string, + addExtraBlocks = true, +): Promise { + if (!dbPrefix) dbPrefix = makeDbName() + + const db = makeDb(dbPrefix) + const chain = await makeChainGenesis(strategy, db) + + const dbFull = makeDb(dbPrefix + '-full') + + if (addExtraBlocks) { + const chainFull = await makeChain(strategy, dbFull) + await chain.addBlock(await blockBySequence(chainFull, 8)) + await chain.addBlock(await blockBySequence(chainFull, 7)) + } + await dbFull.close() + + return await Captain.new(db, strategy, chain) +} + +/** + * Extract a block from the given chain by its sequence. + * Throw an error if the block is null. + * + * This is just for removing typescript non-null assertions. + */ +export async function blockBySequence( + chain: TestBlockchain, + sequence: number | null, +): Promise> { + let hash: Buffer | null + if (sequence === null) { + const heaviestHead = await chain.getHeaviestHead() + hash = heaviestHead ? heaviestHead.hash : null + } else { + hash = blockHash(sequence) + } + + if (!hash) throw new Error(`No hash for ${sequence || ''}`) + + const block = await chain.getBlock(hash) + if (!block) { + throw new Error(`Block ${sequence || ''} does not exist`) + } + return block +} + +/** + * Format a proper response given a payload for Block Syncer + */ +export function response( + payload: MessagePayload>>, +): IncomingPeerMessage>> { + return { + peerIdentity: 'somebody', + message: { + rpcId: 1, + type: MessageType.Blocks, + direction: Direction.response, + payload: payload, + }, + } +} + +/** + * Format a proper request given a payload for Block Syncer + */ +export function request( + payload: MessagePayload, +): IncomingPeerMessage { + return { + peerIdentity: 'somebody', + message: { + type: MessageType.Blocks, + payload: payload, + }, + } +} diff --git a/ironfish/src/captain/testUtilities/helpers/index.ts b/ironfish/src/captain/testUtilities/helpers/index.ts new file mode 100644 index 0000000000..3b073ba033 --- /dev/null +++ b/ironfish/src/captain/testUtilities/helpers/index.ts @@ -0,0 +1,10 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './anchorChain' +export * from './blockchain' +export * from './captain' +export * from './jest' +export * from './merkleTree' +export * from './storage' diff --git a/ironfish/src/captain/testUtilities/helpers/jest.ts b/ironfish/src/captain/testUtilities/helpers/jest.ts new file mode 100644 index 0000000000..75fb0d5ae8 --- /dev/null +++ b/ironfish/src/captain/testUtilities/helpers/jest.ts @@ -0,0 +1,12 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +/** + * Asserts the type of a given function as a Jest mock. + */ +export function typeMock( + func: (...args: [...T]) => R, +): jest.Mock { + return func as jest.Mock +} diff --git a/ironfish/src/captain/testUtilities/helpers/merkleTree.ts b/ironfish/src/captain/testUtilities/helpers/merkleTree.ts new file mode 100644 index 0000000000..a9112e0838 --- /dev/null +++ b/ironfish/src/captain/testUtilities/helpers/merkleTree.ts @@ -0,0 +1,56 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import MerkleTree, { StructureHasher } from '../../anchorChain/merkleTree' +import { IDatabase } from '../../../storage' +import { makeDb, makeDbName } from './storage' + +/** + * Make a tree with the given elements. + */ +export async function makeTree({ + characters, + depth, + name, + database, +}: { + characters?: string + depth?: number + name?: string + database?: IDatabase +} = {}): Promise> { + const openDb = !database + + if (characters && !openDb) { + throw new Error( + `Cannot create A test merkletree with characters unless you also want to open the DB`, + ) + } + + if (!name) name = makeDbName() + if (!database) database = makeDb(name) + + const tree = await MerkleTree.new(new StructureHasher(), database, name, depth) + + if (openDb) { + await database.open() + } + + if (characters) { + for (const i of characters) { + await tree.add(i) + } + } + + return tree +} + +/** + * Make a tree with 16 elements. Used for testing truncate + */ +export async function makeFullTree( + name?: string, +): Promise> { + return await makeTree({ characters: 'abcdefghijklmnop', name: name }) +} diff --git a/ironfish/src/captain/testUtilities/helpers/storage.ts b/ironfish/src/captain/testUtilities/helpers/storage.ts new file mode 100644 index 0000000000..fcd4edecba --- /dev/null +++ b/ironfish/src/captain/testUtilities/helpers/storage.ts @@ -0,0 +1,18 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { IDatabase, LevelupDatabase } from '../../../storage' +import leveldown from 'leveldown' + +/** Generate a test database name from the given test if not provided*/ +export function makeDbName(): string { + const id = (Math.random() * Number.MAX_SAFE_INTEGER).toFixed(0) + return expect.getState().currentTestName + '-' + id +} + +/**Init a database with the given name, or generate one from the current test */ +export function makeDb(name?: string): IDatabase { + if (!name) name = makeDbName() + return new LevelupDatabase(leveldown(`./testdbs/${name}`)) +} diff --git a/ironfish/src/captain/testUtilities/index.test.ts b/ironfish/src/captain/testUtilities/index.test.ts new file mode 100644 index 0000000000..563923117b --- /dev/null +++ b/ironfish/src/captain/testUtilities/index.test.ts @@ -0,0 +1,8 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +it('should export TestStrategy', async () => { + const { TestStrategy } = await import('.') + expect(TestStrategy).not.toBeFalsy() +}) diff --git a/ironfish/src/captain/testUtilities/index.ts b/ironfish/src/captain/testUtilities/index.ts new file mode 100644 index 0000000000..6a1879c0d2 --- /dev/null +++ b/ironfish/src/captain/testUtilities/index.ts @@ -0,0 +1,10 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +// Load the matchers that extend `expect` +import './matchers/index' + +export * from './helpers' +export * from './mocks' +export * from './strategy' diff --git a/ironfish/src/captain/testUtilities/matchers/blockchain.ts b/ironfish/src/captain/testUtilities/matchers/blockchain.ts new file mode 100644 index 0000000000..d4704387e9 --- /dev/null +++ b/ironfish/src/captain/testUtilities/matchers/blockchain.ts @@ -0,0 +1,198 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import diff from 'jest-diff' +import { zip } from 'lodash' + +import Blockchain from '../../anchorChain/blockchain' +import Block from '../../anchorChain/blockchain/Block' +import { BlockHash, Sequence } from '../../anchorChain/blockchain/BlockHeader' +import { BlockSerde } from '../../anchorChain/blockchain/Block' +import { Nullifier } from '../../anchorChain/nullifiers' +import { SerializedTestTransaction, TestTransaction } from '../strategy' +import makeError from './makeError' +import { BufferSerde } from '../../../serde' + +declare global { + namespace jest { + interface Matchers { + toHaveNoSignOfHash(hash: BlockHash, sequence: number): Promise + toEqualNullifier(other: Nullifier): R + toEqualHash(other: BlockHash): R + toHaveChainLengths(blocks: number, heads: number, sequences: number): Promise + toHaveBlockCounts(hash: BlockHash, notes: number, nullifiers: number): Promise + toHaveChainHeads(heads: [BlockHash, BlockHash][]): Promise + toEqualBlock( + block: Block< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction + >, + ): Promise + toHaveSequences(sequences: [number, BlockHash[]][]): Promise + } + } +} + +expect.extend({ + async toHaveNoSignOfHash( + chain: Blockchain< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction + >, + hash: BlockHash, + sequence: Sequence, + ): Promise { + let error: string | null = null + if ((await chain.getBlock(hash)) !== null) { + error = `Expected block ${String(hash)} to be null` + } else if (await chain.headers.get(hash)) { + error = `Expected block header ${String(hash)} not to be in the db` + } else if (await chain.transactions.get(hash)) { + error = `Expected transactions ${String(hash)} not to be in the db` + } else { + const hashesForSequence = await chain.sequenceToHash.get(sequence.toString()) + if (hashesForSequence) { + for (const candidate of hashesForSequence) { + if (error !== null) break + if (chain.blockHashSerde.equals(candidate, hash)) { + error = `Hash ${String(hash)} exists in sequences index for ${sequence}` + } + } + } + } + + return makeError(error, `expect ${String(hash)} and ${sequence} not to be gone`) + }, + toEqualHash(self: BlockHash, other: BlockHash): jest.CustomMatcherResult { + const serde = new BufferSerde(32) + let error: string | null = null + if (!serde.equals(self, other)) { + const diffString = diff(self, other) + error = `Serde results do not match:\n\nDifference:\n\n${String(diffString)}` + } + return makeError(error, `Expected two serde elements to match, but they didn't`) + }, + toEqualNullifier(self: Nullifier, other: Nullifier): jest.CustomMatcherResult { + const serde = new BufferSerde(32) + let error: string | null = null + if (!serde.equals(self, other)) { + const diffString = diff(self, other) + error = `Serde results do not match:\n\nDifference:\n\n${String(diffString)}` + } + return makeError(error, `Expected two serde elements to match, but they didn't`) + }, + toEqualBlock( + self: Block, + other: Block, + ): jest.CustomMatcherResult { + const serde = new BlockSerde(self.header.strategy) + let error: string | null = null + if (!serde.equals(self, other)) { + const diffString = diff(self, other) + error = `Blocks do not match:\n\nDifference:\n\n${String(diffString)}` + } + return makeError(error, `Expected two blocks to match, but they didn't`) + }, + + async toHaveChainLengths( + chain: Blockchain< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction + >, + blocks: number, + heads: number, + sequences: number, + ): Promise { + let error: string | null = null + + const numHeaders = (await chain.headers.getAllKeys()).length + const numTransactions = (await chain.transactions.getAllKeys()).length + const numSequences = (await chain.sequenceToHash.getAllKeys()).length + + if (numHeaders !== blocks) { + error = `Chain has ${numHeaders} headers, but should have ${blocks}` + } else if (numTransactions !== blocks) { + error = `Chain has ${numTransactions} transactions, but should have ${blocks}` + } else if (numSequences !== sequences) { + error = `Chain has ${numSequences} sequences, but should have ${sequences}` + } + + return makeError(error, `Expected chain length not to match`) + }, + async toHaveBlockCounts( + chain: Blockchain< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction + >, + hash: BlockHash, + notes: number, + nullifiers: number, + ): Promise { + let error: string | null = null + const block = await chain.getBlock(hash) + const counts = block?.counts() + + if (counts === undefined) { + error = `${String(hash)} does not have any Counts` + } else if (counts.notes !== notes) { + error = `${String(hash)} has ${counts.notes} notes, but expected ${notes}` + } else if (counts.nullifiers !== nullifiers) { + error = `${String(hash)} has ${counts.nullifiers} nullifiers, but expected ${nullifiers}` + } + return makeError(error, `Expected counts not to match`) + }, + + async toHaveSequences( + chain: Blockchain< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction + >, + sequences: [number, BlockHash[]][], + ) { + let error: string | null = null + for (const [sequence, hashes] of sequences) { + if (error !== null) break + const actualHashes = await chain.sequenceToHash.get(sequence.toString()) + if (actualHashes === undefined) { + error = `There are no hashes for sequence ${sequence}` + } else if (actualHashes.length !== hashes.length) { + error = `There are ${actualHashes.length} hashes for sequence ${sequence}, but there should be ${hashes.length}` + } else { + actualHashes.sort() + hashes.sort() + for (const [actual, expected] of zip(actualHashes, hashes)) { + if (error !== null) break + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + if (!chain.blockHashSerde.equals(Buffer.from(actual!), Buffer.from(expected!))) { + const diffString = diff(actual, expected) + error = `Hashes for sequence ${sequence} don't match\n\nDifference:\n\n${String( + diffString, + )}` + } + } + } + } + return makeError(error, `Expected chain sequences not to match`) + }, +}) diff --git a/ironfish/src/captain/testUtilities/matchers/index.ts b/ironfish/src/captain/testUtilities/matchers/index.ts new file mode 100644 index 0000000000..b32fce0ae5 --- /dev/null +++ b/ironfish/src/captain/testUtilities/matchers/index.ts @@ -0,0 +1,6 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import './blockchain' +import './merkleTree' diff --git a/ironfish/src/captain/testUtilities/matchers/makeError.ts b/ironfish/src/captain/testUtilities/matchers/makeError.ts new file mode 100644 index 0000000000..5d2a82d4e1 --- /dev/null +++ b/ironfish/src/captain/testUtilities/matchers/makeError.ts @@ -0,0 +1,20 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export default function makeError( + error: string | null, + success: string, +): { pass: boolean; message: () => string } { + if (error !== null) { + return { + pass: false, + message: () => error, + } + } else { + return { + pass: true, + message: () => success, + } + } +} diff --git a/ironfish/src/captain/testUtilities/matchers/merkleTree.ts b/ironfish/src/captain/testUtilities/matchers/merkleTree.ts new file mode 100644 index 0000000000..d0b5154aa8 --- /dev/null +++ b/ironfish/src/captain/testUtilities/matchers/merkleTree.ts @@ -0,0 +1,191 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import diff from 'jest-diff' + +import MerkleTree, { NodeValue, Side, Witness } from '../../anchorChain/merkleTree' +import makeError from './makeError' + +declare global { + namespace jest { + interface Matchers { + toHaveLeaves(characters: string, parents: number[]): Promise + toHaveNodes(nodeSpecs: [number, Side, number, string][]): Promise + toMatchTree(other: MerkleTree): Promise + toMatchWitness(treeSize: number, rootHash: string, authPath: [Side, string][]): R + } + } +} + +expect.extend({ + async toHaveLeaves( + tree: MerkleTree, + characters: string, + parents: number[], + ): Promise { + let error: string | null = null + const treeSize = await tree.size() + + if (characters.length !== parents.length) { + error = `expected characters to have same length as parents` + } else if (treeSize !== characters.length) { + error = `expected tree size ${treeSize} to be ${characters.length}` + } + + for (let i = 0; i < characters.length; i++) { + if (error !== null) { + break + } + + const leaf = await tree.getLeaf(i) + + if (leaf.element !== characters[i]) { + error = `expected element ${i} to be ${characters[i]}, but it is ${leaf.element}` + } else if (leaf.merkleHash !== characters[i]) { + error = `expected element ${i} to have hash ${characters[i]}, but it is ${leaf.merkleHash}` + } else if (leaf.parentIndex !== parents[i]) { + error = `expected element ${i} to have parent ${parents[i]}, but it is ${leaf.parentIndex}` + } + } + return makeError( + error, + `expected tree not to match ${characters} and ${parents.toString()}`, + ) + }, + async toHaveNodes( + tree: MerkleTree, + nodeSpecs: [number, Side, number, string][], + ): Promise { + let error: string | null = null + + const treeNodes = await tree.nodes.getAllValues() + const treeNodeCount = treeNodes.length + const nodeCounter = await tree.getCount('Nodes') + + if (treeNodeCount !== nodeSpecs.length) { + error = `expected tree to have ${nodeSpecs.length} nodes, got ${treeNodeCount}` + } else if (nodeCounter !== nodeSpecs.length + 1) { + error = `expected Node counter to be ${nodeSpecs.length + 1} but it is ${nodeCounter}` + } + + for (const nodeSpec of nodeSpecs) { + const [index, side, otherIndex, hashOfSibling] = nodeSpec + + if (error !== null) { + break + } + + // Sorry; this is a bit convoluted; I'm trying to make the tests as readable as possible. + // it's just building a list of elements + const nodeValue: NodeValue = { + side, + hashOfSibling, + leftIndex: otherIndex, + parentIndex: otherIndex, + index, + } + + let expected + if (side === Side.Left) { + const { leftIndex: _leftIndex, ...expectedValue } = nodeValue + expected = expectedValue + } else { + const { parentIndex: _parentIndex, ...expectedValue } = nodeValue + expected = expectedValue + } + + const node = await tree.getNode(index) + const diffString = diff(expected, node) + + if (diffString && diffString.includes('Expected')) { + error = `node ${index} didn't match: \n\nDifference:\n\n${diffString}` + } + } + + return makeError(error, 'tree should not match given nodes') + }, + async toMatchTree( + tree: MerkleTree, + other: MerkleTree, + ): Promise { + let error: string | null = null + const treeLeafCount = await tree.getCount('Leaves') + const treeNodeCount = await tree.getCount('Nodes') + const otherLeafCount = await other.getCount('Leaves') + const otherNodeCount = await other.getCount('Nodes') + + if (treeLeafCount !== otherLeafCount) { + error = `tree ${tree.treeName} has ${treeLeafCount} leaves, but expected ${otherLeafCount}` + } else if (treeNodeCount !== otherNodeCount) { + error = `tree ${tree.treeName} has ${treeNodeCount} nodes, but expected ${otherNodeCount}` + } + + for (let index = 0; index < treeLeafCount; index++) { + if (error !== null) { + break + } + + const { ...actualLeaf } = await tree.getLeaf(index) + const { ...expectedLeaf } = await other.getLeaf(index) + + const diffString = diff(actualLeaf, expectedLeaf) + if (diffString && diffString.includes('Expected')) { + error = `leaf ${index} didn't match: \n\n Difference: \n\n${diffString}` + } + } + + for (let index = 1; index < treeNodeCount; index++) { + if (error !== null) { + break + } + const { ...expectedNode } = await tree.getNode(index) + const { ...actualNode } = await other.getNode(index) + + const diffString = diff(actualNode, expectedNode) + if (diffString && diffString.includes('Expected')) { + error = `node ${index} didn't match: \n\n Difference: \n\n${diffString}` + } + } + + return makeError(error, 'trees should not match') + }, + toMatchWitness( + witness: Witness, + treeSize: number, + rootHash: string, + authenticationPath: [Side, string][], + ): jest.CustomMatcherResult { + let error: string | null = null + + if (witness === undefined) { + error = 'expected witness to be defined' + } else if (witness.rootHash !== rootHash) { + error = `Witness has incorrect root hash:\n\n${ + diff(rootHash, witness.rootHash) || 'null' + }` + } else if (witness.treeSize() !== treeSize) { + error = `Witness has incorrect tree size ${witness.treeSize()}, expected ${treeSize}` + } else if (witness.authenticationPath.length !== authenticationPath.length) { + error = `Witness has incorrect authentication path length ${witness.authenticationPath.length}, expected ${authenticationPath.length}` + } + + for (let index = 0; index < authenticationPath.length; index++) { + if (error !== null) { + break + } + const actual = witness.authenticationPath[index] + const expected = authenticationPath[index] + + if (actual.side !== expected[0]) { + error = `Witness path index ${index} has side ${actual.side}, but expected ${expected[0]}` + } else if (actual.hashOfSibling !== expected[1]) { + error = `Witness path index ${index} has incorrect sibling hash:\n\n${ + diff(actual.hashOfSibling, expected[1]) || 'null' + }` + } + } + + return makeError(error, 'witnesses should not match') + }, +}) diff --git a/ironfish/src/captain/testUtilities/mocks.test.ts b/ironfish/src/captain/testUtilities/mocks.test.ts new file mode 100644 index 0000000000..df657596ff --- /dev/null +++ b/ironfish/src/captain/testUtilities/mocks.test.ts @@ -0,0 +1,51 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { mockImplementationShuffle } from './mocks' + +describe('Mocks', () => { + it('should shuffle mock', async () => { + const mock = jest.fn() + + const results: number[] = [] + + mockImplementationShuffle<[number], void>(mock, (value: number) => { + results.push(value) + return Promise.resolve() + }) + + const promises = [] + for (let i = 0; i < 10; ++i) promises.push(mock(i)) + await Promise.all(promises) + + expect(results).toHaveLength(10) + }) + + it('should allow cancelation', () => { + jest.useFakeTimers() + + const mock = jest.fn() + const results: number[] = [] + + function mockImplementation(value: number) { + results.push(value) + return Promise.resolve(value) + } + + // it should have the result from the shuffled result + mockImplementationShuffle(mock, mockImplementation, 1) + mock(0) + jest.runAllTimers() + expect(results).toHaveLength(1) + + results.length = 0 + + // when we call cancel it should not have the result + const cancelShuffle = mockImplementationShuffle(mock, mockImplementation, 1) + mock(0) + cancelShuffle() + jest.runAllTimers() + expect(results).toHaveLength(0) + }) +}) diff --git a/ironfish/src/captain/testUtilities/mocks.ts b/ironfish/src/captain/testUtilities/mocks.ts new file mode 100644 index 0000000000..d464d1aadf --- /dev/null +++ b/ironfish/src/captain/testUtilities/mocks.ts @@ -0,0 +1,56 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +/** + * Used to shuffle the responses from an asynchronous API call using a debounce strategy. + * @param mock The mock to intercept calls for and shuffle + * @param mocked The mock function to replace mock with + * @param time The maximum amount of debounce time to allow before returning shuffled results + */ +export function mockImplementationShuffle( + mock: jest.Mock, TArgs>, + mocked: (...args: TArgs) => Promise, + time = 10, +): () => void { + type PromiseResolve = (result: Promise) => void + const buffer: [TArgs, PromiseResolve][] = [] + let lastTimeout: number | null = null + let lastSend: number | null = null + + mock.mockImplementation( + (...args: TArgs): Promise => { + const promise = new Promise>((resolve) => { + if (lastTimeout) clearTimeout(lastTimeout) + + buffer.push([args, resolve]) + + function send() { + lastSend = Date.now() + + const shuffled = buffer.slice().sort(() => Math.random() - 0.5) + buffer.length = 0 + + for (const [args, resolve] of shuffled) { + resolve(mocked(...args)) + } + } + + // Force a send if the maximum amount of time has elapsed + if (lastSend !== null && Date.now() - lastSend > time) { + send() + return + } + + // Start the debounce timer + lastTimeout = (setTimeout(send, time) as unknown) as number + }) + + return promise.then((r) => r) + }, + ) + + return () => { + if (lastTimeout) clearTimeout(lastTimeout) + } +} diff --git a/ironfish/src/captain/testUtilities/strategy/SerializedTypes.ts b/ironfish/src/captain/testUtilities/strategy/SerializedTypes.ts new file mode 100644 index 0000000000..e3d8428ba2 --- /dev/null +++ b/ironfish/src/captain/testUtilities/strategy/SerializedTypes.ts @@ -0,0 +1,14 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import type { Spend } from '../../anchorChain/strategies/Transaction' + +type SerializedSpend = Omit, 'nullifier'> & { nullifier: string } + +export type SerializedTestTransaction = { + elements: string[] + spends: SerializedSpend[] + totalFees: string + isValid: boolean +} diff --git a/ironfish/src/captain/testUtilities/strategy/TestStrategy.ts b/ironfish/src/captain/testUtilities/strategy/TestStrategy.ts new file mode 100644 index 0000000000..2cead3aa92 --- /dev/null +++ b/ironfish/src/captain/testUtilities/strategy/TestStrategy.ts @@ -0,0 +1,82 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { BlockHash } from '../../anchorChain/blockchain/BlockHeader' +import { NullifierHasher } from '../../anchorChain/nullifiers' +import Strategy from '../../anchorChain/strategies' +import { ConcatHasher } from '../../anchorChain/merkleTree' + +import { TestTransaction } from './TestTransaction' +import { TestTransactionSerde } from './TestTransactionSerde' +import { SerializedTestTransaction } from './SerializedTypes' +import { TestVerifier } from './testVerifier' +import { TestBlockchain } from '../helpers' + +/** + * Very basic strategy for testing blockchain code. Models notes and hashes + * as concatenated strings, and uses dumb calculations for hashing and + * target calculations + */ +export class TestStrategy + implements + Strategy { + _noteHasher: ConcatHasher + _nullifierHasher: NullifierHasher + + constructor(noteHasher = new ConcatHasher()) { + this._noteHasher = noteHasher + this._nullifierHasher = new NullifierHasher() + } + + createVerifier(chain: TestBlockchain): TestVerifier { + return new TestVerifier(chain) + } + + noteHasher(): ConcatHasher { + return this._noteHasher + } + + nullifierHasher(): NullifierHasher { + return this._nullifierHasher + } + + transactionSerde(): TestTransactionSerde { + return new TestTransactionSerde() + } + + /** + * Generate a hash from the block's sequence. + */ + hashBlockHeader(serializedHeader: Buffer): BlockHash { + const headerWithoutRandomness = Buffer.from(serializedHeader.slice(8)) + const header = JSON.parse(headerWithoutRandomness.toString()) as Record + + const sequence = BigInt(header['sequence']) + const bigIntArray = BigInt64Array.from([sequence]) + const byteArray = Buffer.from(bigIntArray.buffer) + const result = Buffer.alloc(32) + result.set(byteArray) + return result + } + + createMinersFee( + totalTransactionFees: bigint, + blockSequence: bigint, + _minerKey: string, + ): Promise { + const miningReward = this.miningReward(blockSequence) + return Promise.resolve( + new TestTransaction( + true, + [`miners note ${totalTransactionFees + BigInt(miningReward)}`], + BigInt(-1) * (totalTransactionFees + BigInt(miningReward)), + [], + ), + ) + } + + miningReward(_blockSequence: bigint): number { + return 10 + } +} diff --git a/ironfish/src/captain/testUtilities/strategy/TestTransaction.ts b/ironfish/src/captain/testUtilities/strategy/TestTransaction.ts new file mode 100644 index 0000000000..dc7c705000 --- /dev/null +++ b/ironfish/src/captain/testUtilities/strategy/TestTransaction.ts @@ -0,0 +1,78 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { + VerificationResult, + VerificationResultReason, +} from '../../anchorChain/blockchain/VerificationResult' +import { Validity } from '../../anchorChain/blockchain/VerificationResult' +import { Spend, default as Transaction } from '../../anchorChain/strategies/Transaction' +import { StringUtils } from '../../../utils' + +export class TestTransaction implements Transaction { + isValid: boolean + elements: string[] + _spends: Spend[] + totalFees: bigint + + constructor( + isValid = true, + elements: string[] = [], + totalFees: number | bigint = 0, + spends: Spend[] = [], + ) { + this.elements = elements + this._spends = spends + this.totalFees = BigInt(totalFees) + this.isValid = isValid + } + + verify(): VerificationResult { + return { + valid: this.isValid ? Validity.Yes : Validity.No, + reason: this.isValid ? undefined : VerificationResultReason.INVALID_TRANSACTION_PROOF, + } + } + + takeReference(): boolean { + return true + } + + returnReference(): void { + return + } + + withReference(callback: (transaction: TestTransaction) => R): R { + return callback(this) + } + + notesLength(): number { + return this.elements.length + } + + *notes(): Iterable { + yield* this.elements + } + + spendsLength(): number { + return this._spends.length + } + + *spends(): Iterable> { + yield* this._spends + } + + transactionFee(): bigint { + return this.totalFees + } + + transactionSignature(): Buffer { + return Buffer.from('sig') + } + + transactionHash(): Buffer { + return StringUtils.hash( + JSON.stringify(this.elements) + String(this.totalFees) + JSON.stringify(this._spends), + ) + } +} diff --git a/ironfish/src/captain/testUtilities/strategy/TestTransactionSerde.ts b/ironfish/src/captain/testUtilities/strategy/TestTransactionSerde.ts new file mode 100644 index 0000000000..1cb962d792 --- /dev/null +++ b/ironfish/src/captain/testUtilities/strategy/TestTransactionSerde.ts @@ -0,0 +1,47 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import Serde, { BufferSerde, IJSON } from '../../../serde' +import { TestTransaction } from './TestTransaction' +import type { SerializedTestTransaction } from './SerializedTypes' + +export class TestTransactionSerde implements Serde { + equals(transactions1: TestTransaction, transactions2: TestTransaction): boolean { + return ( + IJSON.stringify(this.serialize(transactions1)) === + IJSON.stringify(this.serialize(transactions2)) + ) + } + + serialize(transaction: TestTransaction): SerializedTestTransaction { + const nullifierSerde = new BufferSerde(32) + + const spends = transaction._spends.map((t) => { + return { ...t, nullifier: nullifierSerde.serialize(t.nullifier) } + }) + return { + elements: transaction.elements, + spends, + totalFees: transaction.totalFees.toString(), + isValid: transaction.isValid, + } + } + + deserialize(data: SerializedTestTransaction): TestTransaction { + const nullifierSerde = new BufferSerde(32) + const spends: TestTransaction['_spends'] = data.spends.map((s) => { + return { + commitment: s.commitment, + size: s.size, + nullifier: nullifierSerde.deserialize(s.nullifier), + } + }) + return new TestTransaction( + data.isValid, + data.elements.map(String), + BigInt(data.totalFees), + spends, + ) + } +} diff --git a/ironfish/src/captain/testUtilities/strategy/index.ts b/ironfish/src/captain/testUtilities/strategy/index.ts new file mode 100644 index 0000000000..9df03ec31d --- /dev/null +++ b/ironfish/src/captain/testUtilities/strategy/index.ts @@ -0,0 +1,8 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './SerializedTypes' +export * from './TestStrategy' +export * from './TestTransaction' +export * from './TestTransactionSerde' diff --git a/ironfish/src/captain/testUtilities/strategy/testVerifier.ts b/ironfish/src/captain/testUtilities/strategy/testVerifier.ts new file mode 100644 index 0000000000..4ab25813cf --- /dev/null +++ b/ironfish/src/captain/testUtilities/strategy/testVerifier.ts @@ -0,0 +1,35 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { TestTransaction } from './TestTransaction' +import { SerializedTestTransaction } from './SerializedTypes' +import Verifier from '../../Verifier' +import { Validity, VerificationResult } from '../../anchorChain/blockchain' +import { VerificationResultReason } from '../../anchorChain/blockchain/VerificationResult' +import { TestBlock, TestBlockHeader } from '../helpers' + +export class TestVerifier extends Verifier< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction +> { + isValidTarget(): boolean { + return true + } + + isValidAgainstPrevious( + current: TestBlock, + previousHeader: TestBlockHeader, + ): VerificationResult { + let result = super.isValidAgainstPrevious(current, previousHeader) + + if (result.reason === VerificationResultReason.INVALID_TARGET) + result = { valid: Validity.Yes } + + return result + } +} diff --git a/ironfish/src/event.test.ts b/ironfish/src/event.test.ts new file mode 100644 index 0000000000..ffee566145 --- /dev/null +++ b/ironfish/src/event.test.ts @@ -0,0 +1,58 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Event, waitForEmit } from './event' + +describe('Event', () => { + it('should emit', () => { + const event = new Event<[number, boolean]>() + + let fired = false + + event.on((a, b) => { + expect(a).toBe(5) + expect(b).toBe(true) + fired = true + }) + + event.emit(5, true) + expect(fired).toBe(true) + }) + + it('should emit async', async () => { + const event = new Event<[]>() + + let fired = false + + event.on( + async (): Promise => { + await new Promise((resolve) => setTimeout(resolve, 10)) + fired = true + }, + ) + + await event.emitAsync() + expect(fired).toBe(true) + }) + + it('should wait for emit', async () => { + const foo = new Event<[number]>() + const promise = waitForEmit(foo) + foo.emit(5) + expect((await promise)[0]).toBe(5) + }) + + it('should remove once', () => { + const event = new Event<[]>() + + const mock = jest.fn() + + event.once(mock) + + event.emit() + event.emit() + expect(mock).toBeCalledTimes(1) + expect(event.isEmpty).toBeTruthy() + }) +}) diff --git a/ironfish/src/event.ts b/ironfish/src/event.ts new file mode 100644 index 0000000000..1dd307241b --- /dev/null +++ b/ironfish/src/event.ts @@ -0,0 +1,95 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export class Event { + private handlers: Set<(...args: A) => void | Promise> = new Set() + + /** + * @returns true if the Event has no listeners + */ + get isEmpty(): boolean { + return this.handlers.size === 0 + } + + /** + * @returns the amount of subscriptions on this event + */ + get subscribers(): number { + return this.handlers.size + } + + /** + * Adds a handler for when the event is emitted + * Make sure you unsubscribe using [[Event.off]] + */ + on(handler: (...args: A) => void | Promise): void { + this.handlers.add(handler) + } + + /** + * Removes an event handler by reference + * + * @returns true if the handler was removed + */ + off(handler: (...args: A) => void | Promise): boolean { + return this.handlers.delete(handler) + } + + /** + * Adds an event handler that's removed after the next event is emitted + */ + once(handler: (...args: A) => void | Promise): void { + const wrapper = (...args: A): void | Promise => { + this.off(wrapper) + return handler(...args) + } + this.handlers.add(wrapper) + } + + /** + * Emits the event, calling all handlers for this event + */ + emit(...args: A): void { + void this.emitAsync(...args) + } + + /** + * Emits the event, calling all handlers, and returns a promise that awaits any async handlers + */ + async emitAsync(...args: A): Promise { + const promises = [] + + for (const handler of Array.from(this.handlers)) { + if (this.handlers.has(handler)) { + promises.push(handler.call(undefined, ...args)) + } + } + + await Promise.all(promises) + } + + /** + * Removes all handlers from the event + */ + clear(): void { + this.handlers.clear() + } +} + +/** + * A utility function that accepts an Event and returns a + * promise that resolves the first time the event emits. + * + * @param event The event to wait for + * @returns a promise that resolves the first time the event emits + */ +export const waitForEmit = (event: Event): Promise => { + return new Promise((resolve) => { + const handler = (...args: T) => { + resolve(args) + event.off(handler) + } + event.on(handler) + }) +} diff --git a/ironfish/src/fileStores/config.ts b/ironfish/src/fileStores/config.ts new file mode 100644 index 0000000000..510ec04db7 --- /dev/null +++ b/ironfish/src/fileStores/config.ts @@ -0,0 +1,135 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { DEFAULT_DATA_DIR } from './fileStore' +import { FileSystem } from '../fileSystems' +import { KeyStore } from './keyStore' +import * as yup from 'yup' + +export const DEFAULT_CONFIG_NAME = 'config.json' +export const DEFAULT_DATABASE_NAME = 'default' +export const DEFAULT_WALLET_NAME = 'default' +export const DEFAULT_WEBSOCKET_PORT = 9033 +export const DEFAULT_GET_FUNDS_API = 'https://api.ironfish.network/api/v1/getFunds' +export const DEFAULT_TELEMETRY_API = 'https://api.ironfish.network/api/v1/writeTelemetry' +export const DEFAULT_BOOTSTRAP_NODE = 'test.bn1.ironfish.network' + +export type ConfigOptions = { + bootstrapNodes: string[] + databaseName: string + editor: string + enableListenP2P: boolean + enableMiningDirector: boolean + enableRpc: boolean + enableRpcIpc: boolean + enableRpcTcp: boolean + enableTelemetry: boolean + enableMetrics: boolean + getFundsApi: string + ipcPath: string + /** + * Worker nodes are nodes that are intended to only connect to one node + * directly and should not be broadcast to other peers. For example, a + * single mining director connected to a public node is a worker node. + * + * Often worker nodes are behind firewalls anyway so they cannot be + * connected to. + * */ + isWorker: boolean + /** + * True if you want to send worker peers out to other clients or not + * */ + broadcastWorkers: boolean + /** + * Log levels are formatted like so: + * `*:warn,tag:info` + * + * ex: `warn` or `*:warn` displays only logs that are warns or errors. + * + * ex: `*:warn,peernetwork:info` displays warns and errors, as well as info + * logs from peernetwork and its submodules. + */ + logLevel: string + /** + * String to be prefixed to all logs. Accepts the following replacements: + * %time% : The time of the log + * %tag% : The tags on the log + * %level% : The log level + * + * ex: `[%time%] [%level%] [%tag%]` + */ + logPrefix: string + /** + * When mining new blocks, blockGraffiti will be set on the `graffiti` field of + * newly created blocks. + * Length is truncated to 32 bytes. + */ + blockGraffiti: string + nodeName: string + p2pSimulateLatency: number + peerPort: number + rpcTcpHost: string + rpcTcpPort: number + rpcRetryConnect: boolean + /** + * The maximum number of peers we can be connected to at a time. Past this number, + * new connections will be rejected. + */ + maxPeers: number + /** + * The ideal number of peers we'd like to be connected to. The node will attempt to + * establish new connections when below this number. + */ + targetPeers: number + telemetryApi: string + accountName: string +} + +export const ConfigOptionsSchema: yup.ObjectSchema> = yup + .object() + .shape({}) + .defined() + +export class Config extends KeyStore { + constructor(files: FileSystem, dataDir?: string, configName?: string) { + super( + files, + configName || DEFAULT_CONFIG_NAME, + Config.GetDefaults(files, dataDir || DEFAULT_DATA_DIR), + dataDir || DEFAULT_DATA_DIR, + ConfigOptionsSchema, + ) + } + + static GetDefaults(files: FileSystem, dataDir: string): ConfigOptions { + return { + broadcastWorkers: true, + bootstrapNodes: [DEFAULT_BOOTSTRAP_NODE], + databaseName: DEFAULT_DATABASE_NAME, + editor: '', + enableListenP2P: true, + enableMiningDirector: false, + enableRpc: true, + enableRpcIpc: true, + enableRpcTcp: false, + enableTelemetry: false, + enableMetrics: true, + getFundsApi: DEFAULT_GET_FUNDS_API, + ipcPath: files.resolve(files.join(dataDir || DEFAULT_DATA_DIR, 'ironfish.ipc')), + isWorker: false, + logLevel: '*:info', + logPrefix: '', + blockGraffiti: '', + nodeName: '', + p2pSimulateLatency: 0, + peerPort: DEFAULT_WEBSOCKET_PORT, + rpcTcpHost: 'localhost', + rpcTcpPort: 8020, + rpcRetryConnect: false, + maxPeers: 10000, + targetPeers: 50, + telemetryApi: DEFAULT_TELEMETRY_API, + accountName: DEFAULT_WALLET_NAME, + } + } +} diff --git a/ironfish/src/fileStores/fileStore.ts b/ironfish/src/fileStores/fileStore.ts new file mode 100644 index 0000000000..634da41347 --- /dev/null +++ b/ironfish/src/fileStores/fileStore.ts @@ -0,0 +1,45 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { JSONUtils, PartialRecursive } from '../utils' +import { promises as fs } from 'fs' +import path from 'path' +import { FileSystem } from '../fileSystems' + +export const DEFAULT_DATA_DIR = '~/.ironfish' + +export class FileStore> { + files: FileSystem + dataDir: string + configPath: string + configName: string + + constructor(files: FileSystem, configName: string, dataDir?: string) { + this.files = files + this.dataDir = files.resolve(dataDir || DEFAULT_DATA_DIR) + this.configName = configName + this.configPath = path.join(this.dataDir, configName) + } + + async load(): Promise | null> { + const configExists = await fs + .access(this.configPath) + .then(() => true) + .catch(() => false) + + let config = null + + if (configExists) { + const data = await fs.readFile(this.configPath, { encoding: 'utf8' }) + config = JSONUtils.parse>(data, this.configName) + } + + return config + } + + async save(data: PartialRecursive): Promise { + const json = JSON.stringify(data, undefined, ' ') + await fs.mkdir(this.dataDir, { recursive: true }) + await fs.writeFile(this.configPath, json) + } +} diff --git a/ironfish/src/fileStores/index.ts b/ironfish/src/fileStores/index.ts new file mode 100644 index 0000000000..fdf5492a8a --- /dev/null +++ b/ironfish/src/fileStores/index.ts @@ -0,0 +1,6 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +export * from './config' +export * from './fileStore' +export * from './internal' diff --git a/ironfish/src/fileStores/internal.ts b/ironfish/src/fileStores/internal.ts new file mode 100644 index 0000000000..6dcb2f855a --- /dev/null +++ b/ironfish/src/fileStores/internal.ts @@ -0,0 +1,19 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { FileSystem } from '../fileSystems' +import { KeyStore } from './keyStore' + +export type InternalOptions = { + isFirstRun: boolean +} + +export const InternalOptionsDefaults: InternalOptions = { + isFirstRun: true, +} + +export class InternalStore extends KeyStore { + constructor(files: FileSystem, dataDir?: string, configName?: string) { + super(files, configName || 'internal.json', InternalOptionsDefaults, dataDir) + } +} diff --git a/ironfish/src/fileStores/keyStore.ts b/ironfish/src/fileStores/keyStore.ts new file mode 100644 index 0000000000..579b1ad0a9 --- /dev/null +++ b/ironfish/src/fileStores/keyStore.ts @@ -0,0 +1,116 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Event } from '../event' +import { FileStore } from './fileStore' +import { FileSystem } from '../fileSystems' +import { PartialRecursive, YupUtils } from '../utils' +import * as yup from 'yup' + +export class KeyStore> { + storage: FileStore + config: Readonly + defaults: TSchema + loaded: Partial + overrides: Partial = {} + keysLoaded = new Set() + schema: yup.ObjectSchema> | undefined + + readonly onConfigChange: Event< + [key: keyof TSchema, value: TSchema[keyof TSchema]] + > = new Event() + + constructor( + files: FileSystem, + configName: string, + defaults: TSchema, + dataDir?: string, + schema?: yup.ObjectSchema>, + ) { + this.storage = new FileStore(files, configName, dataDir) + this.schema = schema + + const loaded = Object.setPrototypeOf({}, defaults) as TSchema + const overrides = Object.setPrototypeOf({}, loaded) as TSchema + const config = Object.setPrototypeOf({}, overrides) as TSchema + + this.defaults = defaults + this.loaded = loaded + this.overrides = overrides + this.config = config + } + + async load(): Promise { + const data = await this.storage.load() + + // Validate file store if we have a schema + if (this.schema) { + const { error } = await YupUtils.tryValidate(this.schema, data) + if (error) { + throw new Error(error.message) + } + } + + this.keysLoaded.clear() + + if (data !== null) { + let key: keyof TSchema + + for (key in data) { + this.keysLoaded.add(key) + } + } + + this.loaded = { ...data } as Partial + + // Patch back in inheritence so config is still TSchema + Object.setPrototypeOf(this.loaded, this.defaults) + Object.setPrototypeOf(this.overrides, this.loaded) + + // Write the file out if it doesnt exist + if (data === null) await this.save() + } + + async save(): Promise { + const save: PartialRecursive = {} + + let key: keyof TSchema + for (key in this.loaded) { + const shouldSaveKey = this.keysLoaded.has(key) || this.loaded[key] !== this.defaults[key] + + if (shouldSaveKey) { + Object.assign(save, { [key]: this.config[key] }) + } + } + + await this.storage.save(save) + } + + set(key: T, value: TSchema[T]): void { + const previousValue = this.config[key] + + Object.assign(this.loaded, { [key]: value }) + + if (Object.prototype.hasOwnProperty.call(this.overrides, key)) { + delete this.overrides[key] + } + + if (previousValue !== value) { + this.onConfigChange.emit(key, value) + } + } + + setOverride(key: T, value: TSchema[T]): void { + const previousValue = this.config[key] + + Object.assign(this.overrides, { [key]: value }) + + if (previousValue !== value) { + this.onConfigChange.emit(key, value) + } + } + + get(key: T): TSchema[T] { + return this.config[key] + } +} diff --git a/ironfish/src/fileSystems/fileSystem.ts b/ironfish/src/fileSystems/fileSystem.ts new file mode 100644 index 0000000000..3c3419efcd --- /dev/null +++ b/ironfish/src/fileSystems/fileSystem.ts @@ -0,0 +1,12 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export abstract class FileSystem { + abstract init(): Promise + abstract writeFile(path: string, data: string): Promise + abstract readFile(path: string): Promise + abstract mkdir(path: string, options: { recursive?: boolean }): Promise + abstract resolve(path: string): string + abstract join(...paths: string[]): string +} diff --git a/ironfish/src/fileSystems/index.ts b/ironfish/src/fileSystems/index.ts new file mode 100644 index 0000000000..0681d2d847 --- /dev/null +++ b/ironfish/src/fileSystems/index.ts @@ -0,0 +1,6 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './fileSystem' +export * from './nodeFileSystem' diff --git a/ironfish/src/fileSystems/nodeFileSystem.ts b/ironfish/src/fileSystems/nodeFileSystem.ts new file mode 100644 index 0000000000..fb88a3774e --- /dev/null +++ b/ironfish/src/fileSystems/nodeFileSystem.ts @@ -0,0 +1,71 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Assert } from '../assert' +import { FileSystem } from './fileSystem' + +export class NodeFileProvider extends FileSystem { + fs: typeof import('fs').promises | null = null + path: typeof import('path') | null = null + os: typeof import('os') | null = null + + async init(): Promise { + this.fs = (await import('fs')).promises + this.path = await import('path') + this.os = await import('os') + return this + } + + async writeFile(path: string, data: string): Promise { + Assert.isNotNull(this.fs, `Must call FileSystem.init()`) + await this.fs.writeFile(path, data) + } + + async readFile(path: string): Promise { + Assert.isNotNull(this.fs, `Must call FileSystem.init()`) + return await this.fs.readFile(path, { encoding: 'utf8' }) + } + + async mkdir(path: string, options: { recursive?: boolean }): Promise { + Assert.isNotNull(this.fs, `Must call FileSystem.init()`) + await this.fs.mkdir(path, options) + } + + resolve(path: string): string { + Assert.isNotNull(this.path, `Must call FileSystem.init()`) + return this.path.resolve(this.expandTilde(path)) + } + + join(...paths: string[]): string { + Assert.isNotNull(this.path, `Must call FileSystem.init()`) + return this.path.join(...paths) + } + + /** + * Expands a path out using known unix shell shortcuts + * ~ expands to your home directory + * ~+ expands to your current directory + * + * @param filePath The filepath to expand out using unix shortcuts + */ + private expandTilde(filePath: string): string { + Assert.isNotNull(this.os) + Assert.isNotNull(this.path) + + const CHAR_TILDE = 126 + const CHAR_PLUS = 43 + const home = this.os.homedir() + + if (filePath.charCodeAt(0) === CHAR_TILDE) { + if (filePath.charCodeAt(1) === CHAR_PLUS) { + return this.path.join(process.cwd(), filePath.slice(2)) + } + + if (!home) return filePath + + return this.path.join(home, filePath.slice(1)) + } + + return filePath + } +} diff --git a/ironfish/src/genesis/genesis.test.slow.ts b/ironfish/src/genesis/genesis.test.slow.ts new file mode 100644 index 0000000000..9919363bd5 --- /dev/null +++ b/ironfish/src/genesis/genesis.test.slow.ts @@ -0,0 +1,148 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import Captain, { SerializedBlock, Target } from '../captain' +import { makeDb, fakeMaxTarget } from '../captain/testUtilities' +import { IJSON } from '../serde' +import { genesisBlockData } from './genesisBlock' +import { makeGenesisBlock } from './makeGenesisBlock' +import { IronfishStrategy } from '../strategy' +import { AsyncTransactionWorkerPool } from '../strategy/asyncTransactionWorkerPool' +import { generateKey } from 'ironfish-wasm-nodejs' +import { createNodeTest } from '../testUtilities' + +describe('Genesis block test', () => { + const nodeTest = createNodeTest() + let targetMeetsSpy: jest.SpyInstance + let targetSpy: jest.SpyInstance + + beforeAll(() => { + targetMeetsSpy = jest.spyOn(Target, 'meets').mockImplementation(() => true) + targetSpy = jest.spyOn(Target, 'calculateTarget').mockImplementation(() => fakeMaxTarget()) + }) + + afterAll(async () => { + await AsyncTransactionWorkerPool.stop() + targetMeetsSpy.mockClear() + targetSpy.mockClear() + }) + + it('Can start a chain with the existing genesis block', async () => { + const db = makeDb() + const strategy = new IronfishStrategy() + const captain = await Captain.new(db, strategy) + await db.open() + + const result = IJSON.parse(genesisBlockData) as SerializedBlock + const block = strategy._blockSerde.deserialize(result) + const addedBlock = await captain.chain.addBlock(block) + expect(addedBlock.isAdded).toBe(true) + + // We should also be able to create new blocks after the genesis block + // has been added + const minersfee = await strategy.createMinersFee( + BigInt(0), + block.header.sequence + BigInt(1), + generateKey().spending_key, + ) + const newBlock = await captain.chain.newBlock([], minersfee) + expect(newBlock).toBeTruthy() + }, 60000) + + it('Can generate a valid genesis block', async () => { + // Initialize the database and chain + const strategy = nodeTest.strategy + const node = nodeTest.node + const captain = nodeTest.captain + + const amountNumber = 5 + const amountBigint = BigInt(amountNumber) + + // Construct parameters for the genesis block + const account = await node.accounts.createAccount('test', true) + const info = { + timestamp: Date.now(), + memo: 'test', + allocations: [ + { + amount: amountNumber, + publicAddress: account.publicAddress, + }, + ], + } + + // Build the genesis block itself + const { block } = await makeGenesisBlock(captain, info, account, captain.logger) + + // Check some parameters on it to make sure they match what's expected. + expect(block.header.timestamp.valueOf()).toEqual(info.timestamp) + expect(block.header.target.asBigInt()).toEqual(Target.initialTarget().asBigInt()) + + // Balance should still be zero, since generating the block should clear out + // any notes made in the process + expect(node.accounts.getBalance(account)).toEqual({ + confirmedBalance: BigInt(0), + unconfirmedBalance: BigInt(0), + }) + + // Add the block to the chain + const addBlock = await captain.chain.addBlock(block) + expect(addBlock.isAdded).toBeTruthy() + + // TODO: this should happen automatically in addBlock + await node.accounts.updateHead(node) + + // Check that the balance is what's expected + expect(node.accounts.getBalance(account)).toEqual({ + confirmedBalance: amountBigint, + unconfirmedBalance: amountBigint, + }) + + // Ensure we can construct blocks after that block + const minersfee = await strategy.createMinersFee( + BigInt(0), + block.header.sequence + BigInt(1), + generateKey().spending_key, + ) + const additionalBlock = await captain.chain.newBlock([], minersfee) + expect(additionalBlock).toBeTruthy() + + // Next, serialize it in the same way that the genesis command serializes it + const serialized = strategy._blockSerde.serialize(block) + const jsonedBlock = IJSON.stringify(serialized, ' ') + + // Now start from scratch with a clean database and make sure the block + // is still the same. + const { node: newNode, captain: newCaptain } = await nodeTest.createSetup() + + // Deserialize the block and add it to the new chain + const result = IJSON.parse(jsonedBlock) as SerializedBlock + const deserializedBlock = strategy._blockSerde.deserialize(result) + const addedBlock = await newCaptain.chain.addBlock(deserializedBlock) + expect(addedBlock.isAdded).toBe(true) + + // Validate parameters again to make sure they're what's expected + expect(deserializedBlock.header.timestamp.valueOf()).toEqual(info.timestamp) + expect(deserializedBlock.header.target.asBigInt()).toEqual( + Target.initialTarget().asBigInt(), + ) + + await newNode.accounts.importAccount(account) + await newNode.accounts.updateHead(newNode) + await newNode.accounts.scanTransactions(newNode.captain.chain) + + expect(newNode.accounts.getBalance(account)).toEqual({ + confirmedBalance: amountBigint, + unconfirmedBalance: amountBigint, + }) + + // Ensure we can construct blocks after that block + const newMinersfee = await strategy.createMinersFee( + BigInt(0), + deserializedBlock.header.sequence + BigInt(1), + generateKey().spending_key, + ) + const newBlock = await newCaptain.chain.newBlock([], newMinersfee) + expect(newBlock).toBeTruthy() + }, 600000) +}) diff --git a/ironfish/src/genesis/genesisBlock.ts b/ironfish/src/genesis/genesisBlock.ts new file mode 100644 index 0000000000..a2d9879eba --- /dev/null +++ b/ironfish/src/genesis/genesisBlock.ts @@ -0,0 +1,45 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +export const genesisBlockData = ` +{ + "header": { + "sequence": "1", + "previousBlockHash": "0000000000000000000000000000000000000000000000000000000000000000", + "noteCommitment": { + "commitment": { + "type": "Buffer", + "data": "base64:9d+hS8wu0Y8MbmGa+k7lJvTUAdPNApri/L7RwI9+/V4=" + }, + "size": 3 + }, + "nullifierCommitment": { + "commitment": "78A72F2020CE738CBA3A8995C0EBCA23C83A4CE2ADD7927210EAA2DE6466E0C9", + "size": 1 + }, + "target": "883423532389192164791648750371459257913741948437809479060803100646309888", + "randomness": 0, + "timestamp": 1617401592091, + "minersFee": "0", + "isValid": false, + "work": "0", + "graphId": 0, + "count": 0, + "hash": "541ACF487F1B56C50FA2207737E9D4124D513DEB0A6E8D6FF04677C149773F92", + "graffiti": "67656E6573697300000000000000000000000000000000000000000000000000" + }, + "transactions": [ + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAACA8UsfFPH/rehY0I7NAOp7kjyyfyEfGffYZfZj1SkowEZz2LP/6gnxHdSNBoBokPncd9ulEjpoh1NHR6L5K8489GfplYnPE+COyr/swkOZguC/3gz/gZvxU7nq2gOmGDwZqSRBcjW1CNRf4SX4+qT0WIviSd0Vj00PV/oxCll9blweXrYBhojJlB1f1Bu1hpMB1ALJLKz5sGjGOXNj/nUnJVLoRdzcVhUXvWfVQiMAsclg0L98fxuQM6nlcjUn7QAESzM6K2Utzg3qQjHvrcxVKzJ1TvXhgiEYRYLt0u4ZEqJATMubZ2i0pl3+L2ebGJDeyeAbEYS6Ed4bzMBHVCZ4DK01MVTqGWls4Sg0H/JoFoiqPUCT/7NIXay178zUzPy22vWo0JQK0zkrvIp3csESFjNtPtoS1Pe7yZ0pgQ1XW9lfWHvzH0AmZZWr/TYAQ3f2/uMxXNOFXcfMaSKPdm5XBNdk5ls0bVecf30mA1BuANCiibxFpiw0PUPMxb0Kns1VwK5aUxyA9qxOQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAL2vHCvmdXuAl+e9bDLPtpZ4mRd++37v9gcXuesF9O1Lp7Y3jhj+KrFGF7wYP6IRvJHLsjNXv1UZgB1aq51E8L" + }, + { + "type": "Buffer", + "data": "base64:AQAAAAAAAAABAAAAAAAAAAAAAAAAAAAAj8HKjd2lsmRJP3wCkET10xcdYWyJMbtDVBShMgAZ/sXxWt72AD5MGgT+6/H5IGtshQMQ6m5MAHvkw2CRza0t0KBmX5SXfsJZLHBq3tFf5jWDybYqiUu4j2RWKOXGR/uQCuXMRr8m4Wfu9nFgb51tDnUiPr1RsC3X3PeLMuhX1I6aubBFKXvig/uIKp7i+2m3j/HuR69+MP3XVurenxNM8Ig3b6AqGcDecLOfREsJLQpsRDk4nvPT2xIOWCbRSZOQ8y8NkHvZAP8u9qgQDseoSDJZbYZM16/gklA/rPsB2FtpMYFCKPGAZ1NW4FOEPTNmieudNJv28UA6QEjQFvrcxh21TYKVNBhNgsQQi+txXiRUOdwPoZA9UJgTk70suyY5AQAAAN5hUq0LGPqV0YX76ujf3xxtiXxDhIUj0DFNq/pya9scpMl3yV6V/LOTxbY02jx1PctIPZL/vl7MhrA7BzMTRJGz4Iq7oe32qiXbsKIUjF0GvPdJe6KlV9drdgcjuVbYCZMDYQXjOWgAN2NmWTFXoQL+PjbL7gXXT9yDVV2JXPxW8cBG6H2tKqlrNbxe9GfZhaTk8nwEJc3Qphh2GEmTisRjd00d+0ZVCqvCmW6VXlQWmvNZBL973CIyZOeWKqUiCRVpF69k/kVfYJgbqCWcb/4sSJBgZ7KiPduVgT9CSQ8vIjnvAFMqrjALUBltYU1VqbOguFZ4gbVnqJ2PoJwJZmDONNhjVb1sTvfqwLLDP1jYhVTt3C+gqjtIb5+2P/0RLkqd/WOnKEWhy9EEGE6jdF5a/cQHkEN88dFWEH/gd2FRCZPnrSZqbdNbBtZlpAob1oJXCm0Xx+v4l4VC1bxwLhkxsXf4iOATbeEvgAMC0jZc08uIMXPkM078joNLobPMu+2M06GXiif41/ig7POGCzL4p/HPJf+bBBGWxzuxZbsiNUYlR4Gp9InK0zGaq4M/if8Fxk9L4Kez9WLVRCRWlhpeFOmi1piTxQXlBMG3ZIKk33rxP2FW0uSFHrbSJMPkp2ksci3uJTtx3aVw6jPmL4zwVFCof7HEPo4z3t2bT/QvNDS1+cOUUfZUhzstsWmwBTpKNQEi7KHsK9qGpCXJ7By6ItuDlTz9t6BwYZtp+eb8RfZJyrHuUxqiJMe7Gc266QWPrqmcrmIKEFJVlzBtMoIaXXMUPpDY7otn0TRKCX6vI40Zr2sV7HDfLkrODlngQ3KxAQ==" + }, + { + "type": "Buffer", + "data": "base64:AAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAt9HuUmZ9U7fTcgmprNjzXPXx1riqt4WCSIBQV/jFc1Fq++lghkC+VF2XCHbss6W3uGMsxLZw8rnzKeRX7UHyq+KNapiDP5R83LrnOqhEMpyeNIiRsJyKGcQqj7IaaBjjEzOftLSGDIBoAI0It9XC89b9q7qxXGgGuYC+5SDh7cxDJS6CKYFEk3CWdy4G030vlPf0K6GgPzwM/DOA1oSX7MfUt3eKRkpST2AZ6sq2oN7StR47BZJCqQfw940vGAk0rmvTMWmxsY9i3mfk311wxOfLhGlLDXemrr9vHiT0xL3jDp0MtS7c2+LUcsYdAAHmG90cwUC7yKWwHBer1+6GKHO8gg0Q4anz2lBci+UkFAW8LZMTufsDi+RV5/z9cdhVBOVfYV0wkibcqUwWhcCmlOR3MjqyNcb9vxM95aZF7qsNTfxtCbAH4498oM26O0tXr6FOckSW6+V2F7d5eYtDoMHZLjyM24SSEIAYZCb27p8YDFJ9SG+nFgs89865Q4SElFkTQmVhbnN0YWxrIG5vdGUgZW5jcnlwdGlvbiBtaW5lciBrZXkwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCr0Pj+9YwmL+6aCwFXFFrgyjOQjl9TST/Jq4JsxrrDc5USoY/RjkumpD5d3PamNybd5IOJX5306Hi3GedEZ3cK" + } + ] +}` diff --git a/ironfish/src/genesis/index.ts b/ironfish/src/genesis/index.ts new file mode 100644 index 0000000000..420e65f1b5 --- /dev/null +++ b/ironfish/src/genesis/index.ts @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +export * from './genesisBlock' +export * from './makeGenesisBlock' diff --git a/ironfish/src/genesis/makeGenesisBlock.ts b/ironfish/src/genesis/makeGenesisBlock.ts new file mode 100644 index 0000000000..b64026f167 --- /dev/null +++ b/ironfish/src/genesis/makeGenesisBlock.ts @@ -0,0 +1,146 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Target } from '../captain' +import { generateKey, WasmNote, WasmTransaction } from 'ironfish-wasm-nodejs' +import { Logger } from '../logger' +import type { Account } from '../account' +import { IronfishTransaction, IronfishCaptain, IronfishBlock } from '../strategy' + +export type GenesisBlockInfo = { + memo: string + timestamp: number + allocations: { + publicAddress: string + amount: number + }[] +} + +/** + * Returns a special-cased block with at least one note and spend for the purpose + * of providing an initial block for the blockchain and root hash for the note and + * nullifier merkle trees. + */ +export async function makeGenesisBlock( + captain: IronfishCaptain, + info: GenesisBlockInfo, + account: Account, + logger: Logger, +): Promise<{ block: IronfishBlock }> { + logger = logger.withTag('makeGenesisBlock') + if (!(await captain.chain.isEmpty())) { + throw new Error('Database must be empty to create a genesis block.') + } + // Sum the allocations to get the total number of coins + const allocationSum = info.allocations.reduce((sum, cur) => sum + cur.amount, 0) + + // Track all of the transactions that will be added to the genesis block + const transactionList = [] + + // Create a unique key for the genesis block that's not intended for use. + // It should end up with 0 coins. + const genesisKey = generateKey() + // Create a genesis note granting the genesisKey allocationSum coins. + const genesisNote = new WasmNote(genesisKey.public_address, BigInt(allocationSum), info.memo) + + /** + * + * Transaction 1: + * An initial transaction generating allocationSum coins from nothing. + * + */ + logger.info(`Generating an initial transaction with ${allocationSum} coins...`) + const initialTransaction = new WasmTransaction() + + logger.info(' Generating the receipt...') + initialTransaction.receive(genesisKey.spending_key, genesisNote) + + logger.info(' Posting the initial transaction...') + const postedInitialTransaction = new IronfishTransaction( + Buffer.from(initialTransaction.post_miners_fee().serialize()), + ) + transactionList.push(postedInitialTransaction) + + // Temporarily add the note from the transaction to our merkle tree so we can construct + // a witness from it. It will be re-added later when the block is constructed. + logger.info(' Adding the note to the tree...') + if (postedInitialTransaction.notesLength() != 1) + throw new Error('Expected postedInitialTransaction to have 1 note') + for (const n of postedInitialTransaction.notes()) { + await captain.chain.notes.add(n) + } + + // Construct a witness of that note + logger.info(' Constructing a witness of the note...') + const witness = await captain.chain.notes.witness(0) + if (witness == null) + throw new Error('We must be able to construct a witness in order to generate a spend.') + + // Now that we have the witness, remove the note from the tree + logger.info(' Removing the note from the tree...') + await captain.chain.notes.truncate(0) + + /** + * + * Transaction 2: + * Moves coins from the note in Transaction 1 to each of the allocation addresses. + * + */ + logger.info('Generating a transaction for distributing allocations...') + const transaction = new WasmTransaction() + logger.info(` Generating a spend for ${allocationSum} coins...`) + transaction.spend(genesisKey.spending_key, genesisNote, witness) + + for (const alloc of info.allocations) { + logger.info( + ` Generating a receipt for ${alloc.amount} coins for ${alloc.publicAddress}...`, + ) + const note = new WasmNote(alloc.publicAddress, BigInt(alloc.amount), info.memo) + transaction.receive(genesisKey.spending_key, note) + } + + logger.info(' Posting the transaction...') + const postedTransaction = new IronfishTransaction( + Buffer.from(transaction.post(genesisKey.spending_key, undefined, BigInt(0)).serialize()), + ) + transactionList.push(postedTransaction) + + /** + * + * Now we have all the transactions we need, so we can put together the block. + * + */ + logger.info('Generating a block...') + + // Create a miner's fee transaction for the block. + // Since the block itself generates coins and we don't want the miner account to gain + // additional coins, we'll manually create a non-standard/invalid miner's fee transaction. + // + // This transaction will cause block.verify to fail, but we skip block verification + // throughout the code when the block header's previousBlockHash is GENESIS_BLOCK_PREVIOUS. + logger.info(` Generating a miner's fee transaction for the block...`) + const note = new WasmNote(account.publicAddress, BigInt(0), '') + const minersFeeTransaction = new WasmTransaction() + minersFeeTransaction.receive(account.spendingKey, note) + const postedMinersFeeTransaction = new IronfishTransaction( + Buffer.from(minersFeeTransaction.post_miners_fee().serialize()), + ) + + // Create the block. We expect this to add notes and nullifiers on the block + // into the database for the purpose of generating note and nullifier commitments + // on the block header. + const graffiti = Buffer.alloc(32) + graffiti.write('genesis') + const block = await captain.chain.newBlock( + transactionList, + postedMinersFeeTransaction, + graffiti, + ) + // Modify the block with any custom properties. + block.header.target = Target.initialTarget() + block.header.timestamp = new Date(info.timestamp) + + logger.info('Block complete.') + return { block } +} diff --git a/ironfish/src/gitHash.ts b/ironfish/src/gitHash.ts new file mode 100644 index 0000000000..89dedec027 --- /dev/null +++ b/ironfish/src/gitHash.ts @@ -0,0 +1,2 @@ +// This file is used by ironfish-cli/scripts/build.sh +export default '' diff --git a/ironfish/src/index.test.ts b/ironfish/src/index.test.ts new file mode 100644 index 0000000000..c2c665a079 --- /dev/null +++ b/ironfish/src/index.test.ts @@ -0,0 +1,6 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +it('should test the CLI', () => { + expect(true).toBe(true) +}) diff --git a/ironfish/src/index.ts b/ironfish/src/index.ts new file mode 100644 index 0000000000..f5c34a8539 --- /dev/null +++ b/ironfish/src/index.ts @@ -0,0 +1,20 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +export * from './account' +export * from './assert' +export * from './fileStores' +export * from './fileSystems' +export * from './genesis' +export * from './sdk' +export * from './logger' +export * from './node' +export * from './rpc' +export * from './serde' +export * from './strategy' +export * from './storage' +export * from './mining' +export * from './telemetry' +export * from './utils' +export * from './captain' +export * from './network' diff --git a/ironfish/src/level-errors.d.ts b/ironfish/src/level-errors.d.ts new file mode 100644 index 0000000000..406c4327bd --- /dev/null +++ b/ironfish/src/level-errors.d.ts @@ -0,0 +1,26 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +// From https://github.com/Level/errors + +declare module 'level-errors' { + class LevelUPError extends Error {} + class NotFoundError extends LevelUPError {} + class InitializationError extends LevelUPError {} + class OpenError extends LevelUPError {} + class ReadError extends LevelUPError {} + class WriteError extends LevelUPError {} + class NotFoundError extends LevelUPError {} + class EncodingError extends LevelUPError {} + + export default { + LevelUPError: LevelUPError, + InitializationError: InitializationError, + OpenError: OpenError, + ReadError: ReadError, + WriteError: WriteError, + NotFoundError: NotFoundError, + EncodingError: EncodingError, + } +} diff --git a/ironfish/src/logger/index.ts b/ironfish/src/logger/index.ts new file mode 100644 index 0000000000..90fb682026 --- /dev/null +++ b/ironfish/src/logger/index.ts @@ -0,0 +1,57 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import consola, { LogLevel } from 'consola' +import type { Consola } from 'consola' + +import { ConsoleReporter } from './reporter' +import { parseLogLevelConfig } from './logLevelParser' +export * from './interceptReporter' + +export type Logger = Consola + +export const ConsoleReporterInstance = new ConsoleReporter() + +/** + * Updates the reporter's log levels from a config string. + * + * Format is like so: `*:warn,sdk:info` + * @param logLevelConfig A log level string formatted for use in config files or env vars + */ +export const setLogLevelFromConfig = (logLevelConfig: string): void => { + const parsedConfig = parseLogLevelConfig(logLevelConfig) + + for (const config of parsedConfig) { + ConsoleReporterInstance.setLogLevel(config[0], config[1]) + } +} + +/** + * Updates the reporter's log prefix from a config string. + * + * Format is like so: `[%time%] [%level%] [%tag%]` + * @param logPrefix A string formatted for use in config files or environment vars + */ +export const setLogPrefixFromConfig = (logPrefix: string): void => { + ConsoleReporterInstance.logPrefix = logPrefix +} + +/** + * Enables color when logging + */ +export const setLogColorEnabledFromConfig = (enabled: boolean): void => { + ConsoleReporterInstance.colorEnabled = enabled +} + +/** + * Creates a logger instance with the desired default settings. + */ +export const createRootLogger = (): Logger => { + return consola.create({ + reporters: [ConsoleReporterInstance], + // We're filtering at the reporter level right now so we allow all logs through, + // but if Consola provides a way to set tag-specific log levels, we should use that. + level: LogLevel.Verbose, + }) +} diff --git a/ironfish/src/logger/interceptReporter.ts b/ironfish/src/logger/interceptReporter.ts new file mode 100644 index 0000000000..4f48f4ff93 --- /dev/null +++ b/ironfish/src/logger/interceptReporter.ts @@ -0,0 +1,19 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { ConsolaReporter, ConsolaReporterArgs, ConsolaReporterLogObject } from 'consola' + +type LogCallback = (logObj: ConsolaReporterLogObject, args: ConsolaReporterArgs) => void + +export class InterceptReporter implements ConsolaReporter { + callback: LogCallback + + constructor(callback: LogCallback) { + this.callback = callback + } + + log(logObj: ConsolaReporterLogObject, args: ConsolaReporterArgs): void { + this.callback(logObj, args) + } +} diff --git a/ironfish/src/logger/logLevelParser.test.ts b/ironfish/src/logger/logLevelParser.test.ts new file mode 100644 index 0000000000..636149a6ba --- /dev/null +++ b/ironfish/src/logger/logLevelParser.test.ts @@ -0,0 +1,46 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { LogLevel } from 'consola' +import { parseLogLevelConfig } from './logLevelParser' + +describe('parseLogLevelConfig', () => { + it('should handle multiple entries separated by commas', () => { + const parsed = parseLogLevelConfig('tag:error,tagtwo:warn') + expect(parsed).toHaveLength(2) + expect(parsed[0][0]).toBe('tag') + expect(parsed[0][1]).toBe(LogLevel.Error) + expect(parsed[1][0]).toBe('tagtwo') + expect(parsed[1][1]).toBe(LogLevel.Warn) + }) + + it('should convert mixed-case tags to lowercase', () => { + const parsed = parseLogLevelConfig('TeSt:info') + expect(parsed).toHaveLength(1) + expect(parsed[0][0]).toBe('test') + expect(parsed[0][1]).toBe(LogLevel.Info) + }) + + it('should convert mixed-case log levels to lowercase', () => { + const parsed = parseLogLevelConfig('test:InFo') + expect(parsed).toHaveLength(1) + expect(parsed[0][0]).toBe('test') + expect(parsed[0][1]).toBe(LogLevel.Info) + }) + + it('should parse standalone log levels into wildcard tag', () => { + const parsed = parseLogLevelConfig('warn') + expect(parsed).toHaveLength(1) + expect(parsed[0][0]).toBe('*') + expect(parsed[0][1]).toBe(LogLevel.Warn) + }) + + it('should throw when passed an invalid log level', () => { + expect(() => parseLogLevelConfig('test:qwer')).toThrowError() + }) + + it('should throw when passed a config with too many colons', () => { + expect(() => parseLogLevelConfig('test::warn')).toThrowError() + }) +}) diff --git a/ironfish/src/logger/logLevelParser.ts b/ironfish/src/logger/logLevelParser.ts new file mode 100644 index 0000000000..7277138d32 --- /dev/null +++ b/ironfish/src/logger/logLevelParser.ts @@ -0,0 +1,69 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { LogLevel } from 'consola' + +/** + * Maps config log level strings to consola LogLevel values. + */ +const configToLogLevel: { + [key: string]: LogLevel | undefined +} = Object.freeze({ + fatal: LogLevel.Fatal, + error: LogLevel.Error, + warn: LogLevel.Warn, + log: LogLevel.Log, + info: LogLevel.Info, + success: LogLevel.Success, + debug: LogLevel.Debug, + trace: LogLevel.Trace, + silent: LogLevel.Silent, + verbose: LogLevel.Verbose, +}) + +/** + * Converts a config log level string to a consola LogLevel value. + * @throws `level` does not exist as a key of `configToLogLevel` + * @param level A config log level string + */ +const configLevelToLogLevel = (level: string): LogLevel => { + level = level.toLowerCase() + const configLevel = configToLogLevel[level] + if (configLevel == null) { + throw new Error( + `Log level ${level} should be one of the following: ${Object.keys(configToLogLevel).join( + ', ', + )}`, + ) + } + + return configLevel +} + +/** + * Parses a log level config string into tags and log levels. + * + * ex: `*:warn,peernetwork:info` + * @param logLevelConfig A log level config string. + */ +export const parseLogLevelConfig = ( + logLevelConfig: string, +): ReadonlyArray<[string, LogLevel]> => { + return logLevelConfig.split(',').map((logLevel) => { + const levelParams = logLevel.split(':') + // If we don't have a :, try overriding the default log level + if (levelParams.length === 1) { + levelParams.unshift('*') + } + // We should have 2 levelParams at this point, or the format is wrong + if (levelParams.length !== 2) { + throw new Error('Log levels must have format tag:level') + } + + const tag = levelParams[0].toLowerCase() + const level = configLevelToLogLevel(levelParams[1]) + + return [tag, level] + }) +} diff --git a/ironfish/src/logger/reporter.test.ts b/ironfish/src/logger/reporter.test.ts new file mode 100644 index 0000000000..9a0f2c3008 --- /dev/null +++ b/ironfish/src/logger/reporter.test.ts @@ -0,0 +1,154 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +// We explicitly that the reporter calls console here, so disable the lint +/* eslint-disable no-console */ + +import { LogLevel, logType } from 'consola' +import { format } from 'date-fns' +import { ConsoleReporter } from './reporter' + +describe('setLogLevel', () => { + it('sets defaultMinimumLogLevel when tag is *', () => { + const reporter = new ConsoleReporter() + expect(reporter.defaultMinimumLogLevel).not.toBe(LogLevel.Silent) + reporter.setLogLevel('*', LogLevel.Silent) + expect(reporter.defaultMinimumLogLevel).toBe(LogLevel.Silent) + }) + + it('sets tagToLogLevelMap when tag other than * is passed', () => { + const reporter = new ConsoleReporter() + expect(reporter.tagToLogLevelMap.get('test')).toBeUndefined() + reporter.setLogLevel('test', LogLevel.Silent) + expect(reporter.tagToLogLevelMap.get('test')).toBe(LogLevel.Silent) + }) +}) + +describe('shouldLog', () => { + it('returns false if level is above the defaultMinimumLogLevel and no other overrides exist', () => { + const reporter = new ConsoleReporter() + reporter.defaultMinimumLogLevel = LogLevel.Error + reporter.tagToLogLevelMap.clear() + const result = reporter['shouldLog']({ + args: [], + date: new Date(), + level: LogLevel.Info, + type: 'info', + tag: 'test', + }) + expect(result).toBe(false) + }) + + it('returns true if level equal to the defaultMinimumLogLevel and no other overrides exist', () => { + const reporter = new ConsoleReporter() + reporter.defaultMinimumLogLevel = LogLevel.Error + reporter.tagToLogLevelMap.clear() + const result = reporter['shouldLog']({ + args: [], + date: new Date(), + level: LogLevel.Error, + type: 'error', + tag: 'test', + }) + expect(result).toBe(true) + }) + + it('returns true if an override is more permissive than defaultMinimumLogLevel', () => { + const reporter = new ConsoleReporter() + reporter.defaultMinimumLogLevel = LogLevel.Error + reporter.tagToLogLevelMap.set('test', LogLevel.Info) + const result = reporter['shouldLog']({ + args: [], + date: new Date(), + level: LogLevel.Info, + type: 'info', + tag: 'test', + }) + expect(result).toBe(true) + }) + + it('returns true if a more specific override is more permissive', () => { + const reporter = new ConsoleReporter() + reporter.defaultMinimumLogLevel = LogLevel.Verbose + reporter.tagToLogLevelMap.set('test', LogLevel.Error) + reporter.tagToLogLevelMap.set('tag', LogLevel.Info) + + let result = reporter['shouldLog']({ + args: [], + date: new Date(), + level: LogLevel.Info, + type: 'info', + tag: 'test', + }) + expect(result).toBe(false) + + result = reporter['shouldLog']({ + args: [], + date: new Date(), + level: LogLevel.Info, + type: 'info', + tag: 'test:tag', + }) + expect(result).toBe(true) + }) +}) + +describe('logPrefix', () => { + it('omits logPrefix if logPrefix is an empty string', () => { + const reporter = new ConsoleReporter() + reporter.defaultMinimumLogLevel = LogLevel.Info + reporter.logPrefix = '' + const spy = jest.spyOn(console, 'info').mockImplementationOnce(() => {}) + reporter.log({ + args: ['testlog'], + date: new Date(), + level: LogLevel.Info, + type: 'info', + tag: 'test', + }) + expect(spy).toBeCalledWith('testlog') + spy.mockRestore() + }) + + it('formats logPrefix if set', () => { + const reporter = new ConsoleReporter() + reporter.defaultMinimumLogLevel = LogLevel.Info + reporter.logPrefix = '[%time%] [%tag%] [%level%]' + const spy = jest.spyOn(console, 'info').mockImplementationOnce(() => {}) + const date = new Date() + reporter.log({ + args: ['testlog'], + date: date, + level: LogLevel.Info, + type: 'info', + tag: 'testtag', + }) + expect(spy).toBeCalledWith(`[${format(date, 'HH:mm:ss.SSS')}] [testtag] [info]`, 'testlog') + spy.mockRestore() + }) +}) + +describe('getConsoleLogger', () => { + it.each([ + ['fatal', console.error], + ['error', console.error], + ['warn', console.warn], + ['log', console.log], + ['info', console.info], + ['success', console.info], + ['debug', console.debug], + ['trace', console.trace], + ['verbose', console.debug], + ['ready', console.info], + ['start', console.info], + ])('returns the right console logger for %s', (type, expected) => { + const reporter = new ConsoleReporter() + expect(reporter['getConsoleLogger']((type as unknown) as logType)).toEqual(expected) + }) + + it('should throw an error when passing an invalid logType', () => { + const reporter = new ConsoleReporter() + expect(() => reporter['getConsoleLogger'](('test' as unknown) as logType)).toThrowError() + }) +}) diff --git a/ironfish/src/logger/reporter.ts b/ironfish/src/logger/reporter.ts new file mode 100644 index 0000000000..331e4a6813 --- /dev/null +++ b/ironfish/src/logger/reporter.ts @@ -0,0 +1,153 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +// The reporter intentionally logs to the console, so disable the lint +/* eslint-disable no-console */ + +import { ConsolaReporter, ConsolaReporterLogObject, LogLevel, logType } from 'consola' +import { format as formatDate } from 'date-fns' +import colors from 'colors/safe' +import { StringUtils } from '../utils/strings' + +function isUnreachable(x: never): never { + throw new Error(x) +} + +// Do nothing when log level is 'silent' +const silentLogger = () => { + /* noop */ +} + +const COLORS = [ + colors.red, + colors.green, + colors.yellow, + colors.blue, + colors.magenta, + colors.cyan, + colors.white, + colors.gray, + colors.grey, +] + +export class ConsoleReporter implements ConsolaReporter { + /** + * Maps tags to log level overrides. + */ + readonly tagToLogLevelMap: Map = new Map() + + /** + * The default minimum log level to display (inclusive), + * if no specific overrides apply. + */ + defaultMinimumLogLevel: LogLevel = LogLevel.Info + + /** + * Prefix template string to prepend to all logs. + */ + logPrefix = '' + + /** + * enable colorizing log elements + */ + colorEnabled = false + + /** + * Updates the reporter's log levels for a given tag. + * + * `*` as a tag sets `defaultMinimumLogLevel`. + * @param tag A tag set on a logger. + * @param level Filter out logs less than or equal to this value. + */ + setLogLevel(tag: string, level: LogLevel): void { + if (tag === '*') { + this.defaultMinimumLogLevel = level + } else { + this.tagToLogLevelMap.set(tag, level) + } + } + + /** + * Converts a logType to a console logging function. + * @param level a logType from logObj + */ + private getConsoleLogger(type: logType) { + switch (type) { + case 'fatal': + return console.error + case 'error': + return console.error + case 'warn': + return console.warn + case 'log': + return console.log + case 'info': + return console.info + case 'success': + return console.info + case 'debug': + return console.debug + case 'trace': + return console.trace + case 'silent': + return silentLogger + case 'verbose': + return console.debug + case 'ready': + return console.info + case 'start': + return console.info + default: + isUnreachable(type) + } + } + + /** + * Determines whether to output logs based on the configured minimum log levels. + * @param logObj a logObj instance from the consola reporter's log function + */ + private shouldLog(logObj: ConsolaReporterLogObject): boolean { + // logs with multiple tags come with the tags joined with ':' + const tags = logObj.tag.split(':') + // Start with the default log level, then check tags from least specific + // to most specific and override the log level if we have an override for that tag. + let level: LogLevel = this.defaultMinimumLogLevel + for (const tag of tags) { + const tagLevel = this.tagToLogLevelMap.get(tag) + if (tagLevel !== undefined) { + level = tagLevel + } + } + + return logObj.level <= level + } + + /** + * Materializes the variables on the logPrefix template string into a new string + * @param logObj a logObj instance from the consola reporter's log function + */ + private buildLogPrefix(logObj: ConsolaReporterLogObject): string { + const formattedDate = formatDate(logObj.date, 'HH:mm:ss.SSS') + let formattedTag = logObj.tag + + if (this.colorEnabled && formattedTag) { + const hash = StringUtils.hashToNumber(logObj.tag) + const index = hash % COLORS.length + const color = COLORS[index] + formattedTag = color(logObj.tag) + } + + return this.logPrefix + .replace(/%time%/g, formattedDate) + .replace(/%level%/g, logObj.type) + .replace(/%tag%/g, formattedTag) + } + + log(logObj: ConsolaReporterLogObject): void { + if (!this.shouldLog(logObj)) return + + const logOutput = this.getConsoleLogger(logObj.type) + logOutput(...(this.logPrefix ? [this.buildLogPrefix(logObj)] : []), ...logObj.args) + } +} diff --git a/ironfish/src/memPool/index.ts b/ironfish/src/memPool/index.ts new file mode 100644 index 0000000000..18df516e09 --- /dev/null +++ b/ironfish/src/memPool/index.ts @@ -0,0 +1,4 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +export * from './memPool' diff --git a/ironfish/src/memPool/memPool.ts b/ironfish/src/memPool/memPool.ts new file mode 100644 index 0000000000..26e8a31c1a --- /dev/null +++ b/ironfish/src/memPool/memPool.ts @@ -0,0 +1,147 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import Captain from '../captain' +import { Nullifier } from '../captain/anchorChain/nullifiers' +import Transaction from '../captain/anchorChain/strategies/Transaction' +import { createRootLogger, Logger } from '../logger' +import { JsonSerializable } from '../serde' + +export class MemPool< + E, + H, + T extends Transaction, + SE extends JsonSerializable, + SH extends JsonSerializable, + ST +> { + private transactions = new Map() + private readonly captain: Captain + private readonly logger: Logger + + constructor(captain: Captain, logger: Logger = createRootLogger()) { + this.captain = captain + this.logger = logger.withTag('transactionpool') + } + + size(): number { + return this.transactions.size + } + + exists(transactionHash: Buffer): boolean { + const hash = transactionHash.toString('hex') + return this.transactions.has(hash) + } + + async *get(): AsyncGenerator { + await this.prune() + + for (const transaction of this.transactions.values()) { + yield transaction + } + } + + /** + * Accepts a transaction from the network + */ + acceptTransaction(transaction: T): boolean { + const hash = transaction.transactionHash().toString('hex') + if (this.transactions.has(hash)) return false + + this.add(transaction) + return true + } + + private add(transaction: T): void { + const hash = transaction.transactionHash().toString('hex') + const fee = transaction.transactionFee() + + this.logger.debug('notes: ', transaction.notesLength()) + this.logger.debug('spends: ', transaction.spendsLength()) + this.logger.debug('fee: ', fee) + + this.transactions.set(hash, transaction) + this.logger.info(`Accepted tx ${hash}, poolsize ${this.size()}`) + } + + /** + * Scan the current transaction pool and remove any transactions that + * are not valid. This removes: + * * transactions with invalid proofs + * * transactions that have been seen before the tree was `beforeSize` + * * transactions whose nullifiers were already seen in the transaction list + */ + async prune(): Promise { + // The size of the tree before which any valid transactions must not have been seen + const beforeSize = await this.captain.chain.nullifiers.size() + + const seenNullifiers: Nullifier[] = [] + let pruneCount = 0 + + for (const transaction of this.transactions.values()) { + const isValid = await this.isValidTransaction(transaction, beforeSize, seenNullifiers) + + if (!isValid) { + const hash = transaction.transactionHash().toString('hex') + this.transactions.delete(hash) + pruneCount++ + } + } + + if (pruneCount > 0) { + this.logger.debug(`Pruned ${pruneCount} transactions from the waiting pool`) + } + } + + /** + * Check whether or not the transaction is valid. + * + * Ensures that: + * * Proofs are valid + * * transactionFee is nonnegative + * * transaction spends have not been spent previously on the chain + * * transaction spends have not been spent previously in the list of seenNullifiers + * * transaction spend root actually existed in the notes tree + * + * @param transaction the transaction being tested + * @param beforeSize the size of the nullifiers tree + * before which the transaction must not be seen + * @param seenNullifiers list of nullifiers that were previously spent in this block. + * this method updates seenNullifiers as a side effect, and checks that there + * are no duplicates. + * TODO: seenNullifiers is currently a list, which requires a linear scan for each + * spend. It would be better if it were a set, but the JS native Set doesn't know how + * to operate on the Buffer backed Nullifier. + * TODO: transactions that have been previously verified are needlessly verified again + * when the only thing that might have changed is whether they have been spent before + */ + async isValidTransaction( + transaction: T, + beforeSize: number, + seenNullifiers: Nullifier[], + ): Promise { + // it's faster to check if spends have been seen or not, so do that first + for (const spend of transaction.spends()) { + if (!(await this.captain.chain.verifier.verifySpend(spend, beforeSize))) { + return false + } + } + const validity = transaction.verify() + if (!validity.valid) { + return false + } + + for (const spend of transaction.spends()) { + for (const seen of seenNullifiers) { + if (this.captain.strategy.nullifierHasher().hashSerde().equals(spend.nullifier, seen)) { + return false + } + } + + seenNullifiers.push(spend.nullifier) + } + + return true + } +} diff --git a/ironfish/src/messages.ts b/ironfish/src/messages.ts new file mode 100644 index 0000000000..703c03aa20 --- /dev/null +++ b/ironfish/src/messages.ts @@ -0,0 +1,53 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { + BlockRequest, + BlocksResponse, + MessageType, + NoteRequest, + NoteResponse, + NullifierRequest, + NullifierResponse, +} from './captain/messages' +import { Gossip, MessagePayload, Rpc } from './network' +import { + IronfishVerifier, + SerializedTransaction, + SerializedWasmNoteEncrypted, +} from './strategy' +import { UnwrapPromise } from './utils' + +export type NewBlockMessage = Gossip< + MessageType.NewBlock, + UnwrapPromise> +> + +export type NewTransactionMessage = Gossip< + MessageType.NewTransaction, + UnwrapPromise> +> + +export type BlockRequestMessage = Rpc> + +export type BlocksResponseMessage = Rpc< + MessageType.Blocks, + MessagePayload> +> + +export type NoteRequestMessage = Rpc> + +export type NoteResponseMessage = Rpc< + MessageType.Note, + MessagePayload> +> + +export type NullifierRequestMessage = Rpc< + MessageType.Nullifier, + MessagePayload +> +export type NullifierResponseMessage = Rpc< + MessageType.Nullifier, + MessagePayload +> diff --git a/ironfish/src/metrics/index.ts b/ironfish/src/metrics/index.ts new file mode 100644 index 0000000000..e5de5c789d --- /dev/null +++ b/ironfish/src/metrics/index.ts @@ -0,0 +1,6 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +export * from './meter' +export * from './metricsMonitor' +export * from './rollingAverage' diff --git a/ironfish/src/metrics/meter.ts b/ironfish/src/metrics/meter.ts new file mode 100644 index 0000000000..defaec0b98 --- /dev/null +++ b/ironfish/src/metrics/meter.ts @@ -0,0 +1,91 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { RollingAverage } from './rollingAverage' + +/** + * A metric type useful for recording metered things like + * * blocks per second + * * bytes per second + * + * This metric will take a sample of how many units were + * completd each tick cycle and record that in various + * rolling averages. + * + * @TODO: Move RollingAverages to exponentially-weighted moving average (EWMA) + * */ +export class Meter { + private _started = false + private _rate5s: RollingAverage + private _rate1m: RollingAverage + private _rate5m: RollingAverage + private _count = 0 + private _totalCount = 0 + private _sum = 0 + private _interval: NodeJS.Timeout | null = null + private _intervalMs: number + private _intervalLastMs: number | null = null + + constructor() { + this._intervalMs = 1000 + this._rate5s = new RollingAverage(5000 / this._intervalMs) + this._rate1m = new RollingAverage((1 * 60 * 1000) / this._intervalMs) + this._rate5m = new RollingAverage((5 * 60 * 1000) / this._intervalMs) + } + + get rate5s(): number { + return this._rate5s.average + } + + get rate1m(): number { + return this._rate1m.average + } + + get rate5m(): number { + return this._rate5m.average + } + + get avg(): number { + return this._sum / this._totalCount + } + + add(value: number): void { + if (!this._started) return + this._count += 1 + this._totalCount = this._totalCount + 1 + this._sum += value + } + + start(): void { + if (this._started) return + this._started = true + this._interval = setInterval(() => this.update(), this._intervalMs) + } + + stop(): void { + if (!this._started) return + this._started = false + + if (this._interval) { + clearInterval(this._interval) + } + } + + private update(): void { + const now = Date.now() + + if (this._intervalLastMs === null) { + this._intervalLastMs = now + return + } + + const elapsedMs = now - this._intervalLastMs + const rate = elapsedMs === 0 ? 0 : (this._count / elapsedMs) * 1000 + + this._rate5s.add(rate) + this._rate1m.add(rate) + this._rate5m.add(rate) + this._count = 0 + this._intervalLastMs = now + } +} diff --git a/ironfish/src/metrics/metricsMonitor.ts b/ironfish/src/metrics/metricsMonitor.ts new file mode 100644 index 0000000000..0aa38ad8f4 --- /dev/null +++ b/ironfish/src/metrics/metricsMonitor.ts @@ -0,0 +1,58 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Meter } from './meter' +import { Logger, createRootLogger } from '../logger' + +type Metric = { + start: () => void + stop: () => void +} + +export class MetricsMonitor { + private _started = false + private _metrics: Metric[] = [] + readonly logger: Logger + + readonly p2p_InboundTraffic: Meter + readonly p2p_InboundTraffic_WS: Meter + readonly p2p_InboundTraffic_WebRTC: Meter + + readonly p2p_OutboundTraffic: Meter + readonly p2p_OutboundTraffic_WS: Meter + readonly p2p_OutboundTraffic_WebRTC: Meter + + constructor(logger: Logger = createRootLogger()) { + this.logger = logger + + this.p2p_InboundTraffic = this.addMeter() + this.p2p_InboundTraffic_WS = this.addMeter() + this.p2p_InboundTraffic_WebRTC = this.addMeter() + + this.p2p_OutboundTraffic = this.addMeter() + this.p2p_OutboundTraffic_WS = this.addMeter() + this.p2p_OutboundTraffic_WebRTC = this.addMeter() + } + + get started(): boolean { + return this._started + } + + start(): void { + this._started = true + this._metrics.forEach((m) => m.start()) + } + + stop(): void { + this._started = false + this._metrics.forEach((m) => m.stop()) + } + + addMeter(): Meter { + const meter = new Meter() + this._metrics.push(meter) + if (this._started) meter.start() + return meter + } +} diff --git a/ironfish/src/metrics/rollingAverage.test.ts b/ironfish/src/metrics/rollingAverage.test.ts new file mode 100644 index 0000000000..85cc08fb86 --- /dev/null +++ b/ironfish/src/metrics/rollingAverage.test.ts @@ -0,0 +1,16 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { RollingAverage } from './rollingAverage' + +it('Produces an expected average and variance', () => { + const avg = new RollingAverage(2) + avg.add(2) + avg.add(6) + expect(avg.average).toBe(4) + expect(avg.variance).toBe(8) + avg.add(4) + expect(avg.average).toBe(5) + expect(avg.variance).toBe(2) +}) diff --git a/ironfish/src/metrics/rollingAverage.ts b/ironfish/src/metrics/rollingAverage.ts new file mode 100644 index 0000000000..eb6a301aee --- /dev/null +++ b/ironfish/src/metrics/rollingAverage.ts @@ -0,0 +1,111 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +/** + * Utility to efficiently compute the rolling average and variance over a sliding window of samples + * + * Taken from https://github.com/BabylonJS/Babylon.js/blob/0f31c20/src/Misc/performanceMonitor.ts#L125 + */ +export class RollingAverage { + /** + * Current average + */ + average = 0 + /** + * Current variance + */ + variance = 0 + + protected _samples: Array + protected _sampleCount = 0 + protected _pos = 0 + + /** sum of squares of differences from the (current) mean */ + protected _m2 = 0 + + /** + * constructor + * @param length The number of samples required to saturate the sliding window + */ + constructor(length: number) { + this._samples = new Array(Math.ceil(Math.max(length, 2))) + this.reset() + } + + /** + * Adds a sample to the sample set + * @param v The sample value + */ + add(v: number): void { + //http://en.wikipedia.org/wiki/Algorithms_for_calculating_variance + let delta: number + + //we need to check if we've already wrapped round + if (this.isSaturated()) { + //remove bottom of stack from mean + const bottomValue = this._samples[this._pos] + delta = bottomValue - this.average + this.average -= delta / (this._sampleCount - 1) + this._m2 -= delta * (bottomValue - this.average) + } else { + this._sampleCount++ + } + + //add new value to mean + delta = v - this.average + this.average += delta / this._sampleCount + this._m2 += delta * (v - this.average) + + //set the new variance + this.variance = this._m2 / (this._sampleCount - 1) + + this._samples[this._pos] = v + this._pos++ + + this._pos %= this._samples.length //positive wrap around + } + + /** + * Returns previously added values or null if outside of history or outside the sliding window domain + * @param i Index in history. For example, pass 0 for the most recent value and 1 for the value before that + * @return Value previously recorded with add() or null if outside of range + */ + history(i: number): number { + if (i >= this._sampleCount || i >= this._samples.length) { + return 0 + } + + const i0 = this._wrapPosition(this._pos - 1.0) + return this._samples[this._wrapPosition(i0 - i)] + } + + /** + * Returns true if enough samples have been taken to completely fill the sliding window + * @return true if sample-set saturated + */ + isSaturated(): boolean { + return this._sampleCount >= this._samples.length + } + + /** + * Resets the rolling average (equivalent to 0 samples taken so far) + */ + reset(): void { + this.average = 0 + this.variance = 0 + this._sampleCount = 0 + this._pos = 0 + this._m2 = 0 + } + + /** + * Wraps a value around the sample range boundaries + * @param i Position in sample range, for example if the sample length is 5, and i is -3, then 2 will be returned. + * @return Wrapped position in sample range + */ + protected _wrapPosition(i: number): number { + const max = this._samples.length + return ((i % max) + max) % max + } +} diff --git a/ironfish/src/mining/__snapshots__/director.test.slow.ts.snap b/ironfish/src/mining/__snapshots__/director.test.slow.ts.snap new file mode 100644 index 0000000000..fb601dc6b6 --- /dev/null +++ b/ironfish/src/mining/__snapshots__/director.test.slow.ts.snap @@ -0,0 +1,58 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Mining director adds transactions from the queue to a new block to be mined 1`] = ` +Object { + "graffiti": "0000000000000000000000000000000000000000000000000000000000000000", + "minersFee": "-100", + "noteCommitment": Object { + "commitment": "0-miners note 100", + "size": 45, + }, + "nullifierCommitment": Object { + "commitment": "B08D0434ABFFFB68F6374100A5E11553519AB0270FFE18137669AFA18E22B519", + "size": 18, + }, + "previousBlockHash": "0900000000000000000000000000000000000000000000000000000000000000", + "sequence": "10", + "target": "115792089237316195423570985008687907853269984665640564039457584007913129639935", + "timestamp": Any, +} +`; + +exports[`Mining director creates a new block to be mined when chain head changes 2`] = ` +Object { + "graffiti": "0000000000000000000000000000000000000000000000000000000000000000", + "minersFee": "-10", + "noteCommitment": Object { + "commitment": "0-miners note 10", + "size": 41, + }, + "nullifierCommitment": Object { + "commitment": "1BA8B29F458F6FEBB8EB43097EEFCE7823CB306D56EDF4035B1849E3FECC0687", + "size": 16, + }, + "previousBlockHash": "0900000000000000000000000000000000000000000000000000000000000000", + "sequence": "10", + "target": "115792089237316195423570985008687907853269984665640564039457584007913129639935", + "timestamp": Any, +} +`; + +exports[`Mining director does not add invalid transactions to the block 1`] = ` +Object { + "graffiti": "0000000000000000000000000000000000000000000000000000000000000000", + "minersFee": "-10", + "noteCommitment": Object { + "commitment": "0-miners note 10", + "size": 41, + }, + "nullifierCommitment": Object { + "commitment": "1BA8B29F458F6FEBB8EB43097EEFCE7823CB306D56EDF4035B1849E3FECC0687", + "size": 16, + }, + "previousBlockHash": "0900000000000000000000000000000000000000000000000000000000000000", + "sequence": "10", + "target": "115792089237316195423570985008687907853269984665640564039457584007913129639935", + "timestamp": Any, +} +`; diff --git a/ironfish/src/mining/director.test.slow.ts b/ironfish/src/mining/director.test.slow.ts new file mode 100644 index 0000000000..e5f0ac49de --- /dev/null +++ b/ironfish/src/mining/director.test.slow.ts @@ -0,0 +1,381 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { generateKey } from 'ironfish-wasm-nodejs' +import Captain, { Nullifier, RangeHasher, SerializedBlockHeader, Target } from '../captain' +import { MiningDirector } from './director' +import { waitForEmit } from '../event' +import { Account } from '../account' + +import { Validity } from '../captain' + +import { + TestStrategy, + TestCaptain, + TestTransaction, + SerializedTestTransaction, + makeCaptain, + makeNullifier, + makeFakeBlock, + makeDb, + makeDbName, + makeNextBlock, + blockHash, + makeChainGenesis, + TestMemPool, +} from '../captain/testUtilities' +import { MemPool } from '../memPool' + +// Number of notes and nullifiers on the initial chain created by makeCaptain +const TEST_CHAIN_NUM_NOTES = 40 +const TEST_CHAIN_NUM_NULLIFIERS = 16 + +function generateAccount(): Account { + const key = generateKey() + + return { + name: 'test', + rescan: -1, + incomingViewKey: key.incoming_view_key, + outgoingViewKey: key.outgoing_view_key, + publicAddress: key.public_address, + spendingKey: key.spending_key, + } +} + +describe('Mining director', () => { + const strategy = new TestStrategy(new RangeHasher()) + let captain: TestCaptain + let targetSpy: jest.SpyInstance + let targetMeetsSpy: jest.SpyInstance + let isAddBlockValidSpy: jest.SpyInstance + let memPool: TestMemPool + let director: MiningDirector< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction + > + + beforeEach(async () => { + const db = makeDb(makeDbName()) + const chain = await makeChainGenesis(strategy, db) + captain = await Captain.new(db, strategy, chain) + isAddBlockValidSpy = jest + .spyOn(captain.chain.verifier, 'isAddBlockValid') + .mockResolvedValue({ + valid: Validity.Yes, + }) + for (let i = 1; i < 8 * 5; i++) { + await chain.notes.add(`${i}`) + + if (i % 5 < 2) { + await chain.nullifiers.add(makeNullifier(i)) + } + + if ((i + 1) % 5 === 0) { + await chain.addBlock(await makeNextBlock(chain)) + } + } + + memPool = new MemPool(captain) + director = new MiningDirector( + (captain as unknown) as Captain< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction + >, + memPool, + ) + director.setMinerAccount(generateAccount()) + + targetSpy = jest.spyOn(Target, 'minDifficulty').mockImplementation(() => BigInt(1)) + targetMeetsSpy = jest.spyOn(Target, 'meets').mockImplementation(() => true) + + await director.start() + }) + + afterEach(async () => { + await captain.shutdown() + director.shutdown() + }) + + afterAll(() => { + targetSpy.mockClear() + targetMeetsSpy.mockClear() + isAddBlockValidSpy.mockClear() + }) + + it('creates a new block to be mined when chain head changes', async () => { + const chainHead = await captain.chain.getHeaviestHead() + const listenPromise = waitForEmit(director.onBlockToMine) + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + await captain.chain.onChainHeadChange.emitAsync(chainHead!.recomputeHash()) + const [data] = await listenPromise + const buffer = Buffer.from(data.bytes) + const block = JSON.parse(buffer.toString()) as Partial> + expect(data.target).toMatchInlineSnapshot(` + Target { + "targetValue": 115792089237316195423570985008687907853269984665640564039457584007913129639935n, + } + `) + expect(block).toMatchSnapshot({ timestamp: expect.any(Number) }) + }) + + it('adds transactions from the queue to a new block to be mined', async () => { + director.memPool.acceptTransaction( + new TestTransaction(true, ['abc', 'def'], 50, [ + { nullifier: makeNullifier(8), commitment: '0-3', size: 4 }, + ]), + ) + + director.memPool.acceptTransaction( + new TestTransaction(true, ['jkl', 'mno'], 40, [ + { nullifier: makeNullifier(9), commitment: '0-3', size: 4 }, + ]), + ) + const chainHead = await captain.chain.getHeaviestHead() + expect(chainHead).toBeDefined() + const listenPromise = waitForEmit(director.onBlockToMine) + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + await captain.chain.onChainHeadChange.emitAsync(chainHead!.recomputeHash()) + + const result = (await listenPromise)[0] + const buffer = Buffer.from(result.bytes) + const block = JSON.parse(buffer.toString()) as SerializedBlockHeader + + expect(block.noteCommitment.size).toBe(TEST_CHAIN_NUM_NOTES + 5) + expect(block.nullifierCommitment.size).toBe(TEST_CHAIN_NUM_NULLIFIERS + 2) + expect(block).toMatchSnapshot({ timestamp: expect.any(Number) }) + // Transactions stay in the queue until they are mined + expect(director.memPool.size()).toBe(2) + }) + + it('does not add invalid transactions to the block', async () => { + director.memPool.acceptTransaction( + new TestTransaction(false, ['abc', 'def'], 50, [ + { nullifier: makeNullifier(8), commitment: 'ghi', size: 4 }, + ]), + ) + + director.memPool.acceptTransaction( + new TestTransaction(false, ['jkl', 'mno'], 40, [ + { nullifier: makeNullifier(9), commitment: 'pqr', size: 4 }, + ]), + ) + + const chainHead = await captain.chain.getHeaviestHead() + expect(chainHead).toBeDefined() + const listenPromise = waitForEmit(director.onBlockToMine) + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + await captain.chain.onChainHeadChange.emitAsync(chainHead!.recomputeHash()) + + const result = (await listenPromise)[0] + const buffer = Buffer.from(result.bytes) + const block = JSON.parse(buffer.toString()) as SerializedBlockHeader + + expect(block.noteCommitment.size).toBe(TEST_CHAIN_NUM_NOTES + 1) + expect(block.nullifierCommitment.size).toBe(TEST_CHAIN_NUM_NULLIFIERS) + expect(block).toMatchSnapshot({ timestamp: expect.any(Number) }) + expect(director.memPool.size()).toBe(0) + }) +}) + +// TODO: Move these to MemPool +describe('isValidTransaction', () => { + const strategy = new TestStrategy(new RangeHasher()) + let captain: TestCaptain + let memPool: TestMemPool + + beforeEach(async () => { + captain = await makeCaptain(strategy) + memPool = new MemPool(captain) + }) + + afterEach(async () => { + await captain.shutdown() + }) + + it('is not valid if the spend was seen in other transactions in this block', async () => { + const transaction = new TestTransaction(true, ['abc', 'def'], 50, [ + { nullifier: makeNullifier(8), commitment: '0-3', size: 4 }, + ]) + + const beforeSize = TEST_CHAIN_NUM_NULLIFIERS + const seenNullifiers = [makeNullifier(8)] + const isValid = await memPool.isValidTransaction(transaction, beforeSize, seenNullifiers) + expect(isValid).toBe(false) + }) + + it('is not valid if the spend was seen in a previous block', async () => { + const aPreviousNullifier = await captain.chain.nullifiers.get(4) + + const transaction = new TestTransaction(true, ['abc', 'def'], 50, [ + { nullifier: aPreviousNullifier, commitment: '0-3', size: 4 }, + ]) + + const beforeSize = TEST_CHAIN_NUM_NULLIFIERS + const seenNullifiers: Nullifier[] = [] + const isValid = await memPool.isValidTransaction(transaction, beforeSize, seenNullifiers) + expect(isValid).toBe(false) + }) + + it('Updates seenNullifiers with valid transactions', async () => { + const seenNullifiers: Nullifier[] = [] + const beforeSize = TEST_CHAIN_NUM_NULLIFIERS + let transaction = new TestTransaction(true, ['abc', 'def'], 50, [ + { nullifier: makeNullifier(8), commitment: '0-3', size: 4 }, + ]) + let isValid = await memPool.isValidTransaction(transaction, beforeSize, seenNullifiers) + expect(isValid).toBe(true) + expect(seenNullifiers).toHaveLength(1) + + transaction = new TestTransaction(true, ['jkl', 'mno'], 40, [ + { nullifier: makeNullifier(9), commitment: '0-3', size: 4 }, + ]) + isValid = await memPool.isValidTransaction(transaction, beforeSize, seenNullifiers) + expect(isValid).toBe(true) + expect(seenNullifiers).toHaveLength(2) + }) +}) + +describe('successfullyMined', () => { + const strategy = new TestStrategy(new RangeHasher()) + let captain: TestCaptain + let memPool: TestMemPool + let director: MiningDirector< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction + > + + beforeEach(async () => { + captain = await makeCaptain(strategy) + memPool = new MemPool(captain) + director = new MiningDirector( + (captain as unknown) as Captain< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction + >, + memPool, + ) + director.setMinerAccount(generateAccount()) + }) + + afterEach(async () => { + await captain.shutdown() + director.shutdown() + }) + + it('emits nothing on mining if the block id is not known', () => { + const mockSubmit = jest.fn() + captain.emitBlock = mockSubmit + + director.successfullyMined(5, 0) + expect(captain.emitBlock).not.toBeCalled() + }) + + it('submits nothing if the block invalid', () => { + const mockSubmit = jest.fn() + captain.emitBlock = mockSubmit + + const block = makeFakeBlock(strategy, blockHash(9), blockHash(10), 10, 8, 20) + block.transactions[0].isValid = false + director.recentBlocks.set(1, block) + director.successfullyMined(5, 1) + expect(captain.emitBlock).not.toBeCalled() + }) + + it('submits a validly mined block', () => { + const mockSubmit = jest.fn() + captain.emitBlock = mockSubmit + + const block = makeFakeBlock(strategy, blockHash(9), blockHash(10), 10, 8, 20) + director.recentBlocks.set(2, block) + director.successfullyMined(5, 2) + expect(captain.emitBlock).toBeCalled() + }) +}) + +describe('Recalculating target', () => { + const minDifficulty = Target.minDifficulty() as bigint + const strategy = new TestStrategy(new RangeHasher()) + let captain: TestCaptain + let memPool: TestMemPool + let director: MiningDirector< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction + > + jest.setTimeout(15000) + + beforeEach(async () => { + jest.useFakeTimers() + jest.setTimeout(15000000) + captain = await makeCaptain(strategy) + memPool = new MemPool(captain) + director = new MiningDirector( + (captain as unknown) as Captain< + string, + string, + TestTransaction, + string, + string, + SerializedTestTransaction + >, + memPool, + ) + director.setMinerAccount(generateAccount()) + await director.start() + }) + + afterAll(async () => { + jest.useRealTimers() + await captain.shutdown() + director.shutdown() + }) + + it('after 10 seconds the block header is updated and target is re-calculated if difficulty is high', async () => { + const newTarget = Target.fromDifficulty(minDifficulty + BigInt(10000000000)) + jest.spyOn(Target, 'calculateTarget').mockReturnValueOnce(newTarget) + + const heaviestHeader = await director.captain.chain.getHeaviestHead() + + const spy = jest.spyOn(director, 'constructAndMineBlock') + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + await director.onChainHeadChange(heaviestHeader!.recomputeHash()) + + jest.advanceTimersByTime(11000) + expect(spy).toBeCalledTimes(2) + }) + + it('after 10 seconds the block header is not updated and target is not re-calculated if difficulty is at minimum', async () => { + const newTarget = Target.fromDifficulty(minDifficulty) + jest.spyOn(Target, 'calculateTarget').mockReturnValueOnce(newTarget) + + const heaviestHeader = await director.captain.chain.getHeaviestHead() + + const spy = jest.spyOn(director, 'constructAndMineBlock') + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + await director.onChainHeadChange(heaviestHeader!.recomputeHash()) + + jest.advanceTimersByTime(11000) + expect(spy).toBeCalledTimes(1) + }) +}) diff --git a/ironfish/src/mining/director.ts b/ironfish/src/mining/director.ts new file mode 100644 index 0000000000..adcfcd774c --- /dev/null +++ b/ironfish/src/mining/director.ts @@ -0,0 +1,397 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import Captain, { + Block, + BlockHash, + BlockHeaderSerde, + Strategy, + Target, + Transaction, +} from '../captain' +import { JsonSerializable } from '../serde' +import { Event } from '../event' +import { createRootLogger, Logger } from '../logger' +import { submitMetric } from '../telemetry' +import LeastRecentlyUsed from 'lru-cache' +import { MemPool } from '../memPool' +import { ErrorUtils } from '../utils' +import { Account } from '../account' + +/** + * Number of transactions we are willing to store in a single block. + */ +const MAX_TRANSACTIONS_PER_BLOCK = 10 +const MINING_DIFFICULTY_CHANGE_TIMEOUT = 10000 + +type DirectorState = { type: 'STARTED' } | { type: 'STOPPED' } + +/** + * Responsible for directing miners about which block to mine. + * + * Listens for changes to the anchor chain head and emits a 'onBlockToMine' event + * for each one. + * + * @typeParam E Note element stored in transactions and the notes Merkle Tree + * @typeParam H the hash of an `E`. Used for the internal nodes and root hash + * of the notes Merkle Tree + * @typeParam T Type of a transaction stored on Captain's chain. + * @typeParam ST The serialized format of a `T`. Conversion between the two happens + * via the `strategy`. + */ +export class MiningDirector< + E, + H, + T extends Transaction, + SE extends JsonSerializable, + SH extends JsonSerializable, + ST +> { + readonly captain: Captain + + memPool: MemPool + + /** + * The event creates a block header with loose transactions that have been + * submitted to the network. It then waits for one of the miners to send it + * a randomness value for that block. If one arrives, the block is reconstructed, + * gossiped, and added to the local tree. + */ + onBlockToMine = new Event<[{ miningRequestId: number; bytes: Buffer; target: Target }]>() + + /** + * The chain strategy used to calculate miner's fees. + */ + strategy: Strategy + + /** + * Serde to convert block headers to jsonable objects. + */ + blockHeaderSerde: BlockHeaderSerde + + /** + * Reference blocks that we most recently emitted for miners to mine. + */ + recentBlocks: LeastRecentlyUsed> + + /** + * Block currently being generated by the director. Nulled out after + * the miner's fee is generated. (It will be set to null while + * retrying) + */ + currentBlockUnderConstruction: BlockHash | null = null + + /** + * Next block to construct after currentBlockUnderConstruction finishes. + */ + nextBlockToConstruct: BlockHash | null = null + + /** + * The value to set on the `graffiti` field of newly generated blocks. + */ + private _blockGraffiti: string + get blockGraffiti(): string { + return this._blockGraffiti + } + + /** + * The private spending key for this miner. This is used to construct the + * miner's fee transaction for the block. + */ + private _minerAccount: Account | null + get minerAccount(): Account | null { + return this._minerAccount + } + + /** + * Logger instance used in place of console logs + */ + logger: Logger + + /** + * Setting an interval every 10 seconds to re-calculate the target for the + * currentBlock based on updated timestamp + */ + miningDifficultyChangeTimeout: null | ReturnType + + private _state: Readonly = { type: 'STOPPED' } + + get state(): Readonly { + return this._state + } + + setState(state: Readonly): void { + this._state = state + } + + /** + * Identifier for each request of blocks that gets sent to miners. This + * increases monotonically and allows director to figure out which + * block it is receiving randomness for. + */ + private miningRequestId: number + + constructor( + captain: Captain, + memPool: MemPool, + logger: Logger = createRootLogger(), + ) { + this.captain = captain + this.memPool = memPool + this._blockGraffiti = '' + this._minerAccount = null + this.strategy = captain.strategy + this.blockHeaderSerde = new BlockHeaderSerde(captain.strategy) + this.logger = logger.withTag('director') + this.miningDifficultyChangeTimeout = null + this.miningRequestId = 0 + this.recentBlocks = new LeastRecentlyUsed(50) + + this.captain.chain.onChainHeadChange.on((newChainHead: BlockHash) => { + void this.onChainHeadChange(newChainHead).catch((err) => { + this.logger.error(err) + }) + }) + } + + async start(): Promise { + this.setState({ type: 'STARTED' }) + this.logger.debug('Mining director is running') + + const heaviestHead = await this.captain.chain.getHeaviestHead() + if (heaviestHead) { + await this.generateBlockToMine(heaviestHead.hash) + } + } + + isStarted(): boolean { + return this.state.type === 'STARTED' + } + + setBlockGraffiti(graffiti: string): void { + this._blockGraffiti = graffiti + } + + setMinerAccount(account: Account | null): void { + this._minerAccount = account + } + + /** + * Event listener hooked up to changes in AnchorChain. + * + * When a new head is received it: + * * adds any transactions that we were attempting to mine back to the pool + * * Creates a new block with transactions from the pool + * * emits the header of the new block to any listening miners + * * stores block until either the head changes again or it is succesfully mined + * + * @param newChainHead The hash of the new head of the chain + * @event onBlockToMine header of a new block that needs to have its randomness mined + */ + async onChainHeadChange(newChainHead: BlockHash): Promise { + this.logger.debug('New chain head', newChainHead.toString('hex')) + + if (!this.isStarted()) { + return + } + + await this.generateBlockToMine(newChainHead) + } + + async generateBlockToMine(chainHead: BlockHash): Promise { + // If we're already generating a block, update the next block to generate and exit + this.nextBlockToConstruct = chainHead + if (this.currentBlockUnderConstruction !== null) return + + // Continue generating while we have a new block to generate + while (this.nextBlockToConstruct !== null && this.isStarted()) { + this.currentBlockUnderConstruction = this.nextBlockToConstruct + this.nextBlockToConstruct = null + + if (this.miningDifficultyChangeTimeout) { + clearTimeout(this.miningDifficultyChangeTimeout) + } + + let blockData = null + + try { + blockData = await this.constructTransactionsAndFees(this.currentBlockUnderConstruction) + } catch (error: unknown) { + this.logger.debug( + `An error occurred while creating the new block ${ErrorUtils.renderError(error)}`, + ) + } + + if (blockData === null) { + continue + } + + const [minersFee, blockTransactions] = blockData + await this.constructAndMineBlockWithRetry(minersFee, blockTransactions) + } + + // No longer generating a block + this.currentBlockUnderConstruction = null + } + + async constructAndMineBlockWithRetry(minersFee: T, blockTransactions: T[]): Promise { + if (!this.isStarted()) { + return + } + + const canRetry = await this.constructAndMineBlock(minersFee, blockTransactions) + // The current mining target is already at the initial - no need to try to lower it + if (!canRetry) return + + if (this.miningDifficultyChangeTimeout) { + clearTimeout(this.miningDifficultyChangeTimeout) + } + this.miningDifficultyChangeTimeout = setTimeout(() => { + void this.constructAndMineBlockWithRetry(minersFee, blockTransactions) + }, MINING_DIFFICULTY_CHANGE_TIMEOUT) + } + + /** + * Construct the transactions, header and miner fees used by constructAndMineBlock + * + * @param newChainHead The hash of the new head of the chain + */ + + async constructTransactionsAndFees(newChainHead: BlockHash): Promise<[T, T[]]> { + if (!this._minerAccount) { + throw Error('No miner account found to construct the transaction') + } + + const blockTransactions = [] + let totalTransactionFees = BigInt(0) + + for await (const transaction of this.memPool.get()) { + if (blockTransactions.length >= MAX_TRANSACTIONS_PER_BLOCK) break + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + blockTransactions.push(transaction) + totalTransactionFees += transaction.transactionFee() + } + + const blockHeader = await this.captain.chain.getBlockHeader(newChainHead) + if (!blockHeader) { + // Chain normally has a header for a heaviestHead. Block could be removed + // if a predecessor is proven invalid while this task is running. (unlikely but possible) + throw Error('No header for the new block') + } + + const minersFee = await this.strategy.createMinersFee( + totalTransactionFees, + blockHeader.sequence + BigInt(1), + this._minerAccount.spendingKey, + ) + + return [minersFee, blockTransactions] + } + + /** + * Construct a new block and send it out to miners. + * + * This is called both when the chain head changes and + * when the timeout for mining a block at its current difficulty + * expires. + * + * @param newChainHead The hash of the new head of the chain + * @returns a promise that resolves to a boolean. Boolean returns + * true if mining that block can be retried with a lower difficulty + */ + async constructAndMineBlock(minersFee: T, blockTransactions: T[]): Promise { + let newBlock + try { + const graffiti = Buffer.alloc(32) + graffiti.write(this.blockGraffiti) + + newBlock = await this.captain.chain.newBlock(blockTransactions, minersFee, graffiti) + } catch (e: unknown) { + const message = (e as { message?: string }).message + throw Error(`newBlock produced an invalid block: ${message || ''}`) + } + this.logger.debug( + `Current block ${newBlock.header.sequence}, has ${newBlock.transactions.length} transactions`, + ) + + // For mining, we want a serialized form of the header without the randomness on it + const target = newBlock.header.target + this.logger.debug('target set to', target.asBigInt()) + const asBuffer = newBlock.header.serializePartial() + this.miningRequestId++ + + this.logger.debug( + `Emitting a new block ${newBlock.header.sequence} to mine as request ${this.miningRequestId}`, + ) + await this.onBlockToMine.emitAsync({ + bytes: asBuffer, + target, + miningRequestId: this.miningRequestId, + }) + this.recentBlocks.set(this.miningRequestId, newBlock) + + const canRetry = target.asBigInt() < Target.maxTarget().asBigInt() + return canRetry + } + + /** + * Called when a block has been successfully mined. + * + * To reduce cost of communication with miners, only the randomness for + * the new block is passed in. It is set on the block we have stored locally + * and verified. + * + * The new block is added to the chain and sent out to be gossip'd. + * + * @param randomness The randomness to be set for the new block + */ + successfullyMined(randomness: number, miningRequestId: number): void { + const block = this.recentBlocks.get(miningRequestId) + if (!block) { + this.logger.debug( + 'Received randomness for a block with unknown request ID (it may have expired)', + ) + return + } + + block.header.randomness = randomness + const validation = this.captain.chain.verifier.verifyBlock(block) + if (!validation.valid) { + this.logger.warn('Discarding invalid block', validation.reason) + return + } + + this.logger.info( + `Successful block ${block.header.sequence} has ${block.transactions.length} transactions`, + ) + this.logger.info( + `Propagating successfully mined block ${block.header.sequence}`, + block.header.hash, + ) + const header = block.header + submitMetric({ + name: 'minedBlock', + fields: [ + { name: 'difficulty', type: 'integer', value: Number(header.target.toDifficulty()) }, + { name: 'sequence', type: 'integer', value: Number(header.sequence) }, + ], + }) + + void this.captain.chain.addBlock(block) + this.captain.emitBlock(block) + } + + /** + * clears the timeout to queue up more mining jobs with recalculated target + */ + shutdown(): void { + this.setState({ type: 'STOPPED' }) + + if (this.miningDifficultyChangeTimeout) { + clearTimeout(this.miningDifficultyChangeTimeout) + } + } +} + +export default MiningDirector diff --git a/ironfish/src/mining/index.ts b/ironfish/src/mining/index.ts new file mode 100644 index 0000000000..affe3df62e --- /dev/null +++ b/ironfish/src/mining/index.ts @@ -0,0 +1,6 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export { default as miner } from './miner' +export { MiningDirector } from './director' diff --git a/ironfish/src/mining/mineHeaderTask.test.ts b/ironfish/src/mining/mineHeaderTask.test.ts new file mode 100644 index 0000000000..74dcfe2705 --- /dev/null +++ b/ironfish/src/mining/mineHeaderTask.test.ts @@ -0,0 +1,76 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import hashBlockHeader from './miningAlgorithm' +import mineBatch from './mineHeaderTask' +import { mocked } from 'ts-jest/utils' + +jest.mock('./miningAlgorithm') + +describe('Mine header tasks', () => { + beforeEach(() => { + mocked(hashBlockHeader).mockReset() + }) + it('attempt batch size times', () => { + const targetTooBig = Buffer.alloc(8) + targetTooBig[0] = 10 + mocked(hashBlockHeader).mockReturnValue(targetTooBig) + + const result = mineBatch({ + headerBytesWithoutRandomness: Buffer.alloc(8), + initialRandomness: 42, + targetValue: '0', + batchSize: 10, + miningRequestId: 1, + }) + + expect(result).toStrictEqual({ initialRandomness: 42 }) + expect(hashBlockHeader).toBeCalledTimes(10) + }) + it('finds the randomness', () => { + const targetTooBig = Buffer.alloc(8) + targetTooBig[0] = 10 + mocked(hashBlockHeader) + .mockReturnValueOnce(targetTooBig) + .mockReturnValueOnce(targetTooBig) + .mockReturnValueOnce(targetTooBig) + .mockReturnValue(Buffer.alloc(0)) + + const result = mineBatch({ + headerBytesWithoutRandomness: Buffer.alloc(0), + initialRandomness: 42, + targetValue: '100', + batchSize: 10, + miningRequestId: 2, + }) + + expect(result).toStrictEqual({ initialRandomness: 42, randomness: 45, miningRequestId: 2 }) + expect(hashBlockHeader).toBeCalledTimes(4) + }) + it('wraps the randomness', () => { + const targetTooBig = Buffer.alloc(8) + targetTooBig[0] = 10 + mocked(hashBlockHeader) + .mockReturnValueOnce(targetTooBig) + .mockReturnValueOnce(targetTooBig) + .mockReturnValueOnce(targetTooBig) + .mockReturnValueOnce(targetTooBig) + .mockReturnValue(Buffer.alloc(0)) + + const result = mineBatch({ + headerBytesWithoutRandomness: Buffer.alloc(0), + initialRandomness: Number.MAX_SAFE_INTEGER - 1, + targetValue: '0', + batchSize: 10, + miningRequestId: 3, + }) + + expect(result).toStrictEqual({ + initialRandomness: Number.MAX_SAFE_INTEGER - 1, + randomness: 2, + miningRequestId: 3, + }) + expect(hashBlockHeader).toBeCalledTimes(5) + }) +}) diff --git a/ironfish/src/mining/mineHeaderTask.ts b/ironfish/src/mining/mineHeaderTask.ts new file mode 100644 index 0000000000..ebdd18e3e9 --- /dev/null +++ b/ironfish/src/mining/mineHeaderTask.ts @@ -0,0 +1,67 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Target } from '../captain' +import hashBlockHeader from './miningAlgorithm' + +/** + * Expose a worker task that attempts to mine 1000 randomness + * values. + * + * It hashes each value using the hashFunction and checks if + * it meets the provided Target. If it does, that randomness + * is return, otherwise it tries again. + * + * After 1000 numbers it exits, returning undefined + * + * @param headerBytesWithoutRandomness The bytes to be appended to randomness to generate a header + * @param miningRequestId An identifier that is passed back to the miner when returning a + * successfully mined block + * @param initialRandomness The first randomness value to attempt. Will try the next + * 1000 randomness values after that + * @param targetValue The target value that a valid block hash must be below for + * a given randomness + * @param batchSize The number of attempts to mine that should be made in this batch + * each attempt increments the randomness starting from initialRandomness + * @returns object with initialRandomness (useful as a promise identifier) + * and a randomness value that is either a successfully mined number or undefined, + * and the miningRequestId that was sent in + */ +export default function mineBatch({ + miningRequestId, + headerBytesWithoutRandomness, + initialRandomness, + targetValue, + batchSize, +}: { + miningRequestId: number + headerBytesWithoutRandomness: Buffer + initialRandomness: number + targetValue: string + batchSize: number +}): { initialRandomness: number; randomness?: number; miningRequestId?: number } { + const target = new Target(targetValue) + const randomnessBytes = new ArrayBuffer(8) + + for (let i = 0; i < batchSize; i++) { + // The intention here is to wrap randomness between 0 inclusive and Number.MAX_SAFE_INTEGER inclusive + const randomness = + i > Number.MAX_SAFE_INTEGER - initialRandomness + ? i - (Number.MAX_SAFE_INTEGER - initialRandomness) - 1 + : initialRandomness + i + new DataView(randomnessBytes).setFloat64(0, randomness, false) + + const headerBytes = Buffer.concat([ + Buffer.from(randomnessBytes), + headerBytesWithoutRandomness, + ]) + + const blockHash = hashBlockHeader(headerBytes) + + if (Target.meets(new Target(blockHash).asBigInt(), target)) { + return { initialRandomness, randomness, miningRequestId } + } + } + return { initialRandomness } +} diff --git a/ironfish/src/mining/miner.test.ts b/ironfish/src/mining/miner.test.ts new file mode 100644 index 0000000000..8e7abea220 --- /dev/null +++ b/ironfish/src/mining/miner.test.ts @@ -0,0 +1,139 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import miner from './miner' +import Piscina from 'piscina' + +jest.mock('piscina') +// Tell typescript to treat it as a mock +const MockPiscina = (Piscina as unknown) as jest.Mock + +/** + * Make an iterable of blocks suitable for async generation + * + * If waitAfter is supplied, it won't return an exhausted stream + * until that promise completes. This is useful for tests that + * need to wait for an event to happen before the stream exhausts. + */ +async function* makeAsync( + array: { + bytes: { type: 'Buffer'; data: number[] } + target: string + miningRequestId: number + }[], + waitAfter = Promise.resolve(), +) { + for (const block of array) { + yield block + } + + await waitAfter +} + +/** + * Create a promise that never resolves. + */ +function pending(): Promise { + return new Promise(() => {}) +} + +describe('Miner', () => { + const successfullyMined = jest.fn() + beforeEach(() => { + MockPiscina.mockReset() + successfullyMined.mockReset() + }) + it('constructs a miner', async () => { + const mock = { + runTask: jest.fn(async () => pending()), + destroy: jest.fn(async () => Promise.resolve()), + } + MockPiscina.mockImplementation(() => (mock as unknown) as Piscina) + await miner( + makeAsync([ + { + bytes: { type: 'Buffer', data: [] }, + target: '0', + miningRequestId: 1, + }, + ]), + successfullyMined, + ) + expect(MockPiscina).toHaveBeenCalledTimes(1) + expect(mock.runTask).toHaveBeenCalledTimes(1) + expect(mock.destroy).toHaveBeenCalledTimes(1) + expect(successfullyMined).not.toBeCalled() + }) + + it('reschedules on new block', async () => { + const mock = { + runTask: jest.fn(async () => + Promise.resolve({ randomness: 5, initialRandomness: 10, miningRequestId: 10 }), + ), + destroy: jest.fn(async () => Promise.resolve()), + } + MockPiscina.mockImplementation(() => (mock as unknown) as Piscina) + await miner( + makeAsync([ + { + bytes: { type: 'Buffer', data: [] }, + target: '0', + miningRequestId: 2, + }, + { + bytes: { type: 'Buffer', data: [] }, + target: '0', + miningRequestId: 3, + }, + { + bytes: { type: 'Buffer', data: [] }, + target: '0', + miningRequestId: 4, + }, + ]), + successfullyMined, + ) + expect(MockPiscina).toHaveBeenCalledTimes(1) + expect(mock.runTask).toHaveBeenCalledTimes(3) + expect(mock.destroy).toHaveBeenCalledTimes(1) + expect(successfullyMined).not.toBeCalled() + }) + + it('calls successfullyMined', async () => { + jest.spyOn(global.Math, 'floor').mockReturnValue(10) + const mock = { + runTask: jest.fn(async () => + Promise.resolve({ randomness: 5, initialRandomness: 10, miningRequestId: 10 }), + ), + destroy: jest.fn(async () => Promise.resolve()), + } + + // Used to keep the generator from returning until a block has a chance to mine + let successfulPromiseCallback: () => void + const successfulPromise: Promise = new Promise( + (resolve) => (successfulPromiseCallback = resolve), + ) + + // Exit the generator only after a block has mined + successfullyMined.mockImplementation(() => successfulPromiseCallback()) + + MockPiscina.mockImplementation(() => (mock as unknown) as Piscina) + await miner( + makeAsync( + [ + { + bytes: { type: 'Buffer', data: [] }, + target: '0', + miningRequestId: 2, + }, + ], + successfulPromise, + ), + successfullyMined, + ) + expect(successfullyMined).toBeCalledTimes(1) + expect(MockPiscina).toHaveBeenCalledTimes(1) + expect(mock.runTask).toHaveBeenCalledTimes(1) + expect(mock.destroy).toHaveBeenCalledTimes(1) + }) +}) diff --git a/ironfish/src/mining/miner.ts b/ironfish/src/mining/miner.ts new file mode 100644 index 0000000000..f013ac618a --- /dev/null +++ b/ironfish/src/mining/miner.ts @@ -0,0 +1,206 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +// TODO: This file depends on nodejs librarys (piscina, path) and will not +// work with browser workers. This will need to be abstracted in future. + +import Piscina from 'piscina' +import path from 'path' + +/** + * The number of tasks to run in each thread batch + */ +const BATCH_SIZE = 10000 + +/** + * Return value from a mining task. + * + * @param initialRandomness the value that was passed into the task + * for the initial randomness. Used by the calling code as a task id + * @param randomness if defined, a value for randomness that was found + * while mining the task. If undefined, none of the BATCH_SIZE attempts + * in this task created a valid header + */ +type MineResult = { initialRandomness: number; randomness?: number; miningRequestId?: number } + +/** + * Typeguard to check if an object is a result + * + * Used in racing promises against the new incoming block promise + * + * @param obj object being checked for type + */ +function isMineResult(obj: unknown): obj is MineResult { + const asMineResult = obj as MineResult + + if (asMineResult.initialRandomness !== undefined) { + return true + } + + return false +} + +/** + * Type of the spawned task. + * + * Only used to keep typescript happy when constructing the pool + */ +export type MiningTask = ( + headerBytesWithoutRandomness: Buffer, + initialRandomness: number, + target: bigint, + hashFunction: string, +) => number | undefined + +/** + * Add a new job to the pool of mining tasks. + * + * Called when a new block has been discovered to be mined, + * and when an existing batch exits unsuccessfully + * + * @param pool The pool of workers + * @param bytes The bytes of the header to be mined by this task + * @param randomness The initial randomness value the worker should test + * it will increment this value until it finds a match or has + * tried all the values in its batch + * @param target The target value that this batch needs to meet + * @param hashFunction the strategy's hash function, serialized to a string + */ +function enqueue( + piscina: Piscina, + bytes: Buffer, + miningRequestId: number, + randomness: number, + target: string, +): PromiseLike { + return piscina.runTask({ + miningRequestId, + headerBytesWithoutRandomness: bytes, + initialRandomness: randomness, + targetValue: target, + batchSize: BATCH_SIZE, + }) +} + +/** + * Prime the pool of mining tasks with several jobs for the given block. + * The main miner will create new jobs one at a time as each of these + * complete. + * + * @param randomness the inital randomness. Each task will try BATCH_SIZE + * variations on this randomness before returning. + * @param pool The pool of workers + * @param tasks The list of promises to add the new tasks to + * @param numTasks The number of new tasks to enqueue + * @param bytes The bytes of the header to be mined by these tasks + * @param target The target value that this batch needs to meet + * @param hashFunction the strategy's hash function, serialized to a string + */ +function primePool( + randomness: number, + piscina: Piscina, + tasks: Record>, + numTasks: number, + newBlockData: { + bytes: { type: 'Buffer'; data: number[] } + target: string + miningRequestId: number + }, +): number { + const bytes = Buffer.from(newBlockData.bytes) + + for (let i = 0; i < numTasks; i++) { + tasks[randomness] = enqueue( + piscina, + bytes, + newBlockData.miningRequestId, + randomness, + newBlockData.target, + ) + randomness += BATCH_SIZE + } + + return randomness +} + +/** + * The miner task. + * + * This will probably be started from the RPC layer, which will + * also need to subscribe to mining director tasks and emit them. + * + * @param strategy The strategy that contains the hashBlockHeader function + * Note that hashBlockHeader must be serializable as a string so that it + * can be eval'd. Specifically, it must not use any global values + * from its containing scope, including `this` or any imported modules. + * @param newBlocksIterator Async iterator of new blocks coming in from + * the network + * @param successfullyMined function to call when a block has been successfully + * mined. The glue code will presumably send this to the mining director + * over RPC. + * @param numTasks The number of worker tasks to run in parallel threads. + */ +async function miner( + newBlocksIterator: AsyncIterator<{ + bytes: { type: 'Buffer'; data: number[] } + target: string + miningRequestId: number + }>, + successfullyMined: (randomness: number, miningRequestId: number) => void, + numTasks = 1, +): Promise { + let blockToMineResult = await newBlocksIterator.next() + if (blockToMineResult.done) return + let blockPromise = newBlocksIterator.next() + + const piscina = new Piscina({ + filename: path.resolve(__dirname, 'mineHeaderTask.js'), + }) + + let tasks: Record> = {} + + let randomness = Math.floor(Math.random() * Number.MAX_SAFE_INTEGER) + + primePool(randomness, piscina, tasks, numTasks, blockToMineResult.value) + + for (;;) { + const result = await Promise.race([blockPromise, ...Object.values(tasks)]) + + if (isMineResult(result)) { + delete tasks[result.initialRandomness] + + if (result.randomness !== undefined && result.miningRequestId !== undefined) { + successfullyMined(result.randomness, result.miningRequestId) + continue + } + + tasks[randomness] = enqueue( + piscina, + Buffer.from(blockToMineResult.value.bytes), + blockToMineResult.value.miningRequestId, + randomness, + blockToMineResult.value.target, + ) + + randomness += BATCH_SIZE + } else { + tasks = {} // We don't care about the discarded tasks; they will exit soon enough + + blockToMineResult = result + + if (blockToMineResult.done) { + break + } + + randomness = Math.floor(Math.random() * Number.MAX_SAFE_INTEGER) + primePool(randomness, piscina, tasks, numTasks, blockToMineResult.value) + + blockPromise = newBlocksIterator.next() + } + } + + await piscina.destroy() +} + +export default miner diff --git a/ironfish/src/mining/miningAlgorithm.ts b/ironfish/src/mining/miningAlgorithm.ts new file mode 100644 index 0000000000..941edf18a5 --- /dev/null +++ b/ironfish/src/mining/miningAlgorithm.ts @@ -0,0 +1,12 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { BlockHash } from '../captain' + +import { createHash } from 'blake3-wasm' + +export default function hashBlockHeader(serializedHeader: Buffer): BlockHash { + const hash = createHash() + hash.update(serializedHeader) + return hash.digest() +} diff --git a/ironfish/src/network/__snapshots__/peerNetwork.test.ts.snap b/ironfish/src/network/__snapshots__/peerNetwork.test.ts.snap new file mode 100644 index 0000000000..d4efee2708 --- /dev/null +++ b/ironfish/src/network/__snapshots__/peerNetwork.test.ts.snap @@ -0,0 +1,7 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Registers a handler 1`] = ` +Map { + "hello" => "gossip", +} +`; diff --git a/ironfish/src/network/identity.ts b/ironfish/src/network/identity.ts new file mode 100644 index 0000000000..9be19e1c35 --- /dev/null +++ b/ironfish/src/network/identity.ts @@ -0,0 +1,48 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import tweetnacl, { BoxKeyPair } from 'tweetnacl' + +/** + * Types and helper functions related to a peer's identity. + */ + +/** + * The entire identity required to send messages on the peer network. + * An object consisting of a public key and a private key. + */ +export type PrivateIdentity = BoxKeyPair + +/** + * A base64-encoded 32-byte public key exposed to other peers on the network. + */ +export type Identity = string + +/** + * Length of the identity in bytes. + */ +export const identityLength = tweetnacl.box.publicKeyLength + +/** + * Length of the identity as a base64-encoded string. + */ +export const base64IdentityLength = Math.ceil(identityLength / 3) * 4 + +export function isIdentity(obj: string): boolean { + // Should be a base64-encoded string with the expected length + return ( + obj.length == base64IdentityLength && Buffer.from(obj, 'base64').toString('base64') === obj + ) +} + +export function canInitiateWebRTC(source: Identity, dest: Identity): boolean { + return source > dest +} + +export function canKeepDuplicateConnection(source: Identity, dest: Identity): boolean { + return canInitiateWebRTC(source, dest) +} + +export function privateIdentityToIdentity(identity: PrivateIdentity): Identity { + return Buffer.from(identity.publicKey).toString('base64') +} diff --git a/ironfish/src/network/index.ts b/ironfish/src/network/index.ts new file mode 100644 index 0000000000..9e9be74415 --- /dev/null +++ b/ironfish/src/network/index.ts @@ -0,0 +1,25 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export { PeerNetwork, RoutingStyle } from './peerNetwork' + +export type { Gossip, Rpc } from './messageRouters' +export { CannotSatisfyRequestError, Direction } from './messageRouters' +export { RPC_TIMEOUT_MILLIS } from './messageRouters/rpcId' + +export type { Connection } from './peers/connections' +export type { Peer } from './peers/peer' +export type { PeerManager } from './peers/peerManager' + +export { + base64IdentityLength, + identityLength, + isIdentity, + Identity, + PrivateIdentity, + privateIdentityToIdentity, +} from './identity' + +export * from './messages' +export * from './utils' diff --git a/ironfish/src/network/messageRouters/fireAndForget.test.ts b/ironfish/src/network/messageRouters/fireAndForget.test.ts new file mode 100644 index 0000000000..94082756cc --- /dev/null +++ b/ironfish/src/network/messageRouters/fireAndForget.test.ts @@ -0,0 +1,68 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +jest.mock('ws') + +import ws from 'ws' +import { PeerNetwork, RoutingStyle } from '../peerNetwork' +import { PeerManager } from '../peers/peerManager' +import { mockPrivateIdentity, mockLocalPeer, getConnectedPeer } from '../testUtilities' +import { FireAndForgetRouter, IncomingFireAndForgetGeneric } from './fireAndForget' + +jest.useFakeTimers() + +describe('FireAndForget Router', () => { + it('sends a fire and forget message', () => { + const peers = new PeerManager(mockLocalPeer()) + const sendToMock = jest.spyOn(peers, 'sendTo') + + const router = new FireAndForgetRouter(peers) + router.register('pass', jest.fn()) + + const { peer } = getConnectedPeer(peers) + const request = { type: 'test', payload: { test: 'payload' } } + router.fireAndForget(peer, request) + expect(sendToMock).toBeCalledWith(peer, request) + }) + + it('handles an incoming fire and forget message', async () => { + const peers = new PeerManager(mockLocalPeer()) + const router = new FireAndForgetRouter(peers) + + const handleMock = jest.fn((_message: IncomingFireAndForgetGeneric<'incoming'>) => + Promise.resolve(), + ) + router.register('incoming', handleMock) + + const { peer } = getConnectedPeer(peers) + await router.handle(peer, { type: 'incoming', payload: { test: 'payload' } }) + + expect(handleMock).toHaveBeenCalledWith({ + peerIdentity: peer.getIdentityOrThrow(), + message: { type: 'incoming', payload: { test: 'payload' } }, + }) + }) + + it('routes a fire and forget message as fire and forget', async () => { + const network = new PeerNetwork(mockPrivateIdentity('local'), 'sdk/1/cli', ws) + const fireAndForgetMock = jest.fn(async () => {}) + network['fireAndForgetRouter'].handle = fireAndForgetMock + + network.registerHandler( + 'test', + RoutingStyle.fireAndForget, + jest.fn((p) => Promise.resolve(p)), + () => {}, + ) + + const { peer } = getConnectedPeer(network.peerManager) + await network['handleMessage'](peer, { + peerIdentity: peer.getIdentityOrThrow(), + message: { type: 'test', payload: { test: 'payload' } }, + }) + + expect(fireAndForgetMock).toBeCalled() + network.stop() + }) +}) diff --git a/ironfish/src/network/messageRouters/fireAndForget.ts b/ironfish/src/network/messageRouters/fireAndForget.ts new file mode 100644 index 0000000000..0f83e37742 --- /dev/null +++ b/ironfish/src/network/messageRouters/fireAndForget.ts @@ -0,0 +1,74 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { + MessageType, + IncomingPeerMessage, + Message, + PayloadType, + LooseMessage, +} from '../messages' +import { PeerManager } from '../peers/peerManager' +import { Peer } from '../peers/peer' + +export type IncomingFireAndForgetGeneric = IncomingPeerMessage< + Message +> + +export type IncomingFireAndForgetPeerMessage = IncomingFireAndForgetGeneric + +/** + * Trivial router for sending a message to a connected peer without + * expecting a response or receipt confirmation. + */ +export class FireAndForgetRouter { + peerManager: PeerManager + private handlers: Map< + MessageType, + (message: IncomingFireAndForgetPeerMessage) => Promise + > + + constructor(peerManager: PeerManager) { + this.peerManager = peerManager + this.handlers = new Map< + MessageType, + (message: IncomingFireAndForgetPeerMessage) => Promise + >() + } + + /** + * Register a callback function for a given type of handler + */ + register( + type: T, + handler: (message: IncomingFireAndForgetGeneric) => Promise, + ): void + register( + type: MessageType, + handler: (message: IncomingFireAndForgetPeerMessage) => Promise, + ): void { + this.handlers.set(type, handler) + } + + /** + * Forward the message directly to the intended recipient. + */ + fireAndForget(peer: Peer, message: LooseMessage): void { + this.peerManager.sendTo(peer, message) + } + + /** + * Handle an incoming fire and forget message. Just send it up to the + * handler without any processing. + */ + async handle( + peer: Peer, + message: IncomingFireAndForgetPeerMessage['message'], + ): Promise { + const handler = this.handlers.get(message.type) + if (handler === undefined) return + const peerIdentity = peer.getIdentityOrThrow() + await handler({ peerIdentity, message }) + } +} diff --git a/ironfish/src/network/messageRouters/globalRpc.test.ts b/ironfish/src/network/messageRouters/globalRpc.test.ts new file mode 100644 index 0000000000..543aea5f33 --- /dev/null +++ b/ironfish/src/network/messageRouters/globalRpc.test.ts @@ -0,0 +1,255 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +jest.mock('./rpcId') +jest.mock('ws') + +import { mocked } from 'ts-jest/utils' +import { nextRpcId } from './rpcId' +import { CannotSatisfyRequestError, Direction, RpcRouter } from './rpc' +import { PeerManager } from '../peers/peerManager' +import { mockLocalPeer, getConnectedPeer } from '../testUtilities' +import { GlobalRpcRouter } from './globalRpc' +import { InternalMessageType, MessageType } from '../messages' +import '../testUtilities' + +jest.useFakeTimers() + +describe('select peers', () => { + // Apologies for the confusing and fragile random manipulations + afterEach(() => { + jest.spyOn(global.Math, 'random').mockRestore() + }) + + it('Returns null when no peers available', () => { + const router = new GlobalRpcRouter(new RpcRouter(new PeerManager(mockLocalPeer()))) + + router.register('take', jest.fn()) + expect(router['selectPeer']('take')).toBe(null) + }) + + it('Selects the peer if there is only one', () => { + const router = new GlobalRpcRouter(new RpcRouter(new PeerManager(mockLocalPeer()))) + + const pm = router.rpcRouter.peerManager + const { peer } = getConnectedPeer(pm) + + router.register('take', jest.fn()) + expect(router['selectPeer']('take')).toBe(peer) + }) + + it('Selects peer2 if peer1 is saturated`', () => { + const router = new GlobalRpcRouter(new RpcRouter(new PeerManager(mockLocalPeer()))) + router.register('take', jest.fn()) + const pm = router.rpcRouter.peerManager + const { peer: peer1 } = getConnectedPeer(pm) + const { peer: peer2 } = getConnectedPeer(pm) + + peer1.pendingRPC = peer1.pendingRPCMax + expect(peer1.isSaturated).toBe(true) + expect(peer2.isSaturated).toBe(false) + + router.register('take', jest.fn()) + expect(router['selectPeer']('take')).toBe(peer2) + }) + + it('Selects peer2 if peer1 failed', () => { + const router = new GlobalRpcRouter(new RpcRouter(new PeerManager(mockLocalPeer()))) + + const pm = router.rpcRouter.peerManager + const { peer: peer1 } = getConnectedPeer(pm) + const { peer: peer2 } = getConnectedPeer(pm) + + peer1.pendingRPC = 0 + peer2.pendingRPC = 1 + + router.requestFails.set( + peer1.getIdentityOrThrow(), + new Set(['take']), + ) + + router.register('take', jest.fn()) + expect(router['selectPeer']('take')).toBe(peer2) + + router.requestFails.delete(peer1.getIdentityOrThrow()) + expect(router['selectPeer']('take')).toBe(peer1) + }) + + it('Selects the peer1 if both failed', () => { + const router = new GlobalRpcRouter(new RpcRouter(new PeerManager(mockLocalPeer()))) + + const pm = router.rpcRouter.peerManager + const { peer: peer1 } = getConnectedPeer(pm) + const { peer: peer2 } = getConnectedPeer(pm) + + peer1.pendingRPC = 0 + peer2.pendingRPC = 1 + + router.requestFails.set( + peer1.getIdentityOrThrow(), + new Set(['take']), + ) + router.requestFails.set( + peer2.getIdentityOrThrow(), + new Set(['take']), + ) + + expect(router.requestFails.get(peer1.getIdentityOrThrow())?.has('take')).toBe(true) + expect(router.requestFails.get(peer2.getIdentityOrThrow())?.has('take')).toBe(true) + + router.register('take', jest.fn()) + expect(router['selectPeer']('take')).toBe(peer1) + + // Test the fail counts were reset + expect(router.requestFails.get(peer1.getIdentityOrThrow())?.has('take')).toBe(false) + expect(router.requestFails.get(peer2.getIdentityOrThrow())?.has('take')).toBe(false) + }) + + it('Clears requestFails when peers disconnect', () => { + const router = new GlobalRpcRouter(new RpcRouter(new PeerManager(mockLocalPeer()))) + + const pm = router.rpcRouter.peerManager + const { peer } = getConnectedPeer(pm) + + router.register('take', jest.fn()) + router.requestFails.set( + peer.getIdentityOrThrow(), + new Set(['take']), + ) + + expect(router.requestFails.has(peer.getIdentityOrThrow())).toBe(true) + pm.onDisconnect.emit(peer) + expect(router.requestFails.has(peer.getIdentityOrThrow())).toBe(false) + }) +}) + +describe('Global Rpc', () => { + beforeEach(() => jest.restoreAllMocks()) + + it('Constructs a global RPC Router correctly', () => { + const router = new GlobalRpcRouter(new RpcRouter(new PeerManager(mockLocalPeer()))) + expect(router.requestFails.size).toBe(0) + }) + + it('Registers a global RPC Handler with the direct rpc router', async () => { + const router = new GlobalRpcRouter(new RpcRouter(new PeerManager(mockLocalPeer()))) + const handler = jest.fn() + router.register('test', handler) + expect(router.rpcRouter['handlers'].size).toBe(1) + const rpcHandler = router.rpcRouter['handlers'].get('test') + expect(rpcHandler).toBeDefined() + if (!rpcHandler) throw new Error('rpcHandler should be defined') + await rpcHandler({ + peerIdentity: 'peer', + message: { + type: 'test', + rpcId: 0, + direction: Direction.request, + payload: { test: 'payload' }, + }, + }) + expect(handler).toBeCalledTimes(1) + }) + + it('throws when there are no peers available', async () => { + const router = new GlobalRpcRouter(new RpcRouter(new PeerManager(mockLocalPeer()))) + + router.register('test', () => Promise.resolve(undefined)) + const promise = router.request({ type: 'test', payload: { test: 'payload' } }) + await expect(promise).toRejectErrorInstance(CannotSatisfyRequestError) + }) + + it('throws when peers available but none respond', async () => { + mocked(nextRpcId).mockReturnValue(44) + + const router = new GlobalRpcRouter(new RpcRouter(new PeerManager(mockLocalPeer()))) + + const pm = router.rpcRouter.peerManager + getConnectedPeer(pm) + getConnectedPeer(pm) + + const sendToMock = jest.spyOn(pm, 'sendTo') + router.register('test', () => Promise.resolve(undefined)) + + const promise = router.request({ type: 'test', payload: { test: 'payload' } }) + + // Disconnect both peers with timeouts + await new Promise((resolve) => setImmediate(resolve)) + jest.runOnlyPendingTimers() + await new Promise((resolve) => setImmediate(resolve)) + jest.runOnlyPendingTimers() + + await expect(promise).toRejectErrorInstance(CannotSatisfyRequestError) + expect(router.requestFails.size).toBe(0) + expect(sendToMock).toBeCalledTimes(2) + }) + + it('handles a round trip successfully with one peer', async () => { + const router = new GlobalRpcRouter(new RpcRouter(new PeerManager(mockLocalPeer()))) + + const pm = router.rpcRouter.peerManager + const sendToMock = jest.spyOn(pm, 'sendTo') + const { peer } = getConnectedPeer(pm) + + const request = { + rpcId: 16, + type: 'test', + payload: { test: 'payload' }, + } + + const response = { + rpcId: 16, + direction: Direction.response, + type: 'test', + payload: { response: 'payload' }, + } + + mocked(nextRpcId).mockReturnValueOnce(16) + router.register('test', () => Promise.resolve(undefined)) + const promise = router.request(request) + + await router.handle(peer, response) + expect(sendToMock).toBeCalledWith(peer, expect.objectContaining(request)) + await expect(promise).resolves.toMatchObject({ message: response }) + }) + + it('retries if first attempt returns cannot fulfill request', async () => { + const router = new GlobalRpcRouter(new RpcRouter(new PeerManager(mockLocalPeer()))) + + const pm = router.rpcRouter.peerManager + const sendToMock = jest.spyOn(pm, 'sendTo') + + const { peer: peer1 } = getConnectedPeer(pm) + const { peer: peer2 } = getConnectedPeer(pm) + + router.register('test', () => Promise.resolve(undefined)) + mocked(nextRpcId).mockReturnValueOnce(34).mockReturnValueOnce(11) + const promise = router.request({ type: 'test', payload: { test: 'payload' } }) + + await router.handle(peer1, { + rpcId: 34, + direction: Direction.response, + type: InternalMessageType.cannotSatisfyRequest, + payload: {}, + }) + + void router.handle(peer2, { + rpcId: 11, + direction: Direction.response, + type: 'test', + payload: { response: 'payload' }, + }) + + await expect(promise).resolves.toMatchObject({ + peerIdentity: peer2.getIdentityOrThrow(), + message: { + rpcId: 11, + direction: Direction.response, + type: 'test', + payload: { response: 'payload' }, + }, + }) + expect(sendToMock).toBeCalledTimes(2) + }) +}) diff --git a/ironfish/src/network/messageRouters/globalRpc.ts b/ironfish/src/network/messageRouters/globalRpc.ts new file mode 100644 index 0000000000..489e74750e --- /dev/null +++ b/ironfish/src/network/messageRouters/globalRpc.ts @@ -0,0 +1,163 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { + CannotSatisfyRequestError, + IncomingRpcGeneric, + IncomingRpcPeerMessage, + RpcRouter, +} from './rpc' +import { MessageType, Message, InternalMessageType, PayloadType } from '../messages' +import { Identity } from '../identity' +import { Peer } from '../peers/peer' +import { ArrayUtils } from '../../utils' + +/** + * Number of times to attempt a request with another peer before giving up. + */ +export const RETRIES = 5 + +/** + * Router for sending RPC messages where the client doesn't care which + * peer gives it a response, just that it gets one. + * + * This router automatically retries with another peer if the first + * one fails or times out, and + */ +export class GlobalRpcRouter { + rpcRouter: RpcRouter + /** + * Map of RPC calls per message type to the number of times the message + * has received a "cannot fulfill request" response from that peer + * **since the last time a successful event was received**. + * + * This is useful to limit the number of requests to recently connected peers + * that have not received all the necessary data yet, as well as peers that are + * not storing all of the data. + */ + requestFails: Map> + + constructor(router: RpcRouter) { + this.rpcRouter = router + this.requestFails = new Map>() + + // Clear failures when a peer disconnects to avoid memory leaks + this.rpcRouter.peerManager.onDisconnect.on((peer: Peer) => { + if (peer.state.identity != null) { + this.requestFails.delete(peer.state.identity) + } + }) + } + + /** + * Register a callback function for a given type of handler. This handler + * is used for incoming RPC requents, and should be responded to as with + * a normal RPC handler. + */ + register( + type: T, + handler: (message: IncomingRpcGeneric) => Promise, + ): void + register( + type: MessageType, + handler: (message: IncomingRpcPeerMessage) => Promise, + ): void { + this.rpcRouter.register(type, async (message: IncomingRpcPeerMessage) => { + // TODO: I think there will need to be some extra logic around this, + // but if not, it can be registered with the rpc handler directly + return await handler(message) + }) + } + + /** + * Make the RPC request to a randomly selected connected peer, and return the + * response. Retries if the peer times out or does not have the necessary data. + * + * Throws an error if the request cannot be satisfied after several attempts. + * Attempts may fail if a peer does not have the requested element + * (in which case it returns a CannotSatisfyRequest type), + * or if the individual request times out. + */ + async request( + message: Message>, + ): Promise { + for (let i = 0; i < RETRIES; i++) { + const peer = this.selectPeer(message.type) + + if (peer === null) { + throw new CannotSatisfyRequestError( + `Unable to fulfill request after ${RETRIES} attempts`, + ) + } + const peerIdentity = peer.getIdentityOrThrow() + + try { + const response = await this.rpcRouter.requestFrom(peer, { ...message }) + if (response.message.type !== InternalMessageType.cannotSatisfyRequest) { + this.requestFails.get(peerIdentity)?.delete(message.type) + return response + } + } catch (error) { + // Ignore the error here + } + + if (peer.state.type === 'CONNECTED') { + const peerRequestFailMap = this.requestFails.get(peerIdentity) || new Set() + this.requestFails.set(peerIdentity, peerRequestFailMap) + peerRequestFailMap.add(message.type) + } + } + + throw new CannotSatisfyRequestError(`Unable to fulfill request after ${RETRIES} attempts`) + } + + /** + * Handle an incoming global RPC message. This may be an incoming request for + * some data or an incoming repsonse. Either way, we just forward it to the + * RPC handler. + */ + async handle(peer: Peer, rpcMessage: IncomingRpcPeerMessage['message']): Promise { + await this.rpcRouter.handle(peer, rpcMessage) + } + + /** + * Choose a peer from the list of connected peers. + * + * Prioritizes peers based on their pending RPC messaage count. Filters out + * saturated peers and peers who have failed this message type, unless all + * peers have failed, then reset and try them all again. + * + * Returns null if we were not able to find a valid candidate + */ + private selectPeer(type: MessageType): Peer | null { + let peers = this.rpcRouter.peerManager.getConnectedPeers().filter((p) => !p.isSaturated) + + // Shuffle peers so we get different peers as a tie breaker for sorting + // we can make this more efficient later. + peers = ArrayUtils.shuffle(peers) + + // Try to find the peer with the least pending RPC messages + peers = peers.sort((a, b) => a.pendingRPC - b.pendingRPC) + + // We have no peers to try + if (!peers.length) return null + + // find a peer that hasn't failed this MessageType + for (const peer of peers) { + const identity = peer.getIdentityOrThrow() + const peerFails = this.requestFails.get(identity) + const failed = peerFails?.has(type) + if (!failed) return peer + } + + // reset each peers failed state for this MessageType + for (const peer of peers) { + const identity = peer.getIdentityOrThrow() + this.requestFails.get(identity)?.delete(type) + } + + // because we sorted earlier, this is the lowest pending rpc count + return peers[0] + } +} diff --git a/ironfish/src/network/messageRouters/gossip.test.ts b/ironfish/src/network/messageRouters/gossip.test.ts new file mode 100644 index 0000000000..c03820344b --- /dev/null +++ b/ironfish/src/network/messageRouters/gossip.test.ts @@ -0,0 +1,156 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +jest.mock('uuid') +jest.mock('ws') + +import { mocked } from 'ts-jest/utils' +import { v4 as uuid } from 'uuid' +import ws from 'ws' +import { PeerNetwork, RoutingStyle } from '../peerNetwork' +import { GossipRouter } from './gossip' +import { PeerManager } from '../peers/peerManager' +import { mockLocalPeer, mockPrivateIdentity, getConnectedPeer } from '../testUtilities' + +jest.useFakeTimers() + +describe('Gossip Router', () => { + it('Broadcasts a message on gossip', () => { + mocked(uuid).mockReturnValue('test_broadcast') + const pm = new PeerManager(mockLocalPeer()) + const broadcastMock = jest.spyOn(pm, 'broadcast').mockImplementation(() => {}) + const router = new GossipRouter(pm) + router.register('test', jest.fn()) + const message = { type: 'test', payload: { test: 'hi payload' } } + router.gossip(message) + expect(uuid).toBeCalledTimes(1) + expect(broadcastMock).toBeCalledTimes(1) + expect(broadcastMock).toBeCalledWith({ ...message, nonce: 'test_broadcast' }) + }) + + it('Handles an incoming gossip message', async () => { + const pm = new PeerManager(mockLocalPeer()) + const broadcastMock = jest.spyOn(pm, 'broadcast').mockImplementation(() => {}) + const { peer: peer1 } = getConnectedPeer(pm) + const { peer: peer2 } = getConnectedPeer(pm) + const peer1Spy = jest.spyOn(peer1, 'send') + const peer2Spy = jest.spyOn(peer2, 'send') + + const router = new GossipRouter(pm) + router.register('test', () => Promise.resolve()) + const message = { type: 'test', nonce: 'test_handler1', payload: { test: 'payload' } } + await router.handle(peer1, message) + + expect(broadcastMock).not.toBeCalled() + // Should not send the message back to the peer it received it from + expect(peer1Spy).not.toBeCalled() + expect(peer2Spy).toBeCalledTimes(1) + expect(peer2Spy).toBeCalledWith(message) + }) + + it('Does not process a seen message twice', async () => { + const pm = new PeerManager(mockLocalPeer()) + const broadcastMock = jest.spyOn(pm, 'broadcast').mockImplementation(() => {}) + const { peer: peer1 } = getConnectedPeer(pm) + const { peer: peer2 } = getConnectedPeer(pm) + const peer1Spy = jest.spyOn(peer1, 'send') + const peer2Spy = jest.spyOn(peer2, 'send') + + const router = new GossipRouter(pm) + router.register('test', () => Promise.resolve()) + const message = { type: 'test', nonce: 'test_handler1', payload: { test: 'payload' } } + // Should send the message to peer2 + await router.handle(peer1, message) + + expect(broadcastMock).not.toBeCalled() + // Should not send the message back to the peer it received it from + expect(peer1Spy).not.toBeCalled() + expect(peer2Spy).toBeCalledTimes(1) + expect(peer2Spy).toBeCalledWith(message) + + peer1Spy.mockClear() + peer2Spy.mockClear() + + await router.handle(peer1, message) + await router.handle(peer2, message) + + expect(peer1Spy).not.toBeCalled() + expect(peer2Spy).not.toBeCalled() + }) + + it('Does not send messages to peers of peer that sent it', async () => { + const pm = new PeerManager(mockLocalPeer()) + const broadcastMock = jest.spyOn(pm, 'broadcast').mockImplementation(() => {}) + const { peer: peer1 } = getConnectedPeer(pm) + const { peer: peer2 } = getConnectedPeer(pm) + const { peer: peer3 } = getConnectedPeer(pm) + const peer1Spy = jest.spyOn(peer1, 'send') + const peer2Spy = jest.spyOn(peer2, 'send') + const peer3Spy = jest.spyOn(peer3, 'send') + + peer1.knownPeers.set(peer2.getIdentityOrThrow(), peer2) + peer2.knownPeers.set(peer1.getIdentityOrThrow(), peer1) + + const router = new GossipRouter(pm) + router.register('test', () => Promise.resolve()) + const message = { type: 'test', nonce: 'test_double', payload: { test: 'payload' } } + await router.handle(peer1, message) + expect(broadcastMock).not.toBeCalled() + expect(peer1Spy).not.toBeCalled() + expect(peer2Spy).not.toBeCalled() + expect(peer3Spy).toBeCalledTimes(1) + expect(peer3Spy).toBeCalledWith(message) + }) + + it('routes a gossip message as gossip', async () => { + const network = new PeerNetwork(mockPrivateIdentity('local'), 'sdk/1/cli', ws) + const gossipMock = jest.fn(async () => {}) + network['gossipRouter'].handle = gossipMock + network.registerHandler( + 'hello', + RoutingStyle.gossip, + () => Promise.resolve({ name: '' }), + () => {}, + ) + + const pm = new PeerManager(mockLocalPeer()) + const { peer } = getConnectedPeer(pm) + const message = { type: 'hello', nonce: 'test_handler1', payload: { test: 'payload' } } + await network['handleMessage'](peer, { peerIdentity: peer.getIdentityOrThrow(), message }) + expect(gossipMock).toBeCalled() + network.stop() + }) + + it('does not handle a poorly formatted gossip message as gossip', async () => { + const network = new PeerNetwork(mockPrivateIdentity('local'), 'sdk/1/cli', ws) + + const gossipMock = jest.fn(async () => {}) + network['gossipRouter'].handle = gossipMock + + network.registerHandler( + 'hello', + RoutingStyle.gossip, + jest.fn((p) => Promise.resolve(p)), + () => {}, + ) + const logFn = jest.fn() + network['logger'].mock(() => logFn) + + const pm = new PeerManager(mockLocalPeer()) + const { peer } = getConnectedPeer(pm) + + // This is the wrong type so it tests that it fails + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const message = { type: 'test', test: 'payload' } as any + + await network['handleMessage'](peer, { + peerIdentity: peer.getIdentityOrThrow(), + message: message, + }) + + expect(gossipMock).not.toBeCalled() + expect(logFn).toBeCalled() + network.stop() + }) +}) diff --git a/ironfish/src/network/messageRouters/gossip.ts b/ironfish/src/network/messageRouters/gossip.ts new file mode 100644 index 0000000000..536a8b762b --- /dev/null +++ b/ironfish/src/network/messageRouters/gossip.ts @@ -0,0 +1,113 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { MessageType, IncomingPeerMessage, Message, isMessage, PayloadType } from '../messages' +import { v4 as uuid } from 'uuid' +import EvictingSet from '../utils/evictingSet' +import { PeerManager } from '../peers/peerManager' +import { Peer } from '../peers/peer' + +/** + * We store gossips that have already been seen and processed, and ignore them + * if we have seen them before. The set that contains these gossips is + * bounded to a specific size and old ones are evicted in the order + * they were inserted. + */ +const MAX_SEEN_GOSSIPS_SIZE = 1000 + +type IncomingGossipGeneric = IncomingPeerMessage> +type IncomingGossipPeerMessage = IncomingGossipGeneric + +export type Gossip = Message & { + // Each message gets a unique identifier + nonce: string +} + +export function isGossip(obj: unknown): obj is Gossip { + return isMessage(obj) && typeof (obj as Gossip).nonce == 'string' +} + +/** + * Router for gossip-style messages. Maintains a list of handlers and is responsible + * for sending and receiving the messages. + */ +export class GossipRouter { + peerManager: PeerManager + private seenGossips: EvictingSet + private handlers: Map Promise> + + constructor(peerManager: PeerManager) { + this.peerManager = peerManager + this.seenGossips = new EvictingSet(MAX_SEEN_GOSSIPS_SIZE) + this.handlers = new Map< + MessageType, + (message: IncomingPeerMessage>) => Promise + >() + } + + hasHandler(type: MessageType): boolean { + return this.handlers.has(type) + } + + /** + * Register a callback function for a certain type of handler. + */ + register( + type: T, + handler: (message: IncomingGossipGeneric) => Promise, + ): void + register( + type: MessageType, + handler: (message: IncomingGossipPeerMessage) => Promise, + ): void { + this.handlers.set(type, handler) + } + + /** + * Pack the message in a Gossip envelope and send it to all connected peers with + * the expectation that they will forward it to their other peers. + * The goal is for everyone to receive the message. + */ + gossip(message: Message): void { + // TODO: A uuid takes up a lot of bytes, might be a better choice available + const nonce = uuid() + const gossipMessage: Gossip = { + ...message, + nonce, + } + this.peerManager.broadcast(gossipMessage) + this.seenGossips.add(nonce) + } + + async handle(peer: Peer, gossipMessage: IncomingGossipPeerMessage['message']): Promise { + const handler = this.handlers.get(gossipMessage.type) + if (handler === undefined) return + + if (this.seenGossips.has(gossipMessage.nonce)) { + return + } + + const peerIdentity = peer.getIdentityOrThrow() + + const peersConnections = + this.peerManager.identifiedPeers.get(peerIdentity)?.knownPeers || new Map() + for (const activePeer of this.peerManager.getConnectedPeers()) { + if (activePeer.state.type !== 'CONNECTED') { + throw new Error('Peer not in state CONNECTED returned from getConnectedPeers') + } + // To reduce network noise, we don't send the message back to the peer that + // sent it to us, or any of the peers connected to it + if ( + activePeer.state.identity === peerIdentity || + (peersConnections.has(activePeer.state.identity) && + peersConnections.get(activePeer.state.identity)?.state.type === 'CONNECTED') + ) { + continue + } + activePeer.send(gossipMessage) + } + this.seenGossips.add(gossipMessage.nonce) + await handler({ peerIdentity, message: gossipMessage }) + } +} diff --git a/ironfish/src/network/messageRouters/index.ts b/ironfish/src/network/messageRouters/index.ts new file mode 100644 index 0000000000..5eefa14e52 --- /dev/null +++ b/ironfish/src/network/messageRouters/index.ts @@ -0,0 +1,8 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './fireAndForget' +export * from './globalRpc' +export * from './gossip' +export * from './rpc' diff --git a/ironfish/src/network/messageRouters/rpc.test.ts b/ironfish/src/network/messageRouters/rpc.test.ts new file mode 100644 index 0000000000..8d9be8f66d --- /dev/null +++ b/ironfish/src/network/messageRouters/rpc.test.ts @@ -0,0 +1,176 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +jest.mock('./rpcId') +import { mocked } from 'ts-jest/utils' +import { PeerManager } from '../peers/peerManager' +import { CannotSatisfyRequestError, Direction, RequestTimeoutError, RpcRouter } from './rpc' +import { nextRpcId, rpcTimeoutMillis } from './rpcId' +import { getConnectedPeer, mockLocalPeer } from '../testUtilities' +import { NetworkError } from '../peers/connections/errors' + +describe('RPC Router', () => { + beforeEach(() => { + jest.resetAllMocks() + jest.useFakeTimers() + }) + + beforeEach(() => { + mocked(nextRpcId).mockReturnValue(91) + mocked(rpcTimeoutMillis).mockReturnValue(10) + }) + + it('Registers an RPC Handler', () => { + const peers = new PeerManager(mockLocalPeer()) + const router = new RpcRouter(peers) + const handler = jest.fn() + router.register('test', handler) + expect(router['handlers'].size).toBe(1) + expect(router['handlers'].get('test')).toBe(handler) + }) + + it('should time out RPC requests', async () => { + const peers = new PeerManager(mockLocalPeer()) + const sendToMock = jest.spyOn(peers, 'sendTo') + + const { peer } = getConnectedPeer(peers) + const peerCloseMock = jest.spyOn(peer, 'close') + + const router = new RpcRouter(peers) + const handlerMock = jest.fn() + router.register('test', handlerMock) + expect(router['requests'].size).toBe(0) + + const promise = router.requestFrom(peer, { + type: 'test', + payload: { test: 'payload' }, + }) + + expect(router['requests'].size).toBe(1) + jest.runOnlyPendingTimers() + + expect(router['requests'].size).toBe(0) + expect(sendToMock).toHaveBeenCalledTimes(1) + expect(peerCloseMock).toHaveBeenCalled() + await expect(promise).toRejectErrorInstance(RequestTimeoutError) + }) + + it('should reject requests when connection disconnects', async () => { + const peers = new PeerManager(mockLocalPeer()) + const sendToMock = jest.spyOn(peers, 'sendTo') + + const { peer, connection } = getConnectedPeer(peers) + const peerCloseMock = jest.spyOn(peer, 'close') + + const router = new RpcRouter(peers) + router.register('test', jest.fn()) + expect(router['requests'].size).toBe(0) + + const subscribers = connection.onStateChanged.subscribers + + const promise = router.requestFrom(peer, { + type: 'test', + payload: { test: 'payload' }, + }) + + expect(router['requests'].size).toBe(1) + expect(connection.onStateChanged.subscribers).toBeGreaterThan(subscribers) + connection.close() + + expect(connection.onStateChanged.subscribers).toBeLessThanOrEqual(subscribers) + expect(router['requests'].size).toBe(0) + expect(sendToMock).toHaveBeenCalledTimes(1) + expect(peerCloseMock).not.toHaveBeenCalled() + await expect(promise).toRejectErrorInstance(NetworkError) + }) + + it('should increment and decrement pendingRPC', async () => { + mocked(nextRpcId).mockReturnValue(91) + + const peers = new PeerManager(mockLocalPeer()) + jest.spyOn(peers, 'sendTo') + const { peer } = getConnectedPeer(peers, 'peer') + + const router = new RpcRouter(peers) + router.register('test', jest.fn()) + + void router.requestFrom(peer, { + type: 'test', + payload: { test: 'payload' }, + }) + expect(peer.pendingRPC).toBe(1) + + await router.handle(peer, { + rpcId: 91, + direction: Direction.response, + type: 'test', + payload: { response: 'payload' }, + }) + expect(peer.pendingRPC).toBe(0) + }) + + it('Handles a response as a resolved request promise', async () => { + mocked(nextRpcId).mockReturnValue(91) + mocked(rpcTimeoutMillis).mockReturnValue(1000) + + const peers = new PeerManager(mockLocalPeer()) + + const router = new RpcRouter(peers) + router.register('test', jest.fn()) + + const { peer: peer1 } = getConnectedPeer(peers) + const { peer: peer2 } = getConnectedPeer(peers) + + const promise = router.requestFrom(peer1, { + type: 'test', + payload: { test: 'payload' }, + }) + + const response = { + rpcId: 91, + direction: Direction.response, + type: 'test', + payload: { response: 'payload' }, + } + + await router.handle(peer2, response) + await expect(promise).resolves.toMatchObject({ + message: response, + }) + + expect(router['requests'].size).toBe(0) + }) + + it('Catches a cannotSatisfy error and returns the appropriate type', async () => { + mocked(nextRpcId).mockReturnValue(18) + + const peers = new PeerManager(mockLocalPeer()) + const sendToMock = jest.fn() + peers.sendTo = sendToMock + + const handlerMock = jest.fn(() => { + throw new CannotSatisfyRequestError('Bad request') + }) + const router = new RpcRouter(peers) + router.register('test', handlerMock) + + const { peer } = getConnectedPeer(peers) + await router.handle(peer, { + rpcId: 18, + direction: Direction.request, + type: 'test', + payload: { test: 'payload' }, + }) + + expect(router['requests'].size).toBe(0) + expect(sendToMock).toBeCalledTimes(1) + expect(sendToMock).toHaveBeenCalledWith( + peer, + expect.objectContaining({ + direction: Direction.response, + type: 'cannotSatisfyRequest', + }), + ) + }) +}) diff --git a/ironfish/src/network/messageRouters/rpc.ts b/ironfish/src/network/messageRouters/rpc.ts new file mode 100644 index 0000000000..dab680576a --- /dev/null +++ b/ironfish/src/network/messageRouters/rpc.ts @@ -0,0 +1,243 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { createRootLogger, Logger } from '../../logger' +import { + MessageType, + IncomingPeerMessage, + Message, + isMessage, + InternalMessageType, + PayloadType, +} from '../messages' +import { PeerManager } from '../peers/peerManager' +import { rpcTimeoutMillis, nextRpcId, RpcId } from './rpcId' +import { Peer } from '../peers/peer' +import { NetworkError } from '../peers/connections/errors' +import { Connection } from '../peers/connections/connection' + +export enum Direction { + request = 'request', + response = 'response', +} + +export class CannotSatisfyRequestError extends Error { + constructor(message: string | undefined) { + super(message) + this.name = 'CannotSatisfyRequestError' + } +} + +export class RequestTimeoutError extends Error { + timeoutMs: number + + constructor(timeoutMs: number, message?: string) { + super(message || `Request Timed Out after ${timeoutMs}ms`) + this.name = 'RequestTimeoutError' + this.timeoutMs = timeoutMs + } +} + +export type IncomingRpcGeneric = IncomingPeerMessage> +export type IncomingRpcPeerMessage = IncomingRpcGeneric + +/** + * Rpc Messages essentially hold another message as its payload. + * It adds an RpcId, and whether it is a request or a response. + */ +export type Rpc = Message & { + // Each rpc message gets an id that is unique for the requesting client + rpcId: RpcId + // Whether this is an outgoing request or an incoming response + direction: Direction +} +export function isRpc(obj: unknown): obj is Rpc { + if (!isMessage(obj)) return false + const rpc = obj as Rpc> + + if (rpc.type === InternalMessageType.cannotSatisfyRequest) { + return rpc.payload === undefined + } + + return ( + (rpc.direction === Direction.request || rpc.direction === Direction.response) && + typeof rpc.rpcId == 'number' && + rpc.payload != null + ) +} + +type RpcRequest = { + resolve: (value: IncomingRpcPeerMessage) => void + reject: (e: unknown) => void + connection?: Connection +} + +/** + * Router for sending RPC messages and waiting for a response. RPC streams + * are quite complicated, as there are essentially two streams, one for the + * request and one for the response. + */ +export class RpcRouter { + peerManager: PeerManager + private handlers: Map Promise> + private requests: Map + private logger: Logger + + constructor(peerManager: PeerManager, logger: Logger = createRootLogger()) { + this.peerManager = peerManager + this.handlers = new Map< + MessageType, + (message: IncomingRpcPeerMessage) => Promise + >() + this.requests = new Map() + this.logger = logger.withTag('rpcrouter') + } + + /** + * Register a callback function for a given type of handler. This is the handler + * used for incoming *requests*. Incoming responses are handled using futures + * on the request() function. + */ + register( + type: T, + handler: (message: IncomingRpcGeneric) => Promise, + ): void + register( + type: MessageType, + handler: (message: IncomingRpcPeerMessage) => Promise, + ): void { + this.handlers.set(type, handler) + } + + /** + * Initiate a request for some data from a specific peer. The message is + * packed into a Request envelope and sent to the specified peer. + * This is an async method, so it returns a future that resolves either + */ + requestFrom( + peer: Peer, + message: Message>, + ): Promise { + const rpcId = nextRpcId() + if (typeof rpcId !== 'number') throw new Error(`rpcId mocked: ${typeof rpcId}`) + + return new Promise((resolve, reject) => { + const timeoutMs = rpcTimeoutMillis() + + // Reject requests if the connection becomes disconnected + const onConnectionStateChanged = () => { + const request = this.requests.get(rpcId) + + if (request && request?.connection?.state.type === 'DISCONNECTED') { + request.connection.onStateChanged.off(onConnectionStateChanged) + const errorMessage = `Connection closed while waiting for request ${message.type}: ${rpcId}` + request.reject(new NetworkError(errorMessage)) + } + } + + const clearDisconnectHandler = (): void => { + this.requests.get(rpcId)?.connection?.onStateChanged.off(onConnectionStateChanged) + } + + const timeout = setTimeout(() => { + const request = this.requests.get(rpcId) + if (!request) throw new Error(`Timed out request ${rpcId} not found`) + const errorMessage = `Closing connections to ${peer.displayName} because RPC message of type ${message.type} timed out after ${timeoutMs} ms in request: ${rpcId}.` + const error = new RequestTimeoutError(timeoutMs, errorMessage) + this.logger.debug(errorMessage) + clearDisconnectHandler() + peer.close(error) + request.reject(error) + }, timeoutMs) + + const request: RpcRequest = { + resolve: (message: IncomingRpcPeerMessage): void => { + clearDisconnectHandler() + peer.pendingRPC-- + this.requests.delete(rpcId) + clearTimeout(timeout) + resolve(message) + }, + reject: (reason?: unknown): void => { + clearDisconnectHandler() + peer.pendingRPC-- + this.requests.delete(rpcId) + clearTimeout(timeout) + reject(reason) + }, + } + + peer.pendingRPC++ + this.requests.set(rpcId, request) + + const rpcMessage: Rpc> = { + type: message.type, + rpcId, + direction: Direction.request, + payload: message.payload, + } + + const connection = this.peerManager.sendTo(peer, rpcMessage) + if (!connection) { + return request.reject( + new Error( + `${String(peer.state.identity)} did not send ${message.type} in state ${ + peer.state.type + }`, + ), + ) + } + + request.connection = connection + connection.onStateChanged.on(onConnectionStateChanged) + }) + } + + /** + * Handle an incoming RPC message. This may be an incoming request for some + * data, or an incoming response to one of our requests. + * + * If it is a request, we pass it to the handler registered for it. + * If a response, we resolve the promise waiting for it. + * + * The handler for a given request should either return a payload or throw + * a CannotFulfillRequest error + */ + async handle(peer: Peer, rpcMessage: IncomingRpcPeerMessage['message']): Promise { + const rpcId = rpcMessage.rpcId + const peerIdentity = peer.getIdentityOrThrow() + + if (rpcMessage.direction === Direction.request) { + const handler = this.handlers.get(rpcMessage.type) + if (handler === undefined) return + + let responseMessage: IncomingRpcPeerMessage['message'] + try { + const response = await handler({ peerIdentity, message: rpcMessage }) + responseMessage = { + ...rpcMessage, + direction: Direction.response, + payload: response, + } + } catch (error: unknown) { + const asError = error as Error + if (!(asError.name && asError.name === 'CannotSatisfyRequestError')) { + this.logger.error(`Unexpected error in ${rpcMessage.type} handler: ${String(error)}`) + } + responseMessage = { + rpcId: rpcId, + direction: Direction.response, + type: InternalMessageType.cannotSatisfyRequest, + } + } + + this.peerManager.sendTo(peer, responseMessage) + } else { + const request = this.requests.get(rpcId) + if (request) { + request.resolve({ peerIdentity, message: rpcMessage }) + } + } + } +} diff --git a/ironfish/src/network/messageRouters/rpcId.ts b/ironfish/src/network/messageRouters/rpcId.ts new file mode 100644 index 0000000000..a4a6d86735 --- /dev/null +++ b/ironfish/src/network/messageRouters/rpcId.ts @@ -0,0 +1,29 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +/** + * Keep track of the next ID. Kept in this simple module to aid in mocking + * during testing. + */ + +export type RpcId = number +export const RPC_TIMEOUT_MILLIS = 30000 + +let lastUsedRpcId = 0 + +/** + * Generate an RPC ID for a new outgoing Request + */ +export function nextRpcId(): RpcId { + lastUsedRpcId += 1 + return lastUsedRpcId +} + +/** + * Get the number of milliseconds a rpc call should wait for a response before + * timing out. + */ +export function rpcTimeoutMillis(): number { + return RPC_TIMEOUT_MILLIS +} diff --git a/ironfish/src/network/messages.test.ts b/ironfish/src/network/messages.test.ts new file mode 100644 index 0000000000..4bc71e213d --- /dev/null +++ b/ironfish/src/network/messages.test.ts @@ -0,0 +1,101 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { + isIdentify, + Identify, + InternalMessageType, + Signal, + isSignal, + PeerList, + isPeerList, + DisconnectingMessage, + DisconnectingReason, + isDisconnectingMessage, +} from './messages' + +describe('isIdentify', () => { + it('Returns true on identity message', () => { + const msg: Identify = { + type: InternalMessageType.identity, + payload: { + identity: 'oVHAznOXv4FHdajFYsVNMZm14WHlCdXZz8z55IOhTwI=', + version: 'ironfish-sdk/1/cli', + port: null, + }, + } + expect(isIdentify(msg)).toBeTruthy() + }) +}) + +describe('isSignal', () => { + it('Returns true on signal message', () => { + const msg: Signal = { + type: InternalMessageType.signal, + payload: { + sourceIdentity: 'oVHAznOXv4FHdajFYsVNMZm14WHlCdXZz8z55IOhTwI=', + destinationIdentity: 'oVHAznOXv4FHdajFYsVNMZm14WHlCdXZz8z55IOhTwI=', + nonce: 'test', + signal: 'data', + }, + } + expect(isSignal(msg)).toBeTruthy() + }) +}) + +describe('isPeerList', () => { + it('Returns true on empty connectedPeers', () => { + const msg: PeerList = { + type: InternalMessageType.peerList, + payload: { + connectedPeers: [], + }, + } + expect(isPeerList(msg)).toBeTruthy() + }) + + it('Returns true on peerlist message', () => { + const msg: PeerList = { + type: InternalMessageType.peerList, + payload: { + connectedPeers: [ + { + identity: 'oVHAznOXv4FHdajFYsVNMZm14WHlCdXZz8z55IOhTwI=', + address: 'localhost', + port: null, + }, + ], + }, + } + expect(isPeerList(msg)).toBeTruthy() + }) +}) + +describe('isDisconnectingMessage', () => { + it('Returns true on Disconnecting message', () => { + const msg: DisconnectingMessage = { + type: InternalMessageType.disconnecting, + payload: { + sourceIdentity: 'oVHAznOXv4FHdajFYsVNMZm14WHlCdXZz8z55IOhTwI=', + destinationIdentity: 'oVHAznOXv4FHdajFYsVNMZm14WHlCdXZz8z55IOhTwI=', + reason: DisconnectingReason.ShuttingDown, + disconnectUntil: Date.now(), + }, + } + expect(isDisconnectingMessage(msg)).toBeTruthy() + }) + + it('Returns true on null destinationIdentity', () => { + const msg: DisconnectingMessage = { + type: InternalMessageType.disconnecting, + payload: { + sourceIdentity: 'oVHAznOXv4FHdajFYsVNMZm14WHlCdXZz8z55IOhTwI=', + destinationIdentity: null, + reason: DisconnectingReason.ShuttingDown, + disconnectUntil: Date.now(), + }, + } + expect(isDisconnectingMessage(msg)).toBeTruthy() + }) +}) diff --git a/ironfish/src/network/messages.ts b/ironfish/src/network/messages.ts new file mode 100644 index 0000000000..ffd9515240 --- /dev/null +++ b/ironfish/src/network/messages.ts @@ -0,0 +1,215 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Identity, isIdentity } from './identity' +import { IJSON } from '../serde' + +/** + * The type of the message for the purposes of routing within our code. + * This includes messages consumed by our connection and peer manager layer, + * such as identity, signal, and peerList, + * and message routing types such as gossip, directRPC, and globalRPC. + */ +export enum InternalMessageType { + identity = 'identity', + signal = 'signal', + signalRequest = 'signalRequest', + peerList = 'peerList', + cannotSatisfyRequest = 'cannotSatisfyRequest', + disconnecting = 'disconnecting', +} + +export type MessageType = InternalMessageType | string +export type PayloadType = Record | undefined +/** + * Used for functions that don't care about the contents of the message. + */ +export type LooseMessage = Message + +/** + * A message that has been received on the connection. Note that most messages + * will have other properties, but so long as an object is jsonable and has a + * type, it's ready to send. + */ +export type Message< + T extends MessageType, + P extends PayloadType = undefined +> = P extends undefined ? { type: T } : { type: T; payload: P } + +export type MessagePayload = M extends Message ? P : never + +export function isMessage(obj: unknown): obj is Message { + if (typeof obj != 'object' || obj == null) return false + if ( + 'payload' in obj && + (typeof (obj as Message>).payload !== 'object' || + obj === null) + ) + return false + return typeof (obj as Message).type == 'string' +} + +export function isPayloadMessage( + obj: unknown, +): obj is Message> { + return ( + isMessage(obj) && 'payload' in obj && typeof obj.payload === 'object' && obj.payload != null + ) +} + +/** + * Parse a message and verify that it has a type field (passes isMessage) + * + * Throws an error if it's not a valid message + */ +export function parseMessage(data: string): Message { + const message = IJSON.parse(data) + if (!isMessage(message)) { + throw new Error('Message must have a type field') + } + return message +} + +/** + * A message by which a peer can identify itself to another. + */ +export type Identify = Message< + InternalMessageType.identity, + { + identity: Identity + isWorker?: boolean + name?: string + version: string + port: number | null + } +> + +export function isIdentify(obj: unknown): obj is Identify { + if (!isPayloadMessage(obj)) return false + const payload = obj.payload as Identify['payload'] + return ( + obj.type === InternalMessageType.identity && + typeof payload === 'object' && + payload != null && + typeof payload.identity === 'string' && + typeof payload.version === 'string' + ) +} + +/** + * A message used to indicate to a peer that we want them to + * initiatie signaling with us. This is most often used when + * we discover a peer through another peer but need to indicate + * to them through a brokering peer to connect to us via webrtc. + */ +export type SignalRequest = Message< + InternalMessageType.signalRequest, + { + sourceIdentity: Identity + destinationIdentity: Identity + } +> + +export function isSignalRequest(obj: unknown): obj is SignalRequest { + if (!isPayloadMessage(obj)) return false + + const payload = obj.payload as Signal['payload'] + return ( + obj.type === InternalMessageType.signalRequest && + payload != null && + typeof payload.sourceIdentity === 'string' && + typeof payload.destinationIdentity === 'string' + ) +} + +/** + * A message used to signal an rtc session between two peers. + * + * The referring peer will forward the message to the sourceIdentity, + * which will need to respond with a signal that has peer and source + * inverted. + */ +export type Signal = Message< + InternalMessageType.signal, + { + sourceIdentity: Identity + destinationIdentity: Identity + nonce: string + signal: string + } +> + +export function isSignal(obj: unknown): obj is Signal { + if (!isPayloadMessage(obj)) return false + const payload = obj.payload as Signal['payload'] + return ( + obj.type === InternalMessageType.signal && + payload != null && + typeof payload.sourceIdentity === 'string' && + typeof payload.destinationIdentity === 'string' && + typeof payload.nonce === 'string' && + typeof payload.signal === 'string' + ) +} + +export type PeerList = Message< + InternalMessageType.peerList, + { + connectedPeers: { + identity: Identity + name?: string + address: string | null + port: number | null + }[] + } +> + +export function isPeerList(obj: unknown): obj is PeerList { + if (!isPayloadMessage(obj)) return false + const payload = obj.payload as PeerList['payload'] + return ( + obj.type === InternalMessageType.peerList && + payload != null && + Array.isArray(payload.connectedPeers) && + payload.connectedPeers.every((v) => isIdentity(v.identity)) + ) +} + +export enum DisconnectingReason { + ShuttingDown = 0, + Congested = 1, +} + +export type DisconnectingMessage = Message< + InternalMessageType.disconnecting, + { + sourceIdentity: Identity + // Can be null if we're sending the message to an unidentified Peer + destinationIdentity: Identity | null + reason: DisconnectingReason + disconnectUntil: number + } +> + +export function isDisconnectingMessage(obj: unknown): obj is DisconnectingMessage { + if (!isPayloadMessage(obj)) return false + const payload = obj.payload as DisconnectingMessage['payload'] + return ( + obj.type === InternalMessageType.disconnecting && + payload != null && + typeof payload.sourceIdentity === 'string' && + (typeof payload.destinationIdentity === 'string' || payload.destinationIdentity === null) && + typeof payload.reason === 'number' && + typeof payload.disconnectUntil === 'number' + ) +} + +/** + * A message that we have received from a peer, identified by that peer's + * identity. + */ +export interface IncomingPeerMessage> { + peerIdentity: Identity + message: M +} diff --git a/ironfish/src/network/peerNetwork.test.ts b/ironfish/src/network/peerNetwork.test.ts new file mode 100644 index 0000000000..0c118dfa67 --- /dev/null +++ b/ironfish/src/network/peerNetwork.test.ts @@ -0,0 +1,80 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +jest.mock('ws') + +import ws from 'ws' +import { PeerNetwork, RoutingStyle } from './peerNetwork' +import { getConnectedPeer, mockPrivateIdentity } from './testUtilities' + +jest.useFakeTimers() + +it('Closes the PeerManager when close is called', () => { + const peerNetwork = new PeerNetwork(mockPrivateIdentity('local'), 'sdk/1/cli', ws) + const stopSpy = jest.spyOn(peerNetwork.peerManager, 'stop') + peerNetwork.stop() + expect(stopSpy).toBeCalled() +}) + +it('Registers a handler', () => { + const peerNetwork = new PeerNetwork(mockPrivateIdentity('local'), 'sdk/1/cli', ws) + peerNetwork.registerHandler( + 'hello', + RoutingStyle.gossip, + (p) => Promise.resolve(p), + () => {}, + ) + expect(peerNetwork['routingStyles']).toMatchSnapshot() + peerNetwork.stop() +}) + +it('ignores a message if validation fails', async () => { + const peerNetwork = new PeerNetwork(mockPrivateIdentity('local'), 'sdk/1/cli', ws) + const handlerMock = jest.fn(() => {}) + peerNetwork.registerHandler( + 'hello', + RoutingStyle.gossip, + () => Promise.reject(new Error('invalid message')), + handlerMock, + ) + + const { peer } = getConnectedPeer(peerNetwork.peerManager) + const message = { type: 'hello', nonce: 'test_handler1', payload: { test: 'Payload' } } + await peerNetwork['handleMessage'](peer, { peerIdentity: peer.getIdentityOrThrow(), message }) + expect(handlerMock).not.toBeCalled() + peerNetwork.stop() +}) + +it('changes isReady when peers connect', () => { + const peerNetwork = new PeerNetwork( + mockPrivateIdentity('local'), + 'sdk/1/cli', + ws, + undefined, + { + minPeersReady: 1, + }, + ) + + expect(peerNetwork.isReady).toBe(false) + + const readyChanged = jest.fn() + peerNetwork.onIsReadyChanged.on(readyChanged) + + peerNetwork.start() + expect(peerNetwork.isReady).toBe(false) + + const { peer } = getConnectedPeer(peerNetwork.peerManager) + expect(peerNetwork.isReady).toBe(true) + + peer.close() + expect(peerNetwork.isReady).toBe(false) + + peerNetwork.stop() + expect(peerNetwork.isReady).toBe(false) + + expect(readyChanged).toBeCalledTimes(2) + expect(readyChanged).toHaveBeenNthCalledWith(1, true) + expect(readyChanged).toHaveBeenNthCalledWith(2, false) +}) diff --git a/ironfish/src/network/peerNetwork.ts b/ironfish/src/network/peerNetwork.ts new file mode 100644 index 0000000000..96a4bb6f1e --- /dev/null +++ b/ironfish/src/network/peerNetwork.ts @@ -0,0 +1,410 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { createRootLogger, Logger } from '../logger' +import { MetricsMonitor } from '../metrics' +import { PeerConnectionManager } from './peers/peerConnectionManager' +import { PeerManager } from './peers/peerManager' +import { PrivateIdentity } from './identity' +import { WebSocketServer } from './webSocketServer' +import { Event } from '../event' +import { + MessageType, + IncomingPeerMessage, + Message, + PayloadType, + LooseMessage, + InternalMessageType, + DisconnectingMessage, + DisconnectingReason, +} from './messages' +import { IsomorphicWebRtc, IsomorphicWebSocketConstructor } from './types' +import { + FireAndForgetRouter, + GlobalRpcRouter, + GossipRouter, + Gossip, + isGossip, + RpcRouter, + IncomingRpcGeneric, + isRpc, + Rpc, +} from './messageRouters' +import { Peer } from './peers/peer' +import { LocalPeer } from './peers/localPeer' + +/** + * The routing style that should be used for a message of a given type + */ +export enum RoutingStyle { + gossip = 'gossip', + directRPC = 'directRPC', + globalRPC = 'globalRPC', + fireAndForget = 'fireAndForget', +} + +interface RouteMap { + [RoutingStyle.gossip]: Gossip + [RoutingStyle.globalRPC]: Rpc + [RoutingStyle.directRPC]: Rpc + [RoutingStyle.fireAndForget]: Message +} + +interface ReturnMap { + [RoutingStyle.gossip]: void + [RoutingStyle.globalRPC]: Promise + [RoutingStyle.directRPC]: Promise + [RoutingStyle.fireAndForget]: void +} + +/** + * Entry point for the peer-to-peer network. Manages connections to other peers on the network + * and provides abstractions for several methods of sending/receiving network messages. + */ +export class PeerNetwork { + // optional WebSocket server, started from Node.JS + private webSocketServer?: WebSocketServer + + readonly localPeer: LocalPeer + readonly peerManager: PeerManager + readonly onIsReadyChanged = new Event<[boolean]>() + + private started = false + private readonly enableListen: boolean + private readonly minPeersReady: number + private readonly peerConnectionManager: PeerConnectionManager + private readonly routingStyles: Map + private readonly gossipRouter: GossipRouter + private readonly fireAndForgetRouter: FireAndForgetRouter + private readonly directRpcRouter: RpcRouter + private readonly globalRpcRouter: GlobalRpcRouter + private readonly logger: Logger + private readonly metrics: MetricsMonitor + + /** + * If the peer network is ready for messages to be sent or not + */ + private _isReady = false + get isReady(): boolean { + return this._isReady + } + + constructor( + localIdentity: PrivateIdentity, + localVersion: string, + webSocket: IsomorphicWebSocketConstructor, + webRtc?: IsomorphicWebRtc, + options: { + enableListen?: boolean + port?: number + minPeersReady?: number + name?: string | null + maxPeers?: number + targetPeers?: number + isWorker?: boolean + broadcastWorkers?: boolean + simulateLatency?: number + } = {}, + logger: Logger = createRootLogger(), + metrics?: MetricsMonitor, + ) { + this.logger = logger.withTag('peernetwork') + this.metrics = metrics || new MetricsMonitor(this.logger) + + this.localPeer = new LocalPeer(localIdentity, localVersion, webSocket, webRtc) + this.localPeer.port = options.port || null + this.localPeer.name = options.name || null + this.localPeer.isWorker = options.isWorker || false + this.localPeer.simulateLatency = options.simulateLatency || 0 + this.localPeer.broadcastWorkers = + options.broadcastWorkers === undefined ? true : options.broadcastWorkers + + const maxPeers = options.maxPeers || 10000 + const targetPeers = options.targetPeers || 50 + this.peerManager = new PeerManager( + this.localPeer, + this.logger, + metrics, + maxPeers, + targetPeers, + ) + this.peerManager.onMessage.on((peer, message) => this.handleMessage(peer, message)) + this.peerManager.onConnectedPeersChanged.on(() => this.updateIsReady()) + this.peerConnectionManager = new PeerConnectionManager(this.peerManager, this.logger, { + maxPeers, + }) + + this.routingStyles = new Map() + this.gossipRouter = new GossipRouter(this.peerManager) + this.fireAndForgetRouter = new FireAndForgetRouter(this.peerManager) + this.directRpcRouter = new RpcRouter(this.peerManager) + this.globalRpcRouter = new GlobalRpcRouter(this.directRpcRouter) + + this.minPeersReady = options.minPeersReady || 1 + this.enableListen = options.enableListen === undefined ? true : options.enableListen + + if (options.name && options.name.length > 32) { + options.name = options.name.slice(32) + } + } + + start(): void { + if (this.started) return + this.started = true + + // Start the WebSocket server if possible + if ( + this.enableListen && + 'Server' in this.localPeer.webSocket && + this.localPeer.port != null + ) { + this.webSocketServer = new WebSocketServer( + this.localPeer.webSocket.Server, + this.localPeer.port, + ) + this.webSocketServer.onStart(() => { + const address = this.webSocketServer?.server.address() + const addressStr = + typeof address === 'object' ? `${address.address}:${address.port}` : String(address) + this.logger.info(`WebSocket server started at ${addressStr}`) + }) + this.webSocketServer.onConnection((connection, req) => { + let address: string | null = null + + if (this.peerManager.shouldRejectDisconnectedPeers()) { + const disconnect: DisconnectingMessage = { + type: InternalMessageType.disconnecting, + payload: { + sourceIdentity: this.localPeer.publicIdentity, + destinationIdentity: null, + reason: DisconnectingReason.Congested, + disconnectUntil: Date.now() + 1000 * 60 * 5, + }, + } + connection.send(disconnect) + this.logger.debug( + 'Disconnecting inbound websocket connection because the node has max peers', + ) + connection.close() + return + } + + if (req.headers['X-Forwarded-For'] && req.headers['X-Forwarded-For'][0]) { + address = req.headers['X-Forwarded-For'][0] + } else if (req.socket.remoteAddress) { + address = req.socket.remoteAddress + } + + if (address) { + // Some times local peers connect on IPV6 incompatible addresses like + // '::ffff:127.0.0.1' and we don't support connecting over IPv6 right now + address = address.replace('::ffff:', '') + } + + this.peerManager.createPeerFromInboundWebSocketConnection(connection, address) + }) + } + + // Start up the PeerManager + this.peerManager.start() + + // Start up the PeerConnectionManager + this.peerConnectionManager.start() + + this.updateIsReady() + } + + /** + * Call close when shutting down the PeerNetwork to clean up + * outstanding connections. + */ + stop(): void { + this.started = false + this.peerConnectionManager.stop() + this.peerManager.stop() + this.webSocketServer?.close() + this.updateIsReady() + } + + /** + * Register a handler as being the processor for a specific message type. + * Specify the routing style to be associated with the handler so it receives + * the right kinds of messages. + * + * Handlers for RPC messages can return a payload that will be sent as a reply. + * + * The validator function is responsible for processing the incoming message + * and determining whether the payload is correct. It should return the + * correctly typed payload. If the payload is incorrect, it should throw + * an error. + * + * If the validator throws, the incoming message is silently dropped. + * + * For RPC messages, the validation handler is only called on incoming + * requests. Incoming responses pass the message up to the application layer + * without evaluation. + * + * For gossip messages, the validation handler should determine whether the + * message is valid with respect to local state. If the validator throws, + * the message is not gossiped out to other peers. + */ + registerHandler< + P extends PayloadType, + S extends RoutingStyle = RoutingStyle, + T extends MessageType = MessageType + >( + type: T, + style: S, + validator: (payload: PayloadType) => Promise

, + handler: (parsedMessage: IncomingPeerMessage[S]>) => ReturnMap[S], + ): void { + const hdlr = async (msg: IncomingPeerMessage>) => { + let resp: P + try { + resp = await validator('payload' in msg.message ? msg.message.payload : undefined) + } catch { + // Skip the handler if the message doesn't validate + return + } + const newMsg = { + ...msg, + message: { ...msg.message, payload: resp }, + } + return await handler(newMsg as IncomingPeerMessage[S]>) + } + + switch (style) { + case RoutingStyle.gossip: { + this.gossipRouter.register(type, hdlr) + break + } + case RoutingStyle.directRPC: + this.directRpcRouter.register( + type, + hdlr as (message: IncomingRpcGeneric) => Promise, + ) + break + case RoutingStyle.globalRPC: + this.globalRpcRouter.register( + type, + hdlr as (message: IncomingRpcGeneric) => Promise, + ) + break + case RoutingStyle.fireAndForget: + this.fireAndForgetRouter.register(type, hdlr) + break + } + this.routingStyles.set(type, style) + } + + /** + * Send the message to all connected peers with the expectation that they + * will forward it to their other peers. The goal is for everyone to + * receive the message. + */ + gossip(message: LooseMessage): void { + const style = this.routingStyles.get(message.type) + if (style !== RoutingStyle.gossip) { + throw new Error(`${message.type} type not meant to be gossipped`) + } + this.gossipRouter.gossip(message) + } + + /** + * Send the message directly to the specified peer, if we are connected to it. + * No response or receipt confirmation is expected. + */ + fireAndForget(peer: Peer, message: LooseMessage): void { + const style = this.routingStyles.get(message.type) + if (style !== RoutingStyle.fireAndForget) { + throw new Error(`${message.type} type not meant to be firedAndForgot`) + } + this.fireAndForgetRouter.fireAndForget(peer, message) + } + + /** + * Fire an RPC request to the given peer identity. Returns a promise that + * will resolve when the response is received, or will be rejected if the + * request cannot be completed before timing out. + */ + requestFrom( + peer: Peer, + message: Message>, + ): Promise> { + const style = this.routingStyles.get(message.type) + if (style !== RoutingStyle.directRPC) { + throw new Error(`${message.type} type not meant to be direct RPC`) + } + return this.directRpcRouter.requestFrom(peer, message) + } + + /** + * Fire a global RPC request to a randomly chosen identity, retrying with other + * peers if the first one fails. Returns a promise that will resolve when the + * response is received, or throw an error if the request cannot be completed + * before timing out. + */ + async request( + message: Message>, + ): Promise> { + const style = this.routingStyles.get(message.type) + if (style !== RoutingStyle.globalRPC) { + throw new Error(`${message.type} type not meant to be global RPC`) + } + return await this.globalRpcRouter.request(message) + } + + private async handleMessage( + peer: Peer, + incomingMessage: IncomingPeerMessage, + ): Promise { + const { message } = incomingMessage + let style = this.routingStyles.get(message.type) + if (style === undefined) { + if (message.type === InternalMessageType.cannotSatisfyRequest) { + style = RoutingStyle.globalRPC + } else { + this.logger.warn('Received unknown message type', message.type) + return + } + } + + switch (style) { + case RoutingStyle.gossip: + if (!isGossip(message)) { + this.logger.warn('Handler', message.type, 'expected gossip') + return + } + await this.gossipRouter.handle(peer, message) + break + case RoutingStyle.directRPC: + if (!isRpc(message)) { + this.logger.warn('Handler', message.type, 'expected RPC') + return + } + await this.directRpcRouter.handle(peer, message) + break + case RoutingStyle.globalRPC: + if (!isRpc(message)) { + this.logger.warn('Handler', message.type, 'expected (global) RPC') + return + } + await this.globalRpcRouter.handle(peer, message) + break + case RoutingStyle.fireAndForget: + await this.fireAndForgetRouter.handle(peer, message) + break + } + } + + private updateIsReady(): void { + const prevIsReady = this._isReady + this._isReady = + this.started && this.peerManager.getConnectedPeers().length >= this.minPeersReady + + if (this._isReady !== prevIsReady) { + this.onIsReadyChanged.emit(this._isReady) + } + } +} diff --git a/ironfish/src/network/peers/connectionRetry.ts b/ironfish/src/network/peers/connectionRetry.ts new file mode 100644 index 0000000000..2b852f1adf --- /dev/null +++ b/ironfish/src/network/peers/connectionRetry.ts @@ -0,0 +1,74 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +const seconds = 1000 +const minutes = 60 * seconds + +const retryIntervals = [ + 7 * seconds, + 15 * seconds, + 30 * seconds, + 1 * minutes, + 3 * minutes, + 5 * minutes, +] + +export class ConnectionRetry { + /** + * Number of consecutive connection failures. + */ + private failedRetries = 0 + + /** + * Timestamp representing the next time to allow a connection to be initiated. + */ + private disconnectUntil = 0 + + /** + * Call this if new connection attempts should never be made. + */ + neverRetryConnecting(): void { + this.disconnectUntil = Infinity + } + + /** + * True if new connection attempts will never be made. + */ + get willNeverRetryConnecting(): boolean { + return this.disconnectUntil === Infinity + } + + /** + * True if a new connection can be initiated. + */ + get canConnect(): boolean { + return Date.now() > this.disconnectUntil + } + + /** + * Call this when a successful connection is made to the peer. + * If neverRetryConnecting is set, clears it. + */ + successfulConnection(): void { + this.failedRetries = 0 + this.disconnectUntil = 0 + } + + /** + * Call this when a connection to a peer fails. + * @param now The current time + */ + failedConnection(isWhitelisted = false, now: number = Date.now()): void { + let disconnectUntil = Infinity + + if (this.failedRetries < retryIntervals.length) { + disconnectUntil = now + retryIntervals[this.failedRetries] + } else if (isWhitelisted) { + disconnectUntil = now + retryIntervals[retryIntervals.length - 1] + } + + this.disconnectUntil = disconnectUntil + this.failedRetries++ + } +} diff --git a/ironfish/src/network/peers/connections/connection.ts b/ironfish/src/network/peers/connections/connection.ts new file mode 100644 index 0000000000..07ca7c7aed --- /dev/null +++ b/ironfish/src/network/peers/connections/connection.ts @@ -0,0 +1,192 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Event } from '../../../event' +import type { Logger } from '../../../logger' +import { MetricsMonitor } from '../../../metrics' + +import { rpcTimeoutMillis } from '../../messageRouters/rpcId' +import { SetTimeoutToken } from '../../../utils' +import { Identity } from '../../identity' +import { InternalMessageType, LooseMessage } from '../../messages' +import { HandshakeTimeoutError } from './errors' +import colors from 'colors/safe' + +/** + * The type of peer connection. This should only be used for information + * reporting purposes. Switching on the type indicates an api design flaw, + * as peers should generally behave identically once connected. + */ +export enum ConnectionType { + WebSocket = 'WebSocket', + WebRtc = 'WebRtc', +} + +export enum ConnectionDirection { + Inbound = 'Inbound', + Outbound = 'Outbound', +} + +type ConnectionState = + | { type: 'DISCONNECTED' } + | { type: 'CONNECTING' } + /* A WebRTC-exclusive state that requires an identity */ + | { type: 'REQUEST_SIGNALING' } + /* A WebRTC-exclusive state that requires an identity */ + | { type: 'SIGNALING' } + | { type: 'WAITING_FOR_IDENTITY' } + | { type: 'CONNECTED'; identity: Identity } + +/** + * Model any connection that can send and receive messages. + */ +export abstract class Connection { + readonly logger: Logger + readonly metrics: MetricsMonitor | null + readonly type: ConnectionType + readonly direction: ConnectionDirection + private handshakeTimeout: SetTimeoutToken | null = null + + /** + * If set will simulate a random amount of latency up to this number + */ + protected readonly simulateLatency: number = 0 + protected readonly simulateLatencyQueue: Array + + /** + * The last error received (if any), regardless of the current state of the connection. + */ + protected _error: unknown | null + get error(): Readonly | null { + return this._error as Readonly + } + + /** + * Indicates the current state of the connection. + */ + private _state: Readonly = { type: 'CONNECTING' } + get state(): Readonly { + return this._state + } + + /** + * The loggable name of the connection. + */ + get displayName(): string { + const name = + this.state.type === 'CONNECTED' ? this.state.identity.slice(0, 7) : 'unidentified' + return `${this.type} ${name}` + } + + /** + * Event fired when the state of the connection changes. + */ + readonly onStateChanged: Event<[]> = new Event() + + /** + * Event fired when a new message comes in. The data is converted to a + * json obj and verifies that it has a type attribute before being passed + * in. + */ + readonly onMessage: Event<[LooseMessage]> = new Event() + + /** + * Send a message into this connection. + */ + abstract send: (object: LooseMessage) => void + + /** + * Shutdown the connection, if possible + */ + abstract readonly close: (error?: unknown) => void + + constructor( + type: ConnectionType, + direction: ConnectionDirection, + logger: Logger, + metrics?: MetricsMonitor, + options: { simulateLatency?: number } = {}, + ) { + this.type = type + this.direction = direction + this.logger = logger + this.metrics = metrics || null + this._error = null + this.simulateLatency = options.simulateLatency || 0 + this.simulateLatencyQueue = [] + } + + setState(state: Readonly): void { + const prevState = this._state + this._state = state + + if (prevState.type !== state.type) { + if (this.handshakeTimeout) { + // Clear handshakeTimeout because were changing state + // and we have a timeout per handshake phase or were + // done doing the handshake + clearTimeout(this.handshakeTimeout) + this.handshakeTimeout = null + } + + if ( + state.type === 'REQUEST_SIGNALING' || + state.type === 'SIGNALING' || + state.type === 'WAITING_FOR_IDENTITY' + ) { + const timeout = rpcTimeoutMillis() + + this.handshakeTimeout = setTimeout(() => { + const error = `Closing ${this.type} connection because handshake timed out in state ${state.type} after ${timeout}ms` + this.logger.debug(error) + this.close(new HandshakeTimeoutError(state.type, timeout, error)) + }, timeout) + } + + if (state.type === 'CONNECTED') { + this._error = null + } + + this.logger.debug( + `${colors.green('CONN')} ${this.displayName} STATE ${prevState.type} -> ${state.type}`, + ) + } + + this.onStateChanged.emit() + } + + /** + * Replaces the connection.send() function with one that randomly delays outbound messages + */ + protected addLatencyWrapper(): void { + if (!this.simulateLatency) return + const originalSend = this.send + + const wrapper = ( + ...args: Parameters + ): ReturnType => { + const message: LooseMessage = args[0] + this.simulateLatencyQueue.push(message) + + let latency = Math.random() * (this.simulateLatency || 0) + if (args[0].type === InternalMessageType.disconnecting) latency = 0 + + setTimeout(() => { + const toSend = this.simulateLatencyQueue.shift() + if (this.state.type !== 'DISCONNECTED' && toSend) { + originalSend(toSend) + } + }, latency) + } + + this.send = wrapper + } + + shouldLogMessageType(messageType: string): boolean { + const bannedMessageTypes = [ + InternalMessageType.peerList, + InternalMessageType.signal, + ] as string[] + return !bannedMessageTypes.includes(messageType) + } +} diff --git a/ironfish/src/network/peers/connections/errors.ts b/ironfish/src/network/peers/connections/errors.ts new file mode 100644 index 0000000000..224d7a2e09 --- /dev/null +++ b/ironfish/src/network/peers/connections/errors.ts @@ -0,0 +1,32 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { ErrorUtils } from '../../../utils' + +export class NetworkError extends Error { + wrappedError: unknown | null + + constructor(message?: string, wrappedError?: unknown) { + super(ErrorUtils.renderError(message || wrappedError || 'Unknown Network Error')) + this.wrappedError = wrappedError || null + } +} + +export class TimeoutError extends NetworkError { + readonly timeoutMs: number + + constructor(timeoutMs: number, message?: string) { + super(message || `Request timed out after ${timeoutMs}ms`) + this.timeoutMs = timeoutMs + } +} + +export class HandshakeTimeoutError extends TimeoutError { + readonly state: 'REQUEST_SIGNALING' | 'SIGNALING' | 'WAITING_FOR_IDENTITY' + + constructor(state: HandshakeTimeoutError['state'], timeoutMs: number, message?: string) { + super(timeoutMs, message || `${state} timed out after ${timeoutMs}ms`) + this.state = state + } +} diff --git a/ironfish/src/network/peers/connections/index.ts b/ironfish/src/network/peers/connections/index.ts new file mode 100644 index 0000000000..5fd2979ac1 --- /dev/null +++ b/ironfish/src/network/peers/connections/index.ts @@ -0,0 +1,7 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +export * from './connection' +export * from './errors' +export * from './webRtcConnection' +export * from './webSocketConnection' diff --git a/ironfish/src/network/peers/connections/webRtcConnection.ts b/ironfish/src/network/peers/connections/webRtcConnection.ts new file mode 100644 index 0000000000..4dc8494925 --- /dev/null +++ b/ironfish/src/network/peers/connections/webRtcConnection.ts @@ -0,0 +1,159 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import SimplePeer, { SignalData } from 'simple-peer' +import { Event } from '../../../event' +import type { Logger } from '../../../logger' +import { LooseMessage, parseMessage } from '../../messages' +import { Connection, ConnectionDirection, ConnectionType } from './connection' +import { NetworkError } from './errors' +import { IsomorphicWebRtc } from '../../types' +import { MetricsMonitor } from '../../../metrics' +import colors from 'colors/safe' + +/** + * Light wrapper of WebRtc SimplePeer that knows how to send and receive + * LooseMessages instead of strings/data. + */ +export class WebRtcConnection extends Connection { + private readonly peer: SimplePeer.Instance + + /** + * Event fired when the peer wants to signal its remote peer that an offer, + * answer, or ice candidate is available + */ + onSignal = new Event<[SignalData]>() + + constructor( + initiator: boolean, + wrtc: IsomorphicWebRtc, + logger: Logger, + metrics?: MetricsMonitor, + options: { simulateLatency?: number } = {}, + ) { + super( + ConnectionType.WebRtc, + initiator ? ConnectionDirection.Outbound : ConnectionDirection.Inbound, + logger.withTag('webrtcconnection'), + metrics, + options, + ) + + if (this.simulateLatency) { + this.addLatencyWrapper() + } + + // TODO: This is using google STUN internally, we need to + // make it use any of the websocket peers + this.peer = new SimplePeer({ initiator, wrtc }) + + this.peer.on('close', () => { + this.setState({ type: 'DISCONNECTED' }) + }) + + this.peer.on('error', (error: Error) => { + this._error = error + this.setState({ type: 'DISCONNECTED' }) + }) + + this.peer.on('connect', () => { + if (this.state.type !== 'WAITING_FOR_IDENTITY' && this.state.type !== 'CONNECTED') { + this.setState({ type: 'WAITING_FOR_IDENTITY' }) + } + }) + + this.peer.on('signal', (signal: SignalData) => { + if (this.state.type !== 'CONNECTED' && this.state.type !== 'WAITING_FOR_IDENTITY') { + this.setState({ type: 'SIGNALING' }) + } + + this.onSignal.emit(signal) + }) + + this.peer.on('data', (data: string | Uint8Array) => { + // simple-peer will sometimes emit data before emitting 'connect', so + // make sure the connection state is updated + if (this.state.type === 'SIGNALING') { + this.setState({ type: 'WAITING_FOR_IDENTITY' }) + this.logger.debug( + 'Received data before WebRTC connect event fired, setting peer to WAITING_FOR_IDENTITY', + ) + } + + let stringdata + if (data instanceof Uint8Array) { + stringdata = new TextDecoder().decode(data) + } else stringdata = data + + // TODO: Switch network traffic to binary only so this can measure bytes and then decode the binary into JSON + const byteCount = Buffer.from(stringdata).byteLength + this.metrics?.p2p_InboundTraffic.add(byteCount) + this.metrics?.p2p_InboundTraffic_WebRTC.add(byteCount) + + let message + try { + message = parseMessage(stringdata) + } catch (error) { + this.logger.warn('Unable to parse webrtc message', stringdata) + this.peer.destroy() + return + } + + if (this.shouldLogMessageType(message.type)) { + this.logger.debug(`${colors.yellow('RECV')} ${this.displayName}: ${message.type}`) + } + + this.onMessage.emit(message) + }) + } + + /** + * Inject a signal from the peer during the connection negotiation phase + */ + signal(data: SignalData): void { + try { + if (this.state.type === 'DISCONNECTED' || this.state.type === 'CONNECTING') { + this.setState({ type: 'SIGNALING' }) + } + this.peer.signal(data) + } catch (error) { + const message = 'An error occurred when loading signaling data:' + this.logger.debug(message, error) + this.close(new NetworkError(message, error)) + } + } + + /** + * Encode the message to json and send it to the peer + */ + send = (message: LooseMessage): void => { + if (this.shouldLogMessageType(message.type)) { + this.logger.debug(`${colors.yellow('SEND')} ${this.displayName}: ${message.type}`) + } + + const data = JSON.stringify(message) + this.peer.send(data) + + // TODO: Switch network traffic to binary + const byteCount = Buffer.from(data).byteLength + this.metrics?.p2p_OutboundTraffic.add(byteCount) + this.metrics?.p2p_OutboundTraffic_WebRTC.add(byteCount) + } + + /** + * Close the connection + */ + close = (error?: unknown): void => { + if (error) { + if (!(error instanceof Error)) { + this.logger.warn(`Error in close() not an instance of Error: ${JSON.stringify(error)}`) + } + + this._error = error + } + + this.setState({ type: 'DISCONNECTED' }) + this.peer.destroy() + } +} diff --git a/ironfish/src/network/peers/connections/webSocketConnection.ts b/ironfish/src/network/peers/connections/webSocketConnection.ts new file mode 100644 index 0000000000..8e7f15f76c --- /dev/null +++ b/ironfish/src/network/peers/connections/webSocketConnection.ts @@ -0,0 +1,132 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import type { Logger } from '../../../logger' +import { Connection, ConnectionDirection, ConnectionType } from './connection' +import { NetworkError } from './errors' +import { LooseMessage, parseMessage } from '../../messages' +import { IsomorphicWebSocket, IsomorphicWebSocketErrorEvent } from '../../types' +import { MetricsMonitor } from '../../../metrics' +import colors from 'colors/safe' + +/** + * Light wrapper of node+browser WebSockets that knows how to send and receive + * LooseMessages instead of strings/data. + */ +export class WebSocketConnection extends Connection { + private readonly socket: IsomorphicWebSocket + + // The hostname of the address that was used to establish the WebSocket connection, if any + readonly hostname?: string + + // The port of the address that was used to establish the WebSocket connection, if any + port?: number + + constructor( + socket: IsomorphicWebSocket, + direction: ConnectionDirection, + logger: Logger, + metrics?: MetricsMonitor, + options: { simulateLatency?: number; hostname?: string; port?: number } = {}, + ) { + super( + ConnectionType.WebSocket, + direction, + logger.withTag('WebSocketConnection'), + metrics, + options, + ) + + this.socket = socket + this.hostname = options.hostname + this.port = options.port + + if (this.simulateLatency) { + this.addLatencyWrapper() + } + + if (this.socket.readyState === this.socket.OPEN) { + this.setState({ type: 'WAITING_FOR_IDENTITY' }) + } + + this.socket.onerror = (...args: unknown[]) => { + // Browser WebSockets call onerror with (this, ErrorEvent), but the ws library + // calls onerror with (ErrorEvent), so grab ErrorEvent in either case + let error: IsomorphicWebSocketErrorEvent | null = null + if (args.length === 1) { + error = args[0] as IsomorphicWebSocketErrorEvent + } else if (args.length === 2) { + error = args[1] as IsomorphicWebSocketErrorEvent + } + + this.close(new NetworkError(error?.message, error)) + } + + this.socket.onclose = () => { + this.setState({ type: 'DISCONNECTED' }) + } + + this.socket.onopen = () => { + this.setState({ type: 'WAITING_FOR_IDENTITY' }) + } + + this.socket.onmessage = (event: MessageEvent) => { + // TODO: Switch network traffic to binary only so this can measure bytes and then decode the binary into JSON + const byteCount = Buffer.from(JSON.stringify(event.data)).byteLength + this.metrics?.p2p_InboundTraffic.add(byteCount) + this.metrics?.p2p_InboundTraffic_WS.add(byteCount) + + let message + try { + message = parseMessage(event.data) + } catch (error) { + // TODO: any socket that sends invalid messages should probably + // be punished with some kind of "downgrade" event. This should + // probably happen at a higher layer of abstraction + const message = 'error parsing message' + this.logger.warn(message, event.data) + this.close(new NetworkError(message)) + return + } + + if (this.shouldLogMessageType(message.type)) { + this.logger.debug(`${colors.yellow('RECV')} ${this.displayName}: ${message.type}`) + } + this.onMessage.emit(message) + } + } + + /** + * Encode the message to json and send it to the peer + */ + send = (message: LooseMessage): void => { + if (this.shouldLogMessageType(message.type)) { + this.logger.debug(`${colors.yellow('SEND')} ${this.displayName}: ${message.type}`) + } + + const data = JSON.stringify(message) + this.socket.send(data) + + // TODO: Switch network traffic to binary + const byteCount = Buffer.from(data).byteLength + this.metrics?.p2p_OutboundTraffic.add(byteCount) + this.metrics?.p2p_OutboundTraffic_WS.add(byteCount) + } + + /** + * Close the connection + */ + close = (error?: unknown): void => { + if (error) { + if (!(error instanceof Error)) { + this.logger.warn(`Error in close() not an instance of Error: ${JSON.stringify(error)}`) + } + + this._error = error + } + + this.setState({ type: 'DISCONNECTED' }) + this.socket.close() + } +} diff --git a/ironfish/src/network/peers/connections/wrtc.d.ts b/ironfish/src/network/peers/connections/wrtc.d.ts new file mode 100644 index 0000000000..ee4b55452d --- /dev/null +++ b/ironfish/src/network/peers/connections/wrtc.d.ts @@ -0,0 +1,24 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +declare module 'wrtc' { + // TODO: node-webrtc is supposed to be spec-compliant, but the + // typescript types may not match the browser implementations. + export const MediaStream: MediaStream + export const MediaStreamTrack: MediaStreamTrack + export const RTCDataChannel: RTCDataChannel + export const RTCDataChannelEvent: RTCDataChannelEvent + export const RTCDtlsTransport: RTCDtlsTransport + export const RTCIceCandidate: RTCIceCandidate + export const RTCIceTransport: RTCIceTransport + export const RTCPeerConnection: RTCPeerConnection + export const RTCPeerConnectionIceEvent: RTCPeerConnectionIceEvent + export const RTCRtpReceiver: RTCRtpReceiver + export const RTCRtpSender: RTCRtpSender + export const RTCRtpTransceiver: RTCRtpTransceiver + export const RTCSctpTransport: RTCSctpTransport + export const RTCSessionDescription: RTCSessionDescription + export const getUserMedia: (constraints?: MediaStreamConstraints) => Promise + export const mediaDevices: MediaDevices +} diff --git a/ironfish/src/network/peers/encryption.ts b/ironfish/src/network/peers/encryption.ts new file mode 100644 index 0000000000..ca3b8196e1 --- /dev/null +++ b/ironfish/src/network/peers/encryption.ts @@ -0,0 +1,43 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Buffer } from 'buffer' +import tweetnacl from 'tweetnacl' +import { Identity, PrivateIdentity } from '../identity' + +export function boxMessage( + plainTextMessage: string, + sender: PrivateIdentity, + recipient: Identity, +): { nonce: string; boxedMessage: string } { + const bytes = tweetnacl.randomBytes(tweetnacl.box.nonceLength) + return { + nonce: Buffer.from(bytes).toString('base64'), + boxedMessage: Buffer.from( + tweetnacl.box( + Buffer.from(plainTextMessage, 'utf8'), + bytes, + Buffer.from(recipient, 'base64'), + sender.secretKey, + ), + ).toString('base64'), + } +} + +export function unboxMessage( + boxedMessage: string, + nonce: string, + sender: Identity, + recipient: PrivateIdentity, +): string | null { + const bufferNonce = Buffer.from(nonce, 'base64') + const bufferBoxedMessage = Buffer.from(boxedMessage, 'base64') + const opened = tweetnacl.box.open( + bufferBoxedMessage, + bufferNonce, + Buffer.from(sender, 'base64'), + recipient.secretKey, + ) + if (!opened) return null + return Buffer.from(opened).toString('utf8') +} diff --git a/ironfish/src/network/peers/localPeer.ts b/ironfish/src/network/peers/localPeer.ts new file mode 100644 index 0000000000..2bb98f8a47 --- /dev/null +++ b/ironfish/src/network/peers/localPeer.ts @@ -0,0 +1,98 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Identity, PrivateIdentity, privateIdentityToIdentity } from '../identity' +import { Identify, InternalMessageType } from '../messages' +import { IsomorphicWebRtc, IsomorphicWebSocketConstructor } from '../types' +import { parseVersion, renderVersion, Version } from '../version' +import { boxMessage, unboxMessage } from './encryption' + +/** + * Wraps configuration needed for establishing connections with other peers + * and maintains references to all known peers. + */ +export class LocalPeer { + // our keypair for encrypting messages + private readonly privateIdentity: PrivateIdentity + // the identity we expose to other peers + readonly publicIdentity: Identity + // the version of the local client + readonly version: Version + // constructor for either a Node WebSocket or a browser WebSocket + readonly webSocket: IsomorphicWebSocketConstructor + // optional object containing a Node implementation of WebRTC + readonly webRtc: IsomorphicWebRtc + // optional port the local peer is listening on + port: number | null + // optional a human readable name for the node + name: string | null + // is the node a worker node that should not be advertised + isWorker = false + // should we broadcast worker nodes anyway? + broadcastWorkers = true + // simulated latency in MS that gets added to connection.send + simulateLatency = 0 + + constructor( + identity: PrivateIdentity, + version: string, + webSocket: IsomorphicWebSocketConstructor, + webRtc?: IsomorphicWebRtc, + ) { + this.privateIdentity = identity + this.publicIdentity = privateIdentityToIdentity(identity) + this.version = parseVersion(version) + + if ( + this.version.product === null || + this.version.agent === null || + this.version.version === null + ) { + throw new Error(`Invalid local version ${version} -- example formatting: "sdk/1/cli"`) + } + + this.webSocket = webSocket + this.webRtc = webRtc + this.port = null + this.name = null + this.isWorker = false + } + + /** + * Construct an Identify message with our identity and version. + */ + getIdentifyMessage(): Identify { + return { + type: InternalMessageType.identity, + payload: { + identity: this.publicIdentity, + isWorker: this.isWorker || undefined, + version: renderVersion(this.version), + name: this.name || undefined, + port: this.port, + }, + } + } + + /** + * Encrypt a string for recipient with the stored private identity. + * @param plainTextMessage The string to encrypt. + * @param recipient The public key of the recipient of the message. + */ + boxMessage( + plainTextMessage: string, + recipient: Identity, + ): { nonce: string; boxedMessage: string } { + return boxMessage(plainTextMessage, this.privateIdentity, recipient) + } + + /** + * Decrypt a message using a nonce from a sender. + * @param boxedMessage An encrypted message string. + * @param nonce A nonce, generated by boxMessage. + * @param sender The public key of the message sender. + */ + unboxMessage(boxedMessage: string, nonce: string, sender: Identity): string | null { + return unboxMessage(boxedMessage, nonce, sender, this.privateIdentity) + } +} diff --git a/ironfish/src/network/peers/peer.test.ts b/ironfish/src/network/peers/peer.test.ts new file mode 100644 index 0000000000..ee328299b5 --- /dev/null +++ b/ironfish/src/network/peers/peer.test.ts @@ -0,0 +1,417 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import * as encryption from './encryption' + +jest.mock('ws') +jest.mock('wrtc') +jest.mock('./encryption', () => { + const originalModule = jest.requireActual('./encryption') + + return { + ...originalModule, + boxMessage: jest + .fn() + .mockReturnValue({ nonce: 'boxMessageNonce', boxedMessage: 'boxMessageMessage' }), + unboxMessage: jest.fn().mockReturnValue(JSON.stringify({ type: 'offer' })), + } +}) + +import ws from 'ws' +import wrtc from 'wrtc' +import { mockIdentity } from '../testUtilities' +import { + ConnectionDirection, + ConnectionType, + WebRtcConnection, + WebSocketConnection, +} from './connections' +import { Peer } from './peer' +import { createRootLogger } from '../../logger' + +jest.useFakeTimers() + +describe('Starts in the DISCONNECTED state', () => { + it('Initializes identity for null identity', () => { + const unidentifiedPeer = new Peer(null) + expect(unidentifiedPeer.state).toEqual({ + type: 'DISCONNECTED', + identity: null, + }) + }) + + it('Initializes identity when given an identity', () => { + const identity = mockIdentity('peer') + const peer = new Peer(identity) + expect(peer.state).toEqual({ + type: 'DISCONNECTED', + identity: identity, + }) + }) +}) + +describe('setWebSocketConnection', () => { + it('Changes to CONNECTING when in DISCONNECTED', () => { + const identity = mockIdentity('peer') + const peer = new Peer(identity) + const connection = new WebSocketConnection( + new ws(''), + ConnectionDirection.Outbound, + createRootLogger(), + ) + peer.setWebSocketConnection(connection) + expect(peer.state).toEqual({ + type: 'CONNECTING', + identity: identity, + connections: { webSocket: connection }, + }) + }) + + it('Call successfulConnection when CONNECTED', () => { + const identity = mockIdentity('peer') + const peer = new Peer(identity) + const connection = new WebSocketConnection( + new ws(''), + ConnectionDirection.Outbound, + createRootLogger(), + ) + const retry = peer.getConnectionRetry( + ConnectionType.WebSocket, + ConnectionDirection.Outbound, + ) + if (retry == null) throw new Error('Retry should not be null') + const successSpy = jest.spyOn(retry, 'successfulConnection') + + connection.setState({ type: 'CONNECTED', identity: identity }) + peer.setWebSocketConnection(connection) + + expect(peer.state).toEqual({ + type: 'CONNECTED', + identity: identity, + connections: { webSocket: connection }, + }) + expect(successSpy).toBeCalled() + }) +}) + +describe('setWebRtcConnection', () => { + it('Changes to CONNECTING when in DISCONNECTED', () => { + const identity = mockIdentity('peer') + const peer = new Peer(identity) + const connection = new WebRtcConnection(false, wrtc, createRootLogger()) + + peer.setWebRtcConnection(connection) + expect(peer.state).toEqual({ + type: 'CONNECTING', + identity: identity, + connections: { webRtc: connection }, + }) + }) + + it('Updates supportedConnectionTypes when CONNECTED', () => { + const identity = mockIdentity('peer') + const peer = new Peer(identity) + const connection = new WebRtcConnection(true, wrtc, createRootLogger()) + + const retry = peer.getConnectionRetry(ConnectionType.WebRtc, ConnectionDirection.Outbound) + if (retry == null) throw new Error('Retry should not be null') + const successSpy = jest.spyOn(retry, 'successfulConnection') + + connection.setState({ type: 'CONNECTED', identity: identity }) + peer.setWebRtcConnection(connection) + + expect(peer.state).toEqual({ + type: 'CONNECTED', + identity: identity, + connections: { webRtc: connection }, + }) + expect(successSpy).toBeCalled() + }) +}) + +it('Times out WebRTC handshake', () => { + const connection = new WebRtcConnection(false, wrtc, createRootLogger()) + expect(connection.state.type).toEqual('CONNECTING') + + const peer = new Peer(null) + + // Time out requesting signaling + connection.setState({ type: 'REQUEST_SIGNALING' }) + expect(connection.state.type).toEqual('REQUEST_SIGNALING') + peer.setWebRtcConnection(connection) + expect(peer.state.type).toEqual('CONNECTING') + jest.runOnlyPendingTimers() + expect(connection.state.type).toEqual('DISCONNECTED') + expect(peer.state.type).toEqual('DISCONNECTED') + + // Time out signaling + connection.setState({ type: 'SIGNALING' }) + expect(connection.state.type).toEqual('SIGNALING') + peer.setWebRtcConnection(connection) + expect(peer.state.type).toEqual('CONNECTING') + jest.runOnlyPendingTimers() + expect(connection.state.type).toEqual('DISCONNECTED') + expect(peer.state.type).toEqual('DISCONNECTED') + + // Time out waiting for identity + connection.setState({ type: 'WAITING_FOR_IDENTITY' }) + expect(connection.state.type).toEqual('WAITING_FOR_IDENTITY') + peer.setWebRtcConnection(connection) + expect(peer.state.type).toEqual('CONNECTING') + jest.runOnlyPendingTimers() + expect(connection.state.type).toEqual('DISCONNECTED') + expect(peer.state.type).toEqual('DISCONNECTED') + + // Cancel timeout if we identify + connection.setState({ type: 'WAITING_FOR_IDENTITY' }) + expect(connection.state.type).toEqual('WAITING_FOR_IDENTITY') + peer.setWebRtcConnection(connection) + expect(peer.state.type).toEqual('CONNECTING') + connection.setState({ type: 'CONNECTED', identity: mockIdentity('peer') }) + jest.runOnlyPendingTimers() + expect(connection.state.type).toEqual('CONNECTED') + expect(peer.state.type).toEqual('CONNECTED') +}) + +it('Transitions to DISCONNECTED when all connections disconnect', () => { + const peer = new Peer(null) + const connection = new WebSocketConnection( + new ws(''), + ConnectionDirection.Outbound, + createRootLogger(), + ) + peer.setWebSocketConnection(connection) + expect(peer.state).toEqual({ + type: 'CONNECTING', + identity: null, + connections: { webSocket: connection }, + }) + + connection.setState({ type: 'DISCONNECTED' }) + + expect(peer.state).toEqual({ + type: 'DISCONNECTED', + identity: null, + }) +}) + +it('Transitions to CONNECTED when a connection receives an identity', () => { + const identity = mockIdentity('peer') + const peer = new Peer(null) + const connection = new WebSocketConnection( + new ws(''), + ConnectionDirection.Outbound, + createRootLogger(), + ) + peer.setWebSocketConnection(connection) + const retry = peer.getConnectionRetry(ConnectionType.WebSocket, ConnectionDirection.Outbound) + if (retry == null) throw new Error('Retry should not be null') + const successSpy = jest.spyOn(retry, 'successfulConnection') + + expect(peer.state).toEqual({ + type: 'CONNECTING', + identity: null, + connections: { webSocket: connection }, + }) + + connection.setState({ type: 'CONNECTED', identity }) + + expect(peer.state).toEqual({ + type: 'CONNECTED', + identity: identity, + connections: { webSocket: connection }, + }) + expect(successSpy).toBeCalled() +}) + +it('Transitions to CONNECTED when adding a connection with state CONNECTED', () => { + const identity = mockIdentity('peer') + const peer = new Peer(null) + const connection = new WebSocketConnection( + new ws(''), + ConnectionDirection.Outbound, + createRootLogger(), + ) + connection.setState({ + type: 'CONNECTED', + identity, + }) + const retry = peer.getConnectionRetry(ConnectionType.WebSocket, ConnectionDirection.Outbound) + if (retry == null) throw new Error('Retry should not be null') + const successSpy = jest.spyOn(retry, 'successfulConnection') + + peer.setWebSocketConnection(connection) + + expect(peer.state).toEqual({ + type: 'CONNECTED', + identity: identity, + connections: { webSocket: connection }, + }) + expect(successSpy).toBeCalled() +}) + +it('Stays in CONNECTED when adding an additional connection', () => { + const identity = mockIdentity('peer') + const peer = new Peer(null) + const connection = new WebSocketConnection( + new ws(''), + ConnectionDirection.Outbound, + createRootLogger(), + ) + peer.setWebSocketConnection(connection) + expect(peer.state).toEqual({ + type: 'CONNECTING', + identity: null, + connections: { webSocket: connection }, + }) + + connection.setState({ type: 'CONNECTED', identity }) + + // Add in an additional connection + const wrtcConnection = new WebRtcConnection(true, wrtc, createRootLogger()) + peer.setWebRtcConnection(wrtcConnection) + expect(wrtcConnection.state.type).not.toBe('CONNECTED') + + expect(peer.state).toEqual({ + type: 'CONNECTED', + identity: identity, + connections: { webSocket: connection, webRtc: wrtcConnection }, + }) +}) + +describe('Stays in CONNECTED when one connection disconnects', () => { + it('WebSocket disconnects', () => { + const identity = mockIdentity('peer') + const peer = new Peer(null) + + // Add a CONNECTED WebSocket connection + const connection = new WebSocketConnection( + new ws(''), + ConnectionDirection.Outbound, + createRootLogger(), + ) + peer.setWebSocketConnection(connection) + connection.setState({ type: 'CONNECTED', identity }) + + // Add a CONNECTED WebRTC connection + const wrtcConnection = new WebRtcConnection(true, wrtc, createRootLogger()) + peer.setWebRtcConnection(wrtcConnection) + wrtcConnection.setState({ type: 'CONNECTED', identity }) + + expect(peer.state.type).toBe('CONNECTED') + + connection.close() + + expect(peer.state).toEqual({ + type: 'CONNECTED', + identity: identity, + connections: { webRtc: wrtcConnection }, + }) + }) + + it('WebRTC disconnects', () => { + const identity = mockIdentity('peer') + const peer = new Peer(null) + + // Add a CONNECTED WebSocket connection + const connection = new WebSocketConnection( + new ws(''), + ConnectionDirection.Outbound, + createRootLogger(), + ) + peer.setWebSocketConnection(connection) + connection.setState({ type: 'CONNECTED', identity }) + + // Add a CONNECTED WebRTC connection + const wrtcConnection = new WebRtcConnection(true, wrtc, createRootLogger()) + peer.setWebRtcConnection(wrtcConnection) + wrtcConnection.setState({ type: 'CONNECTED', identity }) + + expect(peer.state.type).toBe('CONNECTED') + + wrtcConnection.close() + + expect(peer.state).toEqual({ + type: 'CONNECTED', + identity: identity, + connections: { webSocket: connection }, + }) + }) +}) + +describe('Updates supportedConnectionTypes when one connection disconnects with an error', () => { + it('WebSocket disconnects', () => { + const peer = new Peer(null) + + const retry = peer.getConnectionRetry( + ConnectionType.WebSocket, + ConnectionDirection.Outbound, + ) + if (retry == null) throw new Error('Retry should not be null') + const failSpy = jest.spyOn(retry, 'failedConnection') + + const connection = new WebSocketConnection( + new ws(''), + ConnectionDirection.Outbound, + createRootLogger(), + ) + peer.setWebSocketConnection(connection) + + connection['_error'] = new Error('Test') + connection.setState({ type: 'DISCONNECTED' }) + expect(failSpy).toBeCalled() + }) + + it('WebRTC disconnects', () => { + const peer = new Peer(null) + + const retry = peer.getConnectionRetry(ConnectionType.WebRtc, ConnectionDirection.Outbound) + if (retry == null) throw new Error('Retry should not be null') + const failSpy = jest.spyOn(retry, 'failedConnection') + + const connection = new WebRtcConnection(true, wrtc, createRootLogger()) + peer.setWebRtcConnection(connection) + + connection['_error'] = new Error('Test') + connection.setState({ type: 'DISCONNECTED' }) + + expect(failSpy).toBeCalled() + }) +}) + +it('Does not clear knownPeers when transitioning to DISCONNECTED', () => { + // knownPeers represents other peers' connections to a given peer. Just because + // we disconnected from the peer doesn't mean that other peers also did so + + const peer1Identity = mockIdentity('peer1') + const peer2Identity = mockIdentity('peer2') + const peer1 = new Peer(null) + const peer2 = new Peer(peer2Identity) + const connection = new WebSocketConnection( + new ws(''), + ConnectionDirection.Outbound, + createRootLogger(), + ) + peer1.setWebSocketConnection(connection) + expect(peer1.state).toEqual({ + type: 'CONNECTING', + identity: null, + connections: { webSocket: connection }, + }) + connection.setState({ + type: 'CONNECTED', + identity: peer1Identity, + }) + peer1.knownPeers.set(peer2Identity, peer2) + peer2.knownPeers.set(peer1Identity, peer1) + const onKnownPeersChangedSpy = jest.spyOn(peer1.onKnownPeersChanged, 'emit') + + connection.close() + + expect(onKnownPeersChangedSpy).not.toBeCalled() + expect(peer1.knownPeers.size).toBe(1) + expect(peer1.knownPeers.has(peer2Identity)).toBeTruthy() + expect(peer2.knownPeers.size).toBe(1) + expect(peer2.knownPeers.has(peer1Identity)).toBeTruthy() +}) diff --git a/ironfish/src/network/peers/peer.ts b/ironfish/src/network/peers/peer.ts new file mode 100644 index 0000000000..d321c1df8d --- /dev/null +++ b/ironfish/src/network/peers/peer.ts @@ -0,0 +1,560 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Event } from '../../event' +import { Logger, createRootLogger } from '../../logger' + +import { Identity } from '../identity' +import { Version } from '../version' +import { DisconnectingReason, LooseMessage } from '../messages' +import { ConnectionRetry } from './connectionRetry' +import { WebRtcConnection, WebSocketConnection } from './connections' +import { Connection, ConnectionDirection, ConnectionType } from './connections/connection' +import { ErrorUtils } from '../../utils' +import colors from 'colors/safe' + +/** + * PeerConnectionState contains at least one connection, as well as an optional second connection. + */ +type PeerConnectionState = + | { webSocket: WebSocketConnection; webRtc: WebRtcConnection } + | { webSocket?: undefined; webRtc: WebRtcConnection } + | { webSocket: WebSocketConnection; webRtc?: undefined } + +type PeerState = + /* Identity may exist if the peer is known by another peer, or has been previously connected to */ + | { type: 'DISCONNECTED'; identity: Identity | null } + /* Peer has at least one connection, but none are ready to send/receive messages */ + | { + type: 'CONNECTING' + identity: Identity | null + connections: Readonly + } + /* Peer has at least one connection that has been identified and is ready to send/receive messages */ + | { + type: 'CONNECTED' + identity: Identity + connections: Readonly + } + +export class Peer { + readonly pendingRPCMax: number + readonly logger: Logger + + /** + * The current state of the peer. + */ + private _state: Readonly + get state(): Readonly { + return this._state + } + + get isSaturated(): boolean { + return this.pendingRPC >= this.pendingRPCMax + } + + /** + * The last error the peer encountered + */ + private _error: Readonly | null + get error(): Readonly | null { + return this._error + } + + /** + * name associated with this peer + */ + name: string | null = null + + /** + * Is the peer a worker node that should not be advertised + */ + version: Version | null = null + + /** + * The loggable name of the peer. For a more specific value, + * try Peer.name or Peer.state.identity. + */ + get displayName(): string { + if (this.state.identity === null) { + return 'unidentified' + } + + const identitySlice = this.state.identity.slice(0, 7) + if (this.name) { + return `${identitySlice} (${this.name})` + } + return identitySlice + } + + /** + * Is the peer a worker node that should not be advertised + */ + isWorker = false + + /** + * Is the peer a node we will always attempt to connect to + */ + isWhitelisted = false + + /** + * address associated with this peer + */ + private _address: string | null = null + get address(): string | null { + return this._address + } + + /** + * port associated with this peer + */ + private _port: number | null = null + get port(): number | null { + return this._port + } + + /** how many outbound connections does the peer have */ + pendingRPC = 0 + + /** + * A map of peers connected to this peer, shared by the PeerList message. + */ + knownPeers: Map = new Map() + + private readonly supportedConnections: { + [ConnectionType.WebSocket]: ConnectionRetry + [ConnectionType.WebRtc]: ConnectionRetry + } = { + WebRtc: new ConnectionRetry(), + WebSocket: new ConnectionRetry(), + } + + /** + * The reason why the Peer requested to disconnect from us. + */ + peerRequestedDisconnectReason: DisconnectingReason | null = null + + /** + * UTC timestamp. If set, the peer manager should not initiate connections to the + * Peer until after the timestamp. + */ + peerRequestedDisconnectUntil: number | null = null + + /** + * The reason why we requested the Peer not to connect to us. + */ + localRequestedDisconnectReason: DisconnectingReason | null = null + + /** + * UTC timestamp. If set, the peer manager should not accept connections from the + * Peer until after the timestamp. + */ + localRequestedDisconnectUntil: number | null = null + + /** + * Event fired for every new incoming message that needs to be processed + * by the application layer. Includes the connection from which the message + * was received. + */ + readonly onMessage: Event<[LooseMessage, Connection]> = new Event() + + /** + * Event fired when the knownPeers map changes. + */ + readonly onKnownPeersChanged: Event<[]> = new Event() + + /** + * Event fired when the peer changes state. The event may fire when connections change, even if the + * state type stays the same. + */ + readonly onStateChanged: Event<[{ prevState: PeerState }]> = new Event() + + constructor( + identity: Identity | null, + { + logger = createRootLogger(), + maxPending = 5, + }: { + logger?: Logger + maxPending?: number + } = {}, + ) { + this.logger = logger.withTag('Peer') + this.pendingRPCMax = maxPending + this._error = null + this._state = { + type: 'DISCONNECTED', + identity: identity, + } + } + + /** + * Sets a WebRTC connection on the peer, moving it into the CONNECTING state if necessary. + * Ignores the connection if the peer already has a WebRTC connection. + * @param connection The WebRTC connection to set + */ + setWebRtcConnection(connection: WebRtcConnection): void { + if (this.state.type !== 'DISCONNECTED' && this.state.connections.webRtc) { + this.logger.warn('Already have a WebRTC connection, ignoring the new one') + return + } + + const webSocket = + this.state.type !== 'DISCONNECTED' ? this.state.connections.webSocket : undefined + + this.setState(this.computeStateFromConnections(webSocket, connection)) + } + + /** + * Sets a WebSocket connection on the peer, moving it into the CONNECTING state if necessary. + * Ignores the connection if the peer already has a WebSocket connection. + * @param connection The WebSocket connection to set + */ + setWebSocketConnection(connection: WebSocketConnection): void { + if (this.state.type !== 'DISCONNECTED' && this.state.connections.webSocket) { + this.logger.warn('Already have a WebSocket connection, ignoring the new one') + return + } + + const webRtc = + this.state.type !== 'DISCONNECTED' ? this.state.connections.webRtc : undefined + + this.setState(this.computeStateFromConnections(connection, webRtc)) + } + + private computeStateFromConnections( + wsConnection: WebSocketConnection | undefined, + webRtcConnection: WebRtcConnection | undefined, + ): PeerState { + // If both connections are either disconnected or don't exist, the + // state should be DISCONNECTED + if ( + (!wsConnection || wsConnection.state.type === 'DISCONNECTED') && + (!webRtcConnection || webRtcConnection.state.type === 'DISCONNECTED') + ) { + return { type: 'DISCONNECTED', identity: this.state.identity } + } + + // If at least one connection is CONNECTED, the state should be CONNECTED + // TODO: Need to resolve what happens if identities conflict + if (webRtcConnection && webRtcConnection.state.type === 'CONNECTED') { + return { + type: 'CONNECTED', + identity: webRtcConnection.state.identity, + connections: { + webSocket: wsConnection, + webRtc: webRtcConnection, + }, + } + } else if (wsConnection && wsConnection.state.type === 'CONNECTED') { + return { + type: 'CONNECTED', + identity: wsConnection.state.identity, + connections: { + webSocket: wsConnection, + webRtc: webRtcConnection, + }, + } + } + + // The remaining states are CONNECTING + return { + type: 'CONNECTING', + identity: this.state.identity, + connections: { + ...(webRtcConnection ? { webRtc: webRtcConnection } : {}), + ...(wsConnection ? { webSocket: wsConnection } : {}), + }, + } + } + + /** + * Removes a connection from the peer, doing nothing if it doesn't exist on the peer. + * @param connection The connection to remove + */ + removeConnection(connection: Connection): Connection { + if (this.state.type === 'DISCONNECTED') return connection + + const wsConnection = + connection === this.state.connections.webSocket + ? undefined + : this.state.connections.webSocket + + const webRtcConnection = + connection === this.state.connections.webRtc ? undefined : this.state.connections.webRtc + + this.setState(this.computeStateFromConnections(wsConnection, webRtcConnection)) + + return connection + } + + /** + * Gets the peer's identity, or throws an error if the peer is unidentified. + */ + getIdentityOrThrow(): Identity { + if (this.state.identity === null) { + throw new Error('Called getIdentityOrThrow on an unidentified peer') + } + return this.state.identity + } + + /** + * Get the peers connectable websocket address + */ + getWebSocketAddress(includeProtocol = true): string { + let address = '' + + if (includeProtocol) { + address = 'ws://' + address + } + + if (this.address) { + address += this.address + } + + if (this.port) { + address = address + ':' + String(this.port) + } + + return address + } + + /** + * Sets the address and peer by which the peer can be connected to over WebSockets. + * Setting address and port to null makes a peer unconnectable via WebSocket outbound connections. + * @param address Hostname of the address, or null to remove the address. + * @param port Port to connect over. Must be null if address is null. + */ + setWebSocketAddress(address: string | null, port: number | null): void { + if (address === null && port !== null) { + throw new Error( + `Called setWebSocketAddress on ${String( + this.state.identity, + )} with a port but no address`, + ) + } + + // Don't do anything if the address and port stay the same + if (address === this._address && port === this._port) { + return + } + + this._address = address + this._port = port + + if (address === null && port === null) { + this.getConnectionRetry( + ConnectionType.WebSocket, + ConnectionDirection.Outbound, + )?.neverRetryConnecting() + } else { + // Reset ConnectionRetry since some component of the address changed + this.getConnectionRetry( + ConnectionType.WebSocket, + ConnectionDirection.Outbound, + )?.successfulConnection() + } + } + + /** + * Sends a message over the peer's connection if CONNECTED, else drops it. + * @param message The message to send. + */ + send(message: LooseMessage): Connection | null { + if (this.state.type === 'CONNECTED') { + if (this.state.connections.webRtc?.state.type === 'CONNECTED') { + this.state.connections.webRtc.send(message) + return this.state.connections.webRtc + } + + if (this.state.connections.webSocket?.state.type === 'CONNECTED') { + this.state.connections.webSocket.send(message) + return this.state.connections.webSocket + } + + this.logger.warn( + `${this.displayName} is in CONNECTED state but has no active connections, dropping message of type ${message.type}`, + ) + + return null + } + + this.logger.debug( + `Tried to send a message ${message.type} to ${this.displayName} in state ${this.state.type}`, + ) + + return null + } + + private getConnectionStateOrDefault(state: PeerState) { + return state.type === 'DISCONNECTED' + ? { webRtc: undefined, webSocket: undefined } + : state.connections + } + + getConnectionRetry(type: ConnectionType, direction: ConnectionDirection.Inbound): null + getConnectionRetry(type: ConnectionType, direction: ConnectionDirection): ConnectionRetry + getConnectionRetry( + type: ConnectionType, + direction: ConnectionDirection, + ): ConnectionRetry | null { + if (direction === ConnectionDirection.Inbound) return null + return this.supportedConnections[type] + } + + private readonly connectionMessageHandlers: Map< + Connection, + (message: LooseMessage) => void + > = new Map void>() + + private readonly connectionStateChangedHandlers: Map void> = new Map< + Connection, + () => void + >() + + private unbindConnectionEvents(connection?: Connection): void { + if (!connection) return + + // onMessage + const messageHandler = this.connectionMessageHandlers.get(connection) + if (messageHandler) { + connection.onMessage.off(messageHandler) + this.connectionMessageHandlers.delete(connection) + } + + // onStateChanged + const stateChangedHandler = this.connectionStateChangedHandlers.get(connection) + if (stateChangedHandler) { + connection.onStateChanged.off(stateChangedHandler) + this.connectionStateChangedHandlers.delete(connection) + } + } + + private bindConnectionEvents(connection?: Connection): void { + if (!connection) return + + if (connection.state.type === 'CONNECTED') { + this.getConnectionRetry(connection.type, connection.direction)?.successfulConnection() + if (connection instanceof WebSocketConnection && connection.hostname != null) { + this.setWebSocketAddress(connection.hostname, connection.port || null) + } + } + + // onMessage + if (!this.connectionMessageHandlers.has(connection)) { + const messageHandler = (message: LooseMessage) => this.onMessage.emit(message, connection) + this.connectionMessageHandlers.set(connection, messageHandler) + connection.onMessage.on(messageHandler) + } + + // onStateChanged + if (!this.connectionStateChangedHandlers.has(connection)) { + const stateChangedHandler = () => { + if (this.state.type === 'DISCONNECTED') { + throw new Error('Peer should not have any connections while in DISCONNECTED state') + } + + if (connection.state.type === 'DISCONNECTED') { + this.logger.debug( + `Connection closing ${connection.type} for ${this.displayName}:`, + ErrorUtils.renderError(connection.error) || 'Reason Unknown', + ) + + if (connection.error != null) { + this._error = connection.error + this.getConnectionRetry(connection.type, connection.direction)?.failedConnection( + this.isWhitelisted, + ) + } + + this.removeConnection(connection) + return + } + + if (connection.state.type === 'CONNECTED') { + // If connection goes to connected, transition the peer to connected + this.getConnectionRetry(connection.type, connection.direction)?.successfulConnection() + if (connection instanceof WebSocketConnection && connection.hostname != null) { + this.setWebSocketAddress(connection.hostname, connection.port || null) + } + this.setState( + this.computeStateFromConnections( + this.state.connections.webSocket, + this.state.connections.webRtc, + ), + ) + } + } + this.connectionStateChangedHandlers.set(connection, stateChangedHandler) + connection.onStateChanged.on(stateChangedHandler) + } + } + + /** + * Changes the peer's state from this.state to nextState. + * @param nextState The new peer state. + */ + private setState(nextState: PeerState): void { + // Perform pre-transition actions + const lastConState = this.getConnectionStateOrDefault(this.state) + const nextConState = this.getConnectionStateOrDefault(nextState) + + if (lastConState.webRtc !== nextConState.webRtc) { + this.unbindConnectionEvents(lastConState.webRtc) + this.bindConnectionEvents(nextConState.webRtc) + } + if (lastConState.webSocket !== nextConState.webSocket) { + this.unbindConnectionEvents(lastConState.webSocket) + this.bindConnectionEvents(nextConState.webSocket) + } + + // Once a peer identity has been set, it must stay the same + if (this.state.identity !== null && nextState.identity !== this.state.identity) { + throw new Error( + `Attempted to change state.identity from ${this.state.identity} to ${String( + nextState.identity, + )}`, + ) + } + + // Transition the state + const prevState = this._state + this._state = nextState + + // Perform post-transition actions + if (prevState.type !== 'CONNECTED' && this.state.type === 'CONNECTED') { + this._error = null + } + + if (prevState.type !== nextState.type) { + this.logger.debug( + `${colors.green('PEER')} ${this.displayName} STATE ${prevState.type} -> ${ + this._state.type + }`, + ) + } + + this.onStateChanged.emit({ prevState }) + } + + /** + * Set the peer's state to DISCONNECTED, closing open connections. + */ + close(error?: Readonly): void { + const connections = this.getConnectionStateOrDefault(this.state) + connections.webRtc && this.removeConnection(connections.webRtc).close(error) + connections.webSocket && this.removeConnection(connections.webSocket).close(error) + + if (error != undefined) this._error = error + this.setState({ type: 'DISCONNECTED', identity: this.state.identity }) + } + + /** + * Clean up all resources managed by the peer. + */ + dispose(): void { + this.onStateChanged.clear() + this.onKnownPeersChanged.clear() + this.onMessage.clear() + } +} diff --git a/ironfish/src/network/peers/peerConnectionManager.test.ts b/ironfish/src/network/peers/peerConnectionManager.test.ts new file mode 100644 index 0000000000..4bf20fbd9b --- /dev/null +++ b/ironfish/src/network/peers/peerConnectionManager.test.ts @@ -0,0 +1,241 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +jest.mock('ws') +jest.mock('wrtc') + +import { createRootLogger } from '../../logger' +import { PeerManager } from './peerManager' +import { PeerConnectionManager } from './peerConnectionManager' +import { + ConnectionDirection, + ConnectionType, + WebRtcConnection, + WebSocketConnection, +} from './connections' +import { + getConnectedPeer, + mockIdentity, + mockLocalPeer, + webRtcCanInitiateIdentity, + webRtcLocalIdentity, +} from '../testUtilities' + +jest.useFakeTimers() + +describe('connectToDisconnectedPeers', () => { + it('Should not connect to disconnected peers without an address or peers', () => { + const pm = new PeerManager(mockLocalPeer()) + const peer = pm.getOrCreatePeer(null) + const pcm = new PeerConnectionManager(pm, createRootLogger(), { maxPeers: 50 }) + pm['logger'].mockTypes(() => jest.fn()) + pcm.start() + expect(peer.state).toEqual({ + type: 'DISCONNECTED', + identity: null, + }) + }) + + it('Should connect to disconnected unidentified peers with an address', () => { + const pm = new PeerManager(mockLocalPeer()) + const peer = pm.getOrCreatePeer(null) + peer.setWebSocketAddress('testuri.com', 9033) + const pcm = new PeerConnectionManager(pm, createRootLogger(), { maxPeers: 50 }) + pcm.start() + expect(peer.state).toEqual({ + type: 'CONNECTING', + identity: null, + connections: { + webSocket: expect.any(WebSocketConnection), + }, + }) + }) + + it('Should connect to disconnected identified peers with an address over WS', () => { + const pm = new PeerManager(mockLocalPeer()) + + const identity = mockIdentity('peer') + const peer = pm.getOrCreatePeer(identity) + peer.setWebSocketAddress('testuri.com', 9033) + + // We want to test websocket only + peer + .getConnectionRetry(ConnectionType.WebRtc, ConnectionDirection.Outbound) + .neverRetryConnecting() + + const pcm = new PeerConnectionManager(pm, createRootLogger(), { maxPeers: 50 }) + pcm.start() + + expect(peer.state).toEqual({ + type: 'CONNECTING', + identity: identity, + connections: { webSocket: expect.any(WebSocketConnection) }, + }) + }) + + it('Should connect to webrtc and websockets', () => { + const peers = new PeerManager(mockLocalPeer()) + + const identity = mockIdentity('peer') + const peer = peers.getOrCreatePeer(identity) + peer.setWebSocketAddress('testuri.com', 9033) + + // Check both connections are eligible to connect to + expect(peers.canConnectToWebRTC(peer)).toBe(true) + expect(peers.canConnectToWebSocket(peer)).toBe(true) + expect(peer.state.type).toBe('DISCONNECTED') + + const peerConnections = new PeerConnectionManager(peers, createRootLogger(), { + maxPeers: 50, + }) + peerConnections.start() + + // Check now that were connecting to websockets and webrtc failed + expect(peers.canConnectToWebRTC(peer)).toBe(false) + expect(peers.canConnectToWebSocket(peer)).toBe(false) + expect(peer.state).toEqual({ + type: 'CONNECTING', + identity: identity, + connections: { webSocket: expect.any(WebSocketConnection) }, + }) + }) + + it('Should connect to known peers of connected peers', () => { + const peerIdentity = webRtcCanInitiateIdentity() + const pm = new PeerManager(mockLocalPeer({ identity: webRtcLocalIdentity() })) + const { peer: brokeringPeer } = getConnectedPeer(pm, 'brokering') + const peer = pm.getOrCreatePeer(peerIdentity) + // Link the peers + brokeringPeer.knownPeers.set(peerIdentity, peer) + peer.knownPeers.set(brokeringPeer.getIdentityOrThrow(), brokeringPeer) + + const pcm = new PeerConnectionManager(pm, createRootLogger(), { maxPeers: 50 }) + pcm.start() + + expect(peer.state).toEqual({ + type: 'CONNECTING', + identity: peerIdentity, + connections: { webRtc: expect.any(WebRtcConnection) }, + }) + }) +}) + +describe('maintainOneConnectionPerPeer', () => { + it('Should not close WS connection if the WebRTC connection is not in CONNECTED', () => { + const pm = new PeerManager(mockLocalPeer({ identity: webRtcLocalIdentity() })) + const peer = pm.connectToWebSocketAddress('testuri') + const identity = webRtcCanInitiateIdentity() + if (peer.state.type === 'DISCONNECTED') throw new Error('Peer should not be DISCONNECTED') + if (!peer.state.connections.webSocket) + throw new Error('Peer should have a WebSocket connection') + peer.state.connections.webSocket?.setState({ + type: 'CONNECTED', + identity: identity, + }) + + pm.connectToWebRTC(peer) + + if (!peer.state.connections.webRtc) throw new Error('Peer should have a WebRTC connection') + peer.state.connections.webRtc.setState({ + type: 'SIGNALING', + }) + + expect(peer.state).toEqual({ + type: 'CONNECTED', + identity, + connections: { + webRtc: expect.any(WebRtcConnection), + webSocket: expect.any(WebSocketConnection), + }, + }) + + const pcm = new PeerConnectionManager(pm, createRootLogger(), { maxPeers: 50 }) + pcm.start() + + expect(peer.state).toEqual({ + type: 'CONNECTED', + identity, + connections: { + webRtc: expect.any(WebRtcConnection), + webSocket: expect.any(WebSocketConnection), + }, + }) + }) + + it('Should close WebSocket connection if a peer has WS and WebRTC connections', () => { + const pm = new PeerManager(mockLocalPeer({ identity: webRtcLocalIdentity() })) + const peer = pm.connectToWebSocketAddress('testuri') + const identity = webRtcCanInitiateIdentity() + if (peer.state.type === 'DISCONNECTED') throw new Error('Peer should not be DISCONNECTED') + if (!peer.state.connections.webSocket) + throw new Error('Peer should have a WebSocket connection') + peer.state.connections.webSocket?.setState({ + type: 'CONNECTED', + identity: identity, + }) + + pm.connectToWebRTC(peer) + + if (!peer.state.connections.webRtc) throw new Error('Peer should have a WebRTC connection') + peer.state.connections.webRtc.setState({ + type: 'CONNECTED', + identity: identity, + }) + + expect(peer.state).toEqual({ + type: 'CONNECTED', + identity, + connections: { + webRtc: expect.any(WebRtcConnection), + webSocket: expect.any(WebSocketConnection), + }, + }) + + const pcm = new PeerConnectionManager(pm, createRootLogger(), { maxPeers: 50 }) + pcm.start() + + expect(peer.state).toEqual({ + type: 'CONNECTED', + identity, + connections: { + webRtc: expect.any(WebRtcConnection), + }, + }) + }) +}) + +describe('attemptToEstablishWebRtcConnectionsToWSPeers', () => { + it('Should attempt to establish a WebRTC connection if we have a WebSocket connection', () => { + const pm = new PeerManager(mockLocalPeer({ identity: webRtcLocalIdentity() })) + const peer = pm.connectToWebSocketAddress('testuri') + const identity = webRtcCanInitiateIdentity() + if (peer.state.type === 'DISCONNECTED') throw new Error('Peer should not be DISCONNECTED') + if (!peer.state.connections.webSocket) + throw new Error('Peer should have a WebSocket connection') + peer.state.connections.webSocket?.setState({ + type: 'CONNECTED', + identity: identity, + }) + + expect(peer.state).toEqual({ + type: 'CONNECTED', + identity, + connections: { + webSocket: expect.any(WebSocketConnection), + }, + }) + + const pcm = new PeerConnectionManager(pm, createRootLogger(), { maxPeers: 50 }) + pcm.start() + + expect(peer.state).toEqual({ + type: 'CONNECTED', + identity, + connections: { + webRtc: expect.any(WebRtcConnection), + webSocket: expect.any(WebSocketConnection), + }, + }) + }) +}) diff --git a/ironfish/src/network/peers/peerConnectionManager.ts b/ironfish/src/network/peers/peerConnectionManager.ts new file mode 100644 index 0000000000..0d617b1ac1 --- /dev/null +++ b/ironfish/src/network/peers/peerConnectionManager.ts @@ -0,0 +1,110 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { createRootLogger, Logger } from '../../logger' +import type { Peer } from './peer' +import { PeerManager } from './peerManager' + +/** + * The time to wait after finishing the event loop before running the event loop again + */ +const EVENT_LOOP_MS = 2000 + +/** + * PeerConnectionManager periodically determines whether to open new connections and/or + * close existing connections on peers. + */ +export class PeerConnectionManager { + private readonly logger: Logger + private readonly peerManager: PeerManager + readonly maxPeers: number + + private started = false + private eventLoopTimer?: ReturnType + + constructor( + peerManager: PeerManager, + logger: Logger = createRootLogger(), + options: { + maxPeers: number + }, + ) { + this.peerManager = peerManager + this.logger = logger.withTag('peerconnectionmanager') + this.maxPeers = options.maxPeers + } + + /** + * Start the connection management event loop. Does nothing + * if the event loop has already been started. + */ + start(): void { + if (this.started) return + this.started = true + this.eventLoop() + } + + /** + * Stop the connection management event loop. + */ + stop(): void { + if (this.eventLoopTimer) { + clearTimeout(this.eventLoopTimer) + } + this.started = false + } + + private eventLoop() { + for (const peer of this.peerManager.peers) { + this.connectToEligiblePeers(peer) + this.attemptToEstablishWebRtcConnectionsToWSPeer(peer) + this.maintainOneConnectionPerPeer(peer) + } + + this.eventLoopTimer = setTimeout(() => this.eventLoop(), EVENT_LOOP_MS) + } + + private connectToEligiblePeers(peer: Peer) { + if (peer.state.type !== 'CONNECTED') { + if (this.peerManager.canConnectToWebRTC(peer)) { + this.peerManager.connectToWebRTC(peer) + } + + if (this.peerManager.canConnectToWebSocket(peer)) { + this.peerManager.connectToWebSocket(peer) + } + } + } + + /** + * If we've successfully established both a WebSocket connection and a WebRTC + * connection, close the WebSocket connection + */ + private maintainOneConnectionPerPeer(peer: Peer) { + if ( + peer.state.type === 'CONNECTED' && + peer.state.connections.webRtc?.state.type === 'CONNECTED' && + peer.state.connections.webSocket?.state.type === 'CONNECTED' + ) { + this.logger.debug( + `Upgraded ${peer.displayName} to WebRTC, closing the WebSocket connection`, + ) + peer.state.connections.webSocket.close() + } + } + + /** + * If we've successfully established a WebSocket connection, + * attempt to establish a WebRTC connection + */ + private attemptToEstablishWebRtcConnectionsToWSPeer(peer: Peer) { + if ( + peer.state.type === 'CONNECTED' && + peer.state.connections.webSocket?.state.type === 'CONNECTED' && + this.peerManager.canConnectToWebRTC(peer) + ) { + this.peerManager.connectToWebRTC(peer) + } + } +} diff --git a/ironfish/src/network/peers/peerManager.test.ts b/ironfish/src/network/peers/peerManager.test.ts new file mode 100644 index 0000000000..f8b96c039b --- /dev/null +++ b/ironfish/src/network/peers/peerManager.test.ts @@ -0,0 +1,1525 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import * as encryption from './encryption' + +jest.mock('ws') +jest.mock('wrtc') +jest.mock('./encryption', () => { + const originalModule = jest.requireActual('./encryption') + + return { + ...originalModule, + boxMessage: jest + .fn() + .mockReturnValue({ nonce: 'boxMessageNonce', boxedMessage: 'boxMessageMessage' }), + unboxMessage: jest.fn().mockReturnValue(JSON.stringify({ type: 'offer' })), + } +}) + +import { mocked } from 'ts-jest/utils' +import ws from 'ws' +import { PeerManager } from './peerManager' +import { + getConnectedPeer, + getConnectingPeer, + getSignalingWebRtcPeer, + getWaitingForIdentityPeer, + mockLocalPeer, + mockIdentity, + webRtcCanInitiateIdentity, + webRtcCannotInitiateIdentity, + mockPrivateIdentity, + webRtcLocalIdentity, +} from '../testUtilities' +import { canInitiateWebRTC, privateIdentityToIdentity } from '../identity' +import { + ConnectionDirection, + ConnectionType, + WebRtcConnection, + WebSocketConnection, +} from './connections' +import { + Identify, + InternalMessageType, + Signal, + PeerList, + DisconnectingReason, + DisconnectingMessage, + SignalRequest, +} from '../messages' + +jest.useFakeTimers() + +describe('PeerManager', () => { + it('Throws when creating a peerManager with an invalid version string', () => { + expect(() => new PeerManager(mockLocalPeer({ version: '1' }))).toThrowError() + }) + + describe('Dispose peers', () => { + it('Should not dispose of peers that have a CONNECTED peer', () => { + const pm = new PeerManager(mockLocalPeer()) + const peer1Identity = mockIdentity('peer1') + const peer2Identity = mockIdentity('peer2') + const { peer: peer1 } = getConnectedPeer(pm, peer1Identity) + + const peer2 = pm.getOrCreatePeer(peer2Identity) + peer2.setWebSocketAddress(null, null) + peer1.knownPeers.set(peer2Identity, peer2) + + expect(pm.identifiedPeers.size).toBe(2) + expect(pm.peers.length).toBe(2) + + pm['disposePeers']() + + expect(pm.identifiedPeers.size).toBe(2) + expect(pm.peers.length).toBe(2) + }) + + it('Should dispose of two DISCONNECTED peers that have each other in knownPeers', () => { + const pm = new PeerManager(mockLocalPeer()) + const peer1Identity = mockIdentity('peer1') + const peer2Identity = mockIdentity('peer2') + const { peer: peer1 } = getConnectedPeer(pm, peer1Identity) + const { peer: peer2 } = getConnectedPeer(pm, peer2Identity) + + peer1.knownPeers.set(peer2Identity, peer2) + peer2.knownPeers.set(peer1Identity, peer1) + + peer1.close() + peer2.close() + peer1 + .getConnectionRetry(ConnectionType.WebSocket, ConnectionDirection.Outbound) + ?.neverRetryConnecting() + peer2 + .getConnectionRetry(ConnectionType.WebSocket, ConnectionDirection.Outbound) + ?.neverRetryConnecting() + + expect(peer1.knownPeers.size).toBe(1) + expect(peer2.knownPeers.size).toBe(1) + expect(pm.identifiedPeers.size).toBe(2) + expect(pm.peers.length).toBe(2) + + pm['disposePeers']() + + expect(pm.identifiedPeers.size).toBe(0) + expect(pm.peers.length).toBe(0) + }) + }) + + it('broadcastPeerList sends all connected peers', () => { + const pm = new PeerManager(mockLocalPeer()) + + const { peer: peer1 } = getConnectedPeer(pm, 'peer1') + const { peer: peer2 } = getConnectedPeer(pm, 'peer2') + + expect(pm.identifiedPeers.size).toBe(2) + + const mockBroadcast = jest.spyOn(pm, 'broadcast') + + pm['broadcastPeerList']() + expect(mockBroadcast).toBeCalledTimes(1) + expect(mockBroadcast).toBeCalledWith({ + type: InternalMessageType.peerList, + payload: { + connectedPeers: [ + { address: 'testuri.com', port: 9033, identity: peer1.getIdentityOrThrow() }, + { address: 'testuri.com', port: 9033, identity: peer2.getIdentityOrThrow() }, + ], + }, + }) + }) + + it('should handle duplicate connections from the same peer', () => { + const localPeer = mockLocalPeer({ identity: webRtcLocalIdentity() }) + const peers = new PeerManager(localPeer) + + const { peer: peerOut, connection: connectionOut } = getWaitingForIdentityPeer( + peers, + true, + ConnectionDirection.Outbound, + ) + const { peer: peerIn1, connection: connectionIn1 } = getWaitingForIdentityPeer( + peers, + true, + ConnectionDirection.Inbound, + ) + const { peer: peerIn2, connection: connectionIn2 } = getWaitingForIdentityPeer( + peers, + true, + ConnectionDirection.Inbound, + ) + + // Create identity and message for all peers + const identity = webRtcCannotInitiateIdentity() + const message: Identify = { + type: InternalMessageType.identity, + payload: { + identity: identity, + version: 'sdk/1/cli', + port: null, + }, + } + + // Identify peerOut + peerOut.onMessage.emit(message, connectionOut) + // Check PeerManager + expect(peers.identifiedPeers.size).toBe(1) + expect(peers.peers.length).toBe(3) + // Connections + expect(connectionOut.state.type).toEqual('CONNECTED') + expect(connectionIn1.state.type).toEqual('WAITING_FOR_IDENTITY') + expect(connectionIn2.state.type).toEqual('WAITING_FOR_IDENTITY') + // Check Peers + expect(peerOut.state).toMatchObject({ + type: 'CONNECTED', + identity: identity, + connections: { webSocket: connectionOut }, + }) + expect(peerIn1.state).toMatchObject({ + type: 'CONNECTING', + identity: null, + connections: { webSocket: connectionIn1 }, + }) + expect(peerIn2.state).toMatchObject({ + type: 'CONNECTING', + identity: null, + connections: { webSocket: connectionIn2 }, + }) + + // Identify peerIn1 now + peerIn1.onMessage.emit(message, connectionIn1) + // Check PeerManager + expect(peers.identifiedPeers.size).toBe(1) + expect(peers.peers.length).toBe(2) + // Connections + expect(connectionOut.state.type).toEqual('DISCONNECTED') + expect(connectionIn1.state.type).toEqual('CONNECTED') + expect(connectionIn2.state.type).toEqual('WAITING_FOR_IDENTITY') + // Check Peers + expect(peerOut.state).toMatchObject({ + type: 'CONNECTED', + identity: identity, + connections: { webSocket: connectionIn1 }, + }) + expect(peerIn1.state).toMatchObject({ + type: 'DISCONNECTED', + identity: identity, + }) + expect(peerIn2.state).toMatchObject({ + type: 'CONNECTING', + identity: null, + connections: { webSocket: connectionIn2 }, + }) + + // Identify peerIn2 now + peerIn2.onMessage.emit(message, connectionIn2) + // Check PeerManager + expect(peers.identifiedPeers.size).toBe(1) + // expect(peers.peers.length).toBe(1) + // Connections + expect(connectionOut.state.type).toEqual('DISCONNECTED') + expect(connectionIn1.state.type).toEqual('CONNECTED') + expect(connectionIn2.state.type).toEqual('DISCONNECTED') + // Check Peers + expect(peerOut.state).toMatchObject({ + type: 'CONNECTED', + identity: identity, + connections: { webSocket: connectionIn1 }, + }) + expect(peerIn1.state).toMatchObject({ + type: 'DISCONNECTED', + identity: identity, + }) + expect(peerIn2.state).toMatchObject({ + type: 'DISCONNECTED', + identity: null, + }) + + // The reason peerIn1 has an identity is because it's identity is taken before + // updatePeerMap() merges into the existing peerOut. peerIn2 has no identity + // because new connections from the same peer have the new connection rejected. + // peerIn2's was never set to connected, so it was never merged into peerOut. + }) + + it('Sends identity when a connection is successfully made', () => { + const localIdentity = mockPrivateIdentity('local') + const pm = new PeerManager(mockLocalPeer({ identity: localIdentity })) + + const { peer, connection } = getConnectingPeer(pm) + + const sendSpy = jest.spyOn(connection, 'send') + + connection.setState({ type: 'WAITING_FOR_IDENTITY' }) + + expect(peer.state).toEqual({ + type: 'CONNECTING', + identity: null, + connections: { webSocket: connection }, + }) + expect(sendSpy).toBeCalledWith({ + type: InternalMessageType.identity, + payload: { + identity: privateIdentityToIdentity(localIdentity), + version: 'sdk/1/cli', + port: null, + }, + }) + }) + + it('should disconnect connection on CONNECTED', () => { + const localPeer = mockLocalPeer() + const peers = new PeerManager(localPeer) + + const { peer: peer1, connection: connection1 } = getConnectingPeer(peers) + const { peer: peer2, connection: connection2 } = getWaitingForIdentityPeer(peers) + const { peer: peer3, connection: connection3 } = getConnectedPeer(peers) + + const sendSpyPeer1 = jest.spyOn(connection1, 'send') + const sendSpyPeer2 = jest.spyOn(connection2, 'send') + const sendSpyPeer3 = jest.spyOn(connection3, 'send') + + peers.disconnect(peer1, DisconnectingReason.ShuttingDown, 0) + peers.disconnect(peer2, DisconnectingReason.ShuttingDown, 0) + peers.disconnect(peer3, DisconnectingReason.ShuttingDown, 0) + + expect(sendSpyPeer1).not.toHaveBeenCalled() + expect(sendSpyPeer2).toHaveBeenCalledWith( + expect.objectContaining({ + type: InternalMessageType.disconnecting, + }), + ) + expect(sendSpyPeer3).toHaveBeenCalledWith( + expect.objectContaining({ + type: InternalMessageType.disconnecting, + }), + ) + + expect(peer1.state.type).toEqual('DISCONNECTED') + expect(peer2.state.type).toEqual('DISCONNECTED') + expect(peer3.state.type).toEqual('DISCONNECTED') + }) + + describe('connect', () => { + it('Creates a peer and adds it to unidentifiedConnections', () => { + const pm = new PeerManager(mockLocalPeer()) + expect(pm.peers.length).toBe(0) + + const peer = pm.connectToWebSocketAddress('testUri') + + expect(pm.identifiedPeers.size).toBe(0) + expect(pm.peers.length).toBe(1) + expect(peer.state).toEqual({ + type: 'CONNECTING', + connections: { webSocket: expect.any(WebSocketConnection) }, + identity: null, + }) + if (peer.state.type !== 'CONNECTING') throw new Error('Peer state must be CONNECTING') + if (peer.state.connections.webSocket == null) + throw new Error('Peer must have a websocket connection') + expect(peer.state.connections.webSocket.type).toEqual(ConnectionType.WebSocket) + expect(peer.state.connections.webSocket.direction).toEqual(ConnectionDirection.Outbound) + }) + + it('Encrypts signaling data', () => { + const brokeringIdentity = mockIdentity('brokering') + + const pm = new PeerManager(mockLocalPeer({ identity: webRtcLocalIdentity() })) + const { connection, brokeringPeer } = getSignalingWebRtcPeer( + pm, + brokeringIdentity, + webRtcCanInitiateIdentity(), + ) + const sendSpy = jest.spyOn(brokeringPeer, 'send') + + connection.onSignal.emit({ + type: 'offer', + }) + + expect(sendSpy).toBeCalledTimes(1) + expect(sendSpy).toBeCalledWith({ + type: InternalMessageType.signal, + payload: { + sourceIdentity: privateIdentityToIdentity(webRtcLocalIdentity()), + destinationIdentity: webRtcCanInitiateIdentity(), + nonce: 'boxMessageNonce', + signal: 'boxMessageMessage', + }, + }) + }) + + it('Attempts to establish a WebSocket connection to a peer with a webSocketAddress', () => { + const peer1Identity = mockIdentity('peer1') + const peer2Identity = mockIdentity('peer2') + const pm = new PeerManager(mockLocalPeer()) + + // Create the peers + const { peer: peer1 } = getConnectedPeer(pm, peer1Identity) + const peer2 = pm.getOrCreatePeer(peer2Identity) + + // Link the peers + peer1.knownPeers.set(peer2Identity, peer2) + peer2.knownPeers.set(peer1Identity, peer1) + + // Verify peer2 is not connected + peer2.setWebSocketAddress('testuri', 9033) + expect(peer2.state).toEqual({ + type: 'DISCONNECTED', + identity: peer2Identity, + }) + + pm.connectToWebSocket(peer2) + + expect(peer2.state).toEqual({ + type: 'CONNECTING', + connections: { webSocket: expect.any(WebSocketConnection) }, + identity: peer2Identity, + }) + }) + + it('Attempts to establish a WebRTC connection through brokering peer', () => { + const peers = new PeerManager(mockLocalPeer({ identity: webRtcLocalIdentity() })) + + // Create the peers + const { peer: brokeringPeer } = getConnectedPeer(peers) + const targetPeer = peers.getOrCreatePeer(webRtcCanInitiateIdentity()) + expect(targetPeer.state.type).toEqual('DISCONNECTED') + + // Link the peers + brokeringPeer.knownPeers.set(targetPeer.getIdentityOrThrow(), targetPeer) + targetPeer.knownPeers.set(brokeringPeer.getIdentityOrThrow(), brokeringPeer) + + peers.connectToWebRTC(targetPeer) + + expect(targetPeer.state).toMatchObject({ + type: 'CONNECTING', + connections: { webRtc: expect.any(WebRtcConnection) }, + }) + }) + + it('Can establish a WebRTC connection to a peer using an existing WebSocket connection to the same peer', () => { + const pm = new PeerManager(mockLocalPeer({ identity: webRtcLocalIdentity() })) + + const { peer, connection } = getConnectedPeer(pm, webRtcCanInitiateIdentity()) + + expect(canInitiateWebRTC(pm.localPeer.publicIdentity, peer.getIdentityOrThrow())).toBe( + true, + ) + + // Call connect() on the same peer to initiate a WebRTC connection + pm.connectToWebRTC(peer) + + expect(peer.state).toEqual({ + type: 'CONNECTED', + connections: { webSocket: connection, webRtc: expect.any(WebRtcConnection) }, + identity: peer.getIdentityOrThrow(), + }) + + if (peer.state.type !== 'CONNECTED') { + throw new Error('Peer should be in state CONNECTED') + } + if (!peer.state.connections.webRtc) { + throw new Error('Peer should have a WebRTC connection') + } + + // Emitting new signal data should trigger a send on the WS connection + expect(pm.identifiedPeers.size).toBe(1) + expect(pm.peers).toHaveLength(1) + const sendSpy = jest.spyOn(connection, 'send') + peer.state.connections.webRtc.onSignal.emit({ type: 'offer' }) + expect(sendSpy).toBeCalledTimes(1) + }) + + it('Attempts to request WebRTC signaling through brokering peer', () => { + const peers = new PeerManager(mockLocalPeer({ identity: webRtcLocalIdentity() })) + + // Create the peer to broker the connection through + const { peer: brokeringPeer } = getConnectedPeer(peers) + const brokerPeerSendMock = jest.fn() + brokeringPeer.send = brokerPeerSendMock + + // Create the peer to connect to WebRTC through + const targetPeer = peers.getOrCreatePeer(webRtcCannotInitiateIdentity()) + expect(targetPeer.state.type).toEqual('DISCONNECTED') + + // Link the peers + brokeringPeer.knownPeers.set(targetPeer.getIdentityOrThrow(), targetPeer) + targetPeer.knownPeers.set(brokeringPeer.getIdentityOrThrow(), brokeringPeer) + + peers.connectToWebRTC(targetPeer) + expect(targetPeer.state).toMatchObject({ + type: 'CONNECTING', + connections: { + webRtc: { + state: { + type: 'REQUEST_SIGNALING', + }, + }, + }, + }) + expect(brokerPeerSendMock).toBeCalledWith({ + type: InternalMessageType.signalRequest, + payload: { + sourceIdentity: peers.localPeer.publicIdentity, + destinationIdentity: targetPeer.getIdentityOrThrow(), + }, + }) + }) + + it('Does not create a connection if Peer has disconnectUntil set', () => { + const pm = new PeerManager(mockLocalPeer()) + const { peer } = getConnectedPeer(pm, 'peer') + peer.close() + + // Mock the logger + pm['logger'].mockTypes(() => jest.fn()) + + // Verify that we could otherwise create a connection + pm.connectToWebSocket(peer) + expect(peer.state.type).toBe('CONNECTING') + peer.close() + + // Set disconnectUntil and verify that we can't create a connection + peer.peerRequestedDisconnectUntil = Number.MAX_SAFE_INTEGER + pm.connectToWebSocket(peer) + expect(peer.state.type).toBe('DISCONNECTED') + }) + + it('Sets disconnectUntil to null if current time is after disconnectUntil', () => { + const pm = new PeerManager(mockLocalPeer()) + const { peer } = getConnectedPeer(pm, 'peer') + peer.close() + + // Try websockets first + peer.peerRequestedDisconnectUntil = 1 + pm.connectToWebSocket(peer) + expect(peer.state.type).toBe('CONNECTING') + expect(peer.peerRequestedDisconnectUntil).toBeNull() + + // Try websockets first + peer.peerRequestedDisconnectUntil = 1 + pm.connectToWebRTC(peer) + expect(peer.state.type).toBe('CONNECTING') + expect(peer.peerRequestedDisconnectUntil).toBeNull() + }) + + it('Does not create a connection to a disconnected Peer above targetPeers', () => { + const pm = new PeerManager(mockLocalPeer(), undefined, undefined, 50, 1) + + // Add one connected peer + getConnectedPeer(pm, 'peer1') + + // Add a second peer that's disconnected + const peer2Identity = mockIdentity('peer2') + const peer2 = pm.getOrCreatePeer(peer2Identity) + peer2.setWebSocketAddress('testuri.com', 9033) + + // Mock the logger + pm['logger'].mockTypes(() => jest.fn()) + + const result = pm.connectToWebSocket(peer2) + + expect(result).toBe(false) + expect(peer2.state).toEqual({ + type: 'DISCONNECTED', + identity: peer2Identity, + }) + }) + }) + + describe('create peers', () => { + it('Returns the same peer when calling createPeer twice with the same identity', () => { + const peerIdentity = mockIdentity('peer') + const pm = new PeerManager(mockLocalPeer()) + + const peer1 = pm.getOrCreatePeer(peerIdentity) + const peer1Again = pm.getOrCreatePeer(peerIdentity) + expect(peer1).toBe(peer1Again) + expect(pm.peers.length).toBe(1) + expect(pm.identifiedPeers.size).toBe(1) + expect(pm.identifiedPeers.get(peerIdentity)).toBe(peer1) + }) + + it('Merges peers when an unidentified peer connects with the same identity as an identified webrtc peer', () => { + const brokerIdentity = mockIdentity('brokering') + const peerIdentity = webRtcCanInitiateIdentity() + const pm = new PeerManager(mockLocalPeer()) + + const { peer } = getSignalingWebRtcPeer(pm, brokerIdentity, peerIdentity) + + if (peer.state.type === 'DISCONNECTED') throw new Error('Peer should not be DISCONNECTED') + if (!peer.state.connections.webRtc) + throw new Error('Peer should have a WebRTC connection') + const webRtcConnection = peer.state.connections.webRtc + webRtcConnection.setState({ + type: 'CONNECTED', + identity: peerIdentity, + }) + + expect(pm.peers.length).toBe(2) + expect(pm.identifiedPeers.size).toBe(2) + expect(pm.identifiedPeers.get(peerIdentity)).toBe(peer) + + const unidentifiedPeer = pm.getOrCreatePeer(null) + const unidentifiedConnection = new WebSocketConnection( + new ws(''), + ConnectionDirection.Inbound, + peer.logger, + ) + unidentifiedPeer.setWebSocketConnection(unidentifiedConnection) + + expect(pm.peers.length).toBe(3) + expect(pm.identifiedPeers.size).toBe(2) + + // Connect the unidentified connection to trigger a merge + unidentifiedConnection.setState({ + type: 'CONNECTED', + identity: peerIdentity, + }) + + expect(pm.peers.length).toBe(2) + expect(pm.identifiedPeers.size).toBe(2) + expect(pm.identifiedPeers.get(peerIdentity)).toBe(peer) + expect(peer.state).toEqual({ + type: 'CONNECTED', + identity: peerIdentity, + connections: { + webSocket: unidentifiedConnection, + webRtc: webRtcConnection, + }, + }) + expect(unidentifiedPeer.state).toEqual({ + type: 'DISCONNECTED', + identity: peerIdentity, + }) + }) + + it('Merges peers when an unidentified peer connects with the same identity as an identified websocket peer', () => { + const peerIdentity = webRtcCanInitiateIdentity() + const pm = new PeerManager(mockLocalPeer({ identity: webRtcLocalIdentity() })) + + const { peer, connection } = getConnectedPeer(pm, peerIdentity) + + if (peer.state.type === 'DISCONNECTED') throw new Error('Peer should not be DISCONNECTED') + if (!peer.state.connections.webSocket) + throw new Error('Peer should have a WebRTC connection') + + expect(peer.state).toEqual({ + type: 'CONNECTED', + identity: peerIdentity, + connections: { + webSocket: connection, + }, + }) + + expect(pm.peers.length).toBe(1) + expect(pm.identifiedPeers.size).toBe(1) + expect(pm.identifiedPeers.get(peerIdentity)).toBe(peer) + + const unidentifiedPeer = pm.getOrCreatePeer(null) + const unidentifiedConnection = new WebSocketConnection( + new ws(''), + ConnectionDirection.Inbound, + peer.logger, + ) + unidentifiedPeer.setWebSocketConnection(unidentifiedConnection) + + expect(pm.peers.length).toBe(2) + expect(pm.identifiedPeers.size).toBe(1) + + // Connect the unidentified connection to trigger a merge + unidentifiedConnection.setState({ + type: 'CONNECTED', + identity: peerIdentity, + }) + + expect(pm.peers.length).toBe(1) + expect(pm.identifiedPeers.size).toBe(1) + expect(pm.identifiedPeers.get(peerIdentity)).toBe(peer) + expect(peer.state).toEqual({ + type: 'CONNECTED', + identity: peerIdentity, + connections: { + webSocket: unidentifiedConnection, + }, + }) + expect(connection.state).toEqual({ + type: 'DISCONNECTED', + }) + }) + }) + + it('Emits onConnectedPeersChanged when a peer enters CONNECTED or DISCONNECTED', () => { + const pm = new PeerManager(mockLocalPeer()) + const onConnectedPeersChangedMock = jest.fn() + pm.onConnectedPeersChanged.on(onConnectedPeersChangedMock) + + const { peer: connecting } = getConnectingPeer(pm) + const { peer: waiting } = getWaitingForIdentityPeer(pm) + const { peer: connected } = getConnectedPeer(pm, 'peer') + + expect(onConnectedPeersChangedMock).toBeCalledTimes(1) + + // Disconnect all of the peers + connecting.close() + waiting.close() + connected.close() + + expect(onConnectedPeersChangedMock).toBeCalledTimes(2) + }) + + describe('Message: Identity', () => { + it('Adds the peer to identifiedPeers after receiving a valid identity message', () => { + const other = mockIdentity('other') + const pm = new PeerManager(mockLocalPeer()) + + expect(pm.identifiedPeers.size).toBe(0) + expect(pm.peers.length).toBe(0) + + const { peer, connection } = getWaitingForIdentityPeer(pm) + + const identify: Identify = { + type: InternalMessageType.identity, + payload: { + identity: other, + port: peer.port, + version: 'sdk/1/cli', + }, + } + peer.onMessage.emit(identify, connection) + + expect(pm.identifiedPeers.size).toBe(1) + expect(pm.peers.length).toBe(1) + expect(connection.state).toEqual({ + type: 'CONNECTED', + identity: other, + }) + expect(peer.state).toEqual({ + type: 'CONNECTED', + connections: { webSocket: connection }, + identity: other, + }) + }) + + it('Closes the connection when versions do not match', () => { + const other = mockPrivateIdentity('other') + const pm = new PeerManager(mockLocalPeer()) + + const { peer, connection } = getWaitingForIdentityPeer(pm) + + expect(pm.peers.length).toBe(1) + const closeSpy = jest.spyOn(connection, 'close') + const retry = peer.getConnectionRetry( + ConnectionType.WebSocket, + ConnectionDirection.Outbound, + ) + if (retry == null) throw new Error('Retry must not be null') + const failSpy = jest.spyOn(retry, 'failedConnection') + + const identify: Identify = { + type: InternalMessageType.identity, + payload: { + identity: privateIdentityToIdentity(other), + version: 'sdk/2/cli', + port: peer.port, + }, + } + peer.onMessage.emit(identify, connection) + + expect(closeSpy).toBeCalled() + expect(failSpy).toBeCalledTimes(1) + expect(pm.peers.length).toBe(1) + expect(pm.identifiedPeers.size).toBe(0) + }) + + it('Closes the connection when an identity message with an invalid public key is sent', () => { + const pm = new PeerManager(mockLocalPeer()) + + const { peer, connection } = getWaitingForIdentityPeer(pm) + + expect(pm.peers.length).toBe(1) + const closeSpy = jest.spyOn(connection, 'close') + const retry = peer.getConnectionRetry( + ConnectionType.WebSocket, + ConnectionDirection.Outbound, + ) + if (retry == null) throw new Error('Retry must not be null') + const failSpy = jest.spyOn(retry, 'failedConnection') + + const identify: Identify = { + type: InternalMessageType.identity, + payload: { + identity: 'test', + version: 'sdk/1/cli', + port: peer.port, + }, + } + peer.onMessage.emit(identify, connection) + expect(closeSpy).toBeCalled() + expect(failSpy).toBeCalledTimes(1) + expect(pm.peers.length).toBe(1) + expect(pm.identifiedPeers.size).toBe(0) + }) + + it('Closes the connection if an unidentified peer returns the local identity', () => { + const localIdentity = mockPrivateIdentity('local') + const pm = new PeerManager(mockLocalPeer({ identity: localIdentity })) + + expect(pm.identifiedPeers.size).toBe(0) + expect(pm.peers.length).toBe(0) + + const { connection } = getWaitingForIdentityPeer(pm) + + const identify: Identify = { + type: InternalMessageType.identity, + payload: { + identity: privateIdentityToIdentity(localIdentity), + port: 9033, + version: 'sdk/1/cli', + }, + } + connection.onMessage.emit(identify) + + expect(connection.state).toEqual({ + type: 'DISCONNECTED', + }) + + expect(pm.peers.length).toBe(0) + expect(pm.identifiedPeers.size).toBe(0) + }) + + it('Closes the connection if an identified peer returns the local identity', () => { + const localIdentity = mockPrivateIdentity('local') + const pm = new PeerManager(mockLocalPeer({ identity: localIdentity })) + + const { peer: peer1 } = getConnectedPeer(pm, 'peer1') + + peer1.close() + + const connection = new WebSocketConnection( + new ws(''), + ConnectionDirection.Outbound, + peer1.logger, + ) + connection.setState({ type: 'WAITING_FOR_IDENTITY' }) + peer1.setWebSocketConnection(connection) + expect(peer1.state.identity).toBe(peer1.getIdentityOrThrow()) + + // Spy on connectionRetry.failedConnection + const retry = peer1.getConnectionRetry( + ConnectionType.WebSocket, + ConnectionDirection.Outbound, + ) + if (retry === null) throw new Error('Retry must exist') + + const identify: Identify = { + type: InternalMessageType.identity, + payload: { + identity: privateIdentityToIdentity(localIdentity), + port: 9033, + version: 'sdk/1/cli', + }, + } + connection.onMessage.emit(identify) + + // Peer 1 should be disconnected and WS connection info removed + expect(connection.state).toEqual({ + type: 'DISCONNECTED', + }) + expect(peer1.state).toEqual({ + type: 'DISCONNECTED', + identity: peer1.getIdentityOrThrow(), + }) + expect(peer1.port).toBeNull() + expect(peer1.address).toBeNull() + expect(retry.willNeverRetryConnecting).toBe(true) + + // The peer should be disposed, since there's no alternative way to connect to it + expect(pm.identifiedPeers.size).toBe(0) + expect(pm.peers.length).toBe(0) + }) + + it('Moves the connection to another peer if it returns a different identity', () => { + const peer1Identity = mockIdentity('peer1') + const peer2Identity = mockIdentity('peer2') + const pm = new PeerManager(mockLocalPeer()) + + const { peer: peer1 } = getConnectedPeer(pm, peer1Identity) + + peer1.close() + + const connection = new WebSocketConnection( + new ws(''), + ConnectionDirection.Outbound, + peer1.logger, + ) + connection.setState({ type: 'WAITING_FOR_IDENTITY' }) + peer1.setWebSocketConnection(connection) + + const identify: Identify = { + type: InternalMessageType.identity, + payload: { + identity: peer2Identity, + port: peer1.port, + version: 'sdk/1/cli', + }, + } + connection.onMessage.emit(identify) + + // Should have 2 verified peers + expect(pm.identifiedPeers.size).toBe(2) + expect(pm.peers.length).toBe(2) + + // Peer 1 should be disconnected and WS connection info removed + expect(peer1.state).toEqual({ + type: 'DISCONNECTED', + identity: peer1Identity, + }) + expect(peer1.port).toBeNull() + expect(peer1.address).toBeNull() + expect( + peer1.getConnectionRetry(ConnectionType.WebSocket, ConnectionDirection.Outbound) + ?.willNeverRetryConnecting, + ).toBe(true) + + const peer2 = pm.getPeer(peer2Identity) + expect(peer2?.address).toBe('testuri.com') + expect(peer2?.port).toBe(9033) + expect(peer2?.state).toEqual({ + type: 'CONNECTED', + connections: { webSocket: connection }, + identity: peer2Identity, + }) + }) + + it('Closes the connection if the peer has disconnectUntil set', () => { + const localIdentity = mockPrivateIdentity('local') + const peerIdentity = mockIdentity('peer') + const pm = new PeerManager(mockLocalPeer({ identity: localIdentity })) + + const { peer } = getConnectedPeer(pm, peerIdentity) + peer.close() + expect(peer.state).toEqual({ type: 'DISCONNECTED', identity: peerIdentity }) + peer.localRequestedDisconnectUntil = Number.MAX_SAFE_INTEGER + + const { connection } = getWaitingForIdentityPeer(pm) + + const sendSpy = jest.spyOn(connection, 'send') + const id: Identify = { + type: InternalMessageType.identity, + payload: { + identity: peerIdentity, + version: 'sdk/1/cli', + port: 9033, + }, + } + connection.onMessage.emit(id) + + const response: DisconnectingMessage = { + type: InternalMessageType.disconnecting, + payload: { + sourceIdentity: privateIdentityToIdentity(localIdentity), + destinationIdentity: peerIdentity, + reason: DisconnectingReason.Congested, + disconnectUntil: peer.localRequestedDisconnectUntil, + }, + } + expect(sendSpy).toBeCalledWith(response) + + expect(connection.state).toEqual({ + type: 'DISCONNECTED', + }) + }) + }) + + describe('Message: SignalRequest', () => { + it('Forwards SignalRequest message intended for another peer', () => { + const pm = new PeerManager(mockLocalPeer()) + + const { peer: destinationPeer } = getConnectedPeer(pm, webRtcCannotInitiateIdentity()) + const { connection: sourcePeerConnection, peer: sourcePeer } = getConnectedPeer( + pm, + webRtcCanInitiateIdentity(), + ) + + expect( + canInitiateWebRTC( + sourcePeer.getIdentityOrThrow(), + destinationPeer.getIdentityOrThrow(), + ), + ).toBe(false) + + const signal: SignalRequest = { + type: InternalMessageType.signalRequest, + payload: { + sourceIdentity: sourcePeer.getIdentityOrThrow(), + destinationIdentity: destinationPeer.getIdentityOrThrow(), + }, + } + + const sendSpy = jest.spyOn(destinationPeer, 'send') + sourcePeer.onMessage.emit(signal, sourcePeerConnection) + expect(sendSpy).toBeCalledWith(signal) + }) + + it('Drops SignalRequest message originating from an different peer than sourceIdentity', () => { + const pm = new PeerManager(mockLocalPeer()) + + const { peer: peer1 } = getConnectedPeer(pm) + const { peer: peer2 } = getConnectedPeer(pm) + const { connection: peer3Connection, peer: peer3 } = getConnectedPeer(pm) + + const signal: SignalRequest = { + type: InternalMessageType.signalRequest, + payload: { + sourceIdentity: peer1.getIdentityOrThrow(), + destinationIdentity: peer2.getIdentityOrThrow(), + }, + } + + const sendSpy1 = jest.spyOn(peer1, 'send') + const sendSpy2 = jest.spyOn(peer2, 'send') + + peer3.onMessage.emit(signal, peer3Connection) + expect(sendSpy1).not.toBeCalled() + expect(sendSpy2).not.toBeCalled() + }) + + it('reject SignalRequest when source peer should initiate', () => { + const pm = new PeerManager(mockLocalPeer({ identity: webRtcLocalIdentity() })) + const initWebRtcConnectionMock = jest.fn() + pm['initWebRtcConnection'] = initWebRtcConnectionMock + + const { peer, connection } = getConnectedPeer(pm, webRtcCannotInitiateIdentity()) + + expect(canInitiateWebRTC(peer.getIdentityOrThrow(), pm.localPeer.publicIdentity)).toBe( + true, + ) + + // Emit the signaling message + const message: SignalRequest = { + type: InternalMessageType.signalRequest, + payload: { + sourceIdentity: peer.getIdentityOrThrow(), + destinationIdentity: pm.localPeer.publicIdentity, + }, + } + + peer.onMessage.emit(message, connection) + expect(initWebRtcConnectionMock).toBeCalledTimes(0) + }) + + it('Initiates webRTC connection when request intended for local peer', () => { + const pm = new PeerManager(mockLocalPeer({ identity: webRtcLocalIdentity() })) + const initWebRtcConnectionMock = jest.fn() + pm['initWebRtcConnection'] = initWebRtcConnectionMock + + const { peer, connection } = getConnectedPeer(pm, webRtcCanInitiateIdentity()) + + expect(canInitiateWebRTC(peer.getIdentityOrThrow(), pm.localPeer.publicIdentity)).toBe( + false, + ) + + // Emit the signaling message + const message: SignalRequest = { + type: InternalMessageType.signalRequest, + payload: { + sourceIdentity: peer.getIdentityOrThrow(), + destinationIdentity: pm.localPeer.publicIdentity, + }, + } + + peer.onMessage.emit(message, connection) + expect(initWebRtcConnectionMock).toBeCalledTimes(1) + expect(initWebRtcConnectionMock).toBeCalledWith(peer, peer, true) + }) + + it('Sends a disconnect message if we are at max peers', () => { + const pm = new PeerManager( + mockLocalPeer({ identity: webRtcLocalIdentity() }), + undefined, + undefined, + 1, + ) + + const { connection: peer1Connection, peer: peer1 } = getConnectedPeer(pm, 'peer') + + const message: SignalRequest = { + type: InternalMessageType.signalRequest, + payload: { + sourceIdentity: webRtcCanInitiateIdentity(), + destinationIdentity: pm.localPeer.publicIdentity, + }, + } + + const sendSpy = jest.spyOn(peer1, 'send') + + peer1.onMessage.emit(message, peer1Connection) + + const reply: DisconnectingMessage = { + type: InternalMessageType.disconnecting, + payload: { + disconnectUntil: expect.any(Number), + reason: DisconnectingReason.Congested, + sourceIdentity: pm.localPeer.publicIdentity, + destinationIdentity: webRtcCanInitiateIdentity(), + }, + } + + expect(sendSpy).toBeCalledWith(reply) + }) + + it('Does not send a disconnect message if we are at max peers but we have an existing connection to the peer', () => { + const pm = new PeerManager( + mockLocalPeer({ identity: webRtcLocalIdentity() }), + undefined, + undefined, + 1, + ) + + const { connection: peer1Connection, peer: peer1 } = getConnectedPeer(pm, 'peer') + getConnectedPeer(pm, webRtcCanInitiateIdentity()) + + const message: SignalRequest = { + type: InternalMessageType.signalRequest, + payload: { + sourceIdentity: webRtcCanInitiateIdentity(), + destinationIdentity: pm.localPeer.publicIdentity, + }, + } + + const sendSpy = jest.spyOn(peer1, 'send') + + peer1.onMessage.emit(message, peer1Connection) + + expect(sendSpy).not.toBeCalled() + }) + }) + + describe('Message: Signal', () => { + it('Forwards signaling messages intended for another peer', () => { + const peer1Identity = mockIdentity('peer1') + const peer2Identity = mockIdentity('peer2') + const pm = new PeerManager(mockLocalPeer()) + + const { connection: peer1Connection, peer: peer1 } = getConnectedPeer(pm, peer1Identity) + const { peer: peer2 } = getConnectedPeer(pm, peer2Identity) + + const signal: Signal = { + type: InternalMessageType.signal, + payload: { + sourceIdentity: peer1Identity, + destinationIdentity: peer2Identity, + nonce: '', + signal: '', + }, + } + + const sendSpy = jest.spyOn(peer2, 'send') + peer1.onMessage.emit(signal, peer1Connection) + expect(sendSpy).toBeCalledWith(signal) + }) + + it('Drops signaling messages originating from an different peer than sourceIdentity', () => { + const peer1Identity = mockIdentity('peer1') + const peer2Identity = mockIdentity('peer2') + const peer3Identity = mockIdentity('peer3') + const pm = new PeerManager(mockLocalPeer()) + + const { peer: peer1 } = getConnectedPeer(pm, peer1Identity) + const { peer: peer2 } = getConnectedPeer(pm, peer2Identity) + const { connection: peer3Connection, peer: peer3 } = getConnectedPeer(pm, peer3Identity) + + const signal: Signal = { + type: InternalMessageType.signal, + payload: { + sourceIdentity: peer1Identity, + destinationIdentity: peer2Identity, + nonce: '', + signal: '', + }, + } + + const sendSpy1 = jest.spyOn(peer1, 'send') + const sendSpy2 = jest.spyOn(peer2, 'send') + peer3.onMessage.emit(signal, peer3Connection) + expect(sendSpy1).not.toBeCalled() + expect(sendSpy2).not.toBeCalled() + }) + + it('Sends a disconnect message if we are at max peers', () => { + const pm = new PeerManager( + mockLocalPeer({ identity: webRtcLocalIdentity() }), + undefined, + undefined, + 1, + ) + + const { connection: peer1Connection, peer: peer1 } = getConnectedPeer(pm, 'peer') + + const message: Signal = { + type: InternalMessageType.signal, + payload: { + sourceIdentity: webRtcCannotInitiateIdentity(), + destinationIdentity: pm.localPeer.publicIdentity, + nonce: '', + signal: '', + }, + } + + const sendSpy = jest.spyOn(peer1, 'send') + + peer1.onMessage.emit(message, peer1Connection) + + const reply: DisconnectingMessage = { + type: InternalMessageType.disconnecting, + payload: { + disconnectUntil: expect.any(Number), + reason: DisconnectingReason.Congested, + sourceIdentity: pm.localPeer.publicIdentity, + destinationIdentity: webRtcCannotInitiateIdentity(), + }, + } + + expect(sendSpy).toBeCalledWith(reply) + }) + + it('Does not send a disconnect message if we are at max peers but we have an existing connection to the peer', () => { + const pm = new PeerManager( + mockLocalPeer({ identity: webRtcLocalIdentity() }), + undefined, + undefined, + 1, + ) + + const { connection: peer1Connection, peer: peer1 } = getConnectedPeer(pm, 'peer') + getConnectedPeer(pm, webRtcCannotInitiateIdentity()) + + const message: Signal = { + type: InternalMessageType.signal, + payload: { + sourceIdentity: webRtcCannotInitiateIdentity(), + destinationIdentity: pm.localPeer.publicIdentity, + nonce: '', + signal: '', + }, + } + + const sendSpy = jest.spyOn(peer1, 'send') + + peer1.onMessage.emit(message, peer1Connection) + + expect(sendSpy).not.toBeCalled() + }) + + it('Decrypts signaling data intended for local peer', () => { + const brokeringPeerIdentity = mockPrivateIdentity('brokering') + + const pm = new PeerManager(mockLocalPeer({ identity: webRtcLocalIdentity() })) + + const { connection, brokeringConnection, brokeringPeer } = getSignalingWebRtcPeer( + pm, + privateIdentityToIdentity(brokeringPeerIdentity), + webRtcCanInitiateIdentity(), + ) + + const signalSpy = jest.spyOn(connection, 'signal') + + // Emit the signaling message + const signal: Signal = { + type: InternalMessageType.signal, + payload: { + sourceIdentity: webRtcCanInitiateIdentity(), + destinationIdentity: privateIdentityToIdentity(webRtcLocalIdentity()), + nonce: 'boxMessageNonce', + signal: 'boxMessageMessage', + }, + } + brokeringPeer.onMessage.emit(signal, brokeringConnection) + + expect(signalSpy).toBeCalledTimes(1) + expect(signalSpy).toBeCalledWith({ + type: 'offer', + }) + }) + + it('Disconnects if decrypting signaling data fails', () => { + const brokeringPeerIdentity = mockIdentity('brokering') + + // Return null from the unboxMessage function + mocked(encryption.unboxMessage).mockReturnValueOnce(null) + + const pm = new PeerManager(mockLocalPeer({ identity: webRtcLocalIdentity() })) + const { connection, brokeringConnection, brokeringPeer } = getSignalingWebRtcPeer( + pm, + brokeringPeerIdentity, + webRtcCanInitiateIdentity(), + ) + + const signalSpy = jest.spyOn(connection, 'signal') + const closeSpy = jest.spyOn(connection, 'close') + + // Emit the signaling message + const signal: Signal = { + type: InternalMessageType.signal, + payload: { + sourceIdentity: webRtcCanInitiateIdentity(), + destinationIdentity: privateIdentityToIdentity(webRtcLocalIdentity()), + nonce: 'boxMessageNonce', + signal: 'boxMessageMessage', + }, + } + brokeringPeer.onMessage.emit(signal, brokeringConnection) + + expect(signalSpy).not.toBeCalled() + expect(closeSpy).toBeCalled() + }) + + it('Disconnects if decoding signaling data fails', () => { + const brokeringPeerIdentity = mockIdentity('brokering') + + // Return something that's not JSON from the unboxMessage function + mocked(encryption.unboxMessage).mockReturnValueOnce('test') + + const pm = new PeerManager(mockLocalPeer({ identity: webRtcLocalIdentity() })) + const { connection, brokeringConnection, brokeringPeer } = getSignalingWebRtcPeer( + pm, + brokeringPeerIdentity, + webRtcCanInitiateIdentity(), + ) + + const signalSpy = jest.spyOn(connection, 'signal') + const closeSpy = jest.spyOn(connection, 'close') + + // Emit the signaling message + const signal: Signal = { + type: InternalMessageType.signal, + payload: { + sourceIdentity: webRtcCanInitiateIdentity(), + destinationIdentity: privateIdentityToIdentity(webRtcLocalIdentity()), + nonce: 'boxMessageNonce', + signal: 'boxMessageMessage', + }, + } + brokeringPeer.onMessage.emit(signal, brokeringConnection) + + expect(signalSpy).not.toBeCalled() + expect(closeSpy).toBeCalled() + }) + }) + + describe('Message: PeerList', () => { + it('Does not add local identity to knownPeers', () => { + const localIdentity = mockPrivateIdentity('local') + const peerIdentity = mockIdentity('peer') + + const pm = new PeerManager(mockLocalPeer({ identity: localIdentity })) + + const { connection, peer } = getConnectedPeer(pm, peerIdentity) + + expect(peer.knownPeers.size).toBe(0) + + const peerList: PeerList = { + type: InternalMessageType.peerList, + payload: { + connectedPeers: [ + { + identity: privateIdentityToIdentity(localIdentity), + address: peer.address, + port: peer.port, + }, + ], + }, + } + peer.onMessage.emit(peerList, connection) + expect(peer.knownPeers.size).toBe(0) + }) + + it('Does not emit onKnownPeersChanged when peer list stays the same', () => { + const peerIdentity = mockIdentity('peer') + const newPeerIdentity = mockIdentity('new') + + const pm = new PeerManager(mockLocalPeer()) + + const { connection, peer } = getConnectedPeer(pm, peerIdentity) + + expect(pm.peers.length).toBe(1) + expect(pm.identifiedPeers.size).toBe(1) + expect(peer.knownPeers.size).toBe(0) + + const peerList: PeerList = { + type: InternalMessageType.peerList, + payload: { + connectedPeers: [ + { + identity: newPeerIdentity, + address: peer.address, + port: peer.port, + }, + ], + }, + } + const onKnownPeersChangedSpy = jest.spyOn(peer.onKnownPeersChanged, 'emit') + peer.onMessage.emit(peerList, connection) + peer.onMessage.emit(peerList, connection) + + expect(onKnownPeersChangedSpy).toBeCalledTimes(1) + }) + + it('Links peers when adding a new known peer', () => { + const peerIdentity = mockIdentity('peer') + const newPeerIdentity = mockIdentity('new') + + const pm = new PeerManager(mockLocalPeer()) + + const { connection, peer } = getConnectedPeer(pm, peerIdentity) + + expect(pm.peers.length).toBe(1) + expect(pm.identifiedPeers.size).toBe(1) + expect(peer.knownPeers.size).toBe(0) + + // Clear onKnownPeersChanged handlers to avoid any side effects + pm.onKnownPeersChanged.clear() + + const peerList: PeerList = { + type: InternalMessageType.peerList, + payload: { + connectedPeers: [ + { + identity: newPeerIdentity, + address: peer.address, + port: peer.port, + }, + ], + }, + } + peer.onMessage.emit(peerList, connection) + + expect(peer.knownPeers.size).toBe(1) + expect(pm.peers.length).toBe(2) + expect(pm.identifiedPeers.size).toBe(2) + + const newPeer = peer.knownPeers.get(newPeerIdentity) + expect(newPeer).toBeDefined() + if (!newPeer) throw new Error('Peer must be defined') + expect(newPeer.state).toEqual({ + type: 'DISCONNECTED', + identity: newPeerIdentity, + }) + expect(newPeer.knownPeers.size).toBe(1) + expect(newPeer.knownPeers.get(peerIdentity)).toBe(peer) + + expect(pm.identifiedPeers.size).toBe(2) + expect(pm.identifiedPeers.get(peerIdentity)).toBe(peer) + expect(pm.identifiedPeers.get(newPeerIdentity)).toBe(newPeer) + }) + + it(`Disposes of peers if they are no longer linked to the network`, () => { + const peerIdentity = mockIdentity('peer') + const newPeerIdentity = mockIdentity('new') + + const pm = new PeerManager(mockLocalPeer()) + + const { connection, peer } = getConnectedPeer(pm, peerIdentity) + + expect(pm.peers.length).toBe(1) + expect(pm.identifiedPeers.size).toBe(1) + expect(peer.knownPeers.size).toBe(0) + + // Clear onKnownPeersChanged handlers to avoid any side effects + pm.onKnownPeersChanged.clear() + + const peerList: PeerList = { + type: InternalMessageType.peerList, + payload: { + connectedPeers: [ + { + identity: newPeerIdentity, + address: peer.address, + port: peer.port, + }, + ], + }, + } + peer.onMessage.emit(peerList, connection) + + expect(peer.knownPeers.size).toBe(1) + expect(pm.peers.length).toBe(2) + expect(pm.identifiedPeers.size).toBe(2) + + // Indicate that we can't initiate a WebSocket connection to the new peer + const newPeer = pm.getPeerOrThrow(newPeerIdentity) + newPeer + .getConnectionRetry(ConnectionType.WebSocket, ConnectionDirection.Outbound) + ?.neverRetryConnecting() + + // Send another peer list without that peer + const newPeerList: PeerList = { + type: InternalMessageType.peerList, + payload: { + connectedPeers: [], + }, + } + peer.onMessage.emit(newPeerList, connection) + + // newPeer should be disposed + expect(pm.peers).toHaveLength(1) + expect(pm.identifiedPeers.size).toBe(1) + expect(pm.identifiedPeers.get(peerIdentity)).toBe(peer) + expect(pm.identifiedPeers.get(newPeerIdentity)).toBeUndefined() + }) + }) + + describe('Message: Disconnect', () => { + it('Should set peerRequestedDisconnectUntil on unidentified Peer', () => { + const localPeer = mockLocalPeer() + const pm = new PeerManager(localPeer) + const peerIdentity = mockIdentity('peer') + const { peer, connection } = getConnectingPeer(pm) + expect(peer.peerRequestedDisconnectUntil).toBeNull() + + const disconnectMessage: DisconnectingMessage = { + type: InternalMessageType.disconnecting, + payload: { + sourceIdentity: peerIdentity, + destinationIdentity: localPeer.publicIdentity, + disconnectUntil: Number.MAX_SAFE_INTEGER, + reason: DisconnectingReason.ShuttingDown, + }, + } + + connection.onMessage.emit(disconnectMessage) + + // Even though identity is included in the message, it shouldn't be set on the + // peer before an Identity message is received. + expect(peer.state.identity).toBeNull() + + expect(peer.peerRequestedDisconnectUntil).toBe(Number.MAX_SAFE_INTEGER) + expect(peer.state.type).toEqual('DISCONNECTED') + }) + + it('Should set peerRequestedDisconnectUntil on CONNECTED Peer', () => { + const localPeer = mockLocalPeer() + const pm = new PeerManager(localPeer) + const peerIdentity = mockIdentity('peer') + const { peer, connection } = getConnectedPeer(pm, peerIdentity) + expect(peer.peerRequestedDisconnectUntil).toBeNull() + + const disconnectMessage: DisconnectingMessage = { + type: InternalMessageType.disconnecting, + payload: { + sourceIdentity: peerIdentity, + destinationIdentity: localPeer.publicIdentity, + disconnectUntil: Number.MAX_SAFE_INTEGER, + reason: DisconnectingReason.ShuttingDown, + }, + } + + connection.onMessage.emit(disconnectMessage) + + expect(peer.peerRequestedDisconnectUntil).toBe(Number.MAX_SAFE_INTEGER) + expect(peer.state.type).toEqual('DISCONNECTED') + }) + }) +}) diff --git a/ironfish/src/network/peers/peerManager.ts b/ironfish/src/network/peers/peerManager.ts new file mode 100644 index 0000000000..b72fb17687 --- /dev/null +++ b/ironfish/src/network/peers/peerManager.ts @@ -0,0 +1,1310 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import type { SignalData } from 'simple-peer' +import WSWebSocket from 'ws' + +import { Event } from '../../event' +import { createRootLogger, Logger } from '../../logger' +import { MetricsMonitor } from '../../metrics' +import { + canInitiateWebRTC, + canKeepDuplicateConnection, + Identity, + isIdentity, +} from '../identity' +import { + DisconnectingMessage, + DisconnectingReason, + IncomingPeerMessage, + InternalMessageType, + isDisconnectingMessage, + isIdentify, + isMessage, + isPeerList, + isSignal, + isSignalRequest, + LooseMessage, + PeerList, + Signal, + SignalRequest, +} from '../messages' +import { + WebRtcConnection, + WebSocketConnection, + Connection, + ConnectionDirection, + ConnectionType, + NetworkError, +} from './connections' +import { LocalPeer } from './localPeer' +import { Peer } from './peer' +import { parseUrl } from '../utils' +import { ArrayUtils } from '../../utils' +import { parseVersion, renderVersion, versionsAreCompatible } from '../version' + +/** + * PeerManager keeps the state of Peers and their underlying connections up to date, + * determines how to establish a connection to a given Peer, and provides an event + * bus for Peers, e.g. for listening to incoming messages from all connected peers. + */ +export class PeerManager { + private readonly logger: Logger + private readonly metrics: MetricsMonitor + + /** + * Stores data related to the user's peer, like the identity and version + */ + public readonly localPeer: LocalPeer + + /** + * Map of identities to peers for every known identified peer in the network. + */ + readonly identifiedPeers: Map = new Map() + + /** + * List of all peers, including both unidentified and identified. + */ + peers: Array = [] + + /** + * setInterval handle for broadcastPeerList, which sends out the peer list to all + * connected peers + */ + private broadcastPeerListHandle: ReturnType | undefined + + /** + * setInterval handle for peer disposal, which removes peers from the list that we + * no longer care about + */ + private disposePeersHandle: ReturnType | undefined + + /** + * Event fired when a new connection is successfully opened. Sends some identifying + * information about the peer. + * + * This event is fired regardless of whether or not we initiated the connection. + */ + readonly onConnect: Event<[Peer]> = new Event() + + /** + * Event fired when an identified peer is disconnected for some reason. + */ + readonly onDisconnect: Event<[Peer]> = new Event() + + /** + * Event fired for every new incoming message that needs to be processed + * by the application layer. + * + * Note that the `Peer` is the peer that sent it to us, + * not necessarily the original source. + */ + readonly onMessage: Event<[Peer, IncomingPeerMessage]> = new Event() + + /** + * Event fired when a peer's knownPeers list changes. + */ + readonly onKnownPeersChanged: Event<[Peer]> = new Event() + + /** + * Event fired when a peer enters or leaves the CONNECTED state. + */ + readonly onConnectedPeersChanged: Event<[]> = new Event() + + /** + * The maximum number of peers allowed to be in the CONNECTED or CONNECTING state. + */ + readonly maxPeers: number + + /** + * Stops establishing connections to DISCONNECTED peers when at or above this number. + */ + readonly targetPeers: number + + constructor( + localPeer: LocalPeer, + logger: Logger = createRootLogger(), + metrics?: MetricsMonitor, + maxPeers = 10000, + targetPeers = 50, + ) { + this.logger = logger.withTag('peermanager') + this.metrics = metrics || new MetricsMonitor(this.logger) + this.localPeer = localPeer + this.maxPeers = maxPeers + this.targetPeers = targetPeers + } + + /** + * Connect to a websocket by its uri. Establish a connection and solicit + * the server's Identity. + */ + connectToWebSocketAddress(uri: string, isWhitelisted = false): Peer { + const url = parseUrl(uri) + + if (!url.hostname) { + throw new Error(`Could not connect to ${uri} because hostname was not parseable`) + } + + const peer = this.getOrCreatePeer(null) + peer.setWebSocketAddress(url.hostname, url.port) + peer.isWhitelisted = isWhitelisted + this.connectToWebSocket(peer) + return peer + } + + /** + * Connect to a peer using WebSockets + * */ + connectToWebSocket(peer: Peer): boolean { + if (!this.canConnectToWebSocket(peer)) return false + + // If we're trying to connect to the peer, we don't care about limiting the peer's connections to us + peer.localRequestedDisconnectUntil = null + peer.localRequestedDisconnectReason = null + + // Clear out peerRequestedDisconnect if we passed it + peer.peerRequestedDisconnectUntil = null + peer.peerRequestedDisconnectReason = null + + const address = peer.getWebSocketAddress() + if (!address) { + peer + .getConnectionRetry(ConnectionType.WebSocket, ConnectionDirection.Outbound) + .failedConnection(peer.isWhitelisted) + + return false + } + + this.initWebSocketConnection( + peer, + new this.localPeer.webSocket(address), + ConnectionDirection.Outbound, + peer.address, + peer.port, + ) + + return true + } + + /** + * Connect to a peer using WebRTC through another peer + * */ + connectToWebRTC(peer: Peer): boolean { + if (!this.canConnectToWebRTC(peer)) return false + + // If we're trying to connect to the peer, we don't care about limiting the peer's connections to us + peer.localRequestedDisconnectUntil = null + peer.localRequestedDisconnectReason = null + + // Clear out peerRequestedDisconnect if we passed it + peer.peerRequestedDisconnectUntil = null + peer.peerRequestedDisconnectReason = null + + if (peer.state.identity === null) { + peer + .getConnectionRetry(ConnectionType.WebRtc, ConnectionDirection.Outbound) + .failedConnection(peer.isWhitelisted) + + return false + } + + const brokeringPeer = this.getBrokeringPeer(peer) + + if (brokeringPeer === null) { + this.logger.debug( + `Attempted to establish a WebRTC connection to ${peer.displayName}, but couldn't find a peer to broker the connection.`, + ) + + peer + .getConnectionRetry(ConnectionType.WebRtc, ConnectionDirection.Outbound) + .failedConnection(peer.isWhitelisted) + + // If we don't have any brokering peers try disposing the peers + this.tryDisposePeer(peer) + return false + } + + if (canInitiateWebRTC(this.localPeer.publicIdentity, peer.state.identity)) { + this.initWebRtcConnection(brokeringPeer, peer, true) + return true + } + + const signal: SignalRequest = { + type: InternalMessageType.signalRequest, + payload: { + sourceIdentity: this.localPeer.publicIdentity, + destinationIdentity: peer.state.identity, + }, + } + + const connection = this.initWebRtcConnection(brokeringPeer, peer, false) + connection.setState({ type: 'REQUEST_SIGNALING' }) + brokeringPeer.send(signal) + return true + } + + createPeerFromInboundWebSocketConnection( + webSocket: WebSocket | WSWebSocket, + address: string | null, + ): Peer { + const peer = this.getOrCreatePeer(null) + + let hostname: string | null = null + let port: number | null = null + + if (address) { + const url = parseUrl(address) + if (url.hostname) { + hostname = url.hostname + port = url.port + } + } + + this.initWebSocketConnection(peer, webSocket, ConnectionDirection.Inbound, hostname, port) + + return peer + } + + /** + * Perform WebSocket-specific connection setup. + */ + private initWebSocketConnection( + peer: Peer, + ws: WebSocket | WSWebSocket, + direction: ConnectionDirection, + hostname: string | null, + port: number | null, + ): WebSocketConnection { + const connection = new WebSocketConnection(ws, direction, this.logger, this.metrics, { + simulateLatency: this.localPeer.simulateLatency, + hostname: hostname || undefined, + port: port || undefined, + }) + + this.initConnectionHandlers(peer, connection) + peer.setWebSocketConnection(connection) + + return connection + } + + /** + * Perform WebRTC-specific connection setup + * @param brokeringPeer The peer used to exchange signaling messages between us and `peer` + * @param peer The peer to establish a connection with + * @param initiator Set to true if we are initiating a connection with `peer` + */ + private initWebRtcConnection( + brokeringPeer: Peer, + peer: Peer, + initiator: boolean, + ): WebRtcConnection { + const connection = new WebRtcConnection( + initiator, + this.localPeer.webRtc, + this.logger, + this.metrics, + { simulateLatency: this.localPeer.simulateLatency }, + ) + + connection.onSignal.on((data) => { + if (peer.state.identity === null) { + const message = 'Cannot establish a WebRTC connection without a peer identity' + this.logger.debug(message) + connection.close(new NetworkError(message)) + return + } + const { nonce, boxedMessage } = this.localPeer.boxMessage( + JSON.stringify(data), + peer.state.identity, + ) + const signal: Signal = { + type: InternalMessageType.signal, + payload: { + sourceIdentity: this.localPeer.publicIdentity, + destinationIdentity: peer.state.identity, + nonce: nonce, + signal: boxedMessage, + }, + } + brokeringPeer.send(signal) + }) + + this.initConnectionHandlers(peer, connection) + peer.setWebRtcConnection(connection) + + return connection + } + + /** + * Set up event handlers that are common among all connection types. + * @param connection An instance of a Connection. + */ + private initConnectionHandlers(peer: Peer, connection: Connection) { + if (connection.state.type === 'WAITING_FOR_IDENTITY') { + connection.send(this.localPeer.getIdentifyMessage()) + } else { + const handler = () => { + if (connection.state.type === 'WAITING_FOR_IDENTITY') { + connection.send(this.localPeer.getIdentifyMessage()) + connection.onStateChanged.off(handler) + } + } + connection.onStateChanged.on(handler) + } + } + + canConnectToWebSocket(peer: Peer, now = Date.now()): boolean { + const canEstablishNewConnection = + peer.state.type !== 'DISCONNECTED' || + this.getPeersWithConnection().length < this.targetPeers + + const disconnectOk = + peer.peerRequestedDisconnectUntil === null || now >= peer.peerRequestedDisconnectUntil + + const hasNoConnection = + peer.state.type === 'DISCONNECTED' || peer.state.connections.webSocket == null + + const retryOk = + peer.getConnectionRetry(ConnectionType.WebSocket, ConnectionDirection.Outbound) + ?.canConnect || false + + return ( + canEstablishNewConnection && + disconnectOk && + hasNoConnection && + retryOk && + peer.address != null + ) + } + + canConnectToWebRTC(peer: Peer, now = Date.now()): boolean { + const canEstablishNewConnection = + peer.state.type !== 'DISCONNECTED' || + this.getPeersWithConnection().length < this.targetPeers + + const disconnectOk = + peer.peerRequestedDisconnectUntil === null || now >= peer.peerRequestedDisconnectUntil + + const hasNoConnection = + peer.state.type === 'DISCONNECTED' || peer.state.connections.webRtc == null + + const retryOk = + peer.getConnectionRetry(ConnectionType.WebRtc, ConnectionDirection.Outbound) + ?.canConnect || false + + return ( + canEstablishNewConnection && + disconnectOk && + hasNoConnection && + retryOk && + peer.state.identity != null + ) + } + + /** + * Initiate a disconnection from another peer. + * @param peer The peer to disconnect from + * @param reason The reason for disconnecting from the peer + * @param until Stay disconnected from the peer until after this timestamp + */ + disconnect(peer: Peer, reason: DisconnectingReason, until: number): void { + peer.localRequestedDisconnectReason = reason + peer.localRequestedDisconnectUntil = until + + if (peer.state.type === 'DISCONNECTED') { + return + } + + const message: DisconnectingMessage = { + type: InternalMessageType.disconnecting, + payload: { + sourceIdentity: this.localPeer.publicIdentity, + destinationIdentity: peer.state.identity, + reason, + disconnectUntil: until, + }, + } + + const canSend = (connection: Connection): boolean => { + return ( + connection.state.type === 'WAITING_FOR_IDENTITY' || + connection.state.type === 'CONNECTED' + ) + } + + if (peer.state.connections.webRtc && canSend(peer.state.connections.webRtc)) { + peer.state.connections.webRtc.send(message) + } + + if (peer.state.connections.webSocket && canSend(peer.state.connections.webSocket)) { + peer.state.connections.webSocket.send(message) + } + + peer.close() + } + + getPeersWithConnection(): ReadonlyArray { + return this.peers.filter((p) => p.state.type !== 'DISCONNECTED') + } + + getConnectedPeers(): ReadonlyArray { + return [...this.identifiedPeers.values()].filter((p) => { + return p.state.type === 'CONNECTED' + }) + } + + /** + * True if we should reject connections from disconnected Peers. + */ + shouldRejectDisconnectedPeers(): boolean { + return this.getPeersWithConnection().length >= this.maxPeers + } + + /** For a given peer, try to find a peer that's connected to that peer + * including itself to broker a WebRTC connection to it + * */ + private getBrokeringPeer(peer: Peer): Peer | null { + if (peer.state.type === 'CONNECTED') { + // Use the existing connection to the peer to broker the connection + return peer + } + + if (peer.state.identity === null) { + // Cannot find a brokering peer of an unidentified peer + return null + } + + // Find another peer to broker the connection + const candidates = [] + + // The peer should know of any brokering peer candidates + for (const [_, candidate] of peer.knownPeers) { + if ( + // The brokering peer candidate should be connected to the local peer + candidate.state.type === 'CONNECTED' && + // the brokering peer candidate should also know of the peer + candidate.knownPeers.has(peer.state.identity) + ) { + candidates.push(candidate) + } + } + + if (candidates.length === 0) { + return null + } + + return ArrayUtils.sampleOrThrow(candidates) + } + + /** + * This function puts a peer in the identified peers map and should be called once + * a peer is connected, meaning it has a connection tht has received an identity + */ + private updateIdentifiedPeerMap(peer: Peer): void { + if (peer.state.identity == null) { + this.logger.warn('updateIdentifiedPeerMap called with a Peer with null identity') + return + } + + // If we don't have a Peer in the Map for this identity, set it and be done + const existingPeer = this.identifiedPeers.get(peer.state.identity) + if (!existingPeer || peer === existingPeer) { + this.identifiedPeers.set(peer.state.identity, peer) + return + } + + // Merge the connections from the new peer onto the existing peer. We want to keep + // the existing peer since someone may be holding a reference + if (peer.state.type === 'DISCONNECTED') { + this.logger.debug(`Trying to dispose disconnected peer ${peer.displayName}`) + peer.close() + this.tryDisposePeer(peer) + return + } + + if (peer.state.connections.webRtc?.state.type === 'CONNECTED') { + if (existingPeer.state.type !== 'DISCONNECTED' && existingPeer.state.connections.webRtc) { + const error = `Replacing duplicate WebRTC connection on ${existingPeer.displayName}` + this.logger.debug(new NetworkError(error)) + existingPeer + .removeConnection(existingPeer.state.connections.webRtc) + .close(new NetworkError(error)) + } + existingPeer.setWebRtcConnection(peer.state.connections.webRtc) + peer.removeConnection(peer.state.connections.webRtc) + } + + if (peer.state.connections.webSocket?.state.type === 'CONNECTED') { + if ( + existingPeer.state.type !== 'DISCONNECTED' && + existingPeer.state.connections.webSocket + ) { + const error = `Replacing duplicate WebSocket connection on ${existingPeer.displayName}` + this.logger.debug(error) + existingPeer + .removeConnection(existingPeer.state.connections.webSocket) + .close(new NetworkError(error)) + } + existingPeer.setWebSocketConnection(peer.state.connections.webSocket) + peer.removeConnection(peer.state.connections.webSocket) + } + + // Clean up data so that the duplicate peer can be disposed + peer + .getConnectionRetry(ConnectionType.WebSocket, ConnectionDirection.Outbound) + ?.neverRetryConnecting() + + this.tryDisposePeer(peer) + } + + /** + * Given an identity, returns the Peer corresponding to that identity, + * or null if no Peer for that identity exists. + * @param identity A peer identity. + */ + getPeer(identity: Identity): Peer | null { + return this.identifiedPeers.get(identity) || null + } + + /** + * Given an identity, fetch a Peer with that identity or throw an error + * @param identity A peer identity. + */ + getPeerOrThrow(identity: Identity): Peer { + const peer = this.identifiedPeers.get(identity) + if (peer != null) { + return peer + } + throw new Error(`No peer found with identity ${identity}`) + } + + /** + * If a null identity is passed, creates a new Peer. If an identity is passed, returns the Peer + * if we already have one with that identity, else creates a new Peer with that identity. + * @param identity The identity of the peer to create, or null if the peer does not yet have one. + */ + getOrCreatePeer(identity: Identity | null): Peer { + // If we already have a Peer with this identity, return it + if (identity !== null) { + const identifiedPeer = this.identifiedPeers.get(identity) + if (identifiedPeer != null) { + return identifiedPeer + } + } + + // Create the new peer + const peer = new Peer(identity, { logger: this.logger }) + + // Add the peer to peers. It's new, so it shouldn't exist there already + this.peers.push(peer) + + // If the peer hasn't been identified, add it to identifiedPeers when the + // peer connects, else do it now + if (peer.state.identity === null) { + const handler = () => { + if (peer.state.type === 'CONNECTED') { + this.updateIdentifiedPeerMap(peer) + peer.onStateChanged.off(handler) + } + } + peer.onStateChanged.on(handler) + } else { + this.updateIdentifiedPeerMap(peer) + } + + // Bind Peer events to PeerManager events + peer.onMessage.on((message, connection) => { + this.handleMessage(peer, connection, message) + }) + + peer.onKnownPeersChanged.on(() => { + this.onKnownPeersChanged.emit(peer) + }) + + peer.onStateChanged.on(({ prevState }) => { + if (prevState.type !== 'CONNECTED' && peer.state.type === 'CONNECTED') { + this.onConnect.emit(peer) + this.onConnectedPeersChanged.emit() + } + if (prevState.type === 'CONNECTED' && peer.state.type !== 'CONNECTED') { + this.onDisconnect.emit(peer) + this.onConnectedPeersChanged.emit() + this.tryDisposePeer(peer) + } + }) + + return peer + } + + /** + * Send a message to a peer, dropping the message if unable. + * @param peer The peer identity to send a message to. + * @param message The message to send. + */ + sendTo(peer: Peer, message: LooseMessage): Connection | null { + return peer.send(message) + } + + /** + * Send a message to all connected peers. + */ + broadcast(message: LooseMessage): void { + for (const peer of this.identifiedPeers.values()) { + if (peer.state.type === 'CONNECTED') { + peer.send(message) + } + } + } + + start(): void { + this.broadcastPeerListHandle = setInterval(() => this.broadcastPeerList(), 5000) + this.disposePeersHandle = setInterval(() => this.disposePeers(), 2000) + } + + /** + * Call when shutting down the PeerManager to clean up + * outstanding connections. + */ + stop(): void { + this.broadcastPeerListHandle && clearInterval(this.broadcastPeerListHandle) + this.disposePeersHandle && clearInterval(this.disposePeersHandle) + for (const peer of this.peers) { + this.disconnect(peer, DisconnectingReason.ShuttingDown, 0) + } + } + + /** + * Send the list of peer IDs I am connected to to each of those peers. + * This is expected to be called periodically, both as a keep-alive and + * to help peers keep their view of the network up-to-date. + */ + private broadcastPeerList() { + const connectedPeers = [] + + for (const p of this.identifiedPeers.values()) { + if (p.state.type !== 'CONNECTED') continue + + // Worker nodes are nodes that should not be broadcast because they are + // meant to connect to a single node and perform one function + if (p.isWorker && !this.localPeer.broadcastWorkers) continue + + connectedPeers.push({ + identity: p.state.identity, + name: p.name || undefined, + address: p.address, + port: p.port, + }) + } + + const peerList: PeerList = { + type: InternalMessageType.peerList, + payload: { connectedPeers }, + } + + this.broadcast(peerList) + } + + private disposePeers(): void { + for (const p of this.peers) { + this.tryDisposePeer(p) + } + } + + /** + * Returns true if we successfully cleaned up the Peer and removed it from PeerManager, + * else returns false and does nothing. + * @param peer The peer to evaluate + */ + private tryDisposePeer(peer: Peer) { + const hasAConnectedPeer = [...peer.knownPeers.values()].some( + (p) => p.state.type === 'CONNECTED', + ) + + if ( + peer.state.type === 'DISCONNECTED' && + !hasAConnectedPeer && + peer.getConnectionRetry(ConnectionType.WebSocket, ConnectionDirection.Outbound) + ?.willNeverRetryConnecting + ) { + this.logger.debug( + `Disposing of peer with identity ${String(peer.state.identity)} (may be a duplicate)`, + ) + + peer.dispose() + if (peer.state.identity && this.identifiedPeers.get(peer.state.identity) === peer) { + this.identifiedPeers.delete(peer.state.identity) + } + this.peers = this.peers.filter((p) => p !== peer) + + return true + } + return false + } + + /** + * Handler fired whenever we receive any message from a peer. + * + * If it is a signal message we need to forward it to the appropriate + * webrtc peer. + * + * Note that the identity on IncomingPeerMessage is the identity of the + * peer that sent it to us, not the original source. + */ + private handleMessage(peer: Peer, connection: Connection, message: LooseMessage) { + if (isDisconnectingMessage(message)) { + this.handleDisconnectingMessage(peer, message) + } else if (connection.state.type === 'WAITING_FOR_IDENTITY') { + this.handleWaitingForIdentityMessage(peer, connection, message) + } else if (isIdentify(message)) { + this.logger.debug( + `Closing connection to ${peer.displayName} that sent identity ${message.payload.identity} while connection is in state ${connection.state.type}`, + ) + } else if (isSignalRequest(message)) { + this.handleSignalRequestMessage(peer, message) + } else if (isSignal(message)) { + this.handleSignalMessage(peer, message) + } else if (isPeerList(message)) { + this.handlePeerListMessage(message, peer) + } else { + if (peer.state.identity == null) { + const messageType = isMessage(message) ? message.type : 'Unknown' + this.logger.debug( + `Closing connection to unidentified peer that sent an unexpected message: ${messageType}`, + ) + peer.close() + return + } + this.onMessage.emit(peer, { peerIdentity: peer.state.identity, message: message }) + } + } + + private handleDisconnectingMessage(messageSender: Peer, message: DisconnectingMessage) { + if ( + message.payload.destinationIdentity !== this.localPeer.publicIdentity && + message.payload.destinationIdentity !== null + ) { + // Only forward it if the message was received from the same peer as it originated from + if (message.payload.sourceIdentity !== messageSender.state.identity) { + this.logger.debug( + `not forwarding disconnect from ${ + messageSender.displayName + } because the message's source identity (${ + message.payload.sourceIdentity + }) doesn't match the sender's identity (${String(messageSender.state.identity)})`, + ) + return + } + + const destinationPeer = this.getPeer(message.payload.destinationIdentity) + + if (!destinationPeer) { + this.logger.debug( + 'not forwarding disconnect from', + messageSender.displayName, + 'due to unknown peer', + message.payload.destinationIdentity, + ) + return + } + + this.sendTo(destinationPeer, message) + return + } + + messageSender.peerRequestedDisconnectReason = message.payload.reason + messageSender.peerRequestedDisconnectUntil = message.payload.disconnectUntil + this.logger.debug( + `${messageSender.displayName} requested we disconnect until ${ + message.payload.disconnectUntil + }. Current time is ${Date.now()}`, + ) + messageSender.close() + } + + /** + * Handle messages received when the peer is in the WAITING_FOR_IDENTITY state. + * + * @param message The message received. + * @param peer The Peer the message was received from. + * @param connection The Connection the message was received from. + */ + private handleWaitingForIdentityMessage( + peer: Peer, + connection: Connection, + message: LooseMessage, + ): void { + // If we receive any message other than an Identity message, close the connection + if (!isIdentify(message)) { + this.logger.debug( + `Disconnecting from ${peer.displayName} - Sent unexpected message ${message.type} while waiting for identity`, + ) + peer.close() + return + } + + const identity = message.payload.identity + const version = parseVersion(message.payload.version) + const port = message.payload.port + const name = message.payload.name || null + + if (!isIdentity(identity)) { + this.logger.debug( + `Disconnecting from ${identity} - Identity does not match expected format`, + ) + peer + .getConnectionRetry(connection.type, connection.direction) + ?.failedConnection(peer.isWhitelisted) + peer.close(new Error(`Identity ${identity} does not match expected format`)) + return + } + + if (!versionsAreCompatible(this.localPeer.version, version)) { + const error = `Peer version ${ + message.payload.version + } is not compatible to ours: ${renderVersion(this.localPeer.version)}` + + this.logger.debug(`Disconnecting from ${identity} - ${error}`) + peer + .getConnectionRetry(connection.type, connection.direction) + ?.failedConnection(peer.isWhitelisted) + peer.close(new Error(error)) + return + } + + if (name && name.length > 32) { + this.logger.debug( + `Disconnecting from ${identity} - Peer name length exceeds 32: ${name.length}}`, + ) + peer + .getConnectionRetry(connection.type, connection.direction) + ?.failedConnection(peer.isWhitelisted) + peer.close(new Error(`Peer name length exceeds 32: ${name.length}}`)) + return + } + + // If we've connected to ourselves, get rid of the connection and take the address and port off the Peer. + // This can happen if a node stops and starts with a different identity + if (identity === this.localPeer.publicIdentity) { + peer.removeConnection(connection) + peer.getConnectionRetry(connection.type, connection.direction)?.neverRetryConnecting() + + if ( + connection.type === ConnectionType.WebSocket && + connection.direction === ConnectionDirection.Outbound + ) { + peer.setWebSocketAddress(null, null) + } + + const error = `Closing ${connection.type} connection from our own identity` + this.logger.debug(error) + connection.close(new NetworkError(error)) + this.tryDisposePeer(peer) + return + } + + // If we already know the peer's identity and the new identity doesn't match, move the connection + // to a Peer with the new identity. + if (peer.state.identity != null && peer.state.identity !== identity) { + this.logger.debug( + `${peer.displayName} sent identity ${identity}, but already has identity ${peer.state.identity}`, + ) + + peer.removeConnection(connection) + peer.getConnectionRetry(connection.type, connection.direction)?.neverRetryConnecting() + + const originalPeer = peer + peer = this.getOrCreatePeer(identity) + + if (connection instanceof WebRtcConnection) { + peer.setWebRtcConnection(connection) + } else if (connection instanceof WebSocketConnection) { + if ( + connection.type === ConnectionType.WebSocket && + connection.direction === ConnectionDirection.Outbound && + originalPeer.address !== null + ) { + peer.setWebSocketAddress(originalPeer.address, originalPeer.port) + originalPeer.setWebSocketAddress(null, null) + } + peer.setWebSocketConnection(connection) + } + } + + const existingPeer = this.getPeer(identity) + + // Check if already have a duplicate websocket connection from this peer + // + // This probably happened because either we connected to each other at the same time, + // or the other side is trying to establish multiple connections to us which is invalid + // behaviour. We should kill the peer / connection that was initiated by the peer with + // the lower identity + if ( + existingPeer !== null && + existingPeer.state.type === 'CONNECTED' && + existingPeer.state.connections.webSocket && + connection.type === ConnectionType.WebSocket + ) { + const existingConnection = existingPeer.state.connections.webSocket + let connectionToClose = connection + + // We keep the other persons outbound connection + if (canKeepDuplicateConnection(identity, this.localPeer.publicIdentity)) { + if (connection.direction === ConnectionDirection.Outbound) { + connectionToClose = connection + } else if (existingConnection.direction === ConnectionDirection.Outbound) { + connectionToClose = existingConnection + } + } + + // We keep our outbound connection + if (canKeepDuplicateConnection(this.localPeer.publicIdentity, identity)) { + if (connection.direction === ConnectionDirection.Inbound) { + connectionToClose = connection + } else if (existingConnection.direction === ConnectionDirection.Inbound) { + connectionToClose = existingConnection + } + } + + const error = `Closing duplicate ${connectionToClose.type} connection with direction ${connectionToClose.direction}` + this.logger.debug(error) + connectionToClose.close(new NetworkError(error)) + + if (connectionToClose === connection) return + } + + // Inbound WebSocket connections come with an address but no port, so we need to + // pull the port from the identity message onto the connection. In cases where we + // attempt to establish an outbound WebSocket connection, we should have received + // the port via the peer list or user input, so we can ignore it. + if ( + connection instanceof WebSocketConnection && + connection.direction === ConnectionDirection.Inbound + ) { + connection.port = port || undefined + } + + peer.name = name + peer.isWorker = message.payload.isWorker || false + peer.version = version + + // If we've told the peer to stay disconnected, repeat + // the disconnection time before closing the connection + if ( + existingPeer !== null && + existingPeer.localRequestedDisconnectUntil !== null && + Date.now() < existingPeer.localRequestedDisconnectUntil + ) { + const disconnectMessage: DisconnectingMessage = { + type: InternalMessageType.disconnecting, + payload: { + sourceIdentity: this.localPeer.publicIdentity, + destinationIdentity: identity, + reason: existingPeer.localRequestedDisconnectReason || DisconnectingReason.Congested, + disconnectUntil: existingPeer.localRequestedDisconnectUntil, + }, + } + connection.send(disconnectMessage) + + const error = `Closing connection from ${ + existingPeer.displayName + } because they connected at ${Date.now()}, but we told them to disconnect until ${ + existingPeer.localRequestedDisconnectUntil + }` + this.logger.debug(error) + connection.close(new NetworkError(error)) + return + } + + // Identity has been successfully validated, update the peer's state + connection.setState({ type: 'CONNECTED', identity: identity }) + } + + /** + * Handle a signal request message relayed by another peer. + * @param message An incoming SignalRequest message from a peer. + */ + private handleSignalRequestMessage(messageSender: Peer, message: SignalRequest) { + if ( + canInitiateWebRTC(message.payload.sourceIdentity, message.payload.destinationIdentity) + ) { + this.logger.debug( + 'not handling signal request from', + message.payload.sourceIdentity, + 'to', + message.payload.destinationIdentity, + 'because source peer should have initiated', + ) + return + } + + // Forward the message if it's not destined for us + if (message.payload.destinationIdentity !== this.localPeer.publicIdentity) { + // Only forward it if the message was received from the same peer as it originated from + if (message.payload.sourceIdentity !== messageSender.state.identity) { + this.logger.debug( + `not forwarding signal request from ${ + messageSender.displayName + } because the message's source identity (${ + message.payload.sourceIdentity + }) doesn't match the sender's identity (${String(messageSender.state.identity)})`, + ) + return + } + + const destinationPeer = this.getPeer(message.payload.destinationIdentity) + + if (!destinationPeer) { + this.logger.debug( + 'not forwarding signal request from', + messageSender.displayName, + 'due to unknown peer', + message.payload.destinationIdentity, + ) + return + } + + this.sendTo(destinationPeer, message) + return + } + + // Ignore the request if we're at max peers and don't have an existing connection + if (this.shouldRejectDisconnectedPeers()) { + const peer = this.getPeer(message.payload.sourceIdentity) + if (!peer || peer.state.type !== 'CONNECTED') { + const disconnectingMessage: DisconnectingMessage = { + type: InternalMessageType.disconnecting, + payload: { + sourceIdentity: this.localPeer.publicIdentity, + destinationIdentity: message.payload.sourceIdentity, + reason: DisconnectingReason.Congested, + disconnectUntil: 1000 * 60 * 5, + }, + } + messageSender.send(disconnectingMessage) + this.logger.debug( + `Ignoring signaling request from ${message.payload.sourceIdentity}, at max peers`, + ) + return + } + } + + const targetPeer = this.getOrCreatePeer(message.payload.sourceIdentity) + this.addKnownPeerTo(targetPeer, messageSender) + + if (targetPeer.state.type !== 'DISCONNECTED' && targetPeer.state.connections.webRtc) { + this.logger.debug( + `Ignoring signaling request from ${targetPeer.displayName} because we already have a connection`, + ) + return + } + + this.initWebRtcConnection(messageSender, targetPeer, true) + } + + /** + * Handle a signal message relayed by another peer. + * @param message An incoming Signal message from a peer. + */ + private handleSignalMessage(messageSender: Peer, message: Signal) { + // Forward the message if it's not destined for us + if (message.payload.destinationIdentity !== this.localPeer.publicIdentity) { + // Only forward it if the message was received from the same peer as it originated from + if (message.payload.sourceIdentity !== messageSender.state.identity) { + this.logger.debug( + `not forwarding signal from ${ + messageSender.displayName + } because the message's source identity (${ + message.payload.sourceIdentity + }) doesn't match the sender's identity (${String(messageSender.state.identity)})`, + ) + return + } + + const destinationPeer = this.getPeer(message.payload.destinationIdentity) + + if (!destinationPeer) { + this.logger.debug( + 'not forwarding signal from', + messageSender.displayName, + 'due to unknown peer', + message.payload.destinationIdentity, + ) + return + } + + this.sendTo(destinationPeer, message) + return + } + + // Ignore the request if we're at max peers and don't have an existing connection + if (this.shouldRejectDisconnectedPeers()) { + const peer = this.getPeer(message.payload.sourceIdentity) + if (!peer || peer.state.type !== 'CONNECTED') { + const disconnectingMessage: DisconnectingMessage = { + type: InternalMessageType.disconnecting, + payload: { + sourceIdentity: this.localPeer.publicIdentity, + destinationIdentity: message.payload.sourceIdentity, + reason: DisconnectingReason.Congested, + disconnectUntil: 1000 * 60 * 5, + }, + } + messageSender.send(disconnectingMessage) + this.logger.debug( + `Ignoring signaling request from ${message.payload.sourceIdentity}, at max peers`, + ) + return + } + } + + // Get or create a WebRTC connection for the signaling peer. + const signalingPeer = this.getOrCreatePeer(message.payload.sourceIdentity) + this.addKnownPeerTo(signalingPeer, messageSender) + + let connection: WebRtcConnection + + if ( + signalingPeer.state.type === 'DISCONNECTED' || + signalingPeer.state.connections.webRtc == null + ) { + if (signalingPeer.state.identity == null) { + this.logger.log('Peer must have an identity to begin signaling') + return + } + + if ( + !canInitiateWebRTC(signalingPeer.state.identity, message.payload.destinationIdentity) + ) { + this.logger.debug( + 'not handling signal message from', + signalingPeer.name, + 'because source peer should have requested signaling', + ) + return + } + + connection = this.initWebRtcConnection(messageSender, signalingPeer, false) + } else { + connection = signalingPeer.state.connections.webRtc + } + + // Try decrypting the message + const result = this.localPeer.unboxMessage( + message.payload.signal, + message.payload.nonce, + message.payload.sourceIdentity, + ) + + // Close the connection if decrypting fails + if (result == null) { + const error = `Failed to decrypt signaling data from ${signalingPeer.displayName}` + this.logger.debug(error) + connection.close(new NetworkError(error)) + return + } + + // Try JSON.parsing the decrypted message + let signalData: SignalData + try { + signalData = JSON.parse(result) as SignalData + } catch { + const error = `Failed to decode signaling data from ${signalingPeer.displayName}` + this.logger.debug(error) + connection.close(new NetworkError(error)) + return + } + + // We have the signaling data, so pass it on to the connection + connection.signal(signalData) + } + + private handlePeerListMessage(peerList: PeerList, peer: Peer) { + if (peer.state.type !== 'CONNECTED') { + this.logger.warn('Should not handle the peer list message unless peer is connected') + return + } + + // Workers don't try connect to other peers, so if localPeer is a worker, + // we can ignore this message + if (this.localPeer.isWorker) { + return + } + + let changed = false + + const newPeerSet = peerList.payload.connectedPeers.reduce( + (memo, peer) => { + memo.set(peer.identity, peer) + return memo + }, + new Map< + Identity, + { + identity: Identity + name?: string + address: string | null + port: number | null + } + >(), + ) + + // Don't include the local peer in the peer graph + newPeerSet.delete(this.localPeer.publicIdentity) + + // Remove peer edges that are no longer in the peer list. + for (const [otherIdentity, otherPeer] of peer.knownPeers) { + if (!newPeerSet.has(otherIdentity)) { + peer.knownPeers.delete(otherIdentity) + // Optimistically update the edges. + // This could result in pinging back and forth if peers don't agree whether they're connected + otherPeer.knownPeers.delete(peer.state.identity) + // See if removing edges from either peer caused it to be disposable + this.tryDisposePeer(peer) + this.tryDisposePeer(otherPeer) + changed = true + } + } + + // Add peer edges that are new to the peer list + for (const newPeer of newPeerSet.values()) { + if (!peer.knownPeers.has(newPeer.identity)) { + const knownPeer = this.getOrCreatePeer(newPeer.identity) + knownPeer.setWebSocketAddress(newPeer.address, newPeer.port) + knownPeer.name = newPeer.name || null + this.addKnownPeerTo(knownPeer, peer, false) + changed = true + } + } + + if (changed) { + peer.onKnownPeersChanged.emit() + } + } + + /** + * This is used for adding a peer to a peers known list. It also handles adding it bi-directionally + * and emits peer.onKnownPeersChanged by default. + * @param peer The peer to put into `addTo's` knownPeers + * @param addTo The peer to add `peer` to + * @param emitKnownPeersChanged Set this to false if you are adding known peers in bulk and you know you want to emit this yourself + */ + addKnownPeerTo(peer: Peer, addTo: Peer, emitKnownPeersChanged = true): void { + if (!peer.state.identity || !addTo.state.identity) return + if (peer.state.identity === addTo.state.identity) return + + if (!addTo.knownPeers.has(peer.state.identity)) { + addTo.knownPeers.set(peer.state.identity, peer) + + if (emitKnownPeersChanged) { + addTo.onKnownPeersChanged.emit() + } + } + + // Optimistically update the edges. This could result in pinging back and forth if peers don't agree whether they're connected + if (!peer.knownPeers.has(addTo.state.identity)) { + this.addKnownPeerTo(addTo, peer) + } + } +} diff --git a/ironfish/src/network/testUtilities/helpers.ts b/ironfish/src/network/testUtilities/helpers.ts new file mode 100644 index 0000000000..b18de58d21 --- /dev/null +++ b/ironfish/src/network/testUtilities/helpers.ts @@ -0,0 +1,143 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Identity, isIdentity } from '../identity' +import { PeerManager } from '../peers/peerManager' +import { Peer } from '../peers/peer' +import { + Connection, + ConnectionDirection, + ConnectionType, + WebRtcConnection, + WebSocketConnection, +} from '../peers/connections' +import { mockIdentity } from './mockIdentity' +import ws from 'ws' + +export function getConnectingPeer( + pm: PeerManager, + disposable = true, + direction = ConnectionDirection.Outbound, +): { peer: Peer; connection: WebSocketConnection } { + let peer: Peer | null = null + + if (direction === ConnectionDirection.Outbound) { + peer = pm.connectToWebSocketAddress('ws://testuri.com:9033') + } else { + peer = pm.getOrCreatePeer(null) + + const connection = new WebSocketConnection( + new ws(''), + ConnectionDirection.Inbound, + peer.logger, + ) + + peer.setWebSocketConnection(connection) + } + + if (disposable) { + peer + .getConnectionRetry(ConnectionType.WebSocket, ConnectionDirection.Outbound) + ?.neverRetryConnecting() + } + + expect(peer.state).toEqual({ + type: 'CONNECTING', + identity: peer.state.identity, + connections: { + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + webSocket: expect.any(WebSocketConnection), + }, + }) + + if (peer.state.type !== 'CONNECTING') { + throw new Error('state should be CONNECTING') + } + if (peer.state.connections.webSocket == null) { + throw new Error('WebSocket connection should be defined') + } + + jest.spyOn(peer.state.connections.webSocket, 'send').mockImplementation() + + return { peer, connection: peer.state.connections.webSocket } +} + +export function getWaitingForIdentityPeer( + pm: PeerManager, + disposable = true, + direction = ConnectionDirection.Outbound, +): { peer: Peer; connection: WebSocketConnection } { + const { peer, connection } = getConnectingPeer(pm, disposable, direction) + connection.setState({ type: 'WAITING_FOR_IDENTITY' }) + + expect(peer.state.type).toBe('CONNECTING') + return { peer, connection: connection } +} + +export function getConnectedPeer( + pm: PeerManager, + identity?: string | Identity, +): { peer: Peer; connection: WebSocketConnection } { + const { peer, connection } = getConnectingPeer(pm) + + if (!identity) { + identity = jest.requireActual('uuid').v4() + } + + if (!isIdentity(identity)) { + identity = mockIdentity(identity) + } + + connection.setState({ type: 'CONNECTED', identity }) + + return { peer, connection: connection } +} + +export function getSignalingWebRtcPeer( + pm: PeerManager, + brokeringPeerIdentity: Identity, + peerIdentity: Identity, +): { + peer: Peer + connection: WebRtcConnection + brokeringPeer: Peer + brokeringConnection: Connection +} { + // Create the peers + const { peer: brokeringPeer, connection: brokeringConnection } = getConnectedPeer( + pm, + brokeringPeerIdentity, + ) + const peer = pm.getOrCreatePeer(peerIdentity) + + // Link the peers + brokeringPeer.knownPeers.set(peerIdentity, peer) + peer.knownPeers.set(brokeringPeerIdentity, brokeringPeer) + + // Verify peer2 is not connected + expect(peer.address).toBeNull() + expect(peer.state).toEqual({ + type: 'DISCONNECTED', + identity: peerIdentity, + }) + + pm.connectToWebRTC(peer) + + expect(peer.state).toEqual({ + type: 'CONNECTING', + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + connections: { webRtc: expect.any(WebRtcConnection) }, + identity: peer.state.identity, + }) + + if (peer.state.type !== 'CONNECTING') throw new Error('Peer state should be CONNECTING') + const connection = peer.state.connections.webRtc + + // Send a signal to trigger the connection into a SIGNALING state + connection?.signal({}) + expect(connection?.state.type).toBe('SIGNALING') + if (connection?.state.type !== 'SIGNALING') throw new Error('Connection') + + return { peer, connection: connection, brokeringPeer, brokeringConnection } +} diff --git a/ironfish/src/network/testUtilities/index.ts b/ironfish/src/network/testUtilities/index.ts new file mode 100644 index 0000000000..46d66330b9 --- /dev/null +++ b/ironfish/src/network/testUtilities/index.ts @@ -0,0 +1,9 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import './matchers' +export * from './mockIdentity' +export * from './mockLocalPeer' +export * from './mockPrivateIdentity' +export * from './helpers' diff --git a/ironfish/src/network/testUtilities/matchers.ts b/ironfish/src/network/testUtilities/matchers.ts new file mode 100644 index 0000000000..9cfc736474 --- /dev/null +++ b/ironfish/src/network/testUtilities/matchers.ts @@ -0,0 +1,73 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +export {} + +declare global { + namespace jest { + interface Matchers { + toThrowErrorInstance(errorClass: Constructor): R + toRejectErrorInstance(errorClass: Constructor): Promise + } + } +} + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +type Constructor = new (...args: any[]) => T + +function makeResult(pass: boolean, message: string): { pass: boolean; message: () => string } { + return { + pass: pass, + message: () => message, + } +} + +function toThrowErrorInstance( + received: () => unknown, + errorClass: Constructor, +): jest.CustomMatcherResult { + try { + received() + + return makeResult( + false, + `expected function to throw ${String(errorClass)} but did not throw`, + ) + } catch (e: unknown) { + if (e instanceof errorClass) { + return makeResult(true, `expect function ${received.name} to throw ${String(errorClass)}`) + } + + return makeResult( + false, + `expected function to throw ${String(errorClass)} but threw ${String(e)}`, + ) + } +} + +async function toRejectErrorInstance( + received: Promise, + errorClass: Constructor, +): Promise { + try { + await received + return makeResult( + false, + `expected function to throw ${String(errorClass)} but did not throw`, + ) + } catch (e: unknown) { + if (e instanceof errorClass) { + return makeResult(true, `expect promise to reject with ${String(errorClass)}`) + } + + return makeResult( + false, + `expected function to throw ${String(errorClass)} but threw ${String(e)}`, + ) + } +} + +expect.extend({ + toThrowErrorInstance, + toRejectErrorInstance, +}) diff --git a/ironfish/src/network/testUtilities/mockIdentity.test.ts b/ironfish/src/network/testUtilities/mockIdentity.test.ts new file mode 100644 index 0000000000..864e940a34 --- /dev/null +++ b/ironfish/src/network/testUtilities/mockIdentity.test.ts @@ -0,0 +1,32 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { canInitiateWebRTC, privateIdentityToIdentity } from '../identity' +import { webRtcCanInitiateIdentity, webRtcCannotInitiateIdentity } from './mockIdentity' +import { webRtcLocalIdentity } from './mockPrivateIdentity' + +describe('mockIdentity', () => { + it('should have identity that can initiate WebRTC', () => { + const can = webRtcCanInitiateIdentity() + const local = privateIdentityToIdentity(webRtcLocalIdentity()) + const cannot = webRtcCannotInitiateIdentity() + + // local is in the middle of the others + expect(canInitiateWebRTC(local, can)).toBe(true) + expect(canInitiateWebRTC(local, cannot)).toBe(false) + + // can is lower than the others + expect(canInitiateWebRTC(can, local)).toBe(false) + expect(canInitiateWebRTC(can, cannot)).toBe(false) + + // cannot is greater than the others + expect(canInitiateWebRTC(cannot, local)).toBe(true) + expect(canInitiateWebRTC(cannot, can)).toBe(true) + + // NO identity can initiate to itself + expect(canInitiateWebRTC(can, can)).toBe(false) + expect(canInitiateWebRTC(local, local)).toBe(false) + expect(canInitiateWebRTC(cannot, cannot)).toBe(false) + }) +}) diff --git a/ironfish/src/network/testUtilities/mockIdentity.ts b/ironfish/src/network/testUtilities/mockIdentity.ts new file mode 100644 index 0000000000..2f31d9f020 --- /dev/null +++ b/ironfish/src/network/testUtilities/mockIdentity.ts @@ -0,0 +1,30 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Identity, privateIdentityToIdentity } from '..' +import { + mockPrivateIdentity, + webRtcCanInitiateIdentityPrivate, + webRtcCannotInitiateIdentityPrivate, +} from './mockPrivateIdentity' + +// The identities here are in order of: +// Lowest: webRtcCanInitiateIdentity +// Middle: webRtcLocalIdentity +// Greatest: webRtcCannotInitiateIdentity + +/** webRtcCannotInitiateIdentity as a base64 string is greater than webRtcLocalIdentity */ +export const webRtcCannotInitiateIdentity = (): Identity => + privateIdentityToIdentity(webRtcCannotInitiateIdentityPrivate()) + +/** webRtcCanInitiateIdentity as a base64 string is less than webRtcLocalIdentity */ +export const webRtcCanInitiateIdentity = (): Identity => + privateIdentityToIdentity(webRtcCanInitiateIdentityPrivate()) + +/** + * Utility to mock a public-facing identity. + */ +export function mockIdentity(identity: string): Identity { + return privateIdentityToIdentity(mockPrivateIdentity(identity)) +} diff --git a/ironfish/src/network/testUtilities/mockLocalPeer.ts b/ironfish/src/network/testUtilities/mockLocalPeer.ts new file mode 100644 index 0000000000..b25d02824b --- /dev/null +++ b/ironfish/src/network/testUtilities/mockLocalPeer.ts @@ -0,0 +1,22 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import ws from 'ws' +import wrtc from 'wrtc' +import { PrivateIdentity } from '../identity' +import { LocalPeer } from '../peers/localPeer' +import { mockPrivateIdentity } from './mockPrivateIdentity' + +/** + * Utility to create a fake "keypair" for testing the network layer + */ +export function mockLocalPeer({ + identity = mockPrivateIdentity('local'), + version = 'sdk/1/cli', +}: { + identity?: PrivateIdentity + version?: string +} = {}): LocalPeer { + return new LocalPeer(identity, version, ws, wrtc) +} diff --git a/ironfish/src/network/testUtilities/mockPrivateIdentity.ts b/ironfish/src/network/testUtilities/mockPrivateIdentity.ts new file mode 100644 index 0000000000..2b8426a347 --- /dev/null +++ b/ironfish/src/network/testUtilities/mockPrivateIdentity.ts @@ -0,0 +1,29 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { PrivateIdentity, identityLength } from '..' + +// The identities here are in order of: +// Lowest: webRtcCanInitiateIdentityPrivate +// Middle: webRtcLocalIdentity +// Greatest: webRtcCannotInitiateIdentityPrivate + +/** webRtcCanInitiateIdentity as a base64 string is less than webRtcLocalIdentity */ +export const webRtcCanInitiateIdentityPrivate = (): PrivateIdentity => mockPrivateIdentity('k') + +/** webRtcLocalIdentity as a base64 string is between webRtcCannotInitiateIdentity and webRtcCanInitiateIdentity */ +export const webRtcLocalIdentity = (): PrivateIdentity => mockPrivateIdentity('l') + +/** webRtcCannotInitiateIdentity as a base64 string is greater than webRtcLocalIdentity */ +export const webRtcCannotInitiateIdentityPrivate = (): PrivateIdentity => + mockPrivateIdentity('m') +/** + * Utility to create a fake "keypair" for testing the network layer + */ +export function mockPrivateIdentity(identity: string): PrivateIdentity { + return { + publicKey: Buffer.alloc(identityLength, identity, 'utf8'), + secretKey: Buffer.alloc(identityLength, identity, 'utf8'), + } +} diff --git a/ironfish/src/network/types.ts b/ironfish/src/network/types.ts new file mode 100644 index 0000000000..21cd43e9b0 --- /dev/null +++ b/ironfish/src/network/types.ts @@ -0,0 +1,31 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import type WSWebSocket from 'ws' +import { ErrorEvent as WSErrorEvent } from 'ws' + +export type IsomorphicWebSocketConstructor = typeof WebSocket | typeof WSWebSocket +export type IsomorphicWebSocket = WebSocket | WSWebSocket +export type IsomorphicWebSocketErrorEvent = WSErrorEvent + +type WebRtcInterface = { + MediaStream: MediaStream + MediaStreamTrack: MediaStreamTrack + RTCDataChannel: RTCDataChannel + RTCDataChannelEvent: RTCDataChannelEvent + RTCDtlsTransport: RTCDtlsTransport + RTCIceCandidate: RTCIceCandidate + RTCIceTransport: RTCIceTransport + RTCPeerConnection: RTCPeerConnection + RTCPeerConnectionIceEvent: RTCPeerConnectionIceEvent + RTCRtpReceiver: RTCRtpReceiver + RTCRtpSender: RTCRtpSender + RTCRtpTransceiver: RTCRtpTransceiver + RTCSctpTransport: RTCSctpTransport + RTCSessionDescription: RTCSessionDescription + getUserMedia: (constraints?: MediaStreamConstraints) => Promise + mediaDevices: MediaDevices +} + +// if wrtc is undefined, simple-peer will use browser functions +export type IsomorphicWebRtc = WebRtcInterface | undefined diff --git a/ironfish/src/network/utils/__snapshots__/evictingSet.test.ts.snap b/ironfish/src/network/utils/__snapshots__/evictingSet.test.ts.snap new file mode 100644 index 0000000000..03488f15a8 --- /dev/null +++ b/ironfish/src/network/utils/__snapshots__/evictingSet.test.ts.snap @@ -0,0 +1,30 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Adds an element to an EvictingSet 1`] = ` +EvictingSet { + "items": Set { + "a", + }, + "max_size": 5, +} +`; + +exports[`Constructs an EvictingSet with no elements 1`] = ` +EvictingSet { + "items": Set {}, + "max_size": 5, +} +`; + +exports[`Removes an element once max size is reached 1`] = ` +EvictingSet { + "items": Set { + "g", + "h", + "i", + "j", + "k", + }, + "max_size": 5, +} +`; diff --git a/ironfish/src/network/utils/evictingSet.test.ts b/ironfish/src/network/utils/evictingSet.test.ts new file mode 100644 index 0000000000..06a4cdc55e --- /dev/null +++ b/ironfish/src/network/utils/evictingSet.test.ts @@ -0,0 +1,32 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import EvictingSet from './evictingSet' + +it('Constructs an EvictingSet with no elements', () => { + const set = new EvictingSet(5) + expect(set).toMatchSnapshot() +}) + +it('Adds an element to an EvictingSet', () => { + const set = new EvictingSet(5) + set.add('a') + expect(set).toMatchSnapshot() +}) + +it('Removes an element once max size is reached', () => { + const set = new EvictingSet(5) + set.add('a') + set.add('b') + set.add('c') + set.add('d') + set.add('e') + set.add('f') + set.add('g') + set.add('h') + set.add('i') + set.add('j') + set.add('k') + expect(set).toMatchSnapshot() +}) diff --git a/ironfish/src/network/utils/evictingSet.ts b/ironfish/src/network/utils/evictingSet.ts new file mode 100644 index 0000000000..44ac346c1c --- /dev/null +++ b/ironfish/src/network/utils/evictingSet.ts @@ -0,0 +1,47 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +// TODO: This belongs in its own package. I'm surprised I couldn't find one + +/** + * Wrapper around a set that guarantees the set does not grow without bounds. + * When the set reaches a maximum size, every call to `add()` results + * in the oldest item being removed. + * + * NOTE: This relies on behaviour described in the MDN docs that + * iterating a set in order always yields values in insertion order. + * If a JS implementation does not follow this documented behaviour, + * the elemnet that gets removed is undefined. + */ +export default class EvictingSet { + private items: Set + private max_size: number + + constructor(max_size: number) { + this.max_size = max_size + this.items = new Set() + } + + /** + * Add an item to the set. If the size of the set is too large, + * the oldest item will be removed. + */ + add(item: T): void { + if (this.items.size >= this.max_size) { + const nextVal = this.items.keys().next() + if (!nextVal.done) { + const oldest_item = nextVal.value + this.items.delete(oldest_item) + } + } + this.items.add(item) + } + + /** + * Check if the item is in the set + */ + has(item: T): boolean { + return this.items.has(item) + } +} diff --git a/ironfish/src/network/utils/index.ts b/ironfish/src/network/utils/index.ts new file mode 100644 index 0000000000..f043225d46 --- /dev/null +++ b/ironfish/src/network/utils/index.ts @@ -0,0 +1,4 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +export * from './parseUrl' diff --git a/ironfish/src/network/utils/parseUrl.test.ts b/ironfish/src/network/utils/parseUrl.test.ts new file mode 100644 index 0000000000..e800e5e206 --- /dev/null +++ b/ironfish/src/network/utils/parseUrl.test.ts @@ -0,0 +1,59 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { parseUrl } from './parseUrl' + +describe('parseUrl', () => { + it('should parse urls', () => { + // all components + expect(parseUrl('http://foo.bar:9033')).toMatchObject({ + protocol: 'http', + hostname: 'foo.bar', + port: 9033, + }) + + // Hostname + expect(parseUrl('foo.bar')).toMatchObject({ + protocol: null, + hostname: 'foo.bar', + port: null, + }) + + // Port only + expect(parseUrl(':9')).toMatchObject({ + protocol: null, + hostname: null, + port: 9, + }) + + // Protocol and port + expect(parseUrl('http://:1')).toMatchObject({ + protocol: 'http', + hostname: null, + port: 1, + }) + }) + + it('should handle spaces', () => { + expect(parseUrl(' foo ')).toMatchObject({ + protocol: null, + hostname: 'foo', + port: null, + }) + + expect(parseUrl(' http://foo : 9033 ')).toMatchObject({ + protocol: 'http', + hostname: 'foo', + port: 9033, + }) + }) + + it('should handle invalid port', () => { + expect(parseUrl('http://foo:notanumber')).toMatchObject({ + protocol: 'http', + hostname: 'foo', + port: null, + }) + }) +}) diff --git a/ironfish/src/network/utils/parseUrl.ts b/ironfish/src/network/utils/parseUrl.ts new file mode 100644 index 0000000000..6b2314883d --- /dev/null +++ b/ironfish/src/network/utils/parseUrl.ts @@ -0,0 +1,43 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +const PROTOCOL_SEPARATOR = '://' +const PORT_SEPARATOR = ':' + +/** + * Liberally parses a URL into its components and returns + * null for a component if it is not present or invalid + */ +export function parseUrl( + url: string, +): { + protocol: string | null + hostname: string | null + port: number | null +} { + url = url.trim() + + let protocol = null + let hostname = null + let port = null + + const protocolSepIndex = url.indexOf(PROTOCOL_SEPARATOR) + if (protocolSepIndex !== -1) { + protocol = url.slice(0, protocolSepIndex) + url = url.slice(protocolSepIndex + PROTOCOL_SEPARATOR.length).trim() + } + + const portSepIndex = url.indexOf(PORT_SEPARATOR) + if (portSepIndex !== -1) { + const value = Number(url.slice(portSepIndex + PORT_SEPARATOR.length).trim()) + url = url.slice(0, portSepIndex).trim() + if (!isNaN(value)) port = value + } + + if (url) { + hostname = url + } + + return { protocol, hostname, port } +} diff --git a/ironfish/src/network/version.ts b/ironfish/src/network/version.ts new file mode 100644 index 0000000000..4dc01b6d49 --- /dev/null +++ b/ironfish/src/network/version.ts @@ -0,0 +1,66 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export const VERSION_SEPARATOR = '/' + +/** + * A peer version and its components + */ +export type Version = { + product: string | null + agent: string | null + version: string | null + code: string | null +} + +/** + * Returns true if `otherVersion` is compatible with `localVersion`. + * "Compatible" means the peers can connect to each other. + * @param otherVersion Another version string. + */ +export function versionsAreCompatible(localVersion: Version, otherVersion: Version): boolean { + if (localVersion.product === null) return false + if (otherVersion.product === null) return false + if (localVersion.version === null) return false + if (otherVersion.version === null) return false + if (localVersion.agent === null) return false + if (otherVersion.agent === null) return false + return localVersion.version === otherVersion.version +} + +/** + * Returns the parsed version string components + * @param peerVersion a peer version string + */ +export function parseVersion(peerVersion: string): Version { + let ironfish: string | null = null + let version: string | null = null + let agent: string | null = null + let code: string | null = null + + const split = peerVersion.split('/') + if (split.length >= 1) ironfish = split[0] + if (split.length >= 2) version = split[1] + if (split.length >= 3) agent = split[2] + if (split.length >= 4) code = split[3] + + return { product: ironfish, agent, version, code } +} + +/** + * Return version in string format in the form [product]/[version]/[agent]/[code] + * Example: sdk/1/cli/eb4d5d3 + */ +export function renderVersion(version: Version): string { + let rendered = + `${version.product || ''}` + + `${VERSION_SEPARATOR}${version.version || ''}` + + `${VERSION_SEPARATOR}${version.agent || ''}` + + if (version.code) { + rendered += `${VERSION_SEPARATOR}${version.code}` + } + + return rendered +} diff --git a/ironfish/src/network/webSocketServer.ts b/ironfish/src/network/webSocketServer.ts new file mode 100644 index 0000000000..85a3cf88c7 --- /dev/null +++ b/ironfish/src/network/webSocketServer.ts @@ -0,0 +1,32 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import WSWebSocket from 'ws' +import http from 'http' + +export class WebSocketServer { + // The server instance + readonly server: WSWebSocket.Server + + constructor(ctor: typeof WSWebSocket.Server, port: number) { + this.server = new ctor({ port }) + } + + /** + * Fired when the server is ready to accept connections. Callback will only + * be executed once. + * @param cb Callback function to be executed. + */ + onStart(cb: (ws: WSWebSocket) => void): void { + this.server.once('listening', cb) + } + + onConnection(cb: (ws: WSWebSocket, req: http.IncomingMessage) => void): void { + this.server.on('connection', cb) + } + + close(): void { + this.server.close() + } +} diff --git a/ironfish/src/networkBridge.test.ts b/ironfish/src/networkBridge.test.ts new file mode 100644 index 0000000000..bd545557fd --- /dev/null +++ b/ironfish/src/networkBridge.test.ts @@ -0,0 +1,249 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { MessageType } from './captain/messages' +import { CannotSatisfyRequestError, Gossip, IncomingPeerMessage, PeerNetwork } from './network' +import { IronfishSdk } from './sdk' +import { makeDbName } from './captain/testUtilities' +import { getConnectedPeer, mockPrivateIdentity } from './network/testUtilities' +import { StringUtils } from './utils' + +jest.mock('ws') + +describe('Node requests a block from other nodes', () => { + it('calls error handler if the message is formatted wrong', async () => { + const dataDir = `./testdbs/${makeDbName()}` + + const sdk = await IronfishSdk.init({ + dataDir: dataDir, + }) + + const node = await sdk.node() + + const peerNetwork = new PeerNetwork(mockPrivateIdentity(''), 'sdk/1/cli', require('ws')) + const { peer } = getConnectedPeer(peerNetwork.peerManager) + + const request = jest.spyOn(peerNetwork, 'request').mockImplementation(() => { + return Promise.resolve({ + peerIdentity: peer.getIdentityOrThrow(), + message: { + type: MessageType.Blocks, + payload: { block: { this_is: 'NOT A BLOCK' } }, + }, + }) + }) + const handleBlockRequestError = jest.spyOn( + node.captain.blockSyncer, + 'handleBlockRequestError', + ) + node.networkBridge.attachPeerNetwork(peerNetwork) + + node.captain.requestBlocks(Buffer.from(StringUtils.hash('blockyoudonthave')), true) + expect(request).toBeCalled() + // Wait for the promises to finish up + await new Promise((resolve) => setImmediate(() => resolve())) + expect(handleBlockRequestError).toBeCalled() + + peerNetwork.stop() + await node.shutdown() + }) + + it('calls error handler if the request promise rejects', async () => { + const dataDir = `./testdbs/${makeDbName()}` + + const sdk = await IronfishSdk.init({ + dataDir: dataDir, + }) + + const node = await sdk.node() + + const peerNetwork = new PeerNetwork(mockPrivateIdentity(''), 'sdk/1/cli', require('ws')) + const request = jest.spyOn(peerNetwork, 'request').mockImplementation(() => { + return Promise.reject(new CannotSatisfyRequestError('bad request')) + }) + const handleBlockRequestError = jest.spyOn( + node.captain.blockSyncer, + 'handleBlockRequestError', + ) + node.networkBridge.attachPeerNetwork(peerNetwork) + + node.captain.requestBlocks(Buffer.from(StringUtils.hash('blockyoudonthave')), true) + expect(request).toBeCalled() + // Wait for the promises to finish up + await new Promise((resolve) => setImmediate(() => resolve())) + expect(handleBlockRequestError).toBeCalled() + + peerNetwork.stop() + await node.shutdown() + }) +}) + +describe('Node receives a proposed transaction from another node', () => { + it('discards the transaction if it does not verify', async () => { + const dataDir = `./testdbs/${makeDbName()}` + + const sdk = await IronfishSdk.init({ + dataDir: dataDir, + }) + + const node = await sdk.node() + + const peerNetwork = new PeerNetwork(mockPrivateIdentity(''), 'sdk/1/cli', require('ws')) + const { peer } = getConnectedPeer(peerNetwork.peerManager) + + const acceptTransaction = jest.spyOn(node.memPool, 'acceptTransaction') + const verifyNewTransaction = jest.spyOn(node.captain.chain.verifier, 'verifyNewTransaction') + node.networkBridge.attachPeerNetwork(peerNetwork) + + const message: IncomingPeerMessage< + Gossip + > = { + peerIdentity: peer.getIdentityOrThrow(), + message: { + type: MessageType.NewTransaction, + nonce: 'asdf', + payload: { + transaction: Buffer.from([]), + }, + }, + } + + await peerNetwork.peerManager.onMessage.emitAsync(peer, message) + + expect(verifyNewTransaction).toBeCalledTimes(1) + expect(acceptTransaction).not.toBeCalled() + + peerNetwork.stop() + await node.shutdown() + }) + + it('passes along the transaction if it verifies', async () => { + const dataDir = `./testdbs/${makeDbName()}` + const sdk = await IronfishSdk.init({ dataDir: dataDir }) + const node = await sdk.node() + + const peerNetwork = new PeerNetwork(mockPrivateIdentity(''), 'sdk/1/cli', require('ws')) + const { peer } = getConnectedPeer(peerNetwork.peerManager) + + const acceptTransaction = jest + .spyOn(node.memPool, 'acceptTransaction') + .mockReturnValue(true) + + const syncTransaction = jest + .spyOn(node.accounts, 'syncTransaction') + .mockReturnValue(Promise.resolve()) + + const verifyNewTransaction = jest + .spyOn(node.captain.chain.verifier, 'verifyNewTransaction') + // @ts-expect-error Returning some irrelevant data + .mockImplementation((t) => { + return { serializedTransaction: t, transaction: t } + }) + + node.networkBridge.attachPeerNetwork(peerNetwork) + + const message: IncomingPeerMessage< + Gossip + > = { + peerIdentity: peer.getIdentityOrThrow(), + message: { + type: MessageType.NewTransaction, + nonce: 'asdf', + payload: { + transaction: Buffer.from([]), + }, + }, + } + + await peerNetwork.peerManager.onMessage.emitAsync(peer, message) + + expect(verifyNewTransaction).toBeCalledTimes(1) + expect(acceptTransaction).toBeCalledTimes(1) + expect(syncTransaction).toBeCalledTimes(1) + + peerNetwork.stop() + await node.shutdown() + }) +}) + +describe('Node receives a new block from another node', () => { + it('discards the block if it does not verify', async () => { + const dataDir = `./testdbs/${makeDbName()}` + + const sdk = await IronfishSdk.init({ + dataDir: dataDir, + }) + + const node = await sdk.node() + + const peerNetwork = new PeerNetwork(mockPrivateIdentity(''), 'sdk/1/cli', require('ws')) + const { peer } = getConnectedPeer(peerNetwork.peerManager) + + // @ts-expect-error Spying on a private method + const onNewBlock = jest.spyOn(node.networkBridge, 'onNewBlock') + const verifyNewBlock = jest.spyOn(node.captain.chain.verifier, 'verifyNewBlock') + node.networkBridge.attachPeerNetwork(peerNetwork) + + const message: IncomingPeerMessage> = { + peerIdentity: peer.getIdentityOrThrow(), + message: { + type: MessageType.NewBlock, + nonce: 'asdf', + payload: { + block: Buffer.from([]), + }, + }, + } + + await peerNetwork.peerManager.onMessage.emitAsync(peer, message) + + expect(verifyNewBlock).toBeCalledTimes(1) + expect(onNewBlock).not.toBeCalled() + + peerNetwork.stop() + await node.shutdown() + }) + + it('passes along the block if it verifies', async () => { + const dataDir = `./testdbs/${makeDbName()}` + + const sdk = await IronfishSdk.init({ + dataDir: dataDir, + }) + + const node = await sdk.node() + + const peerNetwork = new PeerNetwork(mockPrivateIdentity(''), 'sdk/1/cli', require('ws')) + const { peer } = getConnectedPeer(peerNetwork.peerManager) + + // @ts-expect-error Spying on a private method + const onNewBlock = jest.spyOn(node.networkBridge, 'onNewBlock').mockImplementation(() => {}) + const verifyNewBlock = jest + .spyOn(node.captain.chain.verifier, 'verifyNewBlock') + // @ts-expect-error Returning some irrelevant data + .mockImplementation((b) => { + return { serializedBlock: b, block: b } + }) + node.networkBridge.attachPeerNetwork(peerNetwork) + + const message: IncomingPeerMessage> = { + peerIdentity: peer.getIdentityOrThrow(), + message: { + type: MessageType.NewBlock, + nonce: 'asdf', + payload: { + block: Buffer.from([]), + }, + }, + } + + await peerNetwork.peerManager.onMessage.emitAsync(peer, message) + + expect(verifyNewBlock).toBeCalledTimes(1) + expect(onNewBlock).toBeCalledTimes(1) + + peerNetwork.stop() + await node.shutdown() + }) +}) diff --git a/ironfish/src/networkBridge.ts b/ironfish/src/networkBridge.ts new file mode 100644 index 0000000000..7e83de94f9 --- /dev/null +++ b/ironfish/src/networkBridge.ts @@ -0,0 +1,196 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { + MessageType, + BlockRequest, + isBlockRequestPayload, + isBlocksResponse, +} from './captain/messages' +import { Assert } from './assert' +import { IncomingPeerMessage, PeerNetwork, RoutingStyle } from './network' +import { IronfishNode } from './node' +import { + BlockRequestMessage, + BlocksResponseMessage, + NewBlockMessage, + NewTransactionMessage, +} from './messages' +import { SerializedTransaction, SerializedWasmNoteEncrypted } from './strategy' +import { BlockHash } from './captain' +import { NetworkBlockType } from './captain/blockSyncer' + +export class NetworkBridge { + node: IronfishNode | null = null + peerNetwork: PeerNetwork | null = null + + /** Attach to the message handlers of a PeerNetwork and forward them toward an IronfishNode */ + attachPeerNetwork(peerNetwork: PeerNetwork): void { + Assert.isNull(this.peerNetwork) + this.peerNetwork = peerNetwork + + peerNetwork.registerHandler( + MessageType.Blocks, + RoutingStyle.globalRPC, + (p) => (isBlockRequestPayload(p) ? Promise.resolve(p) : Promise.reject('Invalid format')), + (message) => this.onBlockRequest(message), + ) + + peerNetwork.registerHandler( + MessageType.NewBlock, + RoutingStyle.gossip, + (p) => { + Assert.isNotNull(this.node, 'No attached node') + Assert.isNotNull(this.node.captain, 'No attached node') + + return this.node.captain.chain.verifier.verifyNewBlock(p) + }, + (message) => this.onNewBlock(message), + ) + + peerNetwork.registerHandler( + MessageType.NewTransaction, + RoutingStyle.gossip, + (p) => { + Assert.isNotNull(this.node, 'No attached node') + Assert.isNotNull(this.node.captain, 'No attached node') + return this.node.captain.chain.verifier.verifyNewTransaction(p) + }, + async (message) => await this.onNewTransaction(message), + ) + + peerNetwork.onIsReadyChanged.on((isReady) => this.onPeerNetworkReadyChanged(isReady)) + this.onPeerNetworkReadyChanged(peerNetwork.isReady) + } + + /** Attach to the events of an IronfishNode and forward them toward a PeerNetwork */ + attachNode(node: IronfishNode): void { + Assert.isNull(this.node) + this.node = node + + Assert.isNotNull(this.node.captain) + + this.node.captain.onNewBlock.on((block) => { + Assert.isNotNull(this.node) + Assert.isNotNull(this.node.captain) + Assert.isNotNull(this.peerNetwork) + + const serializedBlock = this.node.captain.blockSerde.serialize(block) + + this.peerNetwork.gossip({ + type: MessageType.NewBlock, + payload: { + block: serializedBlock, + }, + }) + }) + + this.node.accounts.onBroadcastTransaction.on((transaction) => { + if (this.peerNetwork === null) return + + Assert.isNotNull(this.node) + Assert.isNotNull(this.node.captain) + + const serializedTransaction = this.node.captain.strategy + .transactionSerde() + .serialize(transaction) + + this.peerNetwork.gossip({ + type: MessageType.NewTransaction, + payload: { transaction: serializedTransaction }, + }) + }) + + this.node.captain.onRequestBlocks.on((hash: BlockHash, nextBlockDirection: boolean) => { + Assert.isNotNull(this.node) + Assert.isNotNull(this.node.captain) + Assert.isNotNull(this.peerNetwork) + Assert.isNotNull(this.node) + + const serializedHash = this.node.captain.chain.blockHashSerde.serialize(hash) + + const request: BlockRequest = { + type: MessageType.Blocks, + payload: { + hash: serializedHash, + nextBlockDirection: nextBlockDirection, + }, + } + + this.peerNetwork + .request(request) + .then((c) => { + if ( + !c || + !isBlocksResponse(c.message) + ) { + throw new Error('Invalid format') + } + this.onBlockResponses( + { + ...c, + message: c.message, + }, + request, + ) + }) + .catch((err) => { + this.node?.captain?.blockSyncer.handleBlockRequestError(request, err) + }) + }) + } + + /** Attach to the events of a WebWorker and forward them to/from an IronfishNode */ + attachFromWebWorker(): void { + throw new Error(`Not implemented yet`) + } + + /** Attach to the events of a WebWorker and forward them to/from a PeerNetwork */ + attachToWebWorker(): void { + throw new Error(`Not implemented yet`) + } + + private onBlockRequest(message: IncomingPeerMessage) { + Assert.isNotNull(this.node) + Assert.isNotNull(this.node.captain) + return this.node.captain.blockSyncer.handleBlockRequest(message) + } + + private onBlockResponses( + message: IncomingPeerMessage, + originalRequest: BlockRequest, + ) { + Assert.isNotNull(this.node) + Assert.isNotNull(this.node.captain) + return this.node.captain.blockSyncer.handleBlockResponse(message, originalRequest) + } + + private onNewBlock(message: IncomingPeerMessage) { + Assert.isNotNull(this.node) + Assert.isNotNull(this.node.captain) + const block = message.message.payload.block + return this.node.captain.blockSyncer.addBlockToProcess(block, NetworkBlockType.GOSSIP) + } + + private async onNewTransaction( + message: IncomingPeerMessage, + ): Promise { + Assert.isNotNull(this.node) + const transaction = message.message.payload.transaction + + if (this.node.memPool.acceptTransaction(transaction)) { + await this.node.accounts.syncTransaction(transaction, {}) + } + + await Promise.resolve() + } + + private onPeerNetworkReadyChanged(isReady: boolean): void { + if (isReady) { + this.node?.onPeerNetworkReady() + } else { + this.node?.onPeerNetworkNotReady() + } + } +} diff --git a/ironfish/src/node-ipc.d.ts b/ironfish/src/node-ipc.d.ts new file mode 100644 index 0000000000..cec7c1f587 --- /dev/null +++ b/ironfish/src/node-ipc.d.ts @@ -0,0 +1,154 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +declare module 'event-pubsub' { + export class EventPubSub { + // eslint-disable-next-line @typescript-eslint/ban-types + on(type: string, handler: Function, once: boolean): EventPubSub + // eslint-disable-next-line @typescript-eslint/ban-types + once(type: string, handler: Function): EventPubSub + // eslint-disable-next-line @typescript-eslint/ban-types + off(type: string, handler: Function): EventPubSub + emit(type: string): EventPubSub + emit$(type: string, args: unknown[]): EventPubSub + } + + export default EventPubSub +} + +declare module 'node-ipc' { + import EventPubSub from 'event-pubsub' + + export type UdpSocket = unknown + export type IpcSocketId = string + + export type IpcSocket = { + id: IpcSocketId | undefined + ipcBuffer: string | undefined + + emit(event: name, data: unknown) + write(data: unknown) + setEncoding(encoding: string) + + on(name: 'connect', callback: () => void): void + on(name: 'close', callback: (socket: UdpSocket | false) => void): void + on(name: 'error', callback: (err: unknown) => void): void + on(name: 'data', callback: (data: unknown, socket: UdpSocket) => void): void + on(name: 'message', callback: (message: unknown, rinfo?: unknown) => void): void + + // eslint-disable-next-line @typescript-eslint/ban-types + off(name: string, callback: Function): void + } + + export class IpcServer extends EventPubSub { + sockets: IpcSocket[] + start(): void + stop(): void + + on(name: 'start', callback: (socket: IpcSocket) => void): void + on(name: 'data', callback: (data: unknown, socket: IpcSocket) => void): void + on(name: 'error', callback: (error: unknown) => void): void + on(name: 'connect', callback: (socket: IpcSocket) => void): void + on( + name: 'socket.disconnected', + callback: (socket: IpcSocket, destroyedSocketId: IpcSocketId | false) => void, + ): void + on(name: string, callback: (data: unknown, socket: IpcSocket) => void): void + + emit(socket: IpcSocket, event: name, data: unknown): void + broadcast(event: name, data: unknown): void + } + + export type IpcClient = { + id: IpcSocketId | undefined + path: string | undefined + port?: number + + config: IpcConfig + queue: Queue + socket: IpcSocket | false + log: unknown + retriesRemaining: number + explicitlyDisconnected: boolean + + connect(): void + emit(name: string, data: unknown): void + + on(event: 'connect', callback: () => void) + on(event: 'error', callback: (error: unknown) => void) + on(event: 'disconnect', callback: () => void) + on(event: 'destroy', callback: () => void) + on(event: 'data', callback: (data: Buffer) => void) + on(event: string, callback: (data: unknown) => void) + + // eslint-disable-next-line @typescript-eslint/ban-types + off(name: string | '*', handler: Function | '*'): void + } + + export type IpcUdpType = 'udp4' | 'udp6' + + export class IPC { + config: IpcConfig + + // TODO: serveNet actually allows all combination of + // parameters as long as they are in the same order + // which cannot be typed by typescript very easily so + // we only type the most commonly used variations here + serveNet(host?: string, port?: number, udpType?: IpcUdpType, callback?: () => void): void + serveNet(host?: string, port?: number, callback?: () => void): void + serveNet(port?: number, udpType?: IpcUdpType, callback?: () => void): void + serveNet(udpType?: IpcUdpType, callback?: () => void): void + serveNet(callback?: () => void): void + + serve(path?: string, callback?: () => void): void + serve(callback?: () => void): void + + connectTo(id: string, path?: string, callback?: () => void): void + connectTo(id: string, callback?: () => void): void + + connectToNet(id: string, host?: string, port?: number, callback?: () => void): void + connectToNet(id: string, callback?: () => void): void + + disconnect(id: IpcSocketId): void + + of: Record + server: IpcServer + } + + export type IpcConfig = { + appSpace: string + socketRoot: string + id: string + + encoding: string + rawBuffer: boolean + sync: boolean + unlink: boolean + + delimiter: string + + silent: boolean + logDepth: number + logInColor: boolean + logger: typeof console.log + + maxConnections: number + retry: number + maxRetries: number + stopRetrying: boolean + + IPType: string + tls: boolean + networkHost: '::1' | '127.0.0.1' + networkPort: number + + interface: { + localAddress: boolean + localPort: boolean + family: boolean + hints: boolean + lookup: boolean + } + } +} diff --git a/ironfish/src/node.ts b/ironfish/src/node.ts new file mode 100644 index 0000000000..bef968cb52 --- /dev/null +++ b/ironfish/src/node.ts @@ -0,0 +1,308 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Config, ConfigOptions, InternalStore } from './fileStores' +import { FileSystem } from './fileSystems' +import { IDatabase } from './storage' +import { IJSON } from './serde' +import { + IronfishCaptain, + IronfishMiningDirector, + IronfishStrategy, + IronfishMemPool, + IronfishVerifier, +} from './strategy' +import { NetworkBridge } from './networkBridge' +import Captain, { SerializedBlock } from './captain' +import { createRootLogger, Logger } from './logger' +import { genesisBlockData } from './genesis' +import { RpcServer } from './rpc/server' +import { MiningDirector } from './mining' +import { submitMetric, startCollecting, stopCollecting } from './telemetry' +import { MetricsMonitor } from './metrics' +import { AsyncTransactionWorkerPool } from './strategy/asyncTransactionWorkerPool' +import { Accounts, Account, AccountsDB } from './account' +import { MemPool } from './memPool' + +export class IronfishNode { + database: IDatabase + captain: IronfishCaptain + strategy: IronfishStrategy + config: Config + internal: InternalStore + networkBridge: NetworkBridge + accounts: Accounts + logger: Logger + miningDirector: IronfishMiningDirector + metrics: MetricsMonitor + memPool: IronfishMemPool + shutdownPromise: Promise | null = null + shutdownResolve: (() => void) | null = null + files: FileSystem + rpc: RpcServer + + private constructor({ + database, + files, + config, + internal, + accounts, + captain, + strategy, + metrics, + miningDirector, + memPool, + logger, + }: { + database: IDatabase + files: FileSystem + config: Config + internal: InternalStore + accounts: Accounts + captain: IronfishCaptain + strategy: IronfishStrategy + metrics: MetricsMonitor + miningDirector: IronfishMiningDirector + memPool: IronfishMemPool + logger: Logger + }) { + this.database = database + this.files = files + this.config = config + this.internal = internal + this.accounts = accounts + this.networkBridge = new NetworkBridge() + this.captain = captain + this.strategy = strategy + this.metrics = metrics + this.miningDirector = miningDirector + this.memPool = memPool + this.rpc = new RpcServer(this) + this.logger = logger + + this.networkBridge.attachNode(this) + this.config.onConfigChange.on((key, value) => this.onConfigChange(key, value)) + this.accounts.onDefaultAccountChange.on(this.onDefaultAccountChange) + } + + static async init({ + databaseName, + dataDir, + config, + internal, + logger = createRootLogger(), + metrics, + makeDatabase, + files, + verifierClass, + strategyClass, + }: { + dataDir?: string + config?: Config + internal?: InternalStore + databaseName?: string + logger?: Logger + metrics?: MetricsMonitor + makeDatabase: (path: string) => Promise + files: FileSystem + verifierClass: typeof IronfishVerifier | null + strategyClass: typeof IronfishStrategy | null + }): Promise { + logger = logger.withTag('ironfishnode') + metrics = metrics || new MetricsMonitor(logger) + + if (!config) { + config = new Config(files, dataDir) + await config.load() + } + + if (!internal) { + internal = new InternalStore(files, dataDir) + await internal.load() + } + + if (databaseName) { + config.setOverride('databaseName', databaseName) + } + + const chainDatabasePath = files.join( + config.storage.dataDir, + 'databases', + config.get('databaseName'), + ) + + const chainDatabase = await makeDatabase(chainDatabasePath) + + strategyClass = strategyClass || IronfishStrategy + const strategy = new strategyClass(verifierClass) + + const captain = await Captain.new(chainDatabase, strategy, undefined, undefined, metrics) + const memPool = new MemPool(captain, logger) + + const accountDatabasePath = files.join( + config.storage.dataDir, + 'accounts', + config.get('accountName'), + ) + + const accountDatabase = await makeDatabase(accountDatabasePath) + const accountDB = new AccountsDB({ database: accountDatabase }) + const accounts = new Accounts({ database: accountDB }) + + const miningDirector = new MiningDirector(captain, memPool, logger) + miningDirector.setBlockGraffiti(config.get('blockGraffiti')) + + return new IronfishNode({ + database: chainDatabase, + captain, + strategy, + files, + config, + internal, + accounts, + metrics, + miningDirector, + memPool, + logger, + }) + } + + async openDB(): Promise { + try { + await this.database.open() + await this.accounts.database.open() + } catch (e) { + await this.database.close() + await this.accounts.database.close() + throw e + } + + await this.accounts.load() + + const defaultAccount = this.accounts.getDefaultAccount() + this.miningDirector.setMinerAccount(defaultAccount) + } + + async closeDB(): Promise { + await this.database.close() + await this.accounts.database.close() + } + + async start(): Promise { + this.shutdownPromise = new Promise((r) => (this.shutdownResolve = r)) + + // Work in the transaction pool happens concurrently, + // so we should start it as soon as possible + AsyncTransactionWorkerPool.start() + + if (this.config.get('enableTelemetry')) { + startCollecting(this.config.get('telemetryApi')) + } + + if (this.config.get('enableMetrics')) { + this.metrics.start() + } + + this.accounts.start(this) + + const promises = [this.captain.start()] + + if (this.config.get('enableRpc')) { + promises.push(this.rpc.start()) + } + + await Promise.all(promises) + + submitMetric({ + name: 'started', + fields: [{ name: 'online', type: 'boolean', value: true }], + }) + + // this.captain.blockSyncer.onTreesSynced.on((treesSynced) => { + // if (treesSynced) { + // this.onTreesSynced() + // } else { + // this.onTreesOutOfSync() + // } + // }) + } + + async waitForShutdown(): Promise { + await this.shutdownPromise + } + + async shutdown(): Promise { + await Promise.all([ + this.accounts.stop(), + this.captain.shutdown(), + this.rpc.stop(), + stopCollecting(), + this.metrics.stop(), + AsyncTransactionWorkerPool.stop(), + this.miningDirector.shutdown(), + ]) + + if (this.shutdownResolve) this.shutdownResolve() + } + + async seed(): Promise { + const result = IJSON.parse(genesisBlockData) as SerializedBlock + const block = this.strategy._blockSerde.deserialize(result) + const blockAddedResult = await this.captain.chain.addBlock(block) + return blockAddedResult.isAdded + } + + onPeerNetworkReady(): void { + this.captain.onPeerNetworkReady() + + // this.captain.blockSyncer.treesSynced && + if (this.config.get('enableMiningDirector')) { + void this.miningDirector.start() + } + } + + onPeerNetworkNotReady(): void { + this.captain.onPeerNetworkNotReady() + + if (this.config.get('enableMiningDirector')) { + this.miningDirector.shutdown() + } + } + + onDefaultAccountChange = (account: Account | null): void => { + this.miningDirector.setMinerAccount(account) + } + + async onConfigChange( + key: Key, + newValue: ConfigOptions[Key], + ): Promise { + switch (key) { + case 'blockGraffiti': { + this.miningDirector.setBlockGraffiti(this.config.get('blockGraffiti')) + break + } + case 'enableTelemetry': { + if (newValue) startCollecting(this.config.get('telemetryApi')) + else await stopCollecting() + break + } + case 'enableMetrics': { + if (newValue) this.metrics.start() + else this.metrics.stop() + break + } + case 'enableRpc': { + if (newValue) await this.rpc.start() + else await this.rpc.stop() + break + } + case 'enableMiningDirector': { + if (newValue && this.networkBridge.peerNetwork?.isReady) + void this.miningDirector.start() + else this.miningDirector.shutdown() + break + } + } + } +} diff --git a/ironfish/src/parse-json.d.ts b/ironfish/src/parse-json.d.ts new file mode 100644 index 0000000000..643ed0c09a --- /dev/null +++ b/ironfish/src/parse-json.d.ts @@ -0,0 +1,19 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +/* eslint-disable @typescript-eslint/no-explicit-any */ + +declare module 'parse-json' { + type Reviver = (this: any, key: string, value: any) => any + + function parse(string: string, filename: string): any + function parse(string: string, reviver: Reviver, filename: string): any + + export class JSONError extends Error { + fileName: string + codeFrame: string + } + + export default parse +} diff --git a/ironfish/src/rpc/adapters/adapter.ts b/ironfish/src/rpc/adapters/adapter.ts new file mode 100644 index 0000000000..49e40aa1e2 --- /dev/null +++ b/ironfish/src/rpc/adapters/adapter.ts @@ -0,0 +1,38 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { RpcServer } from '../server' + +/** + * An adapter represents a network transport that accepts incoming requests + * and routes them into the router. + */ +export interface IAdapter { + /** + * Called when the adapter has been added to an RpcServer. + * This lets you get access to both the RpcServer, and the + * node on the server if you want to access anything like + * configuration. + */ + attach(server: RpcServer): Promise | void + + /** + * Called when the adapter has been removed from an RpcServer. + * This lets you clean up state you stored in attach() + */ + unattach(): Promise | void + + /** + * Called when the adapter should start serving requests to the router + * This is when an adapter would normally listen on a port for data and + * create {@link Request } for the routing layer. + * + * For example, when an + * HTTP server starts listening, or an IPC layer opens an IPC socket. + */ + start(): Promise + + /** Called when the adapter should stop serving requests to the routing layer. */ + stop(): Promise +} diff --git a/ironfish/src/rpc/adapters/errors.ts b/ironfish/src/rpc/adapters/errors.ts new file mode 100644 index 0000000000..f3798620bd --- /dev/null +++ b/ironfish/src/rpc/adapters/errors.ts @@ -0,0 +1,49 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +/** All the known error codes for APIs that can be sent back from all APIs */ +export enum ERROR_CODES { + ACCOUNT_EXISTS = 'account-exists', + ERROR = 'error', + ROUTE_NOT_FOUND = 'route-not-found', + VALIDATION = 'validation', +} + +/** + * Thrown by any part of the RPC server side networking stack to + * indicate that the request should be ended and an error should be + * sent back to the client. Any implementer of {@link IAdapter} should + * catch this before feeding the {@link Request} into the {@link Router}, + * handle it, and render a response to the requester appropriately. + * + * @note Look at the {@link IPCAdapter} implementation for an example + */ +export class ResponseError extends Error { + status: number + code: string + error: Error | null = null + + constructor(message: string, code?: string, status?: number) + constructor(error: Error, code?: string, status?: number) + constructor(messageOrError: string | Error, code = ERROR_CODES.ERROR, status = 400) { + super(messageOrError instanceof Error ? messageOrError.message : messageOrError) + + if (messageOrError instanceof Error) { + this.error = messageOrError + } + + this.status = status + this.code = code + } +} + +/** + * A convenience error to throw inside of routes when you want to indicate + * a 400 error to the user based on validation + */ +export class ValidationError extends ResponseError { + constructor(message: string, status = 400, code = ERROR_CODES.VALIDATION) { + super(message, code, status) + } +} diff --git a/ironfish/src/rpc/adapters/index.ts b/ironfish/src/rpc/adapters/index.ts new file mode 100644 index 0000000000..c7c8100db0 --- /dev/null +++ b/ironfish/src/rpc/adapters/index.ts @@ -0,0 +1,8 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './adapter' +export * from './errors' +export * from './ipcAdapter' +export * from './memoryAdapter' diff --git a/ironfish/src/rpc/adapters/ipcAdapter.test.ts b/ironfish/src/rpc/adapters/ipcAdapter.test.ts new file mode 100644 index 0000000000..fe6b5e62e8 --- /dev/null +++ b/ironfish/src/rpc/adapters/ipcAdapter.test.ts @@ -0,0 +1,126 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +/* eslint-disable jest/no-try-expect */ +/* eslint-disable jest/no-conditional-expect */ +import { ALL_API_NAMESPACES } from '../routes' +import { ERROR_CODES, ValidationError } from './errors' +import { IpcAdapter } from './ipcAdapter' +import { IronfishIpcClient, RequestError } from '../clients' +import { IronfishSdk } from '../../sdk' +import * as yup from 'yup' +import os from 'os' + +describe('IpcAdapter', () => { + let ipc: IpcAdapter + let sdk: IronfishSdk + let client: IronfishIpcClient + + beforeEach(async () => { + const dataDir = os.tmpdir() + + sdk = await IronfishSdk.init({ dataDir }) + sdk.config.setOverride('enableRpc', false) + sdk.config.setOverride('enableRpcIpc', false) + + const node = await sdk.node() + ipc = new IpcAdapter(ALL_API_NAMESPACES, { + mode: 'ipc', + socketPath: sdk.config.get('ipcPath'), + }) + + await node.rpc.mount(ipc) + + client = sdk.client + }) + + afterEach(async () => { + client.disconnect() + await ipc.stop() + }) + + it('should start and stop', async () => { + expect(ipc.started).toBe(false) + + await ipc.start() + expect(ipc.started).toBe(true) + + await ipc.stop() + expect(ipc.started).toBe(true) + }) + + it('should send and receive message', async () => { + ipc.router?.register('foo/bar', yup.string(), (request) => { + request.end(request.data) + }) + + await ipc.start() + await client.connect() + + const response = await client.request('foo/bar', 'hello world').waitForEnd() + expect(response.content).toBe('hello world') + }) + + it('should stream message', async () => { + ipc.router?.register('foo/bar', yup.object({}), (request) => { + request.stream('hello 1') + request.stream('hello 2') + request.end() + }) + + await ipc.start() + await client.connect() + + const response = client.request('foo/bar') + expect((await response.contentStream().next()).value).toBe('hello 1') + expect((await response.contentStream().next()).value).toBe('hello 2') + + await response.waitForEnd() + expect(response.content).toBe(undefined) + }) + + it('should handle errors', async () => { + ipc.router?.register('foo/bar', yup.object({}), () => { + throw new ValidationError('hello error', 402, 'hello-error' as ERROR_CODES) + }) + + await ipc.start() + await client.connect() + + const response = client.request('foo/bar') + + try { + expect.assertions(3) + await response.waitForEnd() + } catch (error: unknown) { + if (!(error instanceof RequestError)) throw error + expect(error.status).toBe(402) + expect(error.code).toBe('hello-error') + expect(error.codeMessage).toBe('hello error') + } + }) + + it('should handle request errors', async () => { + // Requires this + const schema = yup.string().defined() + // But send this instead + const body = undefined + + ipc.router?.register('foo/bar', schema, (res) => res.end()) + + await ipc.start() + await client.connect() + + const response = client.request('foo/bar', body) + + try { + expect.assertions(3) + await response.waitForEnd() + } catch (error: unknown) { + if (!(error instanceof RequestError)) throw error + expect(error.status).toBe(400) + expect(error.code).toBe(ERROR_CODES.VALIDATION) + expect(error.codeMessage).toContain('must be defined') + } + }) +}) diff --git a/ironfish/src/rpc/adapters/ipcAdapter.ts b/ironfish/src/rpc/adapters/ipcAdapter.ts new file mode 100644 index 0000000000..5d24247368 --- /dev/null +++ b/ironfish/src/rpc/adapters/ipcAdapter.ts @@ -0,0 +1,305 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { RpcServer } from '../server' +import { Assert } from '../../assert' +import { Logger, createRootLogger } from '../../logger' +import { IPC, IpcServer, IpcSocket, IpcSocketId } from 'node-ipc' +import { IronfishNode } from '../../node' +import { IAdapter } from './adapter' +import { Request } from '../request' +import { Router, ApiNamespace } from '../routes' +import { v4 as uuid } from 'uuid' +import { YupUtils } from '../../utils/yup' +import * as yup from 'yup' +import { ResponseError, ERROR_CODES } from './errors' + +export type IpcRequest = { + mid: number + type: string + data: unknown | undefined +} + +export type IpcResponse = { + id: number + status: number + data: unknown | undefined +} + +export type IpcStream = { + id: number + data: unknown | undefined +} + +export type IpcError = { + code: string + message: string + stack?: string +} + +export const IpcErrorSchema: yup.ObjectSchema = yup + .object({ + code: yup.string().defined(), + message: yup.string().defined(), + stack: yup.string().notRequired(), + }) + .defined() + +export const IpcRequestSchema: yup.ObjectSchema = yup + .object({ + mid: yup.number().required(), + type: yup.string().required(), + data: yup.mixed().notRequired(), + }) + .required() + +export const IpcResponseSchema: yup.ObjectSchema = yup + .object({ + id: yup.number().defined(), + status: yup.number().defined(), + data: yup.mixed().notRequired(), + }) + .defined() + +export const IpcStreamSchema: yup.ObjectSchema = yup + .object({ + id: yup.number().defined(), + data: yup.mixed().notRequired(), + }) + .defined() + +export type IpcAdapterConnectionInfo = + | { + mode: 'ipc' + socketPath: string + } + | { + mode: 'tcp' + host: string + port: number + } + +export class IpcAdapter implements IAdapter { + node: IronfishNode | null = null + router: Router | null = null + ipc: IPC | null = null + server: IpcServer | null = null + namespaces: ApiNamespace[] + logger: Logger + pending = new Map() + started = false + connection: IpcAdapterConnectionInfo + + constructor( + namespaces: ApiNamespace[], + connection: IpcAdapterConnectionInfo, + logger: Logger = createRootLogger(), + ) { + this.namespaces = namespaces + this.connection = connection + this.logger = logger.withTag('ipcadapter') + } + + async start(): Promise { + if (this.started) return + this.started = true + + const { IPC } = await import('node-ipc') + const ipc = new IPC() + ipc.config.silent = true + ipc.config.rawBuffer = false + this.ipc = ipc + + return new Promise((resolve, reject) => { + const onServed = () => { + const server = ipc.server + this.server = server + + server.off('error', onError) + + server.on('connect', (socket: IpcSocket) => { + this.onConnect(socket) + }) + + server.on('socket.disconnected', (socket) => { + this.onDisconnect(socket, socket.id || null) + }) + + server.on('message', (data: unknown, socket: IpcSocket): void => { + this.onMessage(socket, data).catch((err) => this.logger.error(err)) + }) + + resolve() + } + + const onError = (error?: unknown) => { + ipc.server.off('error', onError) + reject(error) + } + + if (this.connection.mode === 'ipc') { + this.logger.debug(`Serving RPC on IPC ${this.connection.socketPath}`) + ipc.serve(this.connection.socketPath, onServed) + } else if (this.connection.mode === 'tcp') { + this.logger.debug(`Serving RPC on TCP ${this.connection.host}:${this.connection.port}`) + ipc.serveNet(this.connection.host, this.connection.port, onServed) + } + + ipc.server.on('error', onError) + ipc.server.start() + }) + } + + async stop(): Promise { + if (this.started && this.ipc) { + this.ipc.server.stop() + await this.waitForAllToDisconnect() + } + + return Promise.resolve() + } + + async waitForAllToDisconnect(): Promise { + if (!this.server) return + + const promises = [] + + for (const socket of this.server.sockets) { + const promise = new Promise((resolve) => { + const onClose = () => { + resolve() + socket.off('close', onClose) + } + socket.on('close', onClose) + }) + + promises.push(promise) + } + + await Promise.all(promises) + } + + attach(server: RpcServer): void { + this.node = server.node + this.router = server.getRouter(this.namespaces) + } + + unattach(): void { + this.node = null + this.router = null + } + + onConnect(socket: IpcSocket): void { + if (!socket.id) socket.id = uuid() + this.logger.debug(`IPC client connected: ${socket.id}`) + } + + onDisconnect(socket: IpcSocket, socketId: IpcSocketId | null): void { + this.logger.debug(`IPC client disconnected: ${socketId ? socketId : 'unknown'}`) + + if (socketId !== null) { + const pending = this.pending.get(socketId) + + if (pending) { + for (const request of pending) request.close() + this.pending.delete(socketId) + } + } + } + + async onMessage(socket: IpcSocket, data: unknown): Promise { + if (!socket.id) return + + const result = await YupUtils.tryValidate(IpcRequestSchema, data) + + if (result.error) { + this.handleMalformedRequest(socket, data) + return + } + + const message = result.result + const node = this.node + const router = this.router + const server = this.server + + Assert.isNotNull(node) + Assert.isNotNull(router) + Assert.isNotNull(server) + + const request = new Request( + message.data, + node, + (status: number, data?: unknown) => { + this.emitResponse(socket, message.mid, status, data) + }, + (data: unknown) => { + this.emitStream(socket, message.mid, data) + }, + ) + + let pending = this.pending.get(socket.id) + if (!pending) { + pending = [] + this.pending.set(socket.id, pending) + } + + pending.push(request) + + try { + await router.route(message.type, request) + } catch (error: unknown) { + if (error instanceof ResponseError) { + this.emitResponse(socket, message.mid, error.status, this.renderError(error)) + } else throw error + } + } + + emitResponse(socket: IpcSocket, messageId: number, status: number, data: unknown): void { + Assert.isNotNull(this.server) + this.server.emit(socket, 'message', { id: messageId, status: status, data: data }) + } + + emitStream(socket: IpcSocket, messageId: number, data: unknown): void { + Assert.isNotNull(this.server) + this.server.emit(socket, 'stream', { id: messageId, data: data }) + } + + renderError(error: unknown): IpcError { + let message = 'An error has occured' + let stack = undefined + let code: string = ERROR_CODES.ERROR + + if (error instanceof Error) { + message = error.message + stack = error.stack + } + + if (error instanceof ResponseError) { + code = error.code + } + + return { + code: code, + message: message, + stack: stack, + } + } + + handleMalformedRequest(socket: IpcSocket, data: unknown): void { + Assert.isNotNull(this.server) + const error = this.renderError(new Error(`Malformed request rejected`)) + + if ( + typeof data === 'object' && + data !== null && + 'id' in data && + typeof (data as { id: unknown })['id'] === 'number' + ) { + const id = (data as { id: unknown })['id'] as number + this.emitResponse(socket, id, 500, error) + return + } + + this.server.emit(socket, 'malformedRequest', error) + } +} diff --git a/ironfish/src/rpc/adapters/memoryAdapter.ts b/ironfish/src/rpc/adapters/memoryAdapter.ts new file mode 100644 index 0000000000..c2c8e2dab1 --- /dev/null +++ b/ironfish/src/rpc/adapters/memoryAdapter.ts @@ -0,0 +1,116 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Assert } from '../../assert' +import { IAdapter } from './adapter' +import { Request } from '../request' +import { Response, ResponseEnded } from '../response' +import { Router, ALL_API_NAMESPACES } from '../routes' +import { RpcServer } from '../server' +import { PromiseUtils, SetTimeoutToken } from '../../utils' +import { Stream } from '../stream' + +/** + * This class provides a way to route requests directly against the routing layer + * return a response from the route The two methods are `request` and `requestStream` + * + * This is useful any time you want to make requests without hitting an IO layer. + */ +export class MemoryAdapter implements IAdapter { + server: RpcServer | null = null + router: Router | null = null + + start(): Promise { + return Promise.resolve() + } + + stop(): Promise { + return Promise.resolve() + } + + attach(server: RpcServer): void { + this.server = server + this.router = server.getRouter(ALL_API_NAMESPACES) + } + + unattach(): void { + this.server = null + this.router = null + } + + /** + * Makes a request against the routing layer with a given route, and data and waits + * for the response to end. This is used if you want to make a request against a route + * that starts and ends and doesn't stream forever + */ + async request(route: string, data?: unknown): Promise> { + return this.requestStream(route, data).waitForEnd() + } + + /** + * Makes a request against the routing layer with a given route, and data and returns + * a response for you to accumulate the streaming results, or wait for a response + */ + requestStream( + route: string, + data?: unknown, + ): MemoryResponse { + const router = this.router + const server = this.server + + Assert.isNotNull(router) + Assert.isNotNull(server) + + const [promise, resolve, reject] = PromiseUtils.split() + const stream = new Stream() + const response = new MemoryResponse(promise, stream, null) + + const request = new Request( + data, + server.node, + (status: number, data?: unknown) => { + response.status = status + stream.close() + resolve(data as TEnd) + }, + (data: unknown) => { + stream.write(data as TStream) + }, + ) + + response.request = request + + response.routePromise = router.route(route, request).catch((e) => { + stream.close() + reject(e) + }) + + return response + } +} + +export class MemoryResponse extends Response { + request: Request | null = null + routePromise: Promise | null = null + + constructor( + promise: Promise, + stream: Stream, + timeout: SetTimeoutToken | null, + ) { + super(promise, stream, timeout) + } + + end(...args: Parameters): ReturnType { + Assert.isNotNull(this.request) + return this.request.end(args) + } + + async waitForRoute(): Promise> { + if (this.routePromise) { + await this.routePromise + } + + return this + } +} diff --git a/ironfish/src/rpc/clients/errors.ts b/ironfish/src/rpc/clients/errors.ts new file mode 100644 index 0000000000..2bbc718a0c --- /dev/null +++ b/ironfish/src/rpc/clients/errors.ts @@ -0,0 +1,62 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Response } from '../response' + +/* + The errors in this file are to be used by RPC client implementations + to provide a common error interface to consumers of the Ironfish RPC + interface. Try to throw these errors when you are developing a client + so developers can rely on these errors. +*/ + +/** + * The base class for a connection related error. In case someone wants + * to log and handle any connection related issues. + */ +export abstract class ConnectionError extends Error {} + +/** + * Thrown when the connection attempt has failed for any reason. Most + * likely because the server is not running, the server is unreachable, + * the server is running on a different port, etc... + */ +export class ConnectionRefusedError extends ConnectionError {} + +/** Thrown when the connection is lost after you've successfully connected. + * + * @note In a stateless connection like HTTP this should happen after the request was sent out, but before the response has been returned. + * @note In a stateful connection like websockets or IPC, this should be thrown any time after you've connected when the connection has been disconnected unexpectly. */ +export class ConnectionLostError extends ConnectionError {} + +/** Thrown when a response comes back with a code that is between 400 to 500 */ +export class RequestError extends Error { + response?: Response = undefined + status: number + code: string + codeMessage: string + codeStack: string | null + + constructor( + response: Response, + code: string, + codeMessage: string, + codeStack?: string, + ) { + super(`Request failed (${response.status}) ${code}: ${codeMessage}`) + + this.response = response + this.status = response.status + this.code = code + this.codeMessage = codeMessage + this.codeStack = codeStack || null + } +} + +/** Thrown when the request timeout has been exceeded and the request has been aborted */ +export class RequestTimeoutError extends RequestError { + constructor(response: Response, timeoutMs: number, route: string) { + super(response, 'request-timeout', `Timeout of ${timeoutMs} exceeded to ${route}`) + } +} diff --git a/ironfish/src/rpc/clients/index.ts b/ironfish/src/rpc/clients/index.ts new file mode 100644 index 0000000000..0d1577654a --- /dev/null +++ b/ironfish/src/rpc/clients/index.ts @@ -0,0 +1,7 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +export * from './errors' +export * from './ipcClient' +export * from './memoryClient' +export * from './rpcClient' diff --git a/ironfish/src/rpc/clients/ipcClient.ts b/ironfish/src/rpc/clients/ipcClient.ts new file mode 100644 index 0000000000..53faca9d72 --- /dev/null +++ b/ironfish/src/rpc/clients/ipcClient.ts @@ -0,0 +1,314 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Assert } from '../../assert' +import { IPC, IpcClient } from 'node-ipc' +import { IpcErrorSchema, IpcResponseSchema, IpcStreamSchema, IpcRequest } from '../adapters' +import { isResponseError, Response } from '../response' +import { PromiseUtils, SetTimeoutToken, YupUtils } from '../../utils' +import { + ConnectionLostError, + ConnectionRefusedError, + RequestTimeoutError, + RequestError, + ConnectionError, +} from './errors' +import { Stream } from '../stream' +import { Event } from '../../event' +import { createRootLogger, Logger } from '../../logger' +import { IronfishRpcClient } from './rpcClient' + +const CONNECT_RETRY_MS = 2000 +const REQUEST_TIMEOUT_MS = null + +function isConnectRefusedError(error: unknown): boolean { + return error instanceof Error && 'code' in error && error['code'] === 'ECONNREFUSED' +} +function isNoEntityError(error: unknown): boolean { + return error instanceof Error && 'code' in error && error['code'] === 'ENOENT' +} + +export type IpcClientConnectionInfo = + | { + mode: 'ipc' + socketPath: string + } + | { + mode: 'tcp' + host: string + port: number + } + +export class IronfishIpcClient extends IronfishRpcClient { + ipc: IPC | null = null + ipcPath: string | null = null + client: IpcClient | null = null + isConnecting = false + isConnected = false + messageIds = 0 + timeoutMs: number | null = REQUEST_TIMEOUT_MS + connection: Partial + retryConnect: boolean + + onError = new Event<[error: unknown]>() + + pending = new Map< + number, + { + response: Response + stream: Stream + timeout: SetTimeoutToken | null + resolve: (message: unknown) => void + reject: (error?: unknown) => void + type: string + } + >() + + constructor( + connection: Partial = {}, + logger: Logger = createRootLogger(), + retryConnect = false, + ) { + super(logger.withTag('ipcclient')) + this.connection = connection + this.retryConnect = retryConnect + } + + async connect( + { retryConnect = this.retryConnect }: { retryConnect?: boolean } = {}, + connection: Partial = {}, + ): Promise { + connection = { ...connection, ...this.connection } + + if (connection.mode === 'ipc' && !connection.socketPath) { + throw new Error('No IPC socket path given to connect to.') + } + + if (connection.mode === 'tcp' && (!connection.host || !connection.port)) { + throw new Error('No IPC socket path given to connect to.') + } + + const { IPC } = await import('node-ipc') + const ipc = new IPC() + ipc.config.silent = true + ipc.config.stopRetrying = !retryConnect + ipc.config.retry = CONNECT_RETRY_MS + this.ipc = ipc + + return new Promise((resolve, reject) => { + this.isConnecting = true + + const onConnectTo = () => { + const client = ipc.of.server + this.client = client + + const onConnect = () => { + client.off('error', onError) + client.off('connect', onConnect) + this.isConnected = true + this.isConnecting = false + this.onConnect() + resolve() + } + + const onError = (error: unknown) => { + if (client.retriesRemaining > 0 && !client.config.stopRetrying) { + return + } + + this.isConnecting = false + client.off('error', onError) + client.off('connect', onConnect) + + if (isConnectRefusedError(error)) { + reject(new ConnectionRefusedError()) + } else if (isNoEntityError(error)) { + reject(new ConnectionRefusedError()) + } else { + reject(error) + } + } + + client.on('connect', onConnect) + client.on('error', onError) + } + + if (connection.mode === 'ipc') { + this.logger.debug(`Connecting to ${String(connection.socketPath)}`) + ipc.connectTo('server', connection.socketPath, onConnectTo) + } else if (connection.mode === 'tcp') { + this.logger.debug(`Connecting to ${String(connection.host)}:${String(connection.port)}`) + ipc.connectToNet('server', connection.host, connection.port, onConnectTo) + } + }) + } + + /** Like IpcClient.connect but doesn't throw an error if we cannot connect */ + async tryConnect(): Promise { + return this.connect({ retryConnect: false }) + .then(() => true) + .catch((e: unknown) => { + if (e instanceof ConnectionError) return false + throw e + }) + } + + disconnect(): void { + if (this.isConnected) { + this.ipc?.disconnect('server') + this.ipc = null + this.isConnected = false + } + } + + request( + route: string, + data?: unknown, + options: { + timeoutMs?: number | null + } = {}, + ): Response { + Assert.isNotNull(this.client) + + const [promise, resolve, reject] = PromiseUtils.split() + const messageId = ++this.messageIds + const stream = new Stream() + const timeoutMs = options.timeoutMs === undefined ? this.timeoutMs : options.timeoutMs + + let timeout: SetTimeoutToken | null = null + let response: Response | null = null + + if (timeoutMs !== null) { + timeout = setTimeout(() => { + const message = this.pending.get(messageId) + + if (message && response) { + message.reject(new RequestTimeoutError(response, timeoutMs, route)) + } + }, timeoutMs) + } + + const resolveRequest = (...args: Parameters): void => { + this.pending.delete(messageId) + if (timeout) clearTimeout(timeout) + stream.close() + resolve(...args) + } + + const rejectRequest = (...args: Parameters): void => { + this.pending.delete(messageId) + if (timeout) clearTimeout(timeout) + stream.close() + reject(...args) + } + + response = new Response(promise, stream, timeout) + + const pending = { + resolve: resolveRequest as (value: unknown) => void, + reject: rejectRequest, + timeout: timeout, + response: response as Response, + stream: stream as Stream, + type: route, + } + + this.pending.set(messageId, pending) + + const message: IpcRequest = { + mid: messageId, + type: route, + data: data, + } + + this.client.emit('message', message) + + return response + } + + protected onConnect(): void { + Assert.isNotNull(this.client) + this.client.on('disconnect', this.onDisconnect) + this.client.on('message', this.onMessage) + this.client.on('malformedRequest', this.onMalformedRequest) + this.client.on('stream', this.onStream) + this.client.on('error', this.onClientError) + } + + protected onDisconnect = (): void => { + Assert.isNotNull(this.client) + + this.isConnected = false + this.client.off('disconnect', this.onDisconnect) + this.client.off('message', this.onMessage) + this.client.off('malformedRequest', this.onMalformedRequest) + this.client.off('stream', this.onStream) + this.client.off('error', this.onClientError) + this.client = null + + for (const request of this.pending.values()) + request.reject(new ConnectionLostError(request.type)) + this.pending.clear() + } + + protected onClientError = (error: unknown): void => { + this.onError.emit(error) + } + + protected onMessage = (data: unknown): void => { + this.handleEnd(data).catch((e) => this.onError.emit(e)) + } + + protected onStream = (data: unknown): void => { + this.handleStream(data).catch((e) => this.onError.emit(e)) + } + + protected onMalformedRequest = (error: unknown): void => { + this.onError.emit(error) + } + + protected handleStream = async (data: unknown): Promise => { + const { result, error } = await YupUtils.tryValidate(IpcStreamSchema, data) + if (!result) throw error + + const pending = this.pending.get(result.id) + if (!pending) return + + pending.stream.write(result.data) + } + + protected handleEnd = async (data: unknown): Promise => { + const { result, error } = await YupUtils.tryValidate(IpcResponseSchema, data) + if (!result) throw error + + const pending = this.pending.get(result.id) + if (!pending) return + + pending.response.status = result.status + + if (isResponseError(pending.response)) { + const { result: errorBody, error: errorError } = await YupUtils.tryValidate( + IpcErrorSchema, + result.data, + ) + + if (errorBody) { + pending.reject( + new RequestError( + pending.response, + errorBody.code, + errorBody.message, + errorBody.stack, + ), + ) + } else if (errorError) { + pending.reject(errorError) + } else { + pending.reject(data) + } + return + } + + pending.resolve(result.data) + } +} diff --git a/ironfish/src/rpc/clients/memoryClient.ts b/ironfish/src/rpc/clients/memoryClient.ts new file mode 100644 index 0000000000..7ed2f27374 --- /dev/null +++ b/ironfish/src/rpc/clients/memoryClient.ts @@ -0,0 +1,41 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { createRootLogger, Logger } from '../../logger' +import { IronfishNode } from '../../node' +import { IronfishRpcClient } from './rpcClient' +import { MemoryAdapter } from '../adapters' +import { Response } from '../response' + +export class IronfishMemoryClient extends IronfishRpcClient { + node: IronfishNode | null = null + adapter: MemoryAdapter + + constructor(logger: Logger = createRootLogger()) { + super(logger.withTag('memoryclient')) + this.adapter = new MemoryAdapter() + } + + async connect(node: IronfishNode): Promise { + if (node === this.node) return + this.node = node + await node.rpc.mount(this.adapter) + } + + async disconnect(): Promise { + if (this.node) { + await this.node.rpc.unmount(this.adapter) + } + } + + request( + route: string, + data?: unknown, + options: { + timeoutMs?: number | null + } = {}, + ): Response { + if (options.timeoutMs) throw new Error(`MemoryAdapter does not support timeoutMs`) + return this.adapter.requestStream(route, data) + } +} diff --git a/ironfish/src/rpc/clients/rpcClient.ts b/ironfish/src/rpc/clients/rpcClient.ts new file mode 100644 index 0000000000..37d1e9514e --- /dev/null +++ b/ironfish/src/rpc/clients/rpcClient.ts @@ -0,0 +1,215 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Response, ResponseEnded } from '../response' +import { Logger } from '../../logger' +import { + GetAccountsRequest, + GetAccountsResponse, + GetBalanceRequest, + GetBalanceResponse, + GetPublicKeyRequest, + GetPublicKeyResponse, + GetBlockRequest, + GetBlockResponse, + GetChainInfoRequest, + GetChainInfoResponse, + GetPeersRequest, + GetPeersResponse, + GetStatusRequest, + GetStatusResponse, + NewBlocksStreamRequest, + NewBlocksStreamResponse, + SuccessfullyMinedRequest, + SuccessfullyMinedResponse, + SendTransactionRequest, + SendTransactionResponse, + CreateAccountRequest, + CreateAccountResponse, + GiveMeRequest, + GiveMeResponse, + GetLogStreamResponse, + StopNodeResponse, + GetChainRequest, + GetChainResponse, + GetConfigRequest, + GetConfigResponse, + SetConfigRequest, + SetConfigResponse, + UseAccountRequest, + UseAccountResponse, + UploadConfigRequest, + UploadConfigResponse, + GetDefaultAccountRequest, + GetDefaultAccountResponse, + GetBlockInfoRequest, + GetBlockInfoResponse, +} from '../routes' +import { RemoveAccountRequest, RemoveAccountResponse } from '../routes/accounts/removeAccount' +import { ExportAccountRequest, ExportAccountResponse } from '../routes/accounts/exportAccount' +import { ImportAccountRequest, ImportAccountResponse } from '../routes/accounts/importAccount' +import { RescanAccountRequest, RescanAccountResponse } from '../routes/accounts/rescanAccount' + +export abstract class IronfishRpcClient { + readonly logger: Logger + + constructor(logger: Logger) { + this.logger = logger + } + + abstract request( + route: string, + data?: unknown, + options?: { timeoutMs?: number | null }, + ): Response + + async status( + params: GetStatusRequest = undefined, + ): Promise> { + return this.request('node/getStatus', params).waitForEnd() + } + + statusStream(): Response { + return this.request('node/getStatus', { stream: true }) + } + + async stopNode(): Promise> { + return this.request('node/stopNode').waitForEnd() + } + + getLogStream(): Response { + return this.request('node/getLogStream') + } + + async getAccounts( + params: GetAccountsRequest = undefined, + ): Promise> { + return await this.request('account/getAccounts', params).waitForEnd() + } + + async getDefaultAccount( + params: GetDefaultAccountRequest = undefined, + ): Promise> { + return await this.request( + 'account/getDefaultAccount', + params, + ).waitForEnd() + } + + async createAccount( + params: CreateAccountRequest, + ): Promise> { + return await this.request('account/create', params).waitForEnd() + } + + async useAccount(params: UseAccountRequest): Promise> { + return await this.request('account/use', params).waitForEnd() + } + + async removeAccount( + params: RemoveAccountRequest, + ): Promise> { + return await this.request('account/remove', params).waitForEnd() + } + + async getAccountBalance( + params: GetBalanceRequest = {}, + ): Promise> { + return this.request('account/getBalance', params).waitForEnd() + } + + rescanAccountStream( + params: RescanAccountRequest = {}, + ): Response { + return this.request('account/rescanAccount', params) + } + + async exportAccount( + params: ExportAccountRequest = {}, + ): Promise> { + return this.request('account/exportAccount', params).waitForEnd() + } + + async importAccount( + params: ImportAccountRequest, + ): Promise> { + return this.request('account/importAccount', params).waitForEnd() + } + + async getAccountPublicKey( + params: GetPublicKeyRequest, + ): Promise> { + return this.request('account/getPublicKey', params).waitForEnd() + } + + async getPeers( + params: GetPeersRequest = undefined, + ): Promise> { + return this.request('peer/getPeers', params).waitForEnd() + } + + getPeersStream(params: GetPeersRequest = undefined): Response { + return this.request('peer/getPeers', { ...params, stream: true }) + } + + async sendTransaction( + params: SendTransactionRequest, + ): Promise> { + return this.request( + 'transaction/sendTransaction', + params, + ).waitForEnd() + } + + newBlocksStream( + params: NewBlocksStreamRequest = undefined, + ): Response { + return this.request('miner/newBlocksStream', params) + } + + successfullyMined(params: SuccessfullyMinedRequest): Response { + return this.request('miner/successfullyMined', params) + } + + async giveMeFaucet(params: GiveMeRequest): Promise> { + return this.request('faucet/giveMe', params).waitForEnd() + } + + async getBlock(params: GetBlockRequest): Promise> { + return this.request('chain/getBlock', params).waitForEnd() + } + + async getChainInfo( + params: GetChainInfoRequest, + ): Promise> { + return this.request('chain/getChainInfo', params).waitForEnd() + } + + async getBlockInfo( + params: GetBlockInfoRequest, + ): Promise> { + return this.request('chain/getBlockInfo', params).waitForEnd() + } + + async getChain( + params: GetChainRequest = undefined, + ): Promise> { + return this.request('chain/getChain', params).waitForEnd() + } + + async getConfig( + params: GetConfigRequest = undefined, + ): Promise> { + return this.request('config/getConfig', params).waitForEnd() + } + + async setConfig(params: SetConfigRequest): Promise> { + return this.request('config/setConfig', params).waitForEnd() + } + + async uploadConfig( + params: UploadConfigRequest, + ): Promise> { + return this.request('config/uploadConfig', params).waitForEnd() + } +} diff --git a/ironfish/src/rpc/index.ts b/ironfish/src/rpc/index.ts new file mode 100644 index 0000000000..52d906eb7f --- /dev/null +++ b/ironfish/src/rpc/index.ts @@ -0,0 +1,9 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +export * from './adapters' +export * from './clients' +export * from './response' +export * from './routes' +export * from './server' +export * from './stream' diff --git a/ironfish/src/rpc/request.ts b/ironfish/src/rpc/request.ts new file mode 100644 index 0000000000..088c4f8b58 --- /dev/null +++ b/ironfish/src/rpc/request.ts @@ -0,0 +1,53 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { IronfishNode } from '../node' +import { Event } from '../event' + +export class Request { + data: TRequest + node: IronfishNode + ended = false + closed = false + code: number | null = null + onEnd: (status: number, data?: TResponse) => void + onStream: (data?: TResponse) => void + onClose = new Event<[]>() + + constructor( + data: TRequest, + node: IronfishNode, + onEnd: (status: number, data?: unknown) => void, + onStream: (data?: unknown) => void, + ) { + this.data = data + this.node = node + this.onEnd = onEnd + this.onStream = onStream + } + + status(code: number): Request { + this.code = code + return this + } + + end(data?: TResponse, status?: number): void { + if (this.ended) throw new Error(`Request has already ended`) + this.ended = true + if (this.closed) return + this.onClose.clear() + this.onEnd(status || this.code || 200, data) + } + + stream(data: TResponse): void { + if (this.closed) return + if (this.ended) throw new Error(`Request has already ended`) + this.onStream(data) + } + + close(): void { + this.closed = true + this.onClose.emit() + } +} diff --git a/ironfish/src/rpc/response.ts b/ironfish/src/rpc/response.ts new file mode 100644 index 0000000000..13100d443f --- /dev/null +++ b/ironfish/src/rpc/response.ts @@ -0,0 +1,60 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { SetTimeoutToken } from '../utils' +import { ConnectionLostError } from './clients' +import { Stream } from './stream' + +export function isResponseError(response: Response): boolean { + return isResponseUserError(response) || isResponseServerError(response) +} + +export function isResponseServerError(response: Response): boolean { + return response.status >= 500 && response.status <= 599 +} + +export function isResponseUserError(response: Response): boolean { + return response.status >= 400 && response.status <= 499 +} + +export type ResponseEnded = Exclude, 'content'> & { content: TEnd } + +export class Response { + private promise: Promise + private stream: Stream + private timeout: SetTimeoutToken | null + + status = 0 + content: TEnd | null = null + + constructor( + promise: Promise, + stream: Stream, + timeout: SetTimeoutToken | null, + ) { + this.promise = promise + this.stream = stream + this.timeout = timeout + } + + async waitForEnd(): Promise> { + this.content = await this.promise + return this as ResponseEnded + } + + async *contentStream(ignoreClose = true): AsyncGenerator { + if (this.timeout) { + clearTimeout(this.timeout) + } + + for await (const value of this.stream) { + yield value + } + + await this.promise.catch((e) => { + if (e instanceof ConnectionLostError && ignoreClose) return + throw e + }) + } +} diff --git a/ironfish/src/rpc/routes/accounts/create.test.slow.ts b/ironfish/src/rpc/routes/accounts/create.test.slow.ts new file mode 100644 index 0000000000..75f4d6dc44 --- /dev/null +++ b/ironfish/src/rpc/routes/accounts/create.test.slow.ts @@ -0,0 +1,75 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +/* eslint-disable @typescript-eslint/no-unsafe-member-access */ +/* eslint-disable @typescript-eslint/no-explicit-any */ + +import { ERROR_CODES, ResponseError } from '../../adapters' +import { createRouteTest } from '../test' +import { v4 as uuid } from 'uuid' + +describe('Route account/create', () => { + jest.setTimeout(15000) + const routeTest = createRouteTest() + it('should create an account', async () => { + await routeTest.node.accounts.createAccount('existingAccount', true) + + const name = uuid() + + const response = await routeTest.adapter.request('account/create', { name }) + expect(response.status).toBe(200) + expect(response.content).toMatchObject({ + name: name, + publicAddress: expect.any(String), + isDefaultAccount: false, + }) + + const account = routeTest.node.accounts.getAccountByName(name) + expect(account).toMatchObject({ + name: name, + publicAddress: response.content.publicAddress, + }) + }) + + it('should set the account as default', async () => { + await routeTest.node.accounts.setDefaultAccount(null) + + const name = uuid() + + const response = await routeTest.adapter.request('account/create', { name }) + expect(response.content).toMatchObject({ + name: name, + publicAddress: expect.any(String), + isDefaultAccount: true, + }) + expect(routeTest.node.accounts.getDefaultAccount()?.name).toBe(name) + }) + + it('should validate request', async () => { + try { + expect.assertions(3) + await routeTest.adapter.request('account/create') + } catch (e: unknown) { + if (!(e instanceof ResponseError)) throw e + expect(e.status).toBe(400) + expect(e.code).toBe(ERROR_CODES.VALIDATION) + expect(e.message).toContain('name') + } + }) + + it('should fail if name already exists', async () => { + const name = uuid() + + await routeTest.node.accounts.createAccount(name) + + try { + expect.assertions(2) + await routeTest.adapter.request('account/create', { name: name }) + } catch (e: unknown) { + if (!(e instanceof ResponseError)) throw e + expect(e.status).toBe(400) + expect(e.code).toBe(ERROR_CODES.ACCOUNT_EXISTS) + } + }) +}) diff --git a/ironfish/src/rpc/routes/accounts/create.ts b/ironfish/src/rpc/routes/accounts/create.ts new file mode 100644 index 0000000000..c4f3bf97b6 --- /dev/null +++ b/ironfish/src/rpc/routes/accounts/create.ts @@ -0,0 +1,58 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { ApiNamespace, router } from '../router' +import { ERROR_CODES, ValidationError } from '../../adapters' +import * as yup from 'yup' + +export type CreateAccountRequest = { name: string; default?: boolean } +export type CreateAccountResponse = { + name: string + publicAddress: string + isDefaultAccount: boolean +} + +export const CreateAccountRequestSchema: yup.ObjectSchema = yup + .object({ + name: yup.string().defined(), + default: yup.boolean().optional(), + }) + .defined() + +export const CreateAccountResponseSchema: yup.ObjectSchema = yup + .object({ + name: yup.string().defined(), + publicAddress: yup.string().defined(), + isDefaultAccount: yup.boolean().defined(), + }) + .defined() + +router.register( + `${ApiNamespace.account}/create`, + CreateAccountRequestSchema, + async (request, node): Promise => { + const name = request.data.name + + if (node.accounts.accountExists(name)) { + throw new ValidationError( + `There is already an account with the name ${name}`, + 400, + ERROR_CODES.ACCOUNT_EXISTS, + ) + } + + const account = await node.accounts.createAccount(name) + + let isDefaultAccount = false + if (!node.accounts.hasDefaultAccount || request.data.default) { + await node.accounts.setDefaultAccount(name) + isDefaultAccount = true + } + + request.end({ + name: account.name, + publicAddress: account.publicAddress, + isDefaultAccount, + }) + }, +) diff --git a/ironfish/src/rpc/routes/accounts/exportAccount.ts b/ironfish/src/rpc/routes/accounts/exportAccount.ts new file mode 100644 index 0000000000..d45b600953 --- /dev/null +++ b/ironfish/src/rpc/routes/accounts/exportAccount.ts @@ -0,0 +1,46 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { ApiNamespace, router } from '../router' +import * as yup from 'yup' +import { getAccount } from './utils' + +export type ExportAccountRequest = { account?: string } +export type ExportAccountResponse = { + account: { + name: string + spendingKey: string + incomingViewKey: string + outgoingViewKey: string + publicAddress: string + } +} + +export const ExportAccountRequestSchema: yup.ObjectSchema = yup + .object({ + account: yup.string().strip(true), + }) + .defined() + +export const ExportAccountResponseSchema: yup.ObjectSchema = yup + .object({ + account: yup + .object({ + name: yup.string().defined(), + spendingKey: yup.string().defined(), + incomingViewKey: yup.string().defined(), + outgoingViewKey: yup.string().defined(), + publicAddress: yup.string().defined(), + }) + .defined(), + }) + .defined() + +router.register( + `${ApiNamespace.account}/exportAccount`, + ExportAccountRequestSchema, + (request, node): void => { + const account = getAccount(node, request.data.account) + request.end({ account: account }) + }, +) diff --git a/ironfish/src/rpc/routes/accounts/getAccounts.ts b/ironfish/src/rpc/routes/accounts/getAccounts.ts new file mode 100644 index 0000000000..c1d925e0bd --- /dev/null +++ b/ironfish/src/rpc/routes/accounts/getAccounts.ts @@ -0,0 +1,41 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { ApiNamespace, router } from '../router' +import * as yup from 'yup' +import { Account } from '../../../account' + +// eslint-disable-next-line @typescript-eslint/ban-types +export type GetAccountsRequest = { default?: boolean } | undefined +export type GetAccountsResponse = { accounts: string[] } + +export const GetAccountsRequestSchema: yup.ObjectSchema = yup + .object({ + default: yup.boolean().optional(), + }) + .notRequired() + .default({}) + +export const GetAccountsResponseSchema: yup.ObjectSchema = yup + .object({ + accounts: yup.array(yup.string().defined()).defined(), + }) + .defined() + +router.register( + `${ApiNamespace.account}/getAccounts`, + GetAccountsRequestSchema, + (request, node): void => { + let accounts: Account[] = [] + + if (request.data?.default) { + const defaultAccount = node.accounts.getDefaultAccount() + if (defaultAccount) accounts = [defaultAccount] + } else { + accounts = node.accounts.listAccounts() + } + + const names = accounts.map((a) => a.name) + request.end({ accounts: names }) + }, +) diff --git a/ironfish/src/rpc/routes/accounts/getBalance.ts b/ironfish/src/rpc/routes/accounts/getBalance.ts new file mode 100644 index 0000000000..282cfcfc31 --- /dev/null +++ b/ironfish/src/rpc/routes/accounts/getBalance.ts @@ -0,0 +1,35 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { ApiNamespace, router } from '../router' +import * as yup from 'yup' +import { getAccount } from './utils' + +export type GetBalanceRequest = { account?: string } +export type GetBalanceResponse = { confirmedBalance: string; unconfirmedBalance: string } + +export const GetBalanceRequestSchema: yup.ObjectSchema = yup + .object({ + account: yup.string().strip(true), + }) + .defined() + +export const GetBalanceResponseSchema: yup.ObjectSchema = yup + .object({ + unconfirmedBalance: yup.string().defined(), + confirmedBalance: yup.string().defined(), + }) + .defined() + +router.register( + `${ApiNamespace.account}/getBalance`, + GetBalanceRequestSchema, + (request, node): void => { + const account = getAccount(node, request.data.account) + const { confirmedBalance, unconfirmedBalance } = node.accounts.getBalance(account) + request.end({ + confirmedBalance: confirmedBalance.toString(), + unconfirmedBalance: unconfirmedBalance.toString(), + }) + }, +) diff --git a/ironfish/src/rpc/routes/accounts/getDefaultAccount.ts b/ironfish/src/rpc/routes/accounts/getDefaultAccount.ts new file mode 100644 index 0000000000..7e5aebab47 --- /dev/null +++ b/ironfish/src/rpc/routes/accounts/getDefaultAccount.ts @@ -0,0 +1,34 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { ApiNamespace, router } from '../router' +import * as yup from 'yup' + +// eslint-disable-next-line @typescript-eslint/ban-types +export type GetDefaultAccountRequest = {} | undefined +export type GetDefaultAccountResponse = { account: { name: string } | null } + +export const GetDefaultAccountRequestSchema: yup.ObjectSchema = yup + .object({}) + .notRequired() + .default({}) + +export const GetDefaultAccountResponseSchema: yup.ObjectSchema = yup + .object({ + account: yup + .object({ + name: yup.string().defined(), + }) + .nullable() + .defined(), + }) + .defined() + +router.register( + `${ApiNamespace.account}/getDefaultAccount`, + GetDefaultAccountRequestSchema, + (request, node): void => { + const account = node.accounts.getDefaultAccount() + request.end({ account: account ? { name: account.name } : null }) + }, +) diff --git a/ironfish/src/rpc/routes/accounts/getPublicKey.test.ts b/ironfish/src/rpc/routes/accounts/getPublicKey.test.ts new file mode 100644 index 0000000000..da7ab73be3 --- /dev/null +++ b/ironfish/src/rpc/routes/accounts/getPublicKey.test.ts @@ -0,0 +1,55 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +/* eslint-disable @typescript-eslint/no-unsafe-member-access */ +/* eslint-disable @typescript-eslint/no-explicit-any */ + +import { createRouteTest } from '../test' +import { v4 as uuid } from 'uuid' +import { Account } from '../../../account' +import * as wasm from 'ironfish-wasm-nodejs' + +jest.mock('ironfish-wasm-nodejs', () => { + const moduleMock = jest.requireActual('ironfish-wasm-nodejs') + return { + ...moduleMock, + generateNewPublicAddress: jest.fn().mockReturnValue({ publicAddress: 'newkey' }), + } +}) + +describe('Route account/getPublicKey', () => { + const routeTest = createRouteTest() + let account = {} as Account + let publicAddress = '' + + beforeAll(async () => { + account = await routeTest.node.accounts.createAccount(uuid()) + await routeTest.node.accounts.setDefaultAccount(account.name) + publicAddress = account.publicAddress + }) + + it('should return the account data', async () => { + const response = await routeTest.adapter.request('account/getPublicKey', { + account: account.name, + generate: false, + }) + + expect(response.status).toBe(200) + expect(response.content).toMatchObject({ + account: account.name, + publicKey: publicAddress, + }) + }) + + it('should regenerate the account key', async () => { + const response = await routeTest.adapter.request('account/getPublicKey', { + account: account.name, + generate: true, + }) + + expect(response.status).toBe(200) + expect(response.content.account).toEqual(account.name) + expect(response.content.publicAddress).not.toEqual(publicAddress) + }) +}) diff --git a/ironfish/src/rpc/routes/accounts/getPublicKey.ts b/ironfish/src/rpc/routes/accounts/getPublicKey.ts new file mode 100644 index 0000000000..6f1f4dd4d7 --- /dev/null +++ b/ironfish/src/rpc/routes/accounts/getPublicKey.ts @@ -0,0 +1,40 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { ApiNamespace, router } from '../router' +import * as yup from 'yup' +import { getAccount } from './utils' + +export type GetPublicKeyRequest = { account?: string; generate: boolean } +export type GetPublicKeyResponse = { account: string; publicKey: string } + +export const GetPublicKeyRequestSchema: yup.ObjectSchema = yup + .object({ + account: yup.string().strip(true), + generate: yup.boolean().defined(), + }) + .defined() + +export const GetPublicKeyResponseSchema: yup.ObjectSchema = yup + .object({ + account: yup.string().defined(), + publicKey: yup.string().defined(), + }) + .defined() + +router.register( + `${ApiNamespace.account}/getPublicKey`, + GetPublicKeyRequestSchema, + async (request, node): Promise => { + const account = getAccount(node, request.data.account) + + if (request.data.generate) { + await node.accounts.generateNewPublicAddress(account) + } + + request.end({ + account: account.name, + publicKey: account.publicAddress, + }) + }, +) diff --git a/ironfish/src/rpc/routes/accounts/importAccount.ts b/ironfish/src/rpc/routes/accounts/importAccount.ts new file mode 100644 index 0000000000..49aa3b31f7 --- /dev/null +++ b/ironfish/src/rpc/routes/accounts/importAccount.ts @@ -0,0 +1,63 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { ApiNamespace, router } from '../router' +import * as yup from 'yup' + +export type ImportAccountRequest = { + account: { + name: string + spendingKey: string + incomingViewKey: string + outgoingViewKey: string + publicAddress: string + } + rescan?: boolean +} + +export type ImportAccountResponse = { + name: string + isDefaultAccount: boolean +} + +export const ImportAccountRequestSchema: yup.ObjectSchema = yup + .object({ + rescan: yup.boolean().optional().default(true), + account: yup + .object({ + name: yup.string().defined(), + spendingKey: yup.string().defined(), + incomingViewKey: yup.string().defined(), + outgoingViewKey: yup.string().defined(), + publicAddress: yup.string().defined(), + }) + .defined(), + }) + .defined() + +export const ImportAccountResponseSchema: yup.ObjectSchema = yup + .object({ + name: yup.string().defined(), + isDefaultAccount: yup.boolean().defined(), + }) + .defined() + +router.register( + `${ApiNamespace.account}/importAccount`, + ImportAccountRequestSchema, + async (request, node): Promise => { + const account = await node.accounts.importAccount(request.data.account) + void node.accounts.startScanTransactionsFor(node.captain.chain, account) + + let isDefaultAccount = false + if (!node.accounts.hasDefaultAccount) { + await node.accounts.setDefaultAccount(account.name) + isDefaultAccount = true + } + + request.end({ + name: account.name, + isDefaultAccount, + }) + }, +) diff --git a/ironfish/src/rpc/routes/accounts/index.ts b/ironfish/src/rpc/routes/accounts/index.ts new file mode 100644 index 0000000000..62d4ac1738 --- /dev/null +++ b/ironfish/src/rpc/routes/accounts/index.ts @@ -0,0 +1,14 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './create' +export * from './exportAccount' +export * from './getAccounts' +export * from './getDefaultAccount' +export * from './getBalance' +export * from './getPublicKey' +export * from './importAccount' +export * from './removeAccount' +export * from './rescanAccount' +export * from './useAccount' diff --git a/ironfish/src/rpc/routes/accounts/removeAccount.ts b/ironfish/src/rpc/routes/accounts/removeAccount.ts new file mode 100644 index 0000000000..93756d7b2c --- /dev/null +++ b/ironfish/src/rpc/routes/accounts/removeAccount.ts @@ -0,0 +1,53 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { ApiNamespace, router } from '../router' +import { ValidationError } from '../../adapters' +import * as yup from 'yup' + +export type RemoveAccountRequest = { name: string; confirm?: boolean } +export type RemoveAccountResponse = { needsConfirm?: boolean } + +export const RemoveAccountRequestSchema: yup.ObjectSchema = yup + .object({ + name: yup.string().defined(), + confirm: yup.boolean().optional(), + }) + .defined() + +export const RemoveAccountResponseSchema: yup.ObjectSchema = yup + .object({ + needsConfirm: yup.boolean().optional(), + }) + .defined() + +router.register( + `${ApiNamespace.account}/remove`, + RemoveAccountRequestSchema, + async (request, node): Promise => { + const name = request.data.name + const account = node.accounts.getAccountByName(name) + + if (!account) { + throw new ValidationError( + `There is no account with the name ${name}. Options are:\n` + + node.accounts + .listAccounts() + .map((a) => a.name) + .join('\n'), + ) + } + + if (!request.data.confirm) { + const balance = node.accounts.getBalance(account) + + if (balance.unconfirmedBalance !== BigInt(0)) { + request.end({ needsConfirm: true }) + return + } + } + + await node.accounts.removeAccount(account.name) + request.end({}) + }, +) diff --git a/ironfish/src/rpc/routes/accounts/rescanAccount.ts b/ironfish/src/rpc/routes/accounts/rescanAccount.ts new file mode 100644 index 0000000000..c477d3d722 --- /dev/null +++ b/ironfish/src/rpc/routes/accounts/rescanAccount.ts @@ -0,0 +1,57 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { ApiNamespace, router } from '../router' +import * as yup from 'yup' +import { ValidationError } from '../../adapters/errors' + +export type RescanAccountRequest = { follow?: boolean; reset?: boolean } +export type RescanAccountResponse = { sequence: number } + +export const RescanAccountRequestSchema: yup.ObjectSchema = yup + .object({ + follow: yup.boolean().optional(), + reset: yup.boolean().optional(), + }) + .defined() + +export const RescanAccountResponseSchema: yup.ObjectSchema = yup + .object({ + sequence: yup.number().defined(), + }) + .defined() + +router.register( + `${ApiNamespace.account}/rescanAccount`, + RescanAccountRequestSchema, + async (request, node): Promise => { + let scan = node.accounts.scan + + if (scan && !request.data.follow) { + throw new ValidationError(`A transaction rescan is already running`) + } + + if (!scan) { + if (request.data.reset) { + await node.accounts.reset() + } + void node.accounts.scanTransactions(node.captain.chain) + scan = node.accounts.scan + } + + if (scan && request.data.follow) { + const onTransaction = (sequence: BigInt) => { + request.stream({ sequence: Number(sequence) }) + } + + scan.onTransaction.on(onTransaction) + request.onClose.on(() => { + scan?.onTransaction.off(onTransaction) + }) + + await scan.wait() + } + + request.end() + }, +) diff --git a/ironfish/src/rpc/routes/accounts/useAccount.ts b/ironfish/src/rpc/routes/accounts/useAccount.ts new file mode 100644 index 0000000000..67331a4a61 --- /dev/null +++ b/ironfish/src/rpc/routes/accounts/useAccount.ts @@ -0,0 +1,41 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { ApiNamespace, router } from '../router' +import { ValidationError } from '../../adapters' +import * as yup from 'yup' + +export type UseAccountRequest = { name: string } +export type UseAccountResponse = undefined + +export const UseAccountRequestSchema: yup.ObjectSchema = yup + .object({ + name: yup.string().defined(), + }) + .defined() + +export const UseAccountResponseSchema: yup.MixedSchema = yup + .mixed() + .oneOf([undefined] as const) + +router.register( + `${ApiNamespace.account}/use`, + UseAccountRequestSchema, + async (request, node): Promise => { + const name = request.data.name + const account = node.accounts.getAccountByName(name) + + if (!account) { + throw new ValidationError( + `There is no account with the name ${name}. Options are:\n` + + node.accounts + .listAccounts() + .map((a) => a.name) + .join('\n'), + ) + } + + await node.accounts.setDefaultAccount(account.name) + request.end() + }, +) diff --git a/ironfish/src/rpc/routes/accounts/utils.test.ts b/ironfish/src/rpc/routes/accounts/utils.test.ts new file mode 100644 index 0000000000..dd1454a816 --- /dev/null +++ b/ironfish/src/rpc/routes/accounts/utils.test.ts @@ -0,0 +1,46 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { createRouteTest } from '../test' +import { getAccount } from './utils' + +describe('Accounts utils', () => { + describe('getAccount', () => { + const routeTest = createRouteTest() + const name = 'testAccount' + let publicAddress = '' + + beforeAll(async () => { + const account = await routeTest.node.accounts.createAccount(name) + publicAddress = account.publicAddress + }) + + it('should fail if account is not found with name', () => { + expect(() => { + getAccount(routeTest.node, 'badAccount') + }).toThrow('No account with name') + }) + + it('should pass if account is found with name', () => { + const result = getAccount(routeTest.node, name) + expect(result.name).toEqual(name) + expect(result.publicAddress).toEqual(publicAddress) + }) + + it('should fail if no default account account is set', async () => { + await routeTest.node.accounts.setDefaultAccount(null) + + expect(() => { + getAccount(routeTest.node) + }).toThrow('No account is currently active') + }) + + it('should pass if default account is found', async () => { + await routeTest.node.accounts.setDefaultAccount(name) + const result = getAccount(routeTest.node) + expect(result.name).toEqual(name) + expect(result.publicAddress).toEqual(publicAddress) + }) + }) +}) diff --git a/ironfish/src/rpc/routes/accounts/utils.ts b/ironfish/src/rpc/routes/accounts/utils.ts new file mode 100644 index 0000000000..4073fd560e --- /dev/null +++ b/ironfish/src/rpc/routes/accounts/utils.ts @@ -0,0 +1,22 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { ValidationError } from '../../adapters' +import { IronfishNode } from '../../../node' +import { Account } from '../../../account' + +export function getAccount(node: IronfishNode, name?: string): Account { + if (name) { + const account = node.accounts.getAccountByName(name) + if (account) return account + throw new ValidationError(`No account with name ${name}`) + } + + const defaultAccount = node.accounts.getDefaultAccount() + if (defaultAccount) return defaultAccount + + throw new ValidationError( + `No account is currently active.\n\n` + + `Use ironfish accounts:create to first create an account`, + ) +} diff --git a/ironfish/src/rpc/routes/chain/__snapshots__/getBlock.test.ts.snap b/ironfish/src/rpc/routes/chain/__snapshots__/getBlock.test.ts.snap new file mode 100644 index 0000000000..bcfac63362 Binary files /dev/null and b/ironfish/src/rpc/routes/chain/__snapshots__/getBlock.test.ts.snap differ diff --git a/ironfish/src/rpc/routes/chain/getBlock.test.ts b/ironfish/src/rpc/routes/chain/getBlock.test.ts new file mode 100644 index 0000000000..3cc1d72a96 --- /dev/null +++ b/ironfish/src/rpc/routes/chain/getBlock.test.ts @@ -0,0 +1,85 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { createRouteTest } from '../test' +import { RangeHasher } from '../../../captain' +import { makeFakeBlock, TestStrategy, TestTransaction } from '../../../captain/testUtilities' + +describe('Route chain.getBlock', () => { + const genesisHashBuffer = Buffer.alloc(32, 'genesis') + const parentHashBuffer = Buffer.alloc(32, 'parent') + const currentHashBuffer = Buffer.alloc(32, 'current') + const routeTest = createRouteTest() + const mock = jest.fn() + const strategy = new TestStrategy(new RangeHasher()) + const blockParent = makeFakeBlock(strategy, genesisHashBuffer, parentHashBuffer, 1, 1, 2) + const block = makeFakeBlock(strategy, parentHashBuffer, currentHashBuffer, 2, 3, 5) + + block.transactions = [ + new TestTransaction(true, [], 5, [ + { nullifier: Buffer.alloc(32), commitment: 'One', size: 1 }, + ]), + ] + + beforeAll(() => { + mock.mockImplementation((hash: Buffer) => { + if (hash.equals(currentHashBuffer)) { + return block + } + if (hash.equals(parentHashBuffer)) { + return blockParent + } + }) + + routeTest.node.captain.strategy.transactionSerde = jest.fn().mockReturnValue({ + serialize: jest.fn(() => 'transactionSerialized'), + }) + routeTest.node.captain.chain.getBlock = mock + routeTest.node.captain.chain.getAtSequence = jest + .fn() + .mockImplementation((sequence: BigInt) => + sequence === BigInt(2) ? [currentHashBuffer] : [], + ) + routeTest.node.captain.blockSerde.serialize = jest.fn().mockReturnValue('block') + routeTest.node.captain.chain.blockHashSerde.serialize = jest.fn((value) => value.toString()) + routeTest.node.captain.chain.blockHashSerde.deserialize = jest.fn((value) => + Buffer.from(value), + ) + }) + + it('should fail if no sequence or hash provided', async () => { + await expect(routeTest.adapter.request('chain/getBlock', {})).rejects.toThrow( + 'Missing hash or sequence', + ) + }) + + it(`should fail if block can't be found with hash`, async () => { + await expect( + routeTest.adapter.request('chain/getBlock', { hash: 'blockHashNotFound' }), + ).rejects.toThrow('No block found') + }) + + it(`should fail if block can't be found with sequence`, async () => { + await expect(routeTest.adapter.request('chain/getBlock', { index: 5 })).rejects.toThrow( + 'No block found', + ) + }) + + it('returns the right object with hash', async () => { + const response = await routeTest.adapter.request('chain/getBlock', { + hash: currentHashBuffer.toString(), + }) + // called the node for the current block + expect(mock).toHaveBeenCalledWith(currentHashBuffer) + // called the node for the parent block + expect(mock).toHaveBeenCalledWith(parentHashBuffer) + + expect(response.content).toMatchSnapshot() + }) + + it('returns the right object with sequence', async () => { + const response = await routeTest.adapter.request('chain/getBlock', { index: 2 }) + expect(response.content).toMatchSnapshot() + }) +}) diff --git a/ironfish/src/rpc/routes/chain/getBlock.ts b/ironfish/src/rpc/routes/chain/getBlock.ts new file mode 100644 index 0000000000..d4f9b54ef4 --- /dev/null +++ b/ironfish/src/rpc/routes/chain/getBlock.ts @@ -0,0 +1,203 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import * as yup from 'yup' + +import { ApiNamespace, router } from '../router' +import { ValidationError } from '../../adapters' +import { GENESIS_BLOCK_SEQUENCE } from '../../../captain' + +export type GetBlockRequest = { index?: number; hash?: string } + +interface Operation { + operation_identifier: { index: number; network_index: number } + type: string +} +interface Note { + commitment: string +} +interface Spend { + nullifier: string +} +interface Transaction { + transaction_identifier: { hash: string } + operations: Array + metadata: { + size: number + notes: Array + spends: Array + } +} +interface Block { + blockIdentifier: { index: string; hash: string } + parentBlockIdentifier: { index: string; hash: string } + timestamp: number + transactions: Array + metadata: { + size: number + difficulty: number + } +} +export type GetBlockResponse = Block + +export const GetBlockRequestSchema: yup.ObjectSchema = yup + .object({ + index: yup.number().strip(true), + hash: yup.string().strip(true), + }) + .defined() + +const NoteSchema = yup + .object() + .shape({ + commitment: yup.string().defined(), + }) + .defined() + +const SpendSchema = yup + .object() + .shape({ + nullifier: yup.string().defined(), + }) + .defined() + +const OperationSchema = yup + .object() + .shape({ + type: yup.string().defined(), + operation_identifier: yup + .object() + .shape({ + index: yup.number().defined(), + network_index: yup.number().defined(), + }) + .defined(), + }) + .defined() + +const TransactionSchema = yup + .object() + .shape({ + transaction_identifier: yup.object({ hash: yup.string().defined() }).defined(), + operations: yup.array().of(OperationSchema).defined(), + metadata: yup + .object({ + notes: yup.array().of(NoteSchema).defined(), + spends: yup.array().of(SpendSchema).defined(), + size: yup.number().defined(), + }) + .defined(), + }) + .defined() + +export const GetBlockResponseSchema: yup.ObjectSchema = yup + .object({ + blockIdentifier: yup + .object({ index: yup.string().defined(), hash: yup.string().defined() }) + .defined(), + parentBlockIdentifier: yup + .object({ index: yup.string().defined(), hash: yup.string().defined() }) + .defined(), + timestamp: yup.number().defined(), + transactions: yup.array().of(TransactionSchema).defined(), + metadata: yup + .object({ + size: yup.number().defined(), + difficulty: yup.number().defined(), + }) + .defined(), + }) + .defined() + +router.register( + `${ApiNamespace.chain}/getBlock`, + GetBlockRequestSchema, + async (request, node): Promise => { + let hashBuffer = null, + sequence = null + if (request.data.hash) + hashBuffer = node.captain.chain.blockHashSerde.deserialize(request.data.hash) + if (request.data.index) sequence = BigInt(request.data.index) + + if (!hashBuffer && !sequence) throw new ValidationError(`Missing hash or sequence`) + + // Get a block hash for the specific sequence + // You must assume that the block returned will not be idempotent + // Given that a chain reorg event might cause the specific block + // at that sequence can be set to a different one + if (!hashBuffer && sequence) { + const hashBuffers = await node.captain.chain.getAtSequence(sequence) + if (Array.isArray(hashBuffers) && hashBuffers.length > 0) { + hashBuffer = hashBuffers[0] + } + } + + if (!hashBuffer) { + throw new ValidationError(`No block found at provided sequence`) + } + + const block = await node.captain.chain.getBlock(hashBuffer) + if (!block) { + throw new ValidationError(`No block found`) + } + + let parentBlock + if (block.header.sequence === GENESIS_BLOCK_SEQUENCE) { + parentBlock = block + } else { + parentBlock = await node.captain.chain.getBlock(block.header.previousBlockHash) + } + + if (!parentBlock) { + throw new ValidationError(`No parent block found`) + } + + const transactions = block.transactions.map((transaction) => { + const notes = [...transaction.notes()].map((note) => ({ + commitment: Buffer.from(note.merkleHash()).toString('hex'), + })) + + const spends = [...transaction.spends()].map((spend) => ({ + nullifier: node.captain.chain.blockHashSerde.serialize(spend.nullifier), + })) + + // TODO(IRO-289) We need a better way to either serialize directly to buffer or use CBOR + const transactionBuffer = Buffer.from( + JSON.stringify(node.captain.strategy.transactionSerde().serialize(transaction)), + ) + + return { + transaction_identifier: { + hash: node.captain.chain.blockHashSerde.serialize(transaction.transactionHash()), + }, + operations: [], + metadata: { + notes, + spends, + size: transactionBuffer.byteLength, + fee: Number(transaction.transactionFee()), + }, + } + }) + + // TODO(IRO-289) We need a better way to either serialize directly to buffer or use CBOR + const blockBuffer = Buffer.from(JSON.stringify(node.captain.blockSerde.serialize(block))) + + request.end({ + blockIdentifier: { + index: block.header.sequence.toString(), + hash: node.captain.chain.blockHashSerde.serialize(block.header.hash), + }, + parentBlockIdentifier: { + index: parentBlock.header.sequence.toString(), + hash: node.captain.chain.blockHashSerde.serialize(parentBlock.header.hash), + }, + timestamp: block.header.timestamp.getTime(), + transactions, + metadata: { + size: blockBuffer.byteLength, + difficulty: Number(block.header.target.toDifficulty()), + }, + }) + }, +) diff --git a/ironfish/src/rpc/routes/chain/getBlockInfo.ts b/ironfish/src/rpc/routes/chain/getBlockInfo.ts new file mode 100644 index 0000000000..85d3c14d76 --- /dev/null +++ b/ironfish/src/rpc/routes/chain/getBlockInfo.ts @@ -0,0 +1,61 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import * as yup from 'yup' +import { ApiNamespace, router } from '../router' +import { ValidationError } from '../../adapters' + +export type GetBlockInfoRequest = { hash: string } + +export type GetBlockInfoResponse = { + block: { + graffiti: string + hash: string + previousBlockHash: string + sequence: number + timestamp: number + } +} + +export const GetBlockInfoRequestSchema: yup.ObjectSchema = yup + .object({ + hash: yup.string().defined(), + }) + .defined() + +export const GetBlockInfoResponseSchema: yup.ObjectSchema = yup + .object({ + block: yup + .object({ + graffiti: yup.string().defined(), + hash: yup.string().defined(), + previousBlockHash: yup.string().defined(), + sequence: yup.number().defined(), + timestamp: yup.number().defined(), + }) + .defined(), + }) + .defined() + +router.register( + `${ApiNamespace.chain}/getBlockInfo`, + GetBlockInfoRequestSchema, + async (request, node): Promise => { + const hash = Buffer.from(request.data.hash, 'hex') + const header = await node.captain.chain.getBlockHeader(hash) + + if (!header) { + throw new ValidationError(`No block with hash ${request.data.hash}`) + } + + request.status(200).end({ + block: { + graffiti: header.graffiti.toString('hex'), + hash: request.data.hash, + previousBlockHash: header.previousBlockHash.toString('hex'), + sequence: Number(header.sequence), + timestamp: header.timestamp.valueOf(), + }, + }) + }, +) diff --git a/ironfish/src/rpc/routes/chain/getChain.ts b/ironfish/src/rpc/routes/chain/getChain.ts new file mode 100644 index 0000000000..681da65ab6 --- /dev/null +++ b/ironfish/src/rpc/routes/chain/getChain.ts @@ -0,0 +1,34 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import * as yup from 'yup' +import { ApiNamespace, router } from '../router' +import { printChain } from './utils' + +export type GetChainRequest = Record | undefined + +export type GetChainResponse = { + content: string[] +} + +export const GetChainRequestSchema: yup.MixedSchema = yup + .mixed() + .oneOf([undefined] as const) + +export const GetChainResponseSchema: yup.ObjectSchema = yup + .object({ + content: yup.array(yup.string().defined()).defined(), + }) + .defined() + +/** + * Get current, heaviest and genesis block identifiers + */ +router.register( + `${ApiNamespace.chain}/getChain`, + GetChainRequestSchema, + async (request, node): Promise => { + const content = await printChain(node.captain.chain) + request.end({ content }) + }, +) diff --git a/ironfish/src/rpc/routes/chain/getChainInfo.test.ts b/ironfish/src/rpc/routes/chain/getChainInfo.test.ts new file mode 100644 index 0000000000..d4ea034dad --- /dev/null +++ b/ironfish/src/rpc/routes/chain/getChainInfo.test.ts @@ -0,0 +1,52 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { createRouteTest } from '../test' +import { RangeHasher } from '../../../captain/anchorChain/merkleTree' + +import { blockHash, makeFakeBlock, TestStrategy } from '../../../captain/testUtilities' +import { GetChainInfoResponse } from './getChainInfo' + +describe('Route chain.getChainInfo', () => { + const routeTest = createRouteTest() + const date = new Date() + const strategy = new TestStrategy(new RangeHasher()) + const genesis = Buffer.from('genesis1234') + const latestHeader = makeFakeBlock(strategy, blockHash(1), blockHash(2), 1, 1, 1).header + const heaviestHeader = makeFakeBlock(strategy, blockHash(2), blockHash(3), 1, 1, 1).header + + beforeAll(() => { + routeTest.node.captain.chain.getLatestHead = jest.fn().mockReturnValue(latestHeader) + routeTest.node.captain.chain.getHeaviestHead = jest.fn().mockReturnValue(heaviestHeader) + routeTest.node.captain.chain.getAtSequence = jest.fn().mockReturnValue([genesis]) + routeTest.node.captain.chain.blockHashSerde.serialize = jest.fn((value) => value.toString()) + + routeTest.node.captain.chain.headers.get = jest.fn().mockImplementation((hash: Buffer) => { + if (hash.equals(latestHeader.hash)) { + return { + sequence: latestHeader.sequence, + hash: latestHeader.hash, + timestamp: date.getTime(), + } + } + if (hash.equals(heaviestHeader.hash)) { + return { + sequence: heaviestHeader.sequence, + hash: heaviestHeader.hash, + } + } + }) + }) + + it('returns the right object with hash', async () => { + const response = await routeTest.adapter.request('chain/getChainInfo', {}) + + const content = response.content as GetChainInfoResponse + + expect(content.currentBlockIdentifier.index).toEqual(latestHeader.sequence.toString()) + expect(content.genesisBlockIdentifier.index).toEqual('1') + expect(content.oldestBlockIdentifier.index).toEqual(heaviestHeader.sequence.toString()) + expect(content.currentBlockTimestamp).toEqual(Number(latestHeader.timestamp)) + }) +}) diff --git a/ironfish/src/rpc/routes/chain/getChainInfo.ts b/ironfish/src/rpc/routes/chain/getChainInfo.ts new file mode 100644 index 0000000000..4ff853456c --- /dev/null +++ b/ironfish/src/rpc/routes/chain/getChainInfo.ts @@ -0,0 +1,81 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Assert, GENESIS_BLOCK_SEQUENCE } from '../../../captain' +import * as yup from 'yup' + +import { ApiNamespace, router } from '../router' + +export type GetChainInfoRequest = Record +export type BlockIdentifier = { index: string; hash: string } + +export interface ChainInfo { + currentBlockIdentifier: BlockIdentifier + genesisBlockIdentifier: BlockIdentifier + oldestBlockIdentifier: BlockIdentifier + currentBlockTimestamp: number +} +export type GetChainInfoResponse = ChainInfo + +export const GetChainInfoRequestSchema: yup.ObjectSchema = yup + .object>() + .noUnknown() + .defined() + +export const GetChainInfoResponseSchema: yup.ObjectSchema = yup + .object({ + currentBlockIdentifier: yup + .object({ index: yup.string().defined(), hash: yup.string().defined() }) + .defined(), + genesisBlockIdentifier: yup + .object({ index: yup.string().defined(), hash: yup.string().defined() }) + .defined(), + oldestBlockIdentifier: yup + .object({ index: yup.string().defined(), hash: yup.string().defined() }) + .defined(), + currentBlockTimestamp: yup.number().defined(), + }) + .defined() + +/** + * Get current, heaviest and genesis block identifiers + */ +router.register( + `${ApiNamespace.chain}/getChainInfo`, + GetChainInfoRequestSchema, + async (request, node): Promise => { + const latestHeader = await node.captain.chain.getLatestHead() + const heaviestHeader = await node.captain.chain.getHeaviestHead() + const oldestBlockIdentifier = {} as BlockIdentifier + if (heaviestHeader) { + oldestBlockIdentifier.index = heaviestHeader.sequence.toString() + oldestBlockIdentifier.hash = node.captain.chain.blockHashSerde.serialize( + heaviestHeader.hash, + ) + } + + let currentBlockTimestamp = Number() + const currentBlockIdentifier = {} as BlockIdentifier + if (latestHeader) { + currentBlockTimestamp = Number(latestHeader.timestamp) + currentBlockIdentifier.index = latestHeader.sequence.toString() + currentBlockIdentifier.hash = node.captain.chain.blockHashSerde.serialize( + latestHeader.hash, + ) + } + + const genesisBlockHash = await node.captain.chain.getGenesisHash() + Assert.isNotNull(genesisBlockHash) + + const genesisBlockIdentifier = {} as BlockIdentifier + genesisBlockIdentifier.index = GENESIS_BLOCK_SEQUENCE.toString() + genesisBlockIdentifier.hash = node.captain.chain.blockHashSerde.serialize(genesisBlockHash) + + request.end({ + currentBlockIdentifier, + oldestBlockIdentifier, + genesisBlockIdentifier, + currentBlockTimestamp, + }) + }, +) diff --git a/ironfish/src/rpc/routes/chain/index.ts b/ironfish/src/rpc/routes/chain/index.ts new file mode 100644 index 0000000000..0d57ea9373 --- /dev/null +++ b/ironfish/src/rpc/routes/chain/index.ts @@ -0,0 +1,8 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './getBlock' +export * from './getChain' +export * from './getChainInfo' +export * from './getBlockInfo' diff --git a/ironfish/src/rpc/routes/chain/utils.ts b/ironfish/src/rpc/routes/chain/utils.ts new file mode 100644 index 0000000000..8be6235683 --- /dev/null +++ b/ironfish/src/rpc/routes/chain/utils.ts @@ -0,0 +1,225 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Assert } from '../../../assert' +import Blockchain, { Block, GRAPH_ID_NULL } from '../../../captain/anchorChain/blockchain' +import { Graph } from '../../../captain/anchorChain/blockchain/Graph' +import { Transaction } from '../../../captain/anchorChain/strategies' +import { JsonSerializable } from '../../../serde' +import { HashUtils } from '../../../utils' + +/** + * When shown, graph ids are simplified ids for debugging and not their real ids + */ +export async function printGraph< + E, + H, + T extends Transaction, + SE extends JsonSerializable, + SH extends JsonSerializable, + ST +>( + chain: Blockchain, + genesisGraph: Graph, + graph: Graph, + block: Block, + indent: string, + last: boolean, + content: string[], + seen = new Set(), + simpleGraphIds: { id: number; ids: Map } = { + id: 0, + ids: new Map(), + }, + show: { + gph?: boolean + gphSimple?: boolean + prev?: boolean + merge?: boolean + seq?: boolean + work?: boolean + indent?: boolean + } = { + seq: true, + work: true, + }, +): Promise { + const blockHash = block.header.hash.toString('hex') + seen.add(blockHash) + + const isLatest = chain.blockHashSerde.equals(block.header.hash, genesisGraph.latestHash) + ? ' LATEST' + : '' + + const isHeaviest = + genesisGraph.heaviestHash && + chain.blockHashSerde.equals(block.header.hash, genesisGraph.heaviestHash) + ? ' HEAVY' + : '' + + const isTail = chain.blockHashSerde.equals(block.header.hash, genesisGraph.tailHash) + ? ' TAIL' + : '' + + const isGenesis = chain.blockHashSerde.equals( + block.header.hash, + (await chain.getGenesisHash()) || Buffer.from(''), + ) + ? ' GENESIS' + : '' + + const blockString = HashUtils.renderHashHex(blockHash) + + function resolveGraphId(graphId: number): number { + if (!show.gphSimple) return graphId + + if (!simpleGraphIds.ids.has(graphId)) { + simpleGraphIds.ids.set(graphId, ++simpleGraphIds.id) + } + + const simpleId = simpleGraphIds.ids.get(graphId) + Assert.isNotUndefined(simpleId) + return simpleId + } + + // Reduce graphs down to simple integer + const graphId = resolveGraphId(block.header.graphId) + + const suffixParts = [] + if (show.seq) { + suffixParts.push(`${block.header.sequence} seq`) + } + if (show.prev) { + suffixParts.push(`prev ${HashUtils.renderHash(block.header.previousBlockHash)}`) + } + if (show.gph) { + suffixParts.push(`gph ${graphId}`) + } + if (show.work) { + suffixParts.push(`work: ${block.header.work.toString()}`) + } + if (show.merge) { + const graph = await chain.getGraph(block.header.graphId) + + if (graph && graph.mergeId !== null) { + const mergeId = resolveGraphId(graph.mergeId) + suffixParts.push(`mrg ${mergeId}`) + } + } + const suffix = suffixParts.length ? ` (${suffixParts.join(', ')})` : '' + const indentation = show.indent ? ' ' : '' + + content.push( + indent + `+- Block ${blockString}${suffix}${isLatest}${isHeaviest}${isTail}${isGenesis}`, + ) + + indent += last ? `${indentation}` : `| ${indentation}` + + let children = await Promise.all( + (await chain.getBlockToNext(block.header.hash)).map(async (h) => { + const block = await chain.getBlock(h) + if (!block) throw new Error('block was totally not there') + + const graph = + block.header.graphId === GRAPH_ID_NULL + ? null + : await chain.getGraph(block.header.graphId) + + return [block, graph] as [Block, Graph] + }), + ) + + children = children.filter(([b, g]) => { + return b.header.graphId === graph.id || g.mergeId === graph.id + }) + + for (let i = 0; i < children.length; i++) { + const [child, childGraph] = children[i] + const childHash = child.header.hash.toString('hex') + + if (seen.has(childHash)) { + // eslint-disable-next-line no-console + console.error(`ERROR FOUND LOOPING CHAIN ${blockHash} -> ${childHash}`) + return + } + + await printGraph( + chain, + genesisGraph, + childGraph, + child, + indent, + i == children.length - 1, + content, + seen, + simpleGraphIds, + show, + ) + } +} + +export async function printChain< + E, + H, + T extends Transaction, + SE extends JsonSerializable, + SH extends JsonSerializable, + ST +>( + chain: Blockchain, + show: { + gph?: boolean + gphSimple?: boolean + prev?: boolean + merge?: boolean + seq?: boolean + work?: boolean + indent?: boolean + } = { + seq: true, + work: true, + }, +): Promise { + const content: string[] = [] + const graphs = await chain.graphs.getAllValues() + const treeStatus = await chain.checkTreeMatchesHeaviest() + + const simpleGraphIds = { + id: 0, + ids: new Map(), + } + + for (const graph of graphs) { + if (graph.mergeId !== null) continue + + content.push( + '\n======', + 'TAIL', + graph.tailHash.toString('hex'), + 'HEAVIEST', + graph.heaviestHash ? graph.heaviestHash.toString('hex') : '---NULL---', + 'LATEST', + graph.latestHash.toString('hex'), + 'TREES OKAY?', + treeStatus ? 'TRUE' : 'FALSE', + ) + + const tail = await chain.getBlock(graph.tailHash) + if (!tail) throw new Error('no tail is bad') + + await printGraph( + chain, + graph, + graph, + tail, + '', + true, + content, + undefined, + simpleGraphIds, + show, + ) + } + + return content +} diff --git a/ironfish/src/rpc/routes/config/getConfig.ts b/ironfish/src/rpc/routes/config/getConfig.ts new file mode 100644 index 0000000000..839b954a6b --- /dev/null +++ b/ironfish/src/rpc/routes/config/getConfig.ts @@ -0,0 +1,39 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { ApiNamespace, router } from '../router' +import { ConfigOptions, ConfigOptionsSchema } from '../../../fileStores/config' +import * as yup from 'yup' +import { ValidationError } from '../../adapters/errors' + +export type GetConfigRequest = { user?: boolean; name?: string } | undefined +export type GetConfigResponse = Partial + +export const GetConfigRequestSchema: yup.ObjectSchema = yup + .object({ + user: yup.boolean().optional(), + name: yup.string().optional(), + }) + .optional() + +export const GetConfigResponseSchema: yup.ObjectSchema = ConfigOptionsSchema + +router.register( + `${ApiNamespace.config}/getConfig`, + GetConfigRequestSchema, + (request, node): void => { + if (request.data?.name && !(request.data.name in node.config.defaults)) { + throw new ValidationError(`No config option ${String(request.data.name)}`) + } + + let pickKeys: string[] | undefined = undefined + if (!request.data?.user) pickKeys = Object.keys(node.config.defaults) + if (request.data?.name) pickKeys = [request.data.name] + + const data = (request.data?.user + ? JSON.parse(JSON.stringify(node.config.loaded)) + : JSON.parse(JSON.stringify(node.config.config, pickKeys))) as GetConfigResponse + + request.end(data) + }, +) diff --git a/ironfish/src/rpc/routes/config/index.ts b/ironfish/src/rpc/routes/config/index.ts new file mode 100644 index 0000000000..2855b5c591 --- /dev/null +++ b/ironfish/src/rpc/routes/config/index.ts @@ -0,0 +1,7 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './getConfig' +export * from './setConfig' +export * from './uploadConfig' diff --git a/ironfish/src/rpc/routes/config/setConfig.ts b/ironfish/src/rpc/routes/config/setConfig.ts new file mode 100644 index 0000000000..df288e8d0c --- /dev/null +++ b/ironfish/src/rpc/routes/config/setConfig.ts @@ -0,0 +1,29 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { ApiNamespace, router } from '../router' +import { ConfigOptions, ConfigOptionsSchema } from '../../../fileStores/config' +import * as yup from 'yup' +import { setUnknownConfigValue } from './uploadConfig' + +export type SetConfigRequest = { name: string; value: unknown } +export type SetConfigResponse = Partial + +export const SetConfigRequestSchema: yup.ObjectSchema = yup + .object({ + name: yup.string().defined(), + value: yup.mixed().defined(), + }) + .defined() + +export const SetConfigResponseSchema: yup.ObjectSchema = ConfigOptionsSchema + +router.register( + `${ApiNamespace.config}/setConfig`, + SetConfigRequestSchema, + async (request, node): Promise => { + setUnknownConfigValue(node.config, request.data.name, request.data.value) + await node.config.save() + request.end() + }, +) diff --git a/ironfish/src/rpc/routes/config/uploadConfig.ts b/ironfish/src/rpc/routes/config/uploadConfig.ts new file mode 100644 index 0000000000..52e6538f0d --- /dev/null +++ b/ironfish/src/rpc/routes/config/uploadConfig.ts @@ -0,0 +1,105 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { ApiNamespace, router } from '../router' +import { Config, ConfigOptions, ConfigOptionsSchema } from '../../../fileStores/config' +import * as yup from 'yup' +import { ValidationError } from '../../adapters/errors' + +export type UploadConfigRequest = { config: Record } +export type UploadConfigResponse = Partial + +export const UploadConfigRequestSchema: yup.ObjectSchema = yup + .object({ config: yup.mixed().required() }) + .defined() + +export const UploadConfigResponseSchema: yup.ObjectSchema = ConfigOptionsSchema + +router.register( + `${ApiNamespace.config}/uploadConfig`, + UploadConfigRequestSchema, + async (request, node): Promise => { + clearConfig(node.config) + + for (const key of Object.keys(request.data.config)) { + if (Object.prototype.hasOwnProperty.call(request.data.config, key)) { + setUnknownConfigValue(node.config, key, request.data.config[key], true) + } + } + + await node.config.save() + request.end() + }, +) + +function clearConfig(config: Config): void { + for (const key of Object.keys(config.loaded)) { + const configKey = key as keyof ConfigOptions + delete config.loaded[configKey] + } +} + +export function setUnknownConfigValue( + config: Config, + unknownKey: string, + unknownValue: unknown, + ignoreUnknownKey = false, +): void { + if (unknownKey && !(unknownKey in config.defaults)) { + if (!ignoreUnknownKey) { + throw new ValidationError(`No config option ${String(unknownKey)}`) + } + } + + const sourceKey = unknownKey as keyof ConfigOptions + let sourceValue = unknownValue + + let targetValue: unknown = config.defaults[sourceKey] + // Support keys that are undefined inside of the config from old config values or third parties adding config values + if (targetValue === undefined) targetValue = sourceValue + + let value = sourceValue + + // Trim string values + if (typeof sourceValue === 'string') { + sourceValue = sourceValue.trim() + } + + if (typeof sourceValue !== typeof targetValue) { + value = convertValue(sourceValue, targetValue) + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + config.set(sourceKey, value as any) +} + +function convertValue(sourceValue: unknown, targetValue: unknown): unknown { + if (typeof sourceValue !== 'string') { + throw new ValidationError( + `Could not convert ${JSON.stringify(sourceValue)} to ${String(typeof targetValue)}`, + ) + } + + let targetType: 'number' | 'boolean' | 'array' | null = null + + if (typeof targetValue === 'number') { + const converted = Number(sourceValue) + if (!Number.isNaN(converted)) return converted + targetType = 'number' + } else if (typeof targetValue === 'boolean') { + const value = sourceValue.toLowerCase().trim() + if (value === '1') return true + if (value === '0') return false + if (value === 'true') return true + if (value === 'false') return false + targetType = 'boolean' + } else if (typeof targetValue === 'string') { + return sourceValue + } else if (Array.isArray(targetValue)) { + targetType = 'array' + } + + throw new ValidationError( + `Could not convert ${JSON.stringify(sourceValue)} to ${String(targetType)}`, + ) +} diff --git a/ironfish/src/rpc/routes/faucet/giveMe.test.ts b/ironfish/src/rpc/routes/faucet/giveMe.test.ts new file mode 100644 index 0000000000..401f2bef57 --- /dev/null +++ b/ironfish/src/rpc/routes/faucet/giveMe.test.ts @@ -0,0 +1,51 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { createRouteTest } from '../test' +import axios, { AxiosError } from 'axios' + +jest.mock('axios') + +describe('Route faucet.giveMe', () => { + const routeTest = createRouteTest() + + it('should fail if the account does not exist in DB', async () => { + await expect( + routeTest.adapter.request('faucet/giveMe', { accountName: 'test-notfound' }), + ).rejects.toThrow('Account test-notfound could not be found') + }) + + describe('With a default account and the db', () => { + let accountName = 'test' + Math.random().toString() + const email = 'test@test.com' + let publicAddress = '' + + beforeEach(async () => { + accountName = 'test' + Math.random().toString() + const account = await routeTest.node.accounts.createAccount(accountName, true) + publicAddress = account.publicAddress + }) + + it('calls the API and returns the right response', async () => { + const apiResponse = { message: 'success' } + axios.post = jest + .fn() + .mockImplementationOnce(() => Promise.resolve({ data: apiResponse })) + const response = await routeTest.adapter.request('faucet/giveMe', { accountName, email }) + expect(response.status).toBe(200) + + expect(axios.post).toHaveBeenCalledWith(routeTest.node.config.get('getFundsApi'), null, { + params: { email, publicKey: publicAddress }, + }) + expect(response.content).toMatchObject(apiResponse) + }) + + it('throws an error if the API request fails', async () => { + const apiResponse = new Error('API failure') as AxiosError + axios.post = jest.fn().mockRejectedValueOnce(apiResponse) + await expect( + routeTest.adapter.request('faucet/giveMe', { accountName, email }), + ).rejects.toThrow('API failure') + }) + }) +}) diff --git a/ironfish/src/rpc/routes/faucet/giveMe.ts b/ironfish/src/rpc/routes/faucet/giveMe.ts new file mode 100644 index 0000000000..1f934da7a1 --- /dev/null +++ b/ironfish/src/rpc/routes/faucet/giveMe.ts @@ -0,0 +1,53 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import * as yup from 'yup' +import axios, { AxiosResponse, AxiosError } from 'axios' + +import { ApiNamespace, router } from '../router' +import { ValidationError, ResponseError, ERROR_CODES } from '../../adapters' + +export type GiveMeRequest = { accountName: string; email?: string } +export type GiveMeResponse = { message: string } + +export const GiveMeRequestSchema: yup.ObjectSchema = yup + .object({ + accountName: yup.string().required(), + email: yup.string().strip(true), + }) + .defined() + +export const GiveMeResponseSchema: yup.ObjectSchema = yup + .object({ + message: yup.string().defined(), + }) + .defined() + +router.register( + `${ApiNamespace.faucet}/giveMe`, + GiveMeRequestSchema, + async (request, node): Promise => { + const account = node.accounts.getAccountByName(request.data.accountName) + if (!account) + throw new ValidationError(`Account ${request.data.accountName} could not be found`) + + const getFundsApi = node.config.get('getFundsApi') + if (!getFundsApi) { + throw new ValidationError(`GiveMe requires config.getFundsApi to be set`) + } + + await axios + .post(getFundsApi, null, { + params: { + email: request.data.email, + publicKey: account.publicAddress, + }, + }) + .then(({ data }: AxiosResponse) => { + request.end(data) + }) + .catch((error: AxiosError) => { + throw new ResponseError(error.message, ERROR_CODES.ERROR, Number(error.code)) + }) + }, +) diff --git a/ironfish/src/rpc/routes/faucet/index.ts b/ironfish/src/rpc/routes/faucet/index.ts new file mode 100644 index 0000000000..805060846a --- /dev/null +++ b/ironfish/src/rpc/routes/faucet/index.ts @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './giveMe' diff --git a/ironfish/src/rpc/routes/index.ts b/ironfish/src/rpc/routes/index.ts new file mode 100644 index 0000000000..91be6e6b7f --- /dev/null +++ b/ironfish/src/rpc/routes/index.ts @@ -0,0 +1,12 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +export * from './accounts' +export * from './config' +export * from './chain' +export * from './node' +export * from './peers' +export * from './router' +export * from './mining' +export * from './transactions' +export * from './faucet' diff --git a/ironfish/src/rpc/routes/mining/index.ts b/ironfish/src/rpc/routes/mining/index.ts new file mode 100644 index 0000000000..bd3a73c2a9 --- /dev/null +++ b/ironfish/src/rpc/routes/mining/index.ts @@ -0,0 +1,6 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './newBlocksStream' +export * from './successfullyMined' diff --git a/ironfish/src/rpc/routes/mining/newBlocksStream.ts b/ironfish/src/rpc/routes/mining/newBlocksStream.ts new file mode 100644 index 0000000000..0fcd61ff77 --- /dev/null +++ b/ironfish/src/rpc/routes/mining/newBlocksStream.ts @@ -0,0 +1,69 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import * as yup from 'yup' +import { ValidationError } from '../../adapters' +import { router, ApiNamespace } from '../router' + +export type NewBlocksStreamRequest = Record | undefined +export type NewBlocksStreamResponse = { + bytes: { type: 'Buffer'; data: number[] } + target: string + miningRequestId: number +} + +export const NewBlocksStreamRequestSchema: yup.MixedSchema = yup + .mixed() + .oneOf([undefined] as const) + +export const NewBlocksStreamResponseSchema: yup.ObjectSchema = yup + .object({ + bytes: yup + .object({ + type: yup + .mixed() + .oneOf(['Buffer'] as const) + .required(), + data: yup.array().of(yup.number().integer().required()).required(), + }) + .required(), + target: yup.string().required(), + miningRequestId: yup.number().required(), + }) + .required() + .defined() + +router.register( + `${ApiNamespace.miner}/newBlocksStream`, + NewBlocksStreamRequestSchema, + async (request, node): Promise => { + if (!node.config.get('enableMiningDirector')) { + node.config.setOverride('enableMiningDirector', true) + } + + if (!node.miningDirector.minerAccount) { + throw new ValidationError( + `The node you are connecting to doesn't have a default account. + Create and set a default account using "ironfish accounts" first. + `, + ) + } + + node.miningDirector.onBlockToMine.on((event) => { + request.stream({ + bytes: event.bytes.toJSON(), + target: event.target.asBigInt().toString(), + miningRequestId: event.miningRequestId, + }) + }) + + // 'prime' the stream with the current block + const currentHead = await node.captain.chain.getHeaviestHead() + if (currentHead) { + await node.miningDirector.onChainHeadChange(currentHead.hash) + } + + await Promise.resolve() + }, +) diff --git a/ironfish/src/rpc/routes/mining/successfullyMined.ts b/ironfish/src/rpc/routes/mining/successfullyMined.ts new file mode 100644 index 0000000000..6d898111f4 --- /dev/null +++ b/ironfish/src/rpc/routes/mining/successfullyMined.ts @@ -0,0 +1,33 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { router, ApiNamespace } from '../router' +import * as yup from 'yup' + +export type SuccessfullyMinedRequest = { randomness: number; miningRequestId: number } +export type SuccessfullyMinedResponse = Record | undefined + +export const SuccessfullyMinedRequestSchema: yup.ObjectSchema = yup + .object({ + randomness: yup.number().defined(), + miningRequestId: yup.number().defined(), + }) + .defined() +export const SuccessfullyMinedResponseSchema: yup.MixedSchema = yup + .mixed() + .oneOf([undefined] as const) + +router.register( + `${ApiNamespace.miner}/successfullyMined`, + SuccessfullyMinedRequestSchema, + async (request, node): Promise => { + if (node.miningDirector) { + node.miningDirector.successfullyMined( + request.data.randomness, + request.data.miningRequestId, + ) + } + request.end() + return Promise.resolve() + }, +) diff --git a/ironfish/src/rpc/routes/node/getLogStream.test.ts b/ironfish/src/rpc/routes/node/getLogStream.test.ts new file mode 100644 index 0000000000..34ae959266 --- /dev/null +++ b/ironfish/src/rpc/routes/node/getLogStream.test.ts @@ -0,0 +1,32 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { createRouteTest } from '../test' +import { LogLevel } from 'consola' + +describe('Route node/getLogStream', () => { + const routeTest = createRouteTest() + + it('should get stream log', async () => { + // Clear out the console reporter + routeTest.node.logger.setReporters([]) + // Start accepting logs again + routeTest.node.logger.resume() + + const response = await routeTest.adapter.requestStream('node/getLogStream').waitForRoute() + + routeTest.node.logger.info('Hello', { foo: 2 }) + const { value } = await response.contentStream().next() + + response.end() + expect(response.status).toBe(200) + + expect(value).toMatchObject({ + level: LogLevel.Info.toString(), + tag: expect.stringContaining('ironfishnode'), + type: 'info', + args: ['Hello', { foo: 2 }], + date: expect.anything(), + }) + }) +}) diff --git a/ironfish/src/rpc/routes/node/getLogStream.ts b/ironfish/src/rpc/routes/node/getLogStream.ts new file mode 100644 index 0000000000..33bb389c82 --- /dev/null +++ b/ironfish/src/rpc/routes/node/getLogStream.ts @@ -0,0 +1,55 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { router, ApiNamespace } from '../router' +import * as yup from 'yup' +import { InterceptReporter } from '../../../logger' +import { ConsolaReporterLogObject } from 'consola' + +// eslint-disable-next-line @typescript-eslint/ban-types +export type GetLogStreamRequest = {} | undefined + +export type GetLogStreamResponse = { + level: string + type: string + tag: string + args: unknown[] + date: string +} + +export const GetLogStreamRequestSchema: yup.ObjectSchema = yup + .object({}) + .notRequired() + .default({}) + +export const GetLogStreamResponseSchema: yup.ObjectSchema = yup + .object({ + level: yup.string().defined(), + type: yup.string().defined(), + tag: yup.string().defined(), + args: yup.array(yup.mixed()).defined(), + date: yup.string().defined(), + }) + .defined() + +router.register( + `${ApiNamespace.node}/getLogStream`, + GetLogStreamRequestSchema, + (request, node): void => { + const reporter = new InterceptReporter((logObj: ConsolaReporterLogObject): void => { + request.stream({ + level: String(logObj.level), + type: logObj.type, + tag: logObj.tag, + args: logObj.args, + date: logObj.date.toISOString(), + }) + }) + + node.logger.addReporter(reporter) + + request.onClose.on(() => { + node.logger.removeReporter(reporter) + }) + }, +) diff --git a/ironfish/src/rpc/routes/node/getStatus.test.ts b/ironfish/src/rpc/routes/node/getStatus.test.ts new file mode 100644 index 0000000000..9e691b9328 --- /dev/null +++ b/ironfish/src/rpc/routes/node/getStatus.test.ts @@ -0,0 +1,23 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { createRouteTest } from '../test' + +describe('Route node/getStatus', () => { + const routeTest = createRouteTest() + + it('should get status', async () => { + const response = await routeTest.adapter.request('node/getStatus') + + expect(response.status).toBe(200) + + expect(response.content).toMatchObject({ + node: { + status: 'started', + }, + blockSyncer: { + status: 'STOPPED', + }, + }) + }) +}) diff --git a/ironfish/src/rpc/routes/node/getStatus.ts b/ironfish/src/rpc/routes/node/getStatus.ts new file mode 100644 index 0000000000..0305569c1c --- /dev/null +++ b/ironfish/src/rpc/routes/node/getStatus.ts @@ -0,0 +1,130 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { router, ApiNamespace } from '../router' +import * as yup from 'yup' +import { IronfishNode } from '../../../node' +import { MathUtils, PromiseUtils } from '../../../utils' + +export type GetStatusRequest = + | undefined + | { + stream?: boolean + } + +export type GetStatusResponse = { + node: { + status: 'started' | 'stopped' | 'error' + heaviestHead: string + } + blockSyncer: { + status: string + error?: string + syncing?: { + blockSpeed: number + speed: number + } + } + peerNetwork: { + isReady: boolean + inboundTraffic: number + outboundTraffic: number + } +} + +export const GetStatusRequestSchema: yup.ObjectSchema = yup + .object({ + stream: yup.boolean().optional(), + }) + .optional() + .default({}) + +export const GetStatusResponseSchema: yup.ObjectSchema = yup + .object({ + node: yup + .object({ + status: yup.string().oneOf(['started', 'stopped', 'error']).defined(), + heaviestHead: yup.string().defined(), + }) + .defined(), + peerNetwork: yup + .object({ + isReady: yup.boolean().defined(), + inboundTraffic: yup.number().defined(), + outboundTraffic: yup.number().defined(), + }) + .defined(), + blockSyncer: yup + .object({ + status: yup.string().oneOf(['started', 'stopped', 'error']).defined(), + error: yup.string().optional(), + syncing: yup + .object({ + blockSpeed: yup.number().defined(), + speed: yup.number().defined(), + }) + .optional(), + }) + .defined(), + }) + .defined() + +router.register( + `${ApiNamespace.node}/getStatus`, + GetStatusRequestSchema, + async (request, node): Promise => { + const status = await getStatus(node) + + if (!request.data?.stream) { + request.end(status) + return + } + + request.stream(status) + + let stream = true + while (stream) { + const status = await getStatus(node) + request.stream(status) + await PromiseUtils.sleep(500) + } + + request.onClose.on(() => { + stream = false + }) + }, +) + +async function getStatus(node: IronfishNode): Promise { + const heaviestHead = await node.captain.chain.getHeaviestHead() + const status: GetStatusResponse = { + peerNetwork: { + isReady: false, + inboundTraffic: 0, + outboundTraffic: 0, + }, + node: { + status: 'started', + heaviestHead: `${heaviestHead?.hash.toString('hex') || ''} (${ + heaviestHead?.sequence.toString() || '' + })`, + }, + blockSyncer: { + status: node.captain.blockSyncer.state.type, + error: undefined, + }, + } + + if (node.networkBridge.peerNetwork) { + status.peerNetwork.isReady = node.networkBridge.peerNetwork.isReady + status.peerNetwork.inboundTraffic = node.metrics.p2p_InboundTraffic.rate5s + status.peerNetwork.outboundTraffic = node.metrics.p2p_OutboundTraffic.rate5s + } + + status.blockSyncer.syncing = { + blockSpeed: MathUtils.round(node.captain.blockSyncer.status.blockAddingSpeed.avg, 2), + speed: MathUtils.round(node.captain.blockSyncer.status.speed.rate1m, 2), + } + + return status +} diff --git a/ironfish/src/rpc/routes/node/index.ts b/ironfish/src/rpc/routes/node/index.ts new file mode 100644 index 0000000000..b8b1790125 --- /dev/null +++ b/ironfish/src/rpc/routes/node/index.ts @@ -0,0 +1,7 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './getLogStream' +export * from './getStatus' +export * from './stopNode' diff --git a/ironfish/src/rpc/routes/node/stopNode.test.ts b/ironfish/src/rpc/routes/node/stopNode.test.ts new file mode 100644 index 0000000000..d95ac61396 --- /dev/null +++ b/ironfish/src/rpc/routes/node/stopNode.test.ts @@ -0,0 +1,17 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { createRouteTest } from '../test' + +describe('Route node.getStatus', () => { + const routeTest = createRouteTest() + + it('should get status', async () => { + routeTest.node.shutdown = jest.fn() + + const response = await routeTest.adapter.request('node/stopNode') + expect(response.status).toBe(200) + expect(response.content).toBe(undefined) + expect(routeTest.node.shutdown).toHaveBeenCalled() + }) +}) diff --git a/ironfish/src/rpc/routes/node/stopNode.ts b/ironfish/src/rpc/routes/node/stopNode.ts new file mode 100644 index 0000000000..f547e04fcf --- /dev/null +++ b/ironfish/src/rpc/routes/node/stopNode.ts @@ -0,0 +1,27 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { router, ApiNamespace } from '../router' +import * as yup from 'yup' + +// eslint-disable-next-line @typescript-eslint/ban-types +export type StopNodeRequest = undefined +export type StopNodeResponse = undefined + +export const StopNodeRequestSchema: yup.MixedSchema = yup + .mixed() + .oneOf([undefined] as const) + +export const StopNodeResponseSchema: yup.MixedSchema = yup + .mixed() + .oneOf([undefined] as const) + +router.register( + `${ApiNamespace.node}/stopNode`, + StopNodeRequestSchema, + async (request, node): Promise => { + node.logger.withTag('stopnode').info('Shutting down') + await node.shutdown() + request.end() + }, +) diff --git a/ironfish/src/rpc/routes/peers/getPeers.ts b/ironfish/src/rpc/routes/peers/getPeers.ts new file mode 100644 index 0000000000..a6b6f94d66 --- /dev/null +++ b/ironfish/src/rpc/routes/peers/getPeers.ts @@ -0,0 +1,142 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { router, ApiNamespace } from '../router' +import * as yup from 'yup' +import { Connection, PeerNetwork } from '../../../network' +import { renderVersion } from '../../../network/version' + +type ConnectionState = Connection['state']['type'] | '' + +type PeerResponse = { + state: string + identity: string | null + version: string | null + name: string | null + address: string | null + port: number | null + error: string | null + connections: number + connectionWebSocket: ConnectionState + connectionWebSocketError: string + connectionWebRTC: ConnectionState + connectionWebRTCError: string +} + +// eslint-disable-next-line @typescript-eslint/ban-types +export type GetPeersRequest = + | undefined + | { + stream?: boolean + } + +export type GetPeersResponse = { + peers: Array +} + +export const GetPeersRequestSchema: yup.ObjectSchema = yup + .object({ + stream: yup.boolean().optional(), + }) + .optional() + .default({}) + +export const GetPeersResponseSchema: yup.ObjectSchema = yup + .object({ + peers: yup + .array( + yup + .object({ + state: yup.string().defined(), + address: yup.string().nullable().defined(), + port: yup.number().nullable().defined(), + identity: yup.string().nullable().defined(), + name: yup.string().nullable().defined(), + version: yup.string().nullable().defined(), + error: yup.string().nullable().defined(), + connections: yup.number().defined(), + connectionWebSocket: yup.string().defined(), + connectionWebSocketError: yup.string().defined(), + connectionWebRTC: yup.string().defined(), + connectionWebRTCError: yup.string().defined(), + }) + .defined(), + ) + .defined(), + }) + .defined() + +router.register( + `${ApiNamespace.peer}/getPeers`, + GetPeersRequestSchema, + (request, node): void => { + const peerNetwork = node.networkBridge.peerNetwork + + if (!peerNetwork) { + request.end({ peers: [] }) + return + } + + const peers = getPeers(peerNetwork) + + if (!request.data?.stream) { + request.end({ peers }) + return + } + + request.stream({ peers }) + + const interval = setInterval(() => { + const peers = getPeers(peerNetwork) + request.stream({ peers }) + }, 1000) + + request.onClose.on(() => { + clearInterval(interval) + }) + }, +) + +function getPeers(network: PeerNetwork): PeerResponse[] { + const result: PeerResponse[] = [] + + for (const peer of network.peerManager.peers) { + let connections = 0 + let connectionWebRTC: ConnectionState = '' + let connectionWebSocket: ConnectionState = '' + let connectionWebRTCError = '' + let connectionWebSocketError = '' + + if (peer.state.type !== 'DISCONNECTED') { + if (peer.state.connections.webSocket) { + connectionWebSocket = peer.state.connections.webSocket.state.type + connectionWebSocketError = String(peer.state.connections.webSocket.error || '') + } + + if (peer.state.connections.webRtc) { + connectionWebRTC = peer.state.connections.webRtc.state.type + connectionWebRTCError = String(peer.state.connections.webRtc.error || '') + } + } + + if (connectionWebSocket !== '') connections++ + if (connectionWebRTC !== '') connections++ + + result.push({ + state: peer.state.type, + address: peer.address, + port: peer.port, + identity: peer.state.identity, + name: peer.name, + version: peer.version ? renderVersion(peer.version) : null, + connections: connections, + error: peer.error != null ? String(peer.error) : null, + connectionWebSocket: connectionWebSocket, + connectionWebSocketError: connectionWebSocketError, + connectionWebRTC: connectionWebRTC, + connectionWebRTCError: connectionWebRTCError, + }) + } + + return result +} diff --git a/ironfish/src/rpc/routes/peers/index.ts b/ironfish/src/rpc/routes/peers/index.ts new file mode 100644 index 0000000000..21fede9ea1 --- /dev/null +++ b/ironfish/src/rpc/routes/peers/index.ts @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './getPeers' diff --git a/ironfish/src/rpc/routes/router.ts b/ironfish/src/rpc/routes/router.ts new file mode 100644 index 0000000000..86c1f8e019 --- /dev/null +++ b/ironfish/src/rpc/routes/router.ts @@ -0,0 +1,120 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Assert } from '../../assert' +import { IronfishNode } from '../../node' +import { Request } from '../request' +import { ResponseError, ValidationError } from '../adapters/errors' +import { RpcServer } from '../server' +import { StrEnumUtils } from '../../utils/enums' +import { YupSchema, YupSchemaResult, YupUtils } from '../../utils' +import { ERROR_CODES } from '../adapters' + +export enum ApiNamespace { + account = 'account', + chain = 'chain', + config = 'config', + faucet = 'faucet', + miner = 'miner', + node = 'node', + peer = 'peer', + transaction = 'transaction', + telemetry = 'telemetry', +} + +export const ALL_API_NAMESPACES = StrEnumUtils.getValues(ApiNamespace) + +export type RouteHandler = ( + request: Request, + node: IronfishNode, +) => Promise | void + +export class RouteNotFoundError extends ResponseError { + constructor(route: string, namespace: string, method: string) { + super( + `No route found ${route} in namespace ${namespace} for method ${method}`, + ERROR_CODES.ROUTE_NOT_FOUND, + 404, + ) + } +} + +export function parseRoute( + route: string, +): [namespace: string | undefined, method: string | undefined] { + const [n, m] = route.split('/') + return [n, m] +} + +export class Router { + routes = new Map>() + server: RpcServer | null = null + + register( + route: string, + requestSchema: TRequestSchema, + handler: RouteHandler, TResponse>, + ): void { + const [namespace, method] = parseRoute(route) + + Assert.isNotUndefined(namespace, `Invalid namespace: ${String(namespace)}: ${route}`) + Assert.isNotUndefined(method, `Invalid method: ${String(namespace)}: ${route}`) + + let namespaceRoutes = this.routes.get(namespace) + + if (!namespaceRoutes) { + namespaceRoutes = new Map() + this.routes.set(namespace, namespaceRoutes) + } + + namespaceRoutes.set(method, { + handler: handler as RouteHandler, + schema: requestSchema, + }) + } + + async route(route: string, request: Request): Promise { + const [namespace, method] = route.split('/') + + const namespaceRoutes = this.routes.get(namespace) + if (!namespaceRoutes) { + throw new RouteNotFoundError(route, namespace, method) + } + + const methodRoute = namespaceRoutes.get(method) + if (!methodRoute) { + throw new RouteNotFoundError(route, namespace, method) + } + + const { handler, schema } = methodRoute + + const { error } = await YupUtils.tryValidate(schema, request.data) + if (error) throw new ValidationError(error.message, 400) + + Assert.isNotNull(this.server) + + try { + await handler(request, this.server.node) + } catch (e: unknown) { + if (e instanceof ResponseError) throw e + if (e instanceof Error) throw new ResponseError(e) + throw e + } + } + + filter(namespaces: string[]): Router { + const set = new Set(namespaces) + const copy = new Router() + copy.server = this.server + + for (const [key, value] of this.routes) { + if (set.has(key)) { + copy.routes.set(key, value) + } + } + + return copy + } +} + +export const router = new Router() diff --git a/ironfish/src/rpc/routes/test.ts b/ironfish/src/rpc/routes/test.ts new file mode 100644 index 0000000000..5716b2f519 --- /dev/null +++ b/ironfish/src/rpc/routes/test.ts @@ -0,0 +1,55 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { IronfishNode } from '../../node' +import { IronfishSdk } from '../../sdk' +import { MemoryAdapter } from '../adapters' +import os from 'os' +import { v4 as uuid } from 'uuid' +import path from 'path' +import { IronfishMemoryClient } from '../clients' +/** + * Used as an easy wrapper for an RPC route test. Use {@link createRouteTest} + * to create one to make sure you call the proper test lifecycle methods on + * the RouteTest + */ +export class RouteTest { + adapter!: MemoryAdapter + node!: IronfishNode + sdk!: IronfishSdk + client!: IronfishMemoryClient + + async beforeAll(): Promise { + const dataDir = path.join(os.tmpdir(), uuid()) + const sdk = await IronfishSdk.init({ dataDir }) + const node = await sdk.node() + const adapter = new MemoryAdapter() + await node.rpc.mount(adapter) + + sdk.config.setOverride('bootstrapNodes', ['']) + await node.openDB() + + this.adapter = adapter + this.node = node + this.sdk = sdk + } + + async afterEach(): Promise { + await this.node.shutdown() + } + + async afterAll(): Promise { + await this.node.closeDB() + } +} + +/** Call this to create a {@link RouteTest} and ensure its test lifecycle + * methods are called properly like beforeEach, beforeAll, etc + */ +export function createRouteTest(): RouteTest { + const routeTest = new RouteTest() + beforeAll(() => routeTest.beforeAll()) + afterEach(() => routeTest.afterEach()) + afterAll(() => routeTest.afterAll()) + return routeTest +} diff --git a/ironfish/src/rpc/routes/transactions/index.ts b/ironfish/src/rpc/routes/transactions/index.ts new file mode 100644 index 0000000000..989a6fac4c --- /dev/null +++ b/ironfish/src/rpc/routes/transactions/index.ts @@ -0,0 +1,5 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './sendTransaction' diff --git a/ironfish/src/rpc/routes/transactions/sendTransaction.test.ts b/ironfish/src/rpc/routes/transactions/sendTransaction.test.ts new file mode 100644 index 0000000000..d3725837b0 --- /dev/null +++ b/ironfish/src/rpc/routes/transactions/sendTransaction.test.ts @@ -0,0 +1,123 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +jest.mock('ws') + +import ws from 'ws' +import { createRouteTest } from '../test' +import { RangeHasher } from '../../../captain/anchorChain/merkleTree' + +import { blockHash, makeFakeBlock, TestStrategy } from '../../../captain/testUtilities' +import { ResponseError } from '../../adapters' +import { PeerNetwork } from '../../../network' + +import { mockPrivateIdentity } from '../../../network/testUtilities' + +describe('Transactions sendTransaction', () => { + const routeTest = createRouteTest() + const strategy = new TestStrategy(new RangeHasher()) + const heaviestHeader = makeFakeBlock(strategy, blockHash(2), blockHash(3), 1, 1, 1).header + + const paymentsParams = { + amount: 10, + fromAccountName: 'existingAccount', + memo: '', + toPublicKey: 'test2', + transactionFee: BigInt(1).toString(), + } + + beforeAll(async () => { + await routeTest.node.accounts.createAccount('existingAccount', true) + + routeTest.node.captain.chain.getHeaviestHead = jest.fn().mockReturnValue(heaviestHeader) + }) + + it('throws if account does not exist', async () => { + try { + await routeTest.adapter.request('transaction/sendTransaction', { + ...paymentsParams, + fromAccountName: 'AccountDoesNotExist', + }) + } catch (e: unknown) { + if (!(e instanceof ResponseError)) throw e + expect(e.message).toContain('No account found with name AccountDoesNotExist') + } + }) + + it('throws if not connected to network', async () => { + try { + await routeTest.adapter.request('transaction/sendTransaction', paymentsParams) + } catch (e: unknown) { + if (!(e instanceof ResponseError)) throw e + expect(e.message).toContain( + 'Your node must be connected to the Iron Fish network to send a transaction', + ) + } + }) + + describe('Connected to the network', () => { + beforeAll(() => { + const peerNetwork = new PeerNetwork(mockPrivateIdentity('local'), 'sdk/1/cli', ws) + routeTest.node.networkBridge.attachPeerNetwork(peerNetwork) + peerNetwork['_isReady'] = true + }) + + it('throws if the chain is outdated', async () => { + try { + await routeTest.adapter.request('transaction/sendTransaction', paymentsParams) + } catch (e: unknown) { + if (!(e instanceof ResponseError)) throw e + expect(e.message).toContain( + 'Your node must be synced with the Iron Fish network to send a transaction. Please try again later', + ) + } + }) + + it('throws if not enough funds', async () => { + heaviestHeader.timestamp = new Date() + + try { + await routeTest.adapter.request('transaction/sendTransaction', paymentsParams) + } catch (e: unknown) { + if (!(e instanceof ResponseError)) throw e + expect(e.message).toContain('Your balance is too low. Add funds to your account first') + } + }) + + it('throws if the confirmed balance is too low', async () => { + heaviestHeader.timestamp = new Date() + jest.spyOn(routeTest.node.accounts, 'getBalance').mockReturnValueOnce({ + unconfirmedBalance: BigInt(11), + confirmedBalance: BigInt(0), + }) + + try { + await routeTest.adapter.request('transaction/sendTransaction', paymentsParams) + } catch (e: unknown) { + if (!(e instanceof ResponseError)) throw e + expect(e.message).toContain( + 'Please wait a few seconds for your balance to update and try again', + ) + } + }) + + it('calls the pay method on the node', async () => { + heaviestHeader.timestamp = new Date() + routeTest.node.accounts.pay = jest.fn() + const paySpy = jest.spyOn(routeTest.node.accounts, 'pay') + + jest.spyOn(routeTest.node.accounts, 'getBalance').mockReturnValueOnce({ + unconfirmedBalance: BigInt(11), + confirmedBalance: BigInt(11), + }) + + try { + await routeTest.adapter.request('transaction/sendTransaction', paymentsParams) + } catch { + // payment is mocked + } + + expect(paySpy).toHaveBeenCalled() + }) + }) +}) diff --git a/ironfish/src/rpc/routes/transactions/sendTransaction.ts b/ironfish/src/rpc/routes/transactions/sendTransaction.ts new file mode 100644 index 0000000000..78ad50edcd --- /dev/null +++ b/ironfish/src/rpc/routes/transactions/sendTransaction.ts @@ -0,0 +1,104 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { ApiNamespace, router } from '../router' +import * as yup from 'yup' +import { ValidationError } from '../../adapters/errors' + +const LATEST_HEAVIEST_TIMESTAMP_AGO = 1000 * 60 * 20 + +export type SendTransactionRequest = { + fromAccountName: string + toPublicKey: string + amount: string + transactionFee: string + memo: string +} +export type SendTransactionResponse = { + fromAccountName: string + toPublicKey: string + amount: string + transactionHash: string +} + +export const SendTransactionRequestSchema: yup.ObjectSchema = yup + .object({ + fromAccountName: yup.string().defined(), + toPublicKey: yup.string().defined(), + amount: yup.string().defined(), + transactionFee: yup.string().defined(), + memo: yup.string().defined(), + }) + .defined() + +export const SendTransactionResponseSchema: yup.ObjectSchema = yup + .object({ + fromAccountName: yup.string().defined(), + toPublicKey: yup.string().defined(), + amount: yup.string().defined(), + transactionHash: yup.string().defined(), + }) + .defined() + +router.register( + `${ApiNamespace.transaction}/sendTransaction`, + SendTransactionRequestSchema, + async (request, node): Promise => { + const transaction = request.data + + const account = node.accounts.getAccountByName(transaction.fromAccountName) + + if (!account) { + throw new ValidationError(`No account found with name ${transaction.fromAccountName}`) + } + + // The node must be connected to the network first + if (!node.networkBridge.peerNetwork?.isReady) { + throw new ValidationError( + `Your node must be connected to the Iron Fish network to send a transaction`, + ) + } + + const heaviestHead = await node.captain.chain.getHeaviestHead() + // latest heaviest head must be a block mined in the past minute + if ( + !heaviestHead || + heaviestHead.timestamp < new Date(Date.now() - LATEST_HEAVIEST_TIMESTAMP_AGO) + ) { + throw new ValidationError( + `Your node must be synced with the Iron Fish network to send a transaction. Please try again later`, + ) + } + + // Check that the node account is updated + const balance = node.accounts.getBalance(account) + const sum = BigInt(transaction.amount) + BigInt(transaction.transactionFee) + + if (balance.confirmedBalance < sum && balance.unconfirmedBalance < sum) { + throw new ValidationError(`Your balance is too low. Add funds to your account first`) + } + + if (balance.confirmedBalance < sum) { + throw new ValidationError( + `Please wait a few seconds for your balance to update and try again`, + ) + } + + const transactionPosted = await node.accounts.pay( + node.captain, + node.memPool, + account, + BigInt(transaction.amount), + BigInt(transaction.transactionFee), + transaction.memo, + transaction.toPublicKey, + ) + + request.end({ + amount: transaction.amount, + toPublicKey: transaction.toPublicKey, + fromAccountName: account.name, + transactionHash: transactionPosted.transactionHash().toString('hex'), + }) + }, +) diff --git a/ironfish/src/rpc/server.ts b/ironfish/src/rpc/server.ts new file mode 100644 index 0000000000..321fb560d9 --- /dev/null +++ b/ironfish/src/rpc/server.ts @@ -0,0 +1,84 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { IAdapter } from './adapters' +import { IronfishNode } from '../node' +import { Router, router, ApiNamespace } from './routes' +import { ArrayUtils } from '../utils' + +export class RpcServer { + readonly node: IronfishNode + + private readonly adapters: IAdapter[] = [] + private readonly router: Router + private _isRunning = false + private _startPromise: Promise | null = null + + constructor(node: IronfishNode) { + this.node = node + this.router = router + this.router.server = this + } + + get isRunning(): boolean { + return this._isRunning + } + + /** Creates a new router from this RpcServer with the attached routes filtered by namespaces */ + getRouter(namespaces: ApiNamespace[]): Router { + return this.router.filter(namespaces) + } + + /** Starts the RPC server and tells any attached adapters to starts serving requests to the routing layer */ + async start(): Promise { + if (this._isRunning) return + + const promises = this.adapters.map>((a) => a.start()) + this._startPromise = Promise.all(promises) + this._isRunning = true + await this._startPromise + } + + /** Stops the RPC server and tells any attached adapters to stop serving requests to the routing layer */ + async stop(): Promise { + if (!this._isRunning) return + + if (this._startPromise) { + await this._startPromise + } + + const promises = this.adapters.map>((a) => a.stop()) + await Promise.all(promises) + this._isRunning = false + } + + /** Adds an adapter to the RPC server and starts it if the server has already been started */ + async mount(adapter: IAdapter): Promise { + this.adapters.push(adapter) + await adapter.attach(this) + + if (this._isRunning) { + let promise: Promise = adapter.start() + + if (this._startPromise) { + // Attach this promise to the start promise chain + // in case we call stop while were still starting up + promise = Promise.all([this._startPromise, promise]) + } + + this._startPromise = promise + } + } + + async unmount(adapter: IAdapter): Promise { + const removed = ArrayUtils.remove(this.adapters, adapter) + + if (removed) { + await adapter.stop() + await adapter.unattach() + } + + return removed + } +} diff --git a/ironfish/src/rpc/stream.ts b/ironfish/src/rpc/stream.ts new file mode 100644 index 0000000000..1594d4b254 --- /dev/null +++ b/ironfish/src/rpc/stream.ts @@ -0,0 +1,49 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { PromiseResolve } from '../utils' + +export class Stream implements AsyncIterable { + buffer: T[] = [] + waiting: PromiseResolve>[] = [] + closed = false + + write(value: T): void { + if (this.closed) return + + if (this.waiting.length) { + const waiting = this.waiting.shift() as PromiseResolve> + waiting({ done: false, value: value }) + return + } + + this.buffer.push(value) + } + + close(): void { + this.closed = true + + for (const resolve of this.waiting) { + resolve({ value: null, done: true }) + } + } + + next(): Promise> { + if (this.buffer.length > 0) { + const value = this.buffer.shift() + return Promise.resolve({ done: false, value: value as T }) + } + + if (this.closed) { + return Promise.resolve({ value: null, done: true }) + } + + return new Promise>((resolve) => { + this.waiting.push(resolve) + }) + } + + [Symbol.asyncIterator](): AsyncIterator { + return { next: () => this.next() } + } +} diff --git a/ironfish/src/sdk.test.ts b/ironfish/src/sdk.test.ts new file mode 100644 index 0000000000..e63fb828b6 --- /dev/null +++ b/ironfish/src/sdk.test.ts @@ -0,0 +1,61 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { makeLevelupDatabaseNode } from './storage' +import { NodeFileProvider } from './fileSystems' +import { IronfishSdk } from './sdk' +import os from 'os' +import { Config } from './fileStores' +import { Accounts } from './account' +import { IronfishIpcClient } from './rpc' +import { IronfishNode } from './node' + +describe('IronfishSdk', () => { + it('should initialize an SDK', async () => { + const dataDir = os.tmpdir() + + const fileSystem = new NodeFileProvider() + await fileSystem.init() + + const sdk = await IronfishSdk.init({ + configName: 'foo.config.json', + dataDir: dataDir, + fileSystem: fileSystem, + makeDatabase: makeLevelupDatabaseNode, + }) + + expect(sdk.config).toBeInstanceOf(Config) + expect(sdk.client).toBeInstanceOf(IronfishIpcClient) + expect(sdk.fileSystem).toBe(fileSystem) + + expect(sdk.config.storage.dataDir).toBe(dataDir) + expect(sdk.config.storage.configPath).toContain('foo.config.json') + }) + + it('should detect platform defaults', async () => { + const sdk = await IronfishSdk.init({ dataDir: os.tmpdir() }) + + expect(sdk.makeDatabase).toBe(makeLevelupDatabaseNode) + expect(sdk.fileSystem).toBeInstanceOf(NodeFileProvider) + }) + + it('should create a node', async () => { + const fileSystem = new NodeFileProvider() + await fileSystem.init() + + const sdk = await IronfishSdk.init({ + configName: 'foo.config.json', + dataDir: os.tmpdir(), + fileSystem: fileSystem, + makeDatabase: makeLevelupDatabaseNode, + }) + + const node = await sdk.node({ databaseName: 'foo' }) + + expect(node).toBeInstanceOf(IronfishNode) + expect(node.files).toBe(fileSystem) + expect(node.config).toBe(sdk.config) + expect(node.accounts).toBeInstanceOf(Accounts) + expect(node.config.get('databaseName')).toBe('foo') + }) +}) diff --git a/ironfish/src/sdk.ts b/ironfish/src/sdk.ts new file mode 100644 index 0000000000..5414ade8ef --- /dev/null +++ b/ironfish/src/sdk.ts @@ -0,0 +1,242 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { + Logger, + createRootLogger, + setLogLevelFromConfig, + setLogPrefixFromConfig, + setLogColorEnabledFromConfig, +} from './logger' +import { MetricsMonitor } from './metrics' +import { IDatabase, makeLevelupDatabaseNode } from './storage' +import { Config, ConfigOptions } from './fileStores' +import { FileSystem, NodeFileProvider } from './fileSystems' +import { IronfishNode } from './node' +import { ApiNamespace, IpcAdapter, IronfishIpcClient, IronfishMemoryClient } from './rpc' +import { InternalStore } from './fileStores' +import GIT_VERSION from './gitHash' +import { renderVersion } from './network/version' +import { IronfishStrategy, IronfishVerifier } from './strategy' + +type MakeDatabase = (path: string) => Promise + +const VERSION = '14' +const VERSION_PRODUCT = 'ironfish-sdk' +const VERSION_CODE = GIT_VERSION + +export class IronfishSdk { + client: IronfishIpcClient + clientMemory: IronfishMemoryClient + config: Config + fileSystem: FileSystem + makeDatabase: MakeDatabase + logger: Logger + metrics: MetricsMonitor + internal: InternalStore + verifierClass: typeof IronfishVerifier | null + strategyClass: typeof IronfishStrategy | null + + private constructor( + client: IronfishIpcClient, + clientMemory: IronfishMemoryClient, + config: Config, + internal: InternalStore, + fileSystem: FileSystem, + makeDatabase: MakeDatabase, + logger: Logger, + metrics: MetricsMonitor, + verifierClass: typeof IronfishVerifier | null = null, + strategyClass: typeof IronfishStrategy | null = null, + ) { + this.client = client + this.clientMemory = clientMemory + this.config = config + this.internal = internal + this.fileSystem = fileSystem + this.makeDatabase = makeDatabase + this.logger = logger + this.metrics = metrics + this.verifierClass = verifierClass + this.strategyClass = strategyClass + } + + static async init({ + configName, + configOverrides, + fileSystem, + makeDatabase, + dataDir, + logger = createRootLogger(), + metrics, + verifierClass, + strategyClass, + }: { + configName?: string + configOverrides?: Partial + fileSystem?: FileSystem + makeDatabase?: MakeDatabase + dataDir?: string + logger?: Logger + metrics?: MetricsMonitor + verifierClass?: typeof IronfishVerifier + strategyClass?: typeof IronfishStrategy + } = {}): Promise { + const platform = getPlatform() + + if (!fileSystem) { + if (platform === 'node') { + fileSystem = new NodeFileProvider() + await fileSystem.init() + } else throw new Error(`No default fileSystem for ${String(platform)}`) + } + + if (!makeDatabase) { + if (platform === 'node') { + makeDatabase = makeLevelupDatabaseNode + } else throw new Error(`No default makeDatabase for ${String(platform)}`) + } + + logger = logger.withTag('ironfishsdk') + + const config = new Config(fileSystem, dataDir, configName) + await config.load() + + const internal = new InternalStore(fileSystem, dataDir) + await internal.load() + + if (configOverrides) { + Object.assign(config.overrides, configOverrides) + } + + // Update the logger settings + const logLevel = config.get('logLevel') + if (logLevel) { + setLogLevelFromConfig(logLevel) + } + const logPrefix = config.get('logPrefix') + if (logPrefix) { + setLogPrefixFromConfig(logPrefix) + } + + setLogColorEnabledFromConfig(true) + + if (!metrics) { + metrics = metrics || new MetricsMonitor(logger) + } + + const client = new IronfishIpcClient( + config.get('enableRpcTcp') + ? { + mode: 'tcp', + host: config.get('rpcTcpHost'), + port: config.get('rpcTcpPort'), + } + : { + mode: 'ipc', + socketPath: config.get('ipcPath'), + }, + logger, + config.get('rpcRetryConnect'), + ) + + const clientMemory = new IronfishMemoryClient(logger) + + return new IronfishSdk( + client, + clientMemory, + config, + internal, + fileSystem, + makeDatabase, + logger, + metrics, + verifierClass, + strategyClass, + ) + } + + async node({ databaseName }: { databaseName?: string } = {}): Promise { + const node = await IronfishNode.init({ + config: this.config, + internal: this.internal, + files: this.fileSystem, + databaseName: databaseName, + makeDatabase: this.makeDatabase, + logger: this.logger, + metrics: this.metrics, + verifierClass: this.verifierClass, + strategyClass: this.strategyClass, + }) + + const namespaces = [ + ApiNamespace.account, + ApiNamespace.chain, + ApiNamespace.config, + ApiNamespace.faucet, + ApiNamespace.miner, + ApiNamespace.node, + ApiNamespace.peer, + ApiNamespace.transaction, + ApiNamespace.telemetry, + ] + + if (this.config.get('enableRpcIpc')) { + await node.rpc.mount( + new IpcAdapter( + namespaces, + { + mode: 'ipc', + socketPath: this.config.get('ipcPath'), + }, + this.logger, + ), + ) + } + + if (this.config.get('enableRpcTcp')) { + await node.rpc.mount( + new IpcAdapter( + namespaces, + { + mode: 'tcp', + host: this.config.get('rpcTcpHost'), + port: this.config.get('rpcTcpPort'), + }, + this.logger, + ), + ) + } + + return node + } + + /** + * Combines the SDK's version with the name of the client using the SDK + * to produce a version string usable by the peer network code. + * @param agentName The name of the agent using the SDK. e.g. cli, browser + */ + getVersion(agentName: string): string { + return renderVersion({ + version: VERSION, + product: VERSION_PRODUCT, + code: VERSION_CODE, + agent: agentName, + }) + } +} + +/** Get the current platform or null if it cannot detect the platform */ +function getPlatform(): 'node' | 'browser' | null { + if ( + typeof process === 'object' && + process && + process.release && + process.versions && + typeof process.versions.node === 'string' + ) { + return 'node' + } + + return null +} diff --git a/ironfish/src/serde/BufferSerde.ts b/ironfish/src/serde/BufferSerde.ts new file mode 100644 index 0000000000..a0ed7c7406 --- /dev/null +++ b/ironfish/src/serde/BufferSerde.ts @@ -0,0 +1,29 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import Serde from '.' +import Uint8ArraySerde from './Uint8ArraySerde' + +/** + * A buffer serializer and equality checker + */ +export class BufferSerde implements Serde { + serde: Uint8ArraySerde + + constructor(readonly size: number) { + this.serde = new Uint8ArraySerde(size) + } + + equals(element1: Buffer, element2: Buffer): boolean { + return this.serde.equals(element1, element2) + } + + serialize(element: Buffer): string { + return this.serde.serialize(element) + } + + deserialize(data: string): Buffer { + return Buffer.from(this.serde.deserialize(data)) + } +} diff --git a/ironfish/src/serde/README.md b/ironfish/src/serde/README.md new file mode 100644 index 0000000000..d11fda34f6 --- /dev/null +++ b/ironfish/src/serde/README.md @@ -0,0 +1,10 @@ +[![codecov](https://codecov.io/gh/iron-fish/ironfish/branch/master/graph/badge.svg?token=PCSVEVEW5V&flag=serde)](https://codecov.io/gh/iron-fish/ironfish) + +Typescript interface for objects that can be serialized and deserialized to json +as well as compared for equality. + +Also includes implementations of that interface for some standard structures: + +- string +- Uint8Array +- Buffer diff --git a/ironfish/src/serde/StringSerde.ts b/ironfish/src/serde/StringSerde.ts new file mode 100644 index 0000000000..b3a3cb8cad --- /dev/null +++ b/ironfish/src/serde/StringSerde.ts @@ -0,0 +1,26 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import Serde from '.' +/** + * Very simple serializer and equality checker for strings. Used for the test + * hasher, which uses strings for both elements and hashes. + */ + +export default class StringSerde implements Serde { + equals(string1: string, string2: string): boolean { + return string1 === string2 + } + + serialize(element: string): string { + return element + } + + deserialize(data: string): string { + if (typeof data === 'string') { + return data + } + throw new Error(`cannot deserialize '${typeof data}' to string`) + } +} diff --git a/ironfish/src/serde/Uint8ArraySerde.test.ts b/ironfish/src/serde/Uint8ArraySerde.test.ts new file mode 100644 index 0000000000..ab7286ae8a --- /dev/null +++ b/ironfish/src/serde/Uint8ArraySerde.test.ts @@ -0,0 +1,71 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import Uint8ArraySerde from './Uint8ArraySerde' + +describe('Uint8ArraySerde', () => { + it('constructs a Uint8ArraySerde', () => { + expect(new Uint8ArraySerde(32)).toMatchSnapshot() + }) + it('compares two arrays as equal', () => { + const nullifier1 = new Uint8Array(32) + const nullifier2 = new Uint8Array(32) + nullifier1[0] = 1 + nullifier2[0] = 1 + expect(new Uint8ArraySerde(32).equals(nullifier1, nullifier2)).toBe(true) + }) + + it('compares two different arrays as not equal', () => { + const nullifier1 = new Uint8Array(32) + const nullifier2 = new Uint8Array(32) + nullifier2[0] = 1 + expect(new Uint8ArraySerde(32).equals(nullifier1, nullifier2)).toBe(false) + }) + it('throws error when passed incorrectly sized array', () => { + const nullifier1 = new Uint8Array(32) + const nullifier2 = new Uint8Array(32) + expect(() => + new Uint8ArraySerde(64).equals(nullifier1, nullifier2), + ).toThrowErrorMatchingInlineSnapshot(`"Attempting to compare inappropriately sized array"`) + }) + + it('serializes and deserializes an equal array', () => { + const serde = new Uint8ArraySerde(32) + const nullifier = new Uint8Array(32) + nullifier.set([8, 18, 24, 199, 255, 1, 0, 127]) + const serialized = serde.serialize(nullifier) + expect(serialized).toMatchInlineSnapshot( + `"081218C7FF01007F000000000000000000000000000000000000000000000000"`, + ) + const deserialized = serde.deserialize(serialized) + expect(deserialized).toMatchSnapshot() + expect(serde.equals(nullifier, deserialized)).toBe(true) + expect(serde.serialize(deserialized)).toEqual(serialized) + }) + + it('throws an error when trying to serialize an inappropriate array', () => { + expect(() => + new Uint8ArraySerde(32).serialize(new Uint8Array(10)), + ).toThrowErrorMatchingInlineSnapshot( + `"Attempting to serialize array with 10 bytes, expected 32"`, + ) + }) + + it('throws an error when trying to deserialize an inappropriate value', () => { + expect(() => new Uint8ArraySerde(32).deserialize('ABC')).toThrowErrorMatchingInlineSnapshot( + `"\\"ABC\\" is not a 64-character hex string"`, + ) + expect(() => + // @ts-expect-error Argument of type '{ bad: string; }' is not assignable to parameter of type 'string'. + new Uint8ArraySerde(32).deserialize({ bad: 'object' }), + ).toThrowErrorMatchingInlineSnapshot( + `"{\\"bad\\":\\"object\\"} is not a 64-character hex string"`, + ) + expect(() => + new Uint8ArraySerde(32).deserialize( + 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaag', + ), + ).toThrowErrorMatchingInlineSnapshot(`"unexpected character"`) + }) +}) diff --git a/ironfish/src/serde/Uint8ArraySerde.ts b/ironfish/src/serde/Uint8ArraySerde.ts new file mode 100644 index 0000000000..743b1676c8 --- /dev/null +++ b/ironfish/src/serde/Uint8ArraySerde.ts @@ -0,0 +1,44 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import hexarray from 'hex-array' +import { zip } from 'lodash' +import Serde from '.' + +/** + * General-purpose uint8array serializer and equality checker + */ +export default class Uint8ArraySerde implements Serde { + constructor(readonly size: number) {} + equals(element1: Uint8Array, element2: Uint8Array): boolean { + if (element1.length !== this.size) { + throw new Error('Attempting to compare inappropriately sized array') + } + if (element1.length !== element2.length) { + return false + } + for (const [first, second] of zip(element1, element2)) { + if (first !== second) { + return false + } + } + return true + } + + serialize(element: Uint8Array): string { + if (element.length !== this.size) { + throw new Error( + `Attempting to serialize array with ${element.length} bytes, expected ${this.size}`, + ) + } + return hexarray.toString(element) + } + + deserialize(data: string): Uint8Array { + if (typeof data != 'string' || data.length != this.size * 2) { + throw new Error(`${JSON.stringify(data)} is not a ${this.size * 2}-character hex string`) + } + return hexarray.fromString(data) + } +} diff --git a/ironfish/src/serde/__snapshots__/Uint8ArraySerde.test.ts.snap b/ironfish/src/serde/__snapshots__/Uint8ArraySerde.test.ts.snap new file mode 100644 index 0000000000..e182645342 --- /dev/null +++ b/ironfish/src/serde/__snapshots__/Uint8ArraySerde.test.ts.snap @@ -0,0 +1,44 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Uint8ArraySerde constructs a Uint8ArraySerde 1`] = ` +Uint8ArraySerde { + "size": 32, +} +`; + +exports[`Uint8ArraySerde serializes and deserializes an equal array 2`] = ` +Uint8Array [ + 8, + 18, + 24, + 199, + 255, + 1, + 0, + 127, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, +] +`; diff --git a/ironfish/src/serde/hex-array.d.ts b/ironfish/src/serde/hex-array.d.ts new file mode 100644 index 0000000000..9a214b6e58 --- /dev/null +++ b/ironfish/src/serde/hex-array.d.ts @@ -0,0 +1,11 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +declare module 'hex-array' { + export function toString( + array: Uint8Array, + options?: { grouping: number; rowLength: number; uppercase: boolean }, + ): string + export function fromString(value: string): Uint8Array +} diff --git a/ironfish/src/serde/iJson.test.ts b/ironfish/src/serde/iJson.test.ts new file mode 100644 index 0000000000..164eda3371 --- /dev/null +++ b/ironfish/src/serde/iJson.test.ts @@ -0,0 +1,61 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { IJSON } from './iJson' + +describe('IJSON', () => { + describe('stringify', () => { + it('should stringify bigints', () => { + expect(IJSON.stringify({ num: BigInt(100) })).toBe('{"num":"100n"}') + }) + + it('should stringify negative bigints', () => { + expect(IJSON.stringify({ num: BigInt(-100) })).toBe('{"num":"-100n"}') + }) + + it('should stringify 0 bigints', () => { + expect(IJSON.stringify({ num: BigInt(0) })).toBe('{"num":"0n"}') + }) + + it('should stringify Buffers', () => { + expect(IJSON.stringify({ buf: Buffer.from('a') })).toBe( + '{"buf":{"type":"Buffer","data":"base64:YQ=="}}', + ) + }) + }) + + describe('parse', () => { + it('should parse positive bigints', () => { + const result = IJSON.parse('{"num":"100n"}') as { num: bigint } + expect(result.num).toEqual(BigInt(100)) + }) + + it('should parse negative bigints', () => { + const result = IJSON.parse('{"num":"-100n"}') as { num: bigint } + expect(result.num).toEqual(BigInt(-100)) + }) + + it('should parse 0 bigints', () => { + const result = IJSON.parse('{"num":"0n"}') as { num: bigint } + expect(result.num).toEqual(BigInt(0)) + }) + + it('should not parse n as a bigint', () => { + const result = IJSON.parse('{"num":"n"}') as { num: string } + expect(result.num).toEqual('n') + }) + + it('should not parse regular numbers as bigints', () => { + const result = IJSON.parse('{"num":100}') as { num: number } + expect(result.num).toEqual(100) + }) + + it('should parse Buffers', () => { + const result = IJSON.parse('{"buf":{"type":"Buffer","data":"base64:YQ=="}}') as { + buf: Buffer + } + expect(result.buf).toEqual(Buffer.from('a')) + }) + }) +}) diff --git a/ironfish/src/serde/iJson.ts b/ironfish/src/serde/iJson.ts new file mode 100644 index 0000000000..23b2cfc177 --- /dev/null +++ b/ironfish/src/serde/iJson.ts @@ -0,0 +1,35 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import BJSON from 'buffer-json' + +/** + * IJSON, for Iron Fish JSON. Supports parsing/stringifying Buffers and BigInts. + */ +export const IJSON = { + stringify(value: unknown, space?: string | number): string { + return JSON.stringify( + value, + (key, value) => + typeof value === 'bigint' + ? `${value.toString()}n` + : (BJSON.replacer(key, value) as unknown), + space, + ) + }, + + parse(text: string): unknown { + return JSON.parse(text, (key, value) => { + if (typeof value === 'string' && value.endsWith('n') && value.length > 1) { + const slice = value.slice(0, value.length - 1) + const sliceWithoutMinus = slice.startsWith('-') ? slice.slice(1) : slice + // If every character except the last is a number, parse as a bigint + if (sliceWithoutMinus.split('').every((char) => !isNaN(Number(char)))) { + return BigInt(slice) + } + } + return BJSON.reviver(key, value) as unknown + }) + }, +} diff --git a/ironfish/src/serde/index.ts b/ironfish/src/serde/index.ts new file mode 100644 index 0000000000..b9489a4f7a --- /dev/null +++ b/ironfish/src/serde/index.ts @@ -0,0 +1,50 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export { default as StringSerde } from './StringSerde' +export { default as Uint8ArraySerde } from './Uint8ArraySerde' +export * from './BufferSerde' +export { IJSON } from './iJson' + +export type JsonSerializable = + | string + | number + | boolean + | null + | Buffer + | JsonSerializable[] + | { [key: string]: JsonSerializable } + +export type IJsonSerializable = + | string + | number + | boolean + | bigint + | null + | Buffer + | IJsonSerializable[] + | { [key: string]: IJsonSerializable } + | unknown + +/** + * Interface for objects that can be serialized, deserialized, and compared for equality. + * + * It surprises me that Javascript doesn't have some sort of native or standard + * support for this. + */ +export default interface Serde { + /** Determine whether two elements should be considered equal */ + equals(element1: E, element2: E): boolean + /** + * Convert an element to a serialized form suitable for storage or + * to be sent over the network. + */ + serialize(element: E): SE + /** + * Convert serialized data from the database or network to an element. + * + * May throw an error if the data cannot be deserialized. + */ + deserialize(data: SE): E +} diff --git a/ironfish/src/storage/database.test.ts b/ironfish/src/storage/database.test.ts new file mode 100644 index 0000000000..79176699eb --- /dev/null +++ b/ironfish/src/storage/database.test.ts @@ -0,0 +1,687 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { LevelupDatabase, LevelupStore } from './levelup' +import leveldown from 'leveldown' +import { + DatabaseSchema, + IDatabase, + BufferEncoding, + JsonEncoding, + StringEncoding, + IDatabaseTransaction, + ArrayEncoding, + SchemaKey, + DuplicateKeyError, +} from './database' +import { IJsonSerializable } from '../serde' + +type FooValue = { + hash: string + name: string +} + +interface FooSchema extends DatabaseSchema { + key: string + value: FooValue +} + +interface BarSchema extends DatabaseSchema { + key: string + value: Buffer +} + +interface BazSchema extends DatabaseSchema { + key: Buffer + value: string +} + +interface TestSchema extends DatabaseSchema { + key: string + value: IJsonSerializable +} + +interface ArrayKeySchema extends DatabaseSchema { + key: [string, number, boolean] + value: boolean +} + +interface KeypathSchema extends DatabaseSchema { + key: [string, number, boolean] + value: { a: string; b: number; c: boolean } +} + +describe('Database', () => { + const id = `./testdbs/${Math.round(Math.random() * Number.MAX_SAFE_INTEGER)}` + const db = new LevelupDatabase(leveldown(id)) + + const fooStore = db.addStore({ + version: 1, + name: 'Foo', + keyEncoding: new StringEncoding(), + valueEncoding: new JsonEncoding(), + keyPath: 'hash', + }) + + const barStore = db.addStore({ + version: 1, + name: 'Bar', + keyEncoding: new StringEncoding(), + valueEncoding: new BufferEncoding(), + }) + + const bazStore = db.addStore({ + version: 1, + name: 'Baz', + keyEncoding: new BufferEncoding(), + valueEncoding: new StringEncoding(), + }) + + // Prefix key is modified during tests, don't use in other tests + const testPrefixKeyStore = db.addStore({ + version: 1, + name: 'PrefixKey', + keyEncoding: new BufferEncoding(), + valueEncoding: new StringEncoding(), + }) + + const testStore = db.addStore({ + version: 1, + name: 'Test', + keyEncoding: new StringEncoding(), + valueEncoding: new JsonEncoding(), + }) + + const arrayKeyStore = db.addStore({ + version: 1, + name: 'ArrayKey', + keyEncoding: new ArrayEncoding<[string, number, boolean]>(), + valueEncoding: new JsonEncoding(), + }) + + const keypathStore = db.addStore({ + version: 1, + name: 'Keypath', + keyEncoding: new ArrayEncoding<[string, number, boolean]>(), + valueEncoding: new JsonEncoding<{ a: string; b: number; c: boolean }>(), + keyPath: ['a', 'b', 'c'], + }) + + afterEach(async () => { + await db.close() + }) + + it('should run database migrations', async () => { + async function runMigrations( + db: IDatabase, + oldVersion: number, + newVersion: number, + t: IDatabaseTransaction, + ): Promise { + await testStore.put('oldVersion', oldVersion, t) + await testStore.put('newVersion', newVersion, t) + } + + await db.open({ upgrade: runMigrations }) + expect(await testStore.get('oldVersion')).toBe(0) + expect(await testStore.get('newVersion')).toBe(8) + }) + + it('should run store migrations', async () => { + interface EmptySchema extends DatabaseSchema { + key: string + value: number + } + + const store = db.addStore({ + version: 8, + name: 'Nested', + keyEncoding: new StringEncoding(), + valueEncoding: new JsonEncoding(), + upgrade: async ( + db: IDatabase, + oldVersion: number, + newVersion: number, + t: IDatabaseTransaction, + ): Promise => { + await store.put('oldVersion', oldVersion, t) + await store.put('newVersion', newVersion, t) + }, + }) + + await db.open() + expect(await store.get('oldVersion')).toBe(0) + expect(await store.get('newVersion')).toBe(8) + }) + + it('should store and get values', async () => { + await db.open() + const foo = { hash: 'hello', name: 'ironfish' } + const fooHash = Buffer.from(JSON.stringify(foo)) + + await fooStore.put('hello', foo) + await barStore.put('hello', fooHash) + await bazStore.put(fooHash, 'hello') + + expect(await fooStore.get('hello')).toMatchObject(foo) + expect(await barStore.get('hello')).toEqual(fooHash) + expect(await bazStore.get(fooHash)).toEqual('hello') + + await fooStore.del('hello') + await barStore.del('hello') + await bazStore.del(fooHash) + + expect(await fooStore.get('hello')).not.toBeDefined() + expect(await barStore.get('hello')).not.toBeDefined() + expect(await bazStore.get(fooHash)).not.toBeDefined() + }) + + it('should clear store', async () => { + await db.open() + const foo = { hash: 'hello', name: 'ironfish' } + const fooHash = Buffer.from(JSON.stringify(foo)) + + await fooStore.put('hello', foo) + await barStore.put('hello', fooHash) + + expect(await fooStore.get('hello')).toMatchObject(foo) + expect(await barStore.get('hello')).toEqual(fooHash) + + await fooStore.clear() + + expect(await fooStore.get('hello')).not.toBeDefined() + expect(await barStore.get('hello')).toEqual(fooHash) + }) + + it('should add values', async () => { + await db.open() + await db.metaStore.clear() + + await db.metaStore.add('a', 1) + await expect(db.metaStore.get('a')).resolves.toBe(1) + + await expect(db.metaStore.add('a', 2)).rejects.toThrow(DuplicateKeyError) + await expect(db.metaStore.get('a')).resolves.toBe(1) + }) + + it('should add values in transactions', async () => { + await db.open() + await db.metaStore.clear() + + await db.transaction([db.metaStore], 'readwrite', async (tx) => { + // db=undefined, tx=1 + await db.metaStore.add('a', 1, tx) + await expect(db.metaStore.get('a', tx)).resolves.toBe(1) + + // db=undefined, tx=2 + await expect(db.metaStore.add('a', 2, tx)).rejects.toThrow(DuplicateKeyError) + await expect(db.metaStore.get('a', tx)).resolves.toBe(1) + await expect(db.metaStore.get('a')).resolves.toBeUndefined() + }) + + await expect(db.metaStore.get('a')).resolves.toBe(1) + }) + + it('should store array based keys', async () => { + await db.open() + await arrayKeyStore.put(['jason', 5, false], true) + expect(await arrayKeyStore.get(['jason', 5, false])).toBe(true) + expect(await arrayKeyStore.get(['jason', 5, true])).toBe(undefined) + + await arrayKeyStore.del(['jason', 5, false]) + expect(await arrayKeyStore.get(['jason', 5, false])).toBe(undefined) + }) + + it('should store nested buffers', async () => { + interface NestedSchema extends DatabaseSchema { + key: string + value: { + buffer: Buffer + } + } + + const store = db.addStore({ + version: 1, + name: 'Nested', + keyEncoding: new StringEncoding(), + valueEncoding: new JsonEncoding<{ buffer: Buffer }>(), + }) + + await db.open() + + const buffer = Buffer.alloc(2, 10) + + await store.put('a', { buffer: buffer }) + const stored = await store.get('a') + + expect(stored).toBeTruthy() + expect(stored?.buffer).toBeInstanceOf(Buffer) + expect(stored?.buffer.byteLength).toBe(2) + expect(stored?.buffer[0]).toBe(10) + expect(stored?.buffer[1]).toBe(10) + }) + + describe('DatabaseBatch', () => { + it('should batch array of writes', async () => { + await db.open() + + const foo = { hash: 'hello', name: 'ironfish' } + const fooHash = Buffer.from(JSON.stringify(foo)) + + await db.batch([ + [fooStore, 'hello', foo], + [barStore, 'hello', fooHash], + [bazStore, fooHash, 'hello'], + ]) + + expect(await fooStore.get('hello')).toMatchObject(foo) + expect(await barStore.get('hello')).toEqual(fooHash) + expect(await bazStore.get(fooHash)).toEqual('hello') + + await db.batch([ + [fooStore, 'hello'], + [barStore, 'hello'], + [bazStore, fooHash], + ]) + + expect(await fooStore.get('hello')).not.toBeDefined() + expect(await barStore.get('hello')).not.toBeDefined() + expect(await bazStore.get(fooHash)).not.toBeDefined() + }) + + it('should batch chained of writes', async () => { + await db.open() + + const foo = { hash: 'hello', name: 'ironfish' } + const fooHash = Buffer.from(JSON.stringify(foo)) + + await db + .batch() + .put(fooStore, 'hello', foo) + .put(barStore, 'hello', fooHash) + .put(bazStore, fooHash, 'hello') + .commit() + + expect(await fooStore.get('hello')).toMatchObject(foo) + expect(await barStore.get('hello')).toEqual(fooHash) + expect(await bazStore.get(fooHash)).toEqual('hello') + + await db + .batch() + .del(fooStore, 'hello') + .del(barStore, 'hello') + .del(bazStore, fooHash) + .commit() + + expect(await fooStore.get('hello')).not.toBeDefined() + expect(await barStore.get('hello')).not.toBeDefined() + expect(await bazStore.get(fooHash)).not.toBeDefined() + }) + }) + + describe('DatabaseTransaction', () => { + it('should write in transaction manually', async () => { + await db.open() + + const foo = { hash: 'hello', name: 'ironfish' } + const fooHash = Buffer.from(JSON.stringify(foo)) + + let transaction = db.transaction([fooStore, barStore, bazStore], 'readwrite') + await fooStore.put('hello', foo, transaction) + await barStore.put('hello', fooHash, transaction) + await bazStore.put(fooHash, 'hello', transaction) + await transaction.commit() + + expect(await fooStore.get('hello')).toMatchObject(foo) + expect(await barStore.get('hello')).toEqual(fooHash) + expect(await bazStore.get(fooHash)).toEqual('hello') + + transaction = db.transaction([fooStore, barStore, bazStore], 'readwrite') + await fooStore.del('hello', transaction) + await barStore.del('hello', transaction) + await bazStore.del(fooHash, transaction) + + expect(await fooStore.get('hello')).toMatchObject(foo) + expect(await barStore.get('hello')).toEqual(fooHash) + expect(await bazStore.get(fooHash)).toEqual('hello') + + // Now commit transaction + await transaction.commit() + + expect(await fooStore.get('hello')).not.toBeDefined() + expect(await barStore.get('hello')).not.toBeDefined() + expect(await bazStore.get(fooHash)).not.toBeDefined() + }) + + it('should write in transaction automatically', async () => { + await db.open() + + const foo = { hash: 'hello', name: 'ironfish' } + const fooHash = Buffer.from(JSON.stringify(foo)) + + await expect(() => + db.transaction( + [fooStore, barStore, bazStore], + 'readwrite', + async (transaction) => { + await fooStore.put('hello', foo, transaction) + await barStore.put('hello', fooHash, transaction) + await bazStore.put(fooHash, 'hello', transaction) + throw new Error('Aborted Transaction!') + }, + ), + ).rejects.toThrowError('Aborted') + + expect(await fooStore.get('hello')).not.toBeDefined() + expect(await barStore.get('hello')).not.toBeDefined() + expect(await bazStore.get(fooHash)).not.toBeDefined() + + await db.transaction( + [fooStore, barStore, bazStore], + 'readwrite', + async (transaction) => { + await fooStore.put('hello', foo, transaction) + await barStore.put('hello', fooHash, transaction) + await bazStore.put(fooHash, 'hello', transaction) + }, + ) + + await db.transaction( + [fooStore, barStore, bazStore], + 'readwrite', + async (transaction) => { + await fooStore.del('hello', transaction) + await barStore.del('hello', transaction) + await bazStore.del(fooHash, transaction) + + // Should not be commited until this function returns + expect(await fooStore.get('hello')).toMatchObject(foo) + expect(await barStore.get('hello')).toEqual(fooHash) + expect(await bazStore.get(fooHash)).toEqual('hello') + }, + ) + + expect(await fooStore.get('hello')).not.toBeDefined() + expect(await barStore.get('hello')).not.toBeDefined() + expect(await bazStore.get(fooHash)).not.toBeDefined() + }) + + it('should cache transaction operations', async () => { + await db.open() + + const foo = { hash: 'hello', name: 'ironfish' } + const bar = { hash: 'hello', name: 'world' } + + // With an automatic transaction + await db.transaction([fooStore], 'readwrite', async (transaction) => { + await fooStore.put('cache', bar) + await fooStore.del('cache', transaction) + + expect(await fooStore.get('cache', transaction)).toBeUndefined() + expect(await fooStore.get('cache')).toMatchObject(bar) + + await fooStore.put('cache', foo, transaction) + + expect(await fooStore.get('cache', transaction)).toMatchObject(foo) + expect(await fooStore.get('cache')).toMatchObject(bar) + + expect(await fooStore.has('cache', transaction)).toBe(true) + expect(await fooStore.has('cache')).toBe(true) + }) + + expect(await fooStore.get('cache')).toMatchObject(foo) + expect(await fooStore.has('cache')).toBe(true) + }) + + it('should cache has and del missing values', async () => { + await db.open() + await db.metaStore.clear() + + await db.transaction([db.metaStore], 'readwrite', async (tx) => { + // db=undefined, tx=undefined + expect(await db.metaStore.get('a', tx)).toBeUndefined() + + // db=1, tx=undefined + await db.metaStore.put('a', 1) + expect(await db.metaStore.get('a', tx)).toBeUndefined() + + // db=1, tx=1 + await db.metaStore.put('a', 1, tx) + expect(await db.metaStore.get('a', tx)).toBe(1) + + // db=1, tx=undefined + await db.metaStore.del('a', tx) + expect(await db.metaStore.get('a', tx)).toBe(undefined) + }) + }) + }) + + describe('DatabaseTransaction: withTransaction', () => { + it('should commit transaction', async () => { + await db.open() + await db.metaStore.put('test', 0) + + await db.withTransaction(null, [db.metaStore], 'readwrite', async (transaction) => { + await db.metaStore.put('test', 1, transaction) + expect(await db.metaStore.get('test')).toBe(0) + }) + + expect(await db.metaStore.get('test')).toBe(1) + }) + + it('should abort transaction if error thrown', async () => { + await db.open() + await db.metaStore.put('test', 0) + + await expect( + db.withTransaction(null, [db.metaStore], 'readwrite', async (transaction) => { + await db.metaStore.put('test', 1, transaction) + throw new Error('test') + }), + ).rejects.toThrowError('test') + + expect(await db.metaStore.get('test')).toBe(0) + }) + + it('should abort transaction if calls abort', async () => { + await db.open() + await db.metaStore.put('test', 0) + + await db.withTransaction(null, [db.metaStore], 'readwrite', async (transaction) => { + await db.metaStore.put('test', 1, transaction) + await transaction.abort() + }) + + expect(await db.metaStore.get('test')).toBe(0) + }) + + it('should properly nest transactions', async () => { + await db.open() + await db.metaStore.put('test', 0) + const transaction = db.transaction([db.metaStore], 'readwrite') + + await db.withTransaction( + transaction, + [db.metaStore], + 'readwrite', + async (transaction) => { + await db.metaStore.put('test', 1, transaction) + + await db.withTransaction( + transaction, + [db.metaStore], + 'readwrite', + async (transaction) => { + await db.metaStore.put('test', 2, transaction) + }, + ) + + // Should not commit after inner withTransaction + expect(await db.metaStore.get('test')).toBe(0) + }, + ) + + // Should not commit after outer withTransaction + expect(await db.metaStore.get('test')).toBe(0) + + await transaction.commit() + expect(await db.metaStore.get('test')).toBe(2) + }) + }) + + describe('DatabaseStore: keyPath', () => { + it('should create key from value', async () => { + await db.open() + + const foo = { hash: 'keypath', name: '' } + await fooStore.put(foo) + expect(await fooStore.get('keypath')).toMatchObject(foo) + }) + + it('should create array key from value', async () => { + await db.open() + + const value = { a: '', b: 3, c: true } + await keypathStore.put(value) + expect(await keypathStore.get(['', 3, true])).toMatchObject(value) + }) + + it('should create array key with transaction', async () => { + await db.open() + + const key = ['key', 3, true] as SchemaKey + const value = { a: 'key', b: 3, c: true } + + await db.transaction([keypathStore], 'readwrite', async (t) => { + await keypathStore.put(value, t) + expect(await keypathStore.get(key)).toBeUndefined() + }) + + expect(await keypathStore.get(key)).toMatchObject(value) + }) + }) + + describe('DatabaseStore: key and value streams', () => { + it('should get all keys', async () => { + await db.open() + await db.metaStore.clear() + await db.metaStore.put('a', 1000) + await db.metaStore.put('b', 1001) + await db.metaStore.put('c', 1002) + await db.metaStore.put('d', 1003) + + const values = await db.metaStore.getAllValues() + + expect(values).toHaveLength(4) + expect(values).toContain(1000) + expect(values).toContain(1001) + expect(values).toContain(1002) + expect(values).toContain(1003) + + const keys = await db.metaStore.getAllKeys() + + expect(keys).toHaveLength(4) + expect(keys).toContain('a') + expect(keys).toContain('b') + expect(keys).toContain('c') + expect(keys).toContain('d') + }) + + it('should encode and decode keys', async () => { + await db.open() + await bazStore.clear() + + const hash = Buffer.from([0x54, 0x57, 0xf6, 0x2c]) + + // in a transaction + await db.transaction([bazStore], 'readwrite', async (tx) => { + await bazStore.add(hash, 'VALUE', tx) + const keys = await bazStore.getAllKeys(tx) + expect(keys.length).toBe(1) + expect(keys[0]?.equals(hash)).toBe(true) + }) + + // and out of a transaction + const keys = await bazStore.getAllKeys() + expect(keys.length).toBe(1) + expect(keys[0]?.equals(hash)).toBe(true) + }) + + it('should get transactional values', async () => { + await db.open() + await db.metaStore.clear() + + await db.transaction([db.metaStore], 'readwrite', async (tx) => { + // a, db=1000, tx=undefined + await db.metaStore.put('a', 1000) + let values = await db.metaStore.getAllValues(tx) + expect(values).toHaveLength(1) + expect(values).toContain(1000) + + // a, db=1000, tx=1001 + await db.metaStore.put('a', 1001, tx) + values = await db.metaStore.getAllValues(tx) + expect(values).toHaveLength(1) + expect(values).toContain(1001) + + // b, db=undefined, tx=1002 + await db.metaStore.put('b', 1002, tx) + values = await db.metaStore.getAllValues(tx) + expect(values).toHaveLength(2) + expect(values).toContain(1001) + expect(values).toContain(1002) + }) + }) + + it('should not yield undefined', async () => { + await db.open() + await db.metaStore.clear() + await db.metaStore.put('a', 1) + + await db.transaction([db.metaStore], 'readwrite', async (tx) => { + expect(await db.metaStore.get('a', tx)).toBe(1) + + let values = await db.metaStore.getAllValues(tx) + expect(values).toHaveLength(1) + expect(values).toContain(1) + + // cache has undefined and should not yield + await db.metaStore.del('a', tx) + + values = await db.metaStore.getAllValues(tx) + expect(values).toHaveLength(0) + }) + }) + + it('should find entries that have 0xff keys', async () => { + await db.open() + await bazStore.clear() + + await bazStore.put(Buffer.alloc(100, 0xff), '1') + expect((await bazStore.getAllKeys()).length).toBe(1) + }) + + it('should not find entries with an off-by-one prefix and empty key', async () => { + await db.open() + const keyStore = testPrefixKeyStore as LevelupStore + await keyStore.clear() + + // Increment the prefix buffer by one + expect(keyStore.prefixBuffer).toEqual(Buffer.from([92, 188, 18, 188])) + keyStore.prefixBuffer[keyStore.prefixBuffer.length - 1]++ + + // Add an entry with an empty buffer for the key + expect(keyStore.prefixBuffer).toEqual(Buffer.from([92, 188, 18, 189])) + await keyStore.put(Buffer.alloc(0), '1') + expect(await keyStore.get(Buffer.alloc(0))).toEqual('1') + + // Decrement the prefix buffer + keyStore.prefixBuffer[keyStore.prefixBuffer.length - 1]-- + expect(keyStore.prefixBuffer).toEqual(Buffer.from([92, 188, 18, 188])) + + // No keys should exist + expect(await keyStore.getAllKeys()).toHaveLength(0) + }) + }) +}) diff --git a/ironfish/src/storage/database/database.ts b/ironfish/src/storage/database/database.ts new file mode 100644 index 0000000000..30fc1239aa --- /dev/null +++ b/ironfish/src/storage/database/database.ts @@ -0,0 +1,99 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { + BatchOperation, + DatabaseOptions, + DatabaseSchema, + IDatabase, + IDatabaseBatch, + IDatabaseStore, + IDatabaseStoreOptions, + IDatabaseTransaction, + SchemaKey, + SchemaValue, +} from './types' +import { DatabaseIsOpenError } from './errors' + +export abstract class Database implements IDatabase { + stores = new Map>() + + abstract get isOpen(): boolean + + abstract open(options?: DatabaseOptions): Promise + abstract close(): Promise + + abstract transaction( + scopes: IDatabaseStore[], + type: 'readwrite' | 'read', + ): IDatabaseTransaction + + abstract transaction( + scopes: IDatabaseStore[], + type: 'readwrite' | 'read', + handler: (transaction: IDatabaseTransaction) => Promise, + ): Promise + + abstract batch(): IDatabaseBatch + + abstract batch( + writes: BatchOperation< + DatabaseSchema, + SchemaKey, + SchemaValue + >[], + ): Promise + + protected abstract _createStore( + options: IDatabaseStoreOptions, + ): IDatabaseStore + + getStores(): Array> { + return Array.from(this.stores.values()) + } + + addStore( + options: IDatabaseStoreOptions, + ): IDatabaseStore { + if (this.isOpen) { + throw new DatabaseIsOpenError( + `Cannot add store ${options.name} while the database is open`, + ) + } + const existing = this.stores.get(options.name) + if (existing) return existing as IDatabaseStore + + const store = this._createStore(options) + this.stores.set(options.name, store) + return store + } + + /* + Safety wrapper in case you don't know if you've been given a transaction or not + This will create and commit it at the end if it if it hasn't been passed in. + + Usually this is solved by a context that's threaded through the application + and keeps track of this, but we don't have a context. + */ + async withTransaction( + transaction: IDatabaseTransaction | undefined | null, + scopes: IDatabaseStore[], + type: 'readwrite' | 'read', + handler: (transaction: IDatabaseTransaction) => Promise, + ): Promise { + const created = !transaction + transaction = transaction || this.transaction(scopes, type) + + // TODO should we combine scopes if tx is not null but more scopes are given? + + try { + const result = await handler(transaction) + if (created) await transaction.commit() + return result + } catch (error: unknown) { + if (created) await transaction.abort() + throw error + } + } +} diff --git a/ironfish/src/storage/database/encoding.ts b/ironfish/src/storage/database/encoding.ts new file mode 100644 index 0000000000..bd4fcd3c81 --- /dev/null +++ b/ironfish/src/storage/database/encoding.ts @@ -0,0 +1,67 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { IDatabaseEncoding } from './types' +import Serde, { IJSON, IJsonSerializable } from '../../serde' +import hexArray from 'hex-array' + +export class JsonEncoding implements IDatabaseEncoding { + serialize = (value: T): Buffer => Buffer.from(IJSON.stringify(value), 'utf8') + deserialize = (buffer: Buffer): T => IJSON.parse(buffer.toString('utf8')) as T + + equals(): boolean { + throw new Error('You should never use this') + } +} + +export class StringEncoding + implements IDatabaseEncoding { + serialize = (value: TValues): Buffer => Buffer.from(value, 'utf8') + deserialize = (buffer: Buffer): TValues => buffer.toString('utf8') as TValues + + equals(): boolean { + throw new Error('You should never use this') + } +} + +export class BufferEncoding implements IDatabaseEncoding { + serialize = (value: Buffer): Buffer => value + deserialize = (buffer: Buffer): Buffer => buffer + + equals(): boolean { + throw new Error('You should never use this') + } +} + +export class ArrayEncoding extends JsonEncoding {} + +export class BufferArrayEncoding { + serialize = (value: Buffer[]): Buffer => { + const values = value.map((b) => new BufferToStringEncoding().serialize(b)) + return Buffer.from(JSON.stringify(values), 'utf8') + } + + deserialize = (buffer: Buffer): Buffer[] => { + const parsed = JSON.parse(buffer.toString('utf8')) as string[] + return parsed.map((s) => new BufferToStringEncoding().deserialize(s)) + } + + equals(): boolean { + throw new Error('You should never use this') + } +} + +export default class BufferToStringEncoding implements Serde { + serialize(element: Buffer): string { + return hexArray.toString(element) + } + + deserialize(data: string): Buffer { + return Buffer.from(hexArray.fromString(data)) + } + + equals(): boolean { + throw new Error('You should never use this') + } +} diff --git a/ironfish/src/storage/database/errors.ts b/ironfish/src/storage/database/errors.ts new file mode 100644 index 0000000000..bfcafbc858 --- /dev/null +++ b/ironfish/src/storage/database/errors.ts @@ -0,0 +1,8 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export class DuplicateKeyError extends Error {} +export class DatabaseOpenError extends Error {} +export class DatabaseIsOpenError extends DatabaseOpenError {} +export class DatabaseIsLockedError extends DatabaseOpenError {} diff --git a/ironfish/src/storage/database/index.ts b/ironfish/src/storage/database/index.ts new file mode 100644 index 0000000000..a4fc676f16 --- /dev/null +++ b/ironfish/src/storage/database/index.ts @@ -0,0 +1,9 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './types' +export * from './database' +export * from './store' +export * from './encoding' +export * from './errors' diff --git a/ironfish/src/storage/database/store.ts b/ironfish/src/storage/database/store.ts new file mode 100644 index 0000000000..9af79c681f --- /dev/null +++ b/ironfish/src/storage/database/store.ts @@ -0,0 +1,88 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { + DatabaseSchema, + IDatabaseStore, + IDatabaseEncoding, + IDatabaseStoreOptions, + IDatabaseTransaction, + SchemaKey, + SchemaValue, + KnownKeys, + UpgradeFunction, +} from './types' + +export abstract class DatabaseStore + implements IDatabaseStore { + version: number + name: string + upgrade: UpgradeFunction | null + keyEncoding: IDatabaseEncoding> + valueEncoding: IDatabaseEncoding> + keyPath: KnownKeys> | KnownKeys>[] | null + + constructor(options: IDatabaseStoreOptions) { + this.version = options.version + this.name = options.name + this.upgrade = options.upgrade || null + this.keyEncoding = options.keyEncoding + this.valueEncoding = options.valueEncoding + this.keyPath = options.keyPath || null + } + + abstract encode(key: SchemaKey): [Buffer] + abstract encode(key: SchemaKey, value: SchemaValue): [Buffer, Buffer] + + abstract get( + key: SchemaKey, + transaction?: IDatabaseTransaction, + ): Promise | undefined> + + abstract getAllIter( + transaction?: IDatabaseTransaction, + ): AsyncGenerator<[SchemaKey, SchemaValue]> + + abstract getAllValuesIter( + transaction?: IDatabaseTransaction, + ): AsyncGenerator> + abstract getAllValues(transaction?: IDatabaseTransaction): Promise>> + + abstract getAllKeysIter(transaction?: IDatabaseTransaction): AsyncGenerator> + abstract getAllKeys(transaction?: IDatabaseTransaction): Promise>> + + abstract clear(): Promise + + abstract has(key: SchemaKey, transaction?: IDatabaseTransaction): Promise + + abstract put( + key: SchemaKey, + value: SchemaValue, + transaction?: IDatabaseTransaction, + ): Promise + + abstract put(value: SchemaValue, transaction?: IDatabaseTransaction): Promise + + abstract add( + key: SchemaKey, + value: SchemaValue, + transaction?: IDatabaseTransaction, + ): Promise + + abstract add(value: SchemaValue, transaction?: IDatabaseTransaction): Promise + + abstract del(key: SchemaKey, transaction?: IDatabaseTransaction): Promise + + protected makeKey(value: SchemaValue): SchemaKey { + if (this.keyPath === null) { + throw new Error(`No keypath defined`) + } + + if (Array.isArray(this.keyPath)) { + return this.keyPath.map((path) => value[path]) + } + + return value[this.keyPath] + } +} diff --git a/ironfish/src/storage/database/types.ts b/ironfish/src/storage/database/types.ts new file mode 100644 index 0000000000..999166e3c3 --- /dev/null +++ b/ironfish/src/storage/database/types.ts @@ -0,0 +1,381 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import Serde, { IJsonSerializable } from '../../serde' + +export type DatabaseKey = number | string | Date | Buffer | Array + +export type DatabaseSchema = { + key: DatabaseKey + value: unknown +} + +export type SchemaKey = Schema['key'] +export type SchemaValue = Schema['value'] + +export type UpgradeFunction = ( + db: IDatabase, + oldVersion: number, + newVersion: number, + transaction: IDatabaseTransaction, +) => Promise + +export type DatabaseOptions = { + upgrade?: UpgradeFunction +} & { [key: string]: unknown } + +/** + * A database interface to represent a wrapper for a key value store database. The database is the entry point for creating stores, batches, transactions. + * + * The general idea is that you should create a database and add [[`IDatabaseStore`]]s to it. The stores are where all the operations occur, and accept transactions. + +* Three important functions on this interface are +* * [[`IDatabase.addStore`]] +* * [[`IDatabase.transaction`]] +* * [[`IDatabase.batch`]] +*/ +export interface IDatabase { + /** + * If the datbase is open and available for operations + */ + readonly isOpen: boolean + + /** + * Opens a connection to the database with the given options + * + * Your provided upgrade function in [[`DatabaseOptions.upgrade`]] will be called if + * the version you provide is larger than the stored version. + */ + open(options?: DatabaseOptions): Promise + + /** Closes the database and does not handle any open transactions */ + close(): Promise + + /** + * Add an {@link IDatabaseStore} to the database + * + * You can only add a store to the database if the database is not open. This is because some databases only + * allow initializing new stores when the database is being opened. + * @param options The options for the new store + */ + addStore( + options: IDatabaseStoreOptions, + ): IDatabaseStore + + /** Get all the stores added with [[`IDatabase.addStore`]] */ + getStores(): Array> + + /** + * Starts a {@link IDatabaseTransaction} and returns it. + * + * @warning If you use this then it's up to you to manage the transactions life cycle. + * You should not forget to call [[`IDatabaseTransaction.commit`]] or [[`IDatabaseTransaction.abort`]]. + * If you don't you will deadlock the database. This is why it's better and safer to use [[`IDatabase.transaction::OVERLOAD_2`]] + * + * @param scopes The stores you intend to access. Your operation will fail if it's not a store that is not specified here. + * @param type Indicates which type of access you are going to perform. You can only do writes in readwrite. + * + * @returns A new transaction + */ + transaction( + scopes: IDatabaseStore[], + type: 'readwrite' | 'read', + ): IDatabaseTransaction + + /** + * Starts a {@link IDatabaseTransaction} and executes your handler with it + * + * This is the safest transactional function because it guarantees when your + * code finishes, the transaction will be either committed or aborted if an + * exception has been thrown. + * + * @param scopes The stores you intend to access Your operation will fail if + * it's not a store that is not specified here. + * @param type Indicates which type of access you are going to perform. You + * can only do writes in readwrite. + * @param handler You should pass in a function with your code that you want + * to run in the transaction. The handler accepts a transaction and any returns + * are forwarded out. + * + * @returns Forwards the result of your handler to it's return value + */ + transaction( + scopes: IDatabaseStore[], + type: 'readwrite' | 'read', + handler: (transaction: IDatabaseTransaction) => Promise, + ): Promise + + /** + * Uses an existing transaction or starts a transaction and executes your + * handler with it. It commits or aborts the transaction only if a call to + * this function has created one. + * + * Use this when you are given an optional transaction, where you may want + * to create one if one has not been created. + * + * @param scopes The stores you intend to access Your operation will fail if + * it's not a store that is not specified here. + * @param type Indicates which type of access you are going to perform. You + * can only do writes in readwrite. + * @param handler You should pass in a function with your code that you want + * to run in the transaction. The handler accepts a transaction and any returns + * are forwarded out. + * + * @returns Forwards the result of your handler to it's return value + */ + withTransaction( + transaction: IDatabaseTransaction | undefined | null, + scopes: IDatabaseStore[], + type: 'readwrite' | 'read', + handler: (transaction: IDatabaseTransaction) => Promise, + ): Promise + + /** Creates a batch of commands that are executed atomically + * once it's commited using {@link IDatabaseBatch.commit} + * + * @see [[`IDatabaseBatch`]] for what operations are supported + */ + batch(): IDatabaseBatch + + /** + * Executes a batch of database operations atomically + * + * @returns A promise that resolves when the operations are commited to the database + */ + batch( + writes: BatchOperation< + DatabaseSchema, + SchemaKey, + SchemaValue + >[], + ): Promise +} + +export type IDatabaseEncoding = Serde + +export interface IDatabaseBatch { + /** + * Put a value into the database with the given key. + + * @param store - The [[`IDatabaseStore`]] to put the value into + * @param key - The key to insert + * @param value - The value to insert + * + * @returns The batch for chaining operations onto + */ + put( + store: IDatabaseStore, + key: SchemaKey, + value: SchemaValue, + ): IDatabaseBatch + + /** + * Delete a value in the database with the given key. + + * @param store - The [[`IDatabaseStore`]] to delete the key from + * @param key - The key to delete + * + * @returns The batch for chaining operations onto + */ + del( + store: IDatabaseStore, + key: SchemaKey, + ): IDatabaseBatch + + /** Commit the batch atomically to the database */ + commit(): Promise +} + +export type BatchOperation< + Schema extends DatabaseSchema, + Key extends SchemaKey, + Value extends SchemaValue +> = [IDatabaseStore, Key, Value] | [IDatabaseStore, Key] + +/** + * A collection of keys with associated values that exist within a {@link IDatabase} + * you can have many of these inside of a database. All the values inside the store + * have a consistent type specified by the generic parameter. + * + * Use [[`IDatabase.addStore`]] before you open the database to create an [[`IDatabaseStore`]] + * + * You can operate on one or more stores atomically using {@link IDatabase.transaction} + */ +export interface IDatabaseStore { + /** The schema version of the store. @see {@link IDatabase.addStore} for more information*/ + version: number + /** The name of the store inside of the {@link IDatabase} */ + name: string + /** Run when when {@link IDatabaseStore.version} changes */ + upgrade: UpgradeFunction | null + /** The [[`IDatabaseEncoding`]] used to serialize keys to store in the database */ + keyEncoding: IDatabaseEncoding> + /** The [[`IDatabaseEncoding`]] used to serialize values to store in the database */ + valueEncoding: IDatabaseEncoding> + + encode(key: SchemaKey): [Buffer] + + /** + * Used to serialize the key and value for the database + * + * @returns An array with the serialized key and value as Buffers + */ + encode(key: SchemaKey, value: SchemaValue): [Buffer, Buffer] + + /* Get an [[`AsyncGenerator`]] that yields all of the key/value pairs in the IDatastore */ + getAllIter( + transaction?: IDatabaseTransaction, + ): AsyncGenerator<[SchemaKey, SchemaValue]> + + /* Get an [[`AsyncGenerator`]] that yields all of the values in the IDatastore */ + getAllValuesIter(transaction?: IDatabaseTransaction): AsyncGenerator> + /* Get all of the values in the IDatastore */ + getAllValues(transaction?: IDatabaseTransaction): Promise>> + + /* Get an [[`AsyncGenerator`]] that yields all of the keys in the IDatastore */ + getAllKeysIter(transaction?: IDatabaseTransaction): AsyncGenerator> + /* Get all of the keys in the IDatastore */ + getAllKeys(transaction?: IDatabaseTransaction): Promise>> + + /** + * Delete every key in the {@link IDatastore} + * + * @returns resolves when all keys have been deleted + */ + clear(): Promise + + /** + * Used to get a value from the store at a given key + + * @param key - The key to fetch + * @param transaction - If provided, the operation will use the transaction. + * + * @returns resolves with the value if found, or undefined if not found. + */ + get( + key: SchemaKey, + transaction?: IDatabaseTransaction, + ): Promise | undefined> + + /** + * Used to check if the the database has a given key + + * @param key - The key to check + * @param transaction - If provided, the operation will use the transaction. + * + * @returns resolves with true if the key is in the database, or false if it is missing. + */ + has(key: SchemaKey, transaction?: IDatabaseTransaction): Promise + + /** + * Put a value into the store with the given key. + + * @param key - The key to insert + * @param value - The value to insert + * @param transaction - If provided, the operation will be executed atomically when the transaction is {@link IDatabaseTransaction.commit | committed}. + * + * @returns A promise that resolves when the operation has been either executed, or added to the transaction. + */ + put( + key: SchemaKey, + value: SchemaValue, + transaction?: IDatabaseTransaction, + ): Promise + + /** + * Add a value to the database and calculate it's key using the `keyPath` specified for the IDataStore. See the documentation on specifying keyPaths in {@link IDatabase.addStore} for more info. + * + * @param value - The value to insert + * @param transaction - If provided, the operation will be executed atomically when the transaction is {@link IDatabaseTransaction.commit | committed}. + * + * @returns A promise that resolves when the operation has been either executed, or added to the transaction. + * @throws {@link DuplicateKeyError} if the key already exists in the transaction or database + */ + put(value: SchemaValue, transaction?: IDatabaseTransaction): Promise + + /** + * Add a value to the database with the given key. + * + * If the key already exists, an {@link DuplicateKeyError} will be thrown. If you do not want to throw an error on insert, use {@link IDatabaseStore.put} + + * @param key - The key to insert + * @param value - The value to insert + * @param transaction - If provided, the operation will be executed atomically when the transaction is {@link IDatabaseTransaction.commit | committed}. + * + * @returns A promise that resolves when the operation has been either executed, or added to the transaction. + * @throws {@link DuplicateKeyError} if the key already exists in the transaction or database + */ + add( + key: SchemaKey, + value: SchemaValue, + transaction?: IDatabaseTransaction, + ): Promise + + /** + * Add a value to the database and calculate it's key using the `keyPath` specified for the IDataStore. See the documentation on specifying keypaths in {@link IDatabase.addStore} for more info. + * + * If the key already exists, an {@link DuplicateKeyError} will be thrown. If you do not want to throw an error on insert, use {@link IDatabaseStore.put} + * + * @param value - The value to insert + * @param transaction - If provided, the operation will be executed atomically when the transaction is {@link IDatabaseTransaction.commit | committed}. + * + * @returns A promise that resolves when the operation has been either executed, or added to the transaction. + * @throws {@link DuplicateKeyError} if the key already exists in the transaction or database + */ + add(value: SchemaValue, transaction?: IDatabaseTransaction): Promise + + /** + * Delete a value with the given key. + * + * @param key - The key stored in the database to delete + * @param transaction - If provided, the operation will be executed atomically when the transaction is {@link IDatabaseTransaction.commit | committed}. + * + * @returns A promise that resolves when the operation has been either executed, or added to the transaction. + */ + del(key: SchemaKey, transaction?: IDatabaseTransaction): Promise +} + +export type IDatabaseStoreOptions = { + /** The schema version of the store. @see {@link IDatabase.addStore} for more information*/ + version: number + /** The unique name of the store inside of the database */ + name: string + /** The encoding used to encode and decode keys in the database */ + keyEncoding: IDatabaseEncoding> + /** The encoding used to encode and decode values in the database */ + valueEncoding: IDatabaseEncoding> + /** Used to auto construct a key from a value inside the store if specified. It can either be a field from the value, or an array of fields from the value */ + keyPath?: KnownKeys> | KnownKeys>[] + upgrade?: UpgradeFunction +} + +/** + * Stores all operations applied to the transaction and then applies + * them atomically to the database once it's committed. Locks the + * database when the first operation in the transaction is started. + * + * You must release the lock by calling [[`IDatabaseTransaction.commit`]] + * or [[`IDatabaseTransaction.abort`]] + * + * Start a transaction by using {@link IDatabase.transaction} or the less used {@link IDatabase.withTransaction} + * + * @note Unlike most relational database transactions, the state is + * not guaranteed to be consistent at time the transaction was + * started. A row is frozen into the transaction when the first read + * or write is performed on it. + */ +export interface IDatabaseTransaction { + /** Commit the transaction atomically to the database and release the database lock */ + commit(): Promise + /** Abort the transaction and release the database lock */ + abort(): Promise +} + +export type KnownKeys = { + [K in keyof T]: string extends K ? never : number extends K ? never : K +} extends { + [_ in keyof T]: infer U +} + ? U + : never diff --git a/ironfish/src/storage/hex-array.d.ts b/ironfish/src/storage/hex-array.d.ts new file mode 100644 index 0000000000..9a214b6e58 --- /dev/null +++ b/ironfish/src/storage/hex-array.d.ts @@ -0,0 +1,11 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +declare module 'hex-array' { + export function toString( + array: Uint8Array, + options?: { grouping: number; rowLength: number; uppercase: boolean }, + ): string + export function fromString(value: string): Uint8Array +} diff --git a/ironfish/src/storage/index.ts b/ironfish/src/storage/index.ts new file mode 100644 index 0000000000..b017f3a5ca --- /dev/null +++ b/ironfish/src/storage/index.ts @@ -0,0 +1,6 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export * from './database' +export * from './levelup' diff --git a/ironfish/src/storage/levelup.ts b/ironfish/src/storage/levelup.ts new file mode 100644 index 0000000000..9243d2bcdd --- /dev/null +++ b/ironfish/src/storage/levelup.ts @@ -0,0 +1,654 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { AbstractLevelDOWN, AbstractBatch, PutBatch, DelBatch } from 'abstract-leveldown' +import type LevelDOWN from 'leveldown' +import { + Database, + BatchOperation, + DatabaseSchema, + DatabaseStore, + IDatabaseBatch, + IDatabaseStore, + IDatabaseStoreOptions, + IDatabaseTransaction, + SchemaKey, + SchemaValue, + DatabaseOptions, + StringEncoding, + JsonEncoding, + DuplicateKeyError, + UpgradeFunction, +} from './database' +import { IJsonSerializable } from '../serde' +import levelup, { LevelUp } from 'levelup' +import { AsyncUtils } from '../utils/async' +import { Mutex, MutexUnlockFunction } from './mutex' +import BufferToStringEncoding from './database/encoding' +import MurmurHash3 from 'imurmurhash' +import levelErrors from 'level-errors' +import { DatabaseIsLockedError } from './database/errors' + +const ENABLE_TRANSACTIONS = true +const BUFFER_TO_STRING_ENCODING = new BufferToStringEncoding() + +interface INotFoundError { + type: 'NotFoundError' +} + +function isNotFoundError(error: unknown): error is INotFoundError { + return (error as INotFoundError)?.type === 'NotFoundError' +} + +class LevelupBatch implements IDatabaseBatch { + db: LevelupDatabase + queue: AbstractBatch[] = [] + + constructor(db: LevelupDatabase) { + this.db = db + } + + putEncoded(key: Buffer, value: Buffer): LevelupBatch { + this.queue.push({ type: 'put', key: key, value: value } as PutBatch) + return this + } + + delEncoded(key: Buffer): LevelupBatch { + this.queue.push({ type: 'del', key: key } as DelBatch) + return this + } + + put( + store: IDatabaseStore, + key: SchemaKey, + value: SchemaValue, + ): LevelupBatch { + const [encodedKey, encodedValue] = store.encode(key, value) + return this.putEncoded(encodedKey, encodedValue) + } + + del( + store: IDatabaseStore, + key: SchemaKey, + ): LevelupBatch { + const [encodedKey] = store.encode(key) + return this.delEncoded(encodedKey) + } + + async commit(): Promise { + if (this.queue.length === 0) return + await this.db.levelup.batch(this.queue) + this.queue.length = 0 + } +} + +export class LevelupStore extends DatabaseStore { + db: LevelupDatabase + + /* In non relational KV stores, to emulate 'startswith' you often need + to use greaterThan and lessThan using the prefix + a glyph marker. To + search for "App" in a table containing "Apple", "Application", and "Boat" + you would query "gte('App') && lte('App' + 'ff')" Which would return + 'Apple' and 'Application' + */ + allKeysRange: { gte: Buffer; lt: Buffer } + prefixBuffer: Buffer + + constructor(db: LevelupDatabase, options: IDatabaseStoreOptions) { + super(options) + this.db = db + + // Hash the prefix key to ensure identical length and avoid collisions + const prefixHash = new MurmurHash3(this.name, 1).result() + this.prefixBuffer = Buffer.alloc(4) + this.prefixBuffer.writeUInt32BE(prefixHash) + + const gte = Buffer.alloc(4) + gte.writeUInt32BE(prefixHash) + + const lt = Buffer.alloc(4) + lt.writeUInt32BE(prefixHash + 1) + + this.allKeysRange = { + gte: gte, + lt: lt, + } + } + + async has(key: SchemaKey, transaction?: IDatabaseTransaction): Promise { + return (await this.get(key, transaction)) !== undefined + } + + async get( + key: SchemaKey, + transaction?: IDatabaseTransaction, + ): Promise | undefined> { + const [encodedKey] = this.encode(key) + + if (ENABLE_TRANSACTIONS && transaction instanceof LevelupTransaction) { + return transaction.get(this, key) + } + + try { + const data = (await this.db.levelup.get(encodedKey)) as unknown + if (data === undefined) return undefined + if (!(data instanceof Buffer)) return undefined + return this.valueEncoding.deserialize(data) + } catch (error: unknown) { + if (isNotFoundError(error)) return undefined + throw error + } + } + + async *getAllIter( + transaction?: IDatabaseTransaction, + ): AsyncGenerator<[SchemaKey, SchemaValue]> { + const seen = new Set() + + if (ENABLE_TRANSACTIONS && transaction instanceof LevelupTransaction) { + await transaction.acquireLock() + + for (const [key, value] of transaction.cache.entries()) { + const keyBuffer = BUFFER_TO_STRING_ENCODING.deserialize(key) + + const isFromStore = keyBuffer + .slice(0, this.prefixBuffer.byteLength) + .equals(this.prefixBuffer) + + if (isFromStore) { + if (value !== undefined) + yield [this.decodeKey(keyBuffer), value as SchemaValue] + seen.add(key) + } + } + } + + const stream = this.db.levelup.createReadStream(this.allKeysRange) + + for await (const pair of stream) { + const { key, value } = (pair as unknown) as { key: Buffer; value: Buffer } + if (!seen.has(BUFFER_TO_STRING_ENCODING.serialize(key))) { + yield [this.decodeKey(key), this.valueEncoding.deserialize(value)] + } + } + } + + async getAll( + transaction?: IDatabaseTransaction, + ): Promise, SchemaValue]>> { + return AsyncUtils.materialize(this.getAllIter(transaction)) + } + + async *getAllValuesIter( + transaction?: IDatabaseTransaction, + ): AsyncGenerator> { + for await (const [, value] of this.getAllIter(transaction)) { + yield value + } + } + + async getAllValues(transaction?: IDatabaseTransaction): Promise>> { + return AsyncUtils.materialize(this.getAllValuesIter(transaction)) + } + + async *getAllKeysIter(transaction?: IDatabaseTransaction): AsyncGenerator> { + for await (const [key] of this.getAllIter(transaction)) { + yield key + } + } + + async getAllKeys(transaction?: IDatabaseTransaction): Promise>> { + return AsyncUtils.materialize(this.getAllKeysIter(transaction)) + } + + async clear(): Promise { + await this.db.levelup.clear(this.allKeysRange) + } + + async put(value: SchemaValue, transaction?: IDatabaseTransaction): Promise + async put( + key: SchemaKey, + value: SchemaValue, + transaction?: IDatabaseTransaction, + ): Promise + async put(a: unknown, b: unknown, c?: unknown): Promise { + const { key: rawKey, value, transaction } = parsePut(a, b, c) + const key = rawKey === undefined ? this.makeKey(value) : rawKey + + if (ENABLE_TRANSACTIONS && transaction instanceof LevelupTransaction) { + return transaction.put(this, key, value) + } + + const [encodedKey, encodedValue] = this.encode(key, value) + await this.db.levelup.put(encodedKey, encodedValue) + } + + async add(value: SchemaValue, transaction?: IDatabaseTransaction): Promise + async add( + key: SchemaKey, + value: SchemaValue, + transaction?: IDatabaseTransaction, + ): Promise + async add(a: unknown, b: unknown, c?: unknown): Promise { + const { key: rawKey, value, transaction } = parsePut(a, b, c) + const key = rawKey === undefined ? this.makeKey(value) : rawKey + + if (ENABLE_TRANSACTIONS && transaction instanceof LevelupTransaction) { + return transaction.add(this, key, value) + } + + if (await this.has(key, transaction)) { + throw new DuplicateKeyError(`Key already exists ${String(key)}`) + } + + const [encodedKey, encodedValue] = this.encode(key, value) + await this.db.levelup.put(encodedKey, encodedValue) + } + + async del(key: SchemaKey, transaction?: IDatabaseTransaction): Promise { + if (ENABLE_TRANSACTIONS && transaction instanceof LevelupTransaction) { + return transaction.del(this, key) + } + + const [encodedKey] = this.encode(key) + await this.db.levelup.del(encodedKey) + } + + encode(key: SchemaKey): [Buffer] + encode(key: SchemaKey, value: SchemaValue): [Buffer, Buffer] + encode(key: SchemaKey, value?: SchemaValue): [Buffer] | [Buffer, Buffer] { + const keyBuffer = this.keyEncoding.serialize(key) + const encodedKey = Buffer.concat([this.prefixBuffer, keyBuffer]) + + if (value === undefined) return [encodedKey] + return [encodedKey, this.valueEncoding.serialize(value)] + } + + decodeKey(key: Buffer): SchemaKey { + const keyWithoutPrefix = key.slice(this.prefixBuffer.byteLength) + return this.keyEncoding.deserialize(keyWithoutPrefix) + } +} + +export class LevelupTransaction implements IDatabaseTransaction { + db: LevelupDatabase + scopes: Set + type: 'readwrite' | 'read' + batch: LevelupBatch + committing = false + aborting = false + cache = new Map() + cacheDelete = new Set() + unlock: MutexUnlockFunction | null = null + waiting: Promise | null = null + waitingResolve: (() => void) | null = null + id = 0 + + static id = 0 + + constructor( + db: LevelupDatabase, + scopes: IDatabaseStore[], + type: 'readwrite' | 'read', + ) { + this.db = db + this.type = type + this.id = ++LevelupTransaction.id + + this.scopes = new Set(scopes.map((s) => s.name)) + this.batch = new LevelupBatch(db) + } + + async acquireLock(): Promise { + if (this.unlock) return + + if (!this.waiting) { + this.waiting = new Promise((resolve) => (this.waitingResolve = resolve)) + this.unlock = await this.db.lock.lock() + if (this.waitingResolve) this.waitingResolve() + this.waiting = null + this.waitingResolve = null + } else { + await this.waiting + } + } + + releaseLock(): void { + if (!this.unlock) return + this.unlock() + } + + async has( + store: LevelupStore, + key: SchemaKey, + ): Promise { + await this.acquireLock() + return (await this.get(store, key)) !== undefined + } + + async get( + store: LevelupStore, + key: SchemaKey, + ): Promise | undefined> { + await this.acquireLock() + this.assertCanRead(store) + + const [encodedKey] = store.encode(key) + const cacheKey = BUFFER_TO_STRING_ENCODING.serialize(encodedKey) + + if (this.cacheDelete.has(cacheKey)) { + return undefined + } + + if (this.cache.has(cacheKey)) { + const cached = this.cache.get(cacheKey) + return cached as SchemaValue + } + + const value = await store.get(key) + this.cache.set(cacheKey, value) + return value + } + + async put( + store: LevelupStore, + key: SchemaKey, + value: SchemaValue, + ): Promise { + await this.acquireLock() + this.assertCanWrite(store) + + const [encodedKey, encodedValue] = store.encode(key, value) + const cacheKey = BUFFER_TO_STRING_ENCODING.serialize(encodedKey) + + this.batch.putEncoded(encodedKey, encodedValue) + this.cache.set(cacheKey, value) + this.cacheDelete.delete(cacheKey) + } + + async add( + store: LevelupStore, + key: SchemaKey, + value: SchemaValue, + ): Promise { + await this.acquireLock() + this.assertCanWrite(store) + + if (await this.has(store, key)) { + throw new DuplicateKeyError(`Key already exists ${String(key)}`) + } + + const [encodedKey, encodedValue] = store.encode(key, value) + const cacheKey = BUFFER_TO_STRING_ENCODING.serialize(encodedKey) + this.batch.putEncoded(encodedKey, encodedValue) + this.cache.set(cacheKey, value) + this.cacheDelete.delete(cacheKey) + } + + async del( + store: LevelupStore, + key: SchemaKey, + ): Promise { + await this.acquireLock() + this.assertCanWrite(store) + + const [encodedKey] = store.encode(key) + const cacheKey = BUFFER_TO_STRING_ENCODING.serialize(encodedKey) + this.batch.delEncoded(encodedKey) + this.cache.set(cacheKey, undefined) + this.cacheDelete.add(cacheKey) + } + + async commit(): Promise { + try { + if (!this.aborting) { + await this.batch.commit() + } + } finally { + this.releaseLock() + this.cache.clear() + this.cacheDelete.clear() + this.committing = false + } + } + + async abort(): Promise { + this.aborting = true + this.releaseLock() + return Promise.resolve() + } + + private assertCanRead(store: DatabaseStore): void { + this.assertCanWrite(store) + } + + private assertCanWrite(store: DatabaseStore): void { + if (this.committing) { + throw new Error(`Transaction is being committed`) + } + + if (!this.scopes.has(store.name)) { + throw new Error( + `Store ${store.name} is not in transaction scopes: ${Array.from( + this.scopes.values(), + ).join(', ')}`, + ) + } + } +} + +type MetaSchema = { + key: string + value: IJsonSerializable +} + +type StorageAbstractLevelDown = AbstractLevelDOWN + +export class LevelupDatabase extends Database { + db: StorageAbstractLevelDown + metaStore: LevelupStore + lock = new Mutex() + _levelup: LevelUp | null = null + + get levelup(): LevelUp { + if (!this._levelup) throw new Error('Database is not open. Call IDatabase.open() first') + return this._levelup + } + + constructor(db: StorageAbstractLevelDown) { + super() + this.db = db + + this.metaStore = this.addStore({ + name: 'Meta', + version: 1, + keyEncoding: new StringEncoding(), + valueEncoding: new JsonEncoding(), + }) as LevelupStore + } + + get isOpen(): boolean { + return this._levelup?.isOpen() || false + } + + getVersion(): number { + return this.getStores().reduce((memo, s) => memo + s.version, 0) + } + + async open(options: DatabaseOptions = {}): Promise { + this._levelup = await new Promise((resolve, reject) => { + const opened = levelup(this.db, (error?: unknown) => { + if (error) { + if (error instanceof levelErrors.OpenError) { + reject(new DatabaseIsLockedError(error.message)) + } else { + reject(error) + } + } else { + resolve(opened) + } + }) + }) + + await this._levelup.open() + + await this.transaction( + [this.metaStore, ...this.stores.values()], + 'readwrite', + async (t) => { + const upgrade = async ( + versionKey: string, + newVersion: number, + upgrade: UpgradeFunction | null = null, + ): Promise => { + const oldVersion = await this.metaStore.get(versionKey) + + if (oldVersion !== undefined && typeof oldVersion !== 'number') { + throw new Error( + `Corrupted meta store version for ${versionKey} is at: ${String(oldVersion)}`, + ) + } + + if (oldVersion !== undefined && newVersion < oldVersion) { + throw new Error( + `Cannot open database: The database version (${oldVersion}) is newer than the provided schema version (${newVersion})`, + ) + } + + if (oldVersion == null || newVersion > oldVersion) { + if (upgrade) { + await upgrade(this, oldVersion || 0, newVersion, t) + } + + await this.metaStore.put(versionKey, newVersion, t) + } + } + + for (const store of this.stores.values()) { + await upgrade(`version_${store.name}`, store.version, store.upgrade) + } + + await upgrade('version', this.getVersion(), options.upgrade) + }, + ) + } + + async close(): Promise { + await this._levelup?.close() + this._levelup = null + } + + transaction( + scopes: IDatabaseStore[], + type: 'readwrite' | 'read', + handler: (transaction: IDatabaseTransaction) => Promise, + ): Promise + transaction( + scopes: IDatabaseStore[], + type: 'readwrite' | 'read', + ): IDatabaseTransaction + transaction( + scopes: IDatabaseStore[], + type: 'readwrite' | 'read', + handler?: (transaction: IDatabaseTransaction) => Promise, + ): IDatabaseTransaction | Promise { + if (handler === undefined) { + return new LevelupTransaction(this, scopes, type) + } + + return this.withTransaction(null, scopes, type, handler) + } + + batch( + writes: BatchOperation< + DatabaseSchema, + SchemaKey, + SchemaValue + >[], + ): Promise + batch(writes?: undefined): LevelupBatch + batch( + writes?: BatchOperation< + DatabaseSchema, + SchemaKey, + SchemaValue + >[], + ): LevelupBatch | Promise { + const batch = new LevelupBatch(this) + + if (!writes) return batch + + for (const write of writes) { + const [store, key, value] = write + + if (!(store instanceof LevelupStore)) { + throw new Error() + } + + if (value === undefined) { + batch.del(store, key) + } else { + batch.put(store, key, value) + } + } + + return batch.commit() + } + + protected _createStore( + options: IDatabaseStoreOptions, + ): IDatabaseStore { + return new LevelupStore(this, options) + } +} + +function parsePut( + keyOrValue: unknown, + valueOrTransaction: unknown, + transaction?: unknown, +): { + key?: SchemaKey + value?: SchemaValue + transaction?: IDatabaseTransaction +} { + if (transaction instanceof LevelupTransaction) + return { + key: keyOrValue as SchemaKey, + value: valueOrTransaction as SchemaValue, + transaction: transaction, + } + + if (valueOrTransaction instanceof LevelupTransaction) + return { + value: keyOrValue as SchemaValue, + transaction: valueOrTransaction, + } + + if (valueOrTransaction !== undefined) + return { + key: keyOrValue as SchemaKey, + value: valueOrTransaction as SchemaValue, + } + + return { + value: keyOrValue as SchemaValue, + } +} + +export async function makeLevelupDatabaseNode(path: string): Promise { + await mkDir(path) + + // eslint-disable-next-line @typescript-eslint/no-var-requires + const leveldown = require('leveldown') as typeof LevelDOWN + return new LevelupDatabase(leveldown(path)) + + async function mkDir(path: string): Promise { + const { promises: fs } = await import('fs') + + try { + await fs.mkdir(path, { recursive: true }) + } catch (e: unknown) { + if (!(e instanceof Error) || !e.message.includes('EEXIST')) throw e + } + } +} diff --git a/ironfish/src/storage/mutex.ts b/ironfish/src/storage/mutex.ts new file mode 100644 index 0000000000..6970fd9132 --- /dev/null +++ b/ironfish/src/storage/mutex.ts @@ -0,0 +1,30 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export type MutexUnlockFunction = () => void + +export class Mutex { + private mutex = Promise.resolve() + + lock(): PromiseLike { + let begin: (unlock: MutexUnlockFunction) => void + + this.mutex = this.mutex.then(() => { + return new Promise(begin) + }) + + return new Promise((resolve) => { + begin = resolve + }) + } + + async dispatch(fn: (() => T) | (() => PromiseLike)): Promise { + const unlock = await this.lock() + try { + return await Promise.resolve(fn()) + } finally { + unlock() + } + } +} diff --git a/ironfish/src/strategy/asyncTransaction.test.slow.ts b/ironfish/src/strategy/asyncTransaction.test.slow.ts new file mode 100644 index 0000000000..fc53b55ad4 --- /dev/null +++ b/ironfish/src/strategy/asyncTransaction.test.slow.ts @@ -0,0 +1,209 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { + generateKey, + generateNewPublicAddress, + WasmNote, + WasmTransactionPosted, + Key, +} from 'ironfish-wasm-nodejs' +import AsyncTransaction from './asyncTransaction' +import { MerkleTree } from '../captain' +import { IDatabase } from '../storage' +import { + IronfishNote, + IronfishNoteEncrypted, + IronfishTransaction, + NoteHasher, + WasmNoteEncryptedHash, +} from '.' +import { makeDb, makeDbName } from '../captain/testUtilities' + +async function makeWasmStrategyTree({ + depth, + name, + database, +}: { + depth?: number + name?: string + database?: IDatabase +} = {}): Promise> { + const openDb = !database + + if (!name) name = makeDbName() + if (!database) database = makeDb(name) + + const tree = await MerkleTree.new(new NoteHasher(), database, name, depth) + + if (openDb) { + await database.open() + } + + return tree +} + +type ThenArg = T extends PromiseLike ? U : T + +describe('Demonstrates async transaction', () => { + let tree: ThenArg> + let receiverKey: Key + let spenderKey: Key + let minerNote: WasmNote + let minerTransaction: IronfishTransaction + let simpleTransaction: AsyncTransaction + let publicTransaction: IronfishTransaction + + jest.setTimeout(1200000) + + beforeAll(async () => { + // Pay the cost of setting up Sapling and the DB outside of any test + tree = await makeWasmStrategyTree() + spenderKey = generateKey() + }) + + describe('Can transact between two accounts', () => { + it('Can create a miner reward', async () => { + const owner = generateNewPublicAddress(spenderKey.spending_key).public_address + + minerNote = new WasmNote(owner, BigInt(42), '') + const transaction = new AsyncTransaction() + expect( + await transaction.receive( + spenderKey.spending_key, + new IronfishNote(Buffer.from(minerNote.serialize())), + ), + ).toBe('') + minerTransaction = await transaction.postMinersFee() + expect(minerTransaction).toBeTruthy() + expect(minerTransaction.notesLength()).toEqual(1) + }) + + it('Can verify the miner transaction', () => { + const serializedTransaction = minerTransaction.serialize() + const deserializedTransaction = WasmTransactionPosted.deserialize(serializedTransaction) + expect(deserializedTransaction.verify()).toBeTruthy() + }) + + it('Can add the miner transaction note to the tree', async () => { + for (let i = 0; i < minerTransaction.notesLength(); i++) { + const note = minerTransaction.getNote(i) + await tree.add(note) + } + const treeSize: number = await tree.size() + expect(treeSize).toBeGreaterThan(0) + }) + + it('Can create a async transaction', () => { + simpleTransaction = new AsyncTransaction() + expect(simpleTransaction).toBeTruthy() + }) + + it('Can add a spend to the transaction', async () => { + const witness = await tree.witness(0) + if (witness == null) throw new Error('Witness should not be null') + const result = await simpleTransaction.spend( + spenderKey.spending_key, + new IronfishNote(Buffer.from(minerNote.serialize())), + witness, + ) + expect(result).toEqual('') + }) + + it('Can add a receive to the transaction', async () => { + receiverKey = generateKey() + const receivingNote = new WasmNote(receiverKey.public_address, BigInt(40), '') + const result = await simpleTransaction.receive( + spenderKey.spending_key, + new IronfishNote(Buffer.from(receivingNote.serialize())), + ) + expect(result).toEqual('') + }) + + it('Can post the transaction', async () => { + publicTransaction = await simpleTransaction.post(spenderKey.spending_key, null, BigInt(2)) + expect(publicTransaction).toBeTruthy() + }) + + it('Can verify the transaction', async () => { + expect(publicTransaction.verify()).toBeTruthy() + for (let i = 0; i < publicTransaction.notesLength(); i++) { + await tree.add(publicTransaction.getNote(i)) + } + }) + + it('Exposes binding signature on the transaction', () => { + const hex_signature = publicTransaction.transactionSignature().toString('hex') + expect(hex_signature.length).toBe(128) + }) + + it('Exposes transaction hash', () => { + expect(publicTransaction.transactionHash().length).toBe(32) + }) + }) + + describe('Finding notes to spend', () => { + let receiverNote: IronfishNote + const receiverWitnessIndex = 1 + let transaction: AsyncTransaction + + it('Decrypts and fails to decrypt notes', async () => { + // Get the note we added in the previous example + const latestNote = await tree.get(receiverWitnessIndex) + + // We should be able to decrypt the note as owned by the receiver + const decryptedNote = latestNote.decryptNoteForOwner(receiverKey.incoming_view_key) + expect(decryptedNote).toBeTruthy() + if (!decryptedNote) throw new Error('DecryptedNote should be truthy') + receiverNote = decryptedNote + + // If we can decrypt a note as owned by the receiver, the spender should not be able to decrypt it as owned + expect(latestNote.decryptNoteForOwner(spenderKey.incoming_view_key)).toBeUndefined() + + // Nor should the receiver be able to decrypt it as spent + expect(latestNote.decryptNoteForSpender(receiverKey.outgoing_view_key)).toBeUndefined() + + // However, the spender should be able to decrypt it as spent + expect(latestNote.decryptNoteForSpender(spenderKey.outgoing_view_key)).toBeTruthy() + }) + + it('Can create a transaction', async () => { + transaction = new AsyncTransaction() + + const witness = await tree.witness(receiverWitnessIndex) + if (witness == null) throw new Error('Witness must not be null') + + expect(await transaction.spend(receiverKey.spending_key, receiverNote, witness)).toBe('') + + const noteForSpender = new WasmNote(spenderKey.public_address, BigInt(10), '') + const serializedNoteForSpender = Buffer.from(noteForSpender.serialize()) + noteForSpender.free() + const ironfishNoteForSpender = new IronfishNote(serializedNoteForSpender) + + const receiverNoteToSelf = new WasmNote( + generateNewPublicAddress(receiverKey.spending_key).public_address, + BigInt(29), + '', + ) + const serializedReceiverNote = Buffer.from(receiverNoteToSelf.serialize()) + receiverNoteToSelf.free() + const ironfishReceiverNote = new IronfishNote(serializedReceiverNote) + + expect(await transaction.receive(receiverKey.spending_key, ironfishNoteForSpender)).toBe( + '', + ) + expect(await transaction.receive(receiverKey.spending_key, ironfishReceiverNote)).toBe('') + }) + + it('Can post a transaction', async () => { + const postedTransaction = await transaction.post( + receiverKey.spending_key, + null, + BigInt(1), + ) + expect(postedTransaction).toBeTruthy() + expect(postedTransaction.verify()).toBeTruthy() + }) + }) +}) diff --git a/ironfish/src/strategy/asyncTransaction.ts b/ironfish/src/strategy/asyncTransaction.ts new file mode 100644 index 0000000000..49644f7723 --- /dev/null +++ b/ironfish/src/strategy/asyncTransaction.ts @@ -0,0 +1,194 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +// +// TODO: This file depends on nodejs librarys (worker-threads) and will not +// work with browser workers. This will need to be abstracted in future. + +import { Worker } from 'worker_threads' +import { + IronfishNote, + IronfishNoteEncrypted, + IronfishTransaction, + SerializedWasmNoteEncrypted, + SerializedWasmNoteEncryptedHash, + WasmNoteEncryptedHash, +} from '.' +import { Witness } from '../captain' + +// Messages that the asyncTransactionWorker knows how to handle +type Request = + | { type: 'receive'; spenderKey: string; serializedNote: Buffer } + | { + type: 'spend' + spenderKey: string + serializedNote: Buffer + serializedWitness: { + treeSize: number + authPath: { side: string; hashOfSibling: Buffer }[] + rootHash: Buffer + } + } + | { type: 'postMinersFee' } + | { + type: 'post' + spenderKey: string + changeGoesTo: string | null + intendedTransactionFee: bigint + } + +/** + * Wrapper of WasmTransaction that performs the work + * in a node worker thread. + * + * The entire transaction is created in the worker thread + * and spends and receipts happen there. + * + * Only when it is posted is the transaction returned + * to this thread. + */ +export default class AsyncTransaction { + worker: Worker + resolvers: Record void> + lastRequestId: number + isPosted: boolean + + constructor() { + // I hate it. I hate it so hard. + // Works around that ts-jest cannot find the file + let dir = __dirname + if (dir.includes('ironfish/src/strategy')) { + dir = dir.replace('ironfish/src/strategy', 'ironfish/build/src/strategy') + } + this.worker = new Worker(dir + '/asyncTransactionWorker.js') + this.worker.on('message', (value) => this.promisifyResponse(value)) + this.resolvers = {} + this.lastRequestId = 0 + this.isPosted = false + } + + /** + * Instruct the worker thread to create a receipt proof for + * the provided parameters and attach the receipt to the transaction. + */ + async receive(spenderKey: string, note: IronfishNote): Promise { + const serializedNote = note.serialize() + const response = await this.promisifyRequest({ + type: 'receive', + spenderKey, + serializedNote, + }) + return response.error ?? 'Unknown response' + } + + /** + * Instruct the worker thread to create a spend proof for the + * provided parameters and attach it to the transaction. + */ + async spend( + spenderKey: string, + note: IronfishNote, + witness: Witness< + IronfishNoteEncrypted, + WasmNoteEncryptedHash, + SerializedWasmNoteEncrypted, + SerializedWasmNoteEncryptedHash + >, + ): Promise { + const authPath = witness.authPath().map((p) => { + return { side: p.side(), hashOfSibling: p.hashOfSibling() } + }) + const serializedNote = note.serialize() + const response = await this.promisifyRequest({ + type: 'spend', + spenderKey, + serializedNote, + serializedWitness: { + treeSize: witness.treeSize(), + rootHash: witness.serializeRootHash(), + authPath, + }, + }) + return response.error ?? 'Unknown response' + } + + /** + * Post the transaction as a miner's fee. + * + * A miner's fee transaction should have one receipt and zero spends. + * + * @returns a promise with the posted transaction + */ + async postMinersFee(): Promise { + const serializedPosted = await this.promisifyRequest({ + type: 'postMinersFee', + }) + if (!serializedPosted?.posted) { + throw new Error('Unable to post transaction') + } + this.isPosted = true + return new IronfishTransaction(serializedPosted.posted) + } + + /** + * Post the transaction with its current list of spends and receipts. + * + * @returns a promise with the posted transaction + */ + async post( + spenderKey: string, + changeGoesTo: string | null, + intendedTransactionFee: bigint, + ): Promise { + const serializedPosted = await this.promisifyRequest({ + type: 'post', + spenderKey, + changeGoesTo, + intendedTransactionFee, + }) + if (!serializedPosted?.posted) { + throw new Error('Unable to post transaction') + } + this.isPosted = true + return new IronfishTransaction(serializedPosted.posted) + } + + /** + * Cancel the worker thread and discard the transaction + */ + async cancel(): Promise { + await this.worker.terminate() + } + + /** + * Send a request to the worker thread, + * giving it an id and constructing a promise that can be resolved + * when the worker thread has issued a response message. + */ + private promisifyRequest(request: Request): Promise<{ posted?: Buffer; error?: string }> { + if (this.isPosted) { + throw new Error('This transaction has already been posted') + } + const requestId = this.lastRequestId++ + const promise: Promise<{ posted?: Buffer; error?: string }> = new Promise((resolve) => { + this.resolvers[requestId] = (posted) => resolve(posted) + }) + this.worker.postMessage({ ...request, requestId }) + return promise + } + + /** + * Listener for worker thread messages that looks up which request + * is being responded to and fulfills the promise + * + * Sends and receipts return a string that is either empty or an error message. + * the two post methods return a posted transaction + */ + promisifyResponse(response: { requestId: number; posted?: Buffer; error?: string }): void { + const resolver = this.resolvers[response.requestId] + if (resolver) { + resolver({ posted: response.posted, error: response.error }) + } + delete this.resolvers[response.requestId] + } +} diff --git a/ironfish/src/strategy/asyncTransactionWorker.ts b/ironfish/src/strategy/asyncTransactionWorker.ts new file mode 100644 index 0000000000..7be1dad435 --- /dev/null +++ b/ironfish/src/strategy/asyncTransactionWorker.ts @@ -0,0 +1,138 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { WasmNote, WasmTransaction, WasmTransactionPosted } from 'ironfish-wasm-nodejs' +import { parentPort, MessagePort } from 'worker_threads' +import { Witness, WitnessSide } from '../captain' +import { NoteHasher } from '.' + +type ReceiveRequest = { + type: 'receive' + requestId: number + spenderKey: string + serializedNote: Buffer +} +type SpendRequest = { + type: 'spend' + requestId: number + + spenderKey: string + serializedNote: Buffer + serializedWitness: { + treeSize: number + authPath: { side: string; hashOfSibling: Buffer }[] + rootHash: Buffer + } +} +type PostRequest = { + type: 'post' + requestId: number + spenderKey: string + changeGoesTo: string | null + intendedTransactionFee: bigint +} + +type PostMinersFeeRequest = { type: 'postMinersFee'; requestId: number } + +type Request = ReceiveRequest | SpendRequest | PostRequest | PostMinersFeeRequest + +// One transaction per thread accrues all the spends and receipts for that transaction +const transaction = new WasmTransaction() + +/** + * The client has requested that we add a spend to the transaction + */ +function handleSpend( + port: MessagePort, + { spenderKey, requestId, serializedNote, serializedWitness }: SpendRequest, +): void { + const merkleHasher = new NoteHasher() + const hashSerde = merkleHasher.hashSerde() + + const rootHash = hashSerde.deserialize(serializedWitness.rootHash) + const authPath = serializedWitness.authPath.map(({ side, hashOfSibling }) => { + return { + side: side === 'Left' ? WitnessSide.Left : WitnessSide.Right, + hashOfSibling: hashSerde.deserialize(hashOfSibling), + } + }) + + const witness = new Witness(serializedWitness.treeSize, rootHash, authPath, merkleHasher) + + const note = WasmNote.deserialize(serializedNote) + + const error = transaction.spend(spenderKey, note, witness) + + port.postMessage({ requestId, error }) +} + +/** + * The client has requested that we add a new received note to the transaction + */ +function handleReceive( + port: MessagePort, + { requestId, spenderKey, serializedNote }: ReceiveRequest, +): void { + const note = WasmNote.deserialize(serializedNote) + + const error = transaction.receive(spenderKey, note) + + port.postMessage({ requestId, error }) +} + +/** + * The client has requested that we post a transaction. + * + * We post immediately and exit this worker. + */ +function handlePost( + port: MessagePort, + { requestId, spenderKey, changeGoesTo, intendedTransactionFee }: PostRequest, +): void { + const postedTransaction = transaction.post( + spenderKey, + changeGoesTo ?? undefined, + intendedTransactionFee, + ) + const posted = Buffer.from(postedTransaction.serialize()) + port.postMessage({ requestId, posted }) + process.exit(0) +} + +/** + * The client has requested that we post a miner's fee. + * A miner's fee should only have one receipt and no spends + * We try to post it and immediately exit + */ +function handlePostMinersFee( + port: MessagePort, + { requestId }: PostMinersFeeRequest, +): WasmTransactionPosted { + const postedTransaction = transaction.post_miners_fee() + const posted = Buffer.from(postedTransaction.serialize()) + port.postMessage({ requestId, posted }) + process.exit(0) +} + +function handleRequest(port: MessagePort, request: Request) { + switch (request.type) { + case 'spend': + handleSpend(port, request) + break + case 'receive': + handleReceive(port, request) + break + case 'post': + handlePost(port, request) + break + case 'postMinersFee': + handlePostMinersFee(port, request) + break + } +} + +if (parentPort !== null) { + const port = parentPort + port.on('message', (request: Request) => handleRequest(port, request)) +} diff --git a/ironfish/src/strategy/asyncTransactionWorkerPool.ts b/ironfish/src/strategy/asyncTransactionWorkerPool.ts new file mode 100644 index 0000000000..99d8782638 --- /dev/null +++ b/ironfish/src/strategy/asyncTransactionWorkerPool.ts @@ -0,0 +1,51 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import AsyncTransaction from './asyncTransaction' + +/** + * AsyncTransaction Workers have a time-to-start of a few seconds because of + * Sapling setup time. We can keep a started one running to mitigate this time. + */ +class AsyncTransactionWorkerPoolClass { + /** + * A transaction waiting to be returned by createTransaction. + */ + private waitingTransaction: AsyncTransaction | undefined + + /** + * Primes the pool by creating an AsyncTransaction. + */ + start(): AsyncTransactionWorkerPoolClass { + this.waitingTransaction = new AsyncTransaction() + return this + } + + /** + * Shuts down the worker in waitingTransaction and deletes the transaction. + */ + async stop(): Promise { + const trans = this.waitingTransaction + this.waitingTransaction = undefined + await trans?.cancel() + return + } + + /** + * Returns waitingTransaction and replaces it with a fresh AsyncTransaction. + */ + createTransaction(): AsyncTransaction { + if (!this.waitingTransaction) { + return this.start().createTransaction() + } + const trans = this.waitingTransaction + this.waitingTransaction = new AsyncTransaction() + return trans + } +} + +/** + * Export the pool as a singleton. + */ +export const AsyncTransactionWorkerPool = new AsyncTransactionWorkerPoolClass() diff --git a/ironfish/src/strategy/index.test.ts b/ironfish/src/strategy/index.test.ts new file mode 100644 index 0000000000..bbd7740370 --- /dev/null +++ b/ironfish/src/strategy/index.test.ts @@ -0,0 +1,26 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { IronfishStrategy } from './' +describe('Miners reward', () => { + let strategy: IronfishStrategy + beforeAll(() => { + strategy = new IronfishStrategy() + }) + + // see https://ironfish.network/docs/whitepaper/4_mining#include-the-miner-reward-based-on-coin-emission-schedule + // for more details + it('miners reward is properly calculated for year 0-1', () => { + let minersReward = strategy.miningReward(BigInt(1)) + expect(minersReward).toBe(5 * 10 ** 8) + + minersReward = strategy.miningReward(BigInt(100000)) + expect(minersReward).toBe(5 * 10 ** 8) + }) + + it('miners reward is properly calculated for year 1-2', () => { + const minersReward = strategy.miningReward(BigInt(2100001)) + expect(minersReward).toBe(475614712) + }) +}) diff --git a/ironfish/src/strategy/index.ts b/ironfish/src/strategy/index.ts new file mode 100644 index 0000000000..510c8e704d --- /dev/null +++ b/ironfish/src/strategy/index.ts @@ -0,0 +1,560 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { + generateNewPublicAddress, + WasmNote, + WasmNoteEncrypted, + WasmTransactionPosted, +} from 'ironfish-wasm-nodejs' +import Captain, { + Block, + BlockHash, + BlockSerde, + NullifierHasher, + Spend, + Strategy, + Transaction as TransactionInterface, + Validity, + VerificationResult, + MerkleHasher, + Verifier, + Witness, + BlockHeader, + SerializedBlock, +} from '../captain' +import Blockchain from '../captain/anchorChain/blockchain' +import Serde from '../serde' + +import { MiningDirector } from '../mining' +import hashBlockHeader from '../mining/miningAlgorithm' +import { AsyncTransactionWorkerPool } from './asyncTransactionWorkerPool' +import { MemPool } from '../memPool' +import { VerificationResultReason } from '../captain/anchorChain/blockchain/VerificationResult' + +/** + * Implementation of the IronFish strategy that calls into sapling via Wasm + * to encode notes in zero-knowledge proofs. + */ + +/** + * Serialized version of an encrypted note. + */ +export type SerializedWasmNoteEncrypted = Buffer + +/** + * An encrypted note's hash. + */ +export type WasmNoteEncryptedHash = Buffer + +/** + * Serialized version of an encrypted note's hash. + */ +export type SerializedWasmNoteEncryptedHash = Buffer + +export const GENESIS_SUPPLY_IN_IRON = 42000000 + +export const IRON_FISH_YEAR_IN_BLOCKS = 2100000 + +/** + * Serde implementation to convert an encrypted note to its serialized form and back. + */ +class WasmNoteEncryptedSerde + implements Serde { + equals(note1: IronfishNoteEncrypted, note2: IronfishNoteEncrypted): boolean { + return note1.serialize().equals(note2.serialize()) + } + + serialize(note: IronfishNoteEncrypted): SerializedWasmNoteEncrypted { + return note.serialize() + } + + deserialize(serializedNote: SerializedWasmNoteEncrypted): IronfishNoteEncrypted { + return new IronfishNoteEncrypted(serializedNote) + } +} + +/** + * Serde implementation to convert an encrypted note's hash to its serialized form and back. + */ +class WasmNoteEncryptedHashSerde + implements Serde { + equals(hash1: WasmNoteEncryptedHash, hash2: WasmNoteEncryptedHash): boolean { + return hash1.equals(hash2) + } + serialize(note: WasmNoteEncryptedHash): SerializedWasmNoteEncryptedHash { + return note + } + deserialize(serializedNote: SerializedWasmNoteEncryptedHash): WasmNoteEncryptedHash { + return serializedNote + } +} + +/** + * Hasher implementation for notes to satisfy the MerkleTree requirements. + */ +export class NoteHasher + implements + MerkleHasher< + IronfishNoteEncrypted, + WasmNoteEncryptedHash, + SerializedWasmNoteEncrypted, + SerializedWasmNoteEncryptedHash + > { + _merkleNoteSerde: WasmNoteEncryptedSerde + _merkleNoteHashSerde: WasmNoteEncryptedHashSerde + constructor() { + this._merkleNoteSerde = new WasmNoteEncryptedSerde() + this._merkleNoteHashSerde = new WasmNoteEncryptedHashSerde() + } + + elementSerde(): Serde { + return this._merkleNoteSerde + } + + hashSerde(): Serde { + return this._merkleNoteHashSerde + } + + merkleHash(note: IronfishNoteEncrypted): Buffer { + return note.merkleHash() + } + + combineHash( + depth: number, + left: WasmNoteEncryptedHash, + right: WasmNoteEncryptedHash, + ): WasmNoteEncryptedHash { + return Buffer.from(WasmNoteEncrypted.combineHash(depth, left, right)) + } +} + +export type TransactionHash = Buffer + +export class IronfishNote { + private readonly wasmNoteSerialized: Buffer + private wasmNote: WasmNote | null = null + private referenceCount = 0 + + constructor(wasmNoteSerialized: Buffer) { + this.wasmNoteSerialized = wasmNoteSerialized + } + + serialize(): Buffer { + return this.wasmNoteSerialized + } + + takeReference(): WasmNote { + this.referenceCount++ + if (this.wasmNote === null) { + this.wasmNote = WasmNote.deserialize(this.wasmNoteSerialized) + } + return this.wasmNote + } + + returnReference(): void { + this.referenceCount-- + if (this.referenceCount <= 0) { + this.referenceCount = 0 + this.wasmNote?.free() + this.wasmNote = null + } + } + + value(): BigInt { + const value = this.takeReference().value + this.returnReference() + return value + } + + memo(): string { + const memo = this.takeReference().memo + this.returnReference() + return memo + } + + nullifier(ownerPrivateKey: string, position: BigInt): Buffer { + const buf = Buffer.from(this.takeReference().nullifier(ownerPrivateKey, position)) + this.returnReference() + return buf + } +} + +export class IronfishNoteEncrypted { + private readonly wasmNoteEncryptedSerialized: Buffer + private wasmNoteEncrypted: WasmNoteEncrypted | null = null + private referenceCount = 0 + + constructor(wasmNoteEncryptedSerialized: Buffer) { + this.wasmNoteEncryptedSerialized = wasmNoteEncryptedSerialized + } + + serialize(): Buffer { + return this.wasmNoteEncryptedSerialized + } + + takeReference(): WasmNoteEncrypted { + this.referenceCount++ + if (this.wasmNoteEncrypted === null) { + this.wasmNoteEncrypted = WasmNoteEncrypted.deserialize(this.wasmNoteEncryptedSerialized) + } + return this.wasmNoteEncrypted + } + + returnReference(): void { + this.referenceCount-- + if (this.referenceCount <= 0) { + this.referenceCount = 0 + this.wasmNoteEncrypted?.free() + this.wasmNoteEncrypted = null + } + } + + decryptNoteForOwner(ownerHexKey: string): IronfishNote | undefined { + const note = this.takeReference().decryptNoteForOwner(ownerHexKey) + this.returnReference() + if (note) { + const serializedNote = note.serialize() + note.free() + return new IronfishNote(Buffer.from(serializedNote)) + } + } + + decryptNoteForSpender(spenderHexKey: string): IronfishNote | undefined { + const note = this.takeReference().decryptNoteForSpender(spenderHexKey) + this.returnReference() + if (note) { + const serializedNote = note.serialize() + note.free() + return new IronfishNote(Buffer.from(serializedNote)) + } + } + + merkleHash(): Buffer { + const note = this.takeReference().merkleHash() + this.returnReference() + return Buffer.from(note) + } +} + +/** + * Wraps a Wasm transaction to provide a convenient interface. + * + * Transactions come from a client looking to spend. They are stored on blocks + * in the transaction list, and one is also used to hold the miner's fee for + * a given transaction. + */ +export class IronfishTransaction + implements TransactionInterface { + private readonly wasmTransactionPostedSerialized: Buffer + private wasmTransactionPosted: WasmTransactionPosted | null = null + private referenceCount = 0 + + constructor(wasmTransactionPostedSerialized: Buffer) { + this.wasmTransactionPostedSerialized = wasmTransactionPostedSerialized + } + + serialize(): Buffer { + return this.wasmTransactionPostedSerialized + } + + takeReference(): WasmTransactionPosted { + this.referenceCount++ + if (this.wasmTransactionPosted === null) { + this.wasmTransactionPosted = WasmTransactionPosted.deserialize( + this.wasmTransactionPostedSerialized, + ) + } + return this.wasmTransactionPosted + } + + returnReference(): void { + this.referenceCount-- + if (this.referenceCount <= 0) { + this.referenceCount = 0 + this.wasmTransactionPosted?.free() + this.wasmTransactionPosted = null + } + } + + withReference(callback: (transaction: WasmTransactionPosted) => R): R { + const transaction = this.takeReference() + try { + return callback(transaction) + } finally { + this.returnReference() + } + } + + verify(): VerificationResult { + return this.withReference((t) => { + const result = t.verify() + return result + ? { valid: Validity.Yes } + : { valid: Validity.No, reason: VerificationResultReason.ERROR } + }) + } + + notesLength(): number { + return this.withReference((t) => t.notesLength) + } + + getNote(index: number): IronfishNoteEncrypted { + return this.withReference((t) => { + // Get the note + const serializedNote = Buffer.from(t.getNote(index)) + + // Convert it to an IronfishNoteEncrypted + return new IronfishNoteEncrypted(serializedNote) + }) + } + + *notes(): Iterable { + const notesLength = this.notesLength() + + for (let i = 0; i < notesLength; i++) { + yield this.getNote(i) + } + } + + spendsLength(): number { + return this.withReference((t) => t.spendsLength) + } + + *spends(): Iterable> { + const spendsLength = this.spendsLength() + for (let i = 0; i < spendsLength; i++) { + yield this.withReference((t) => { + const wasmSpend = t.getSpend(i) + const spend: Spend = { + size: wasmSpend.treeSize, + nullifier: Buffer.from(wasmSpend.nullifier), + commitment: Buffer.from(wasmSpend.rootHash), + } + wasmSpend.free() + return spend + }) + } + } + + transactionFee(): bigint { + return this.withReference((t) => BigInt(t.transactionFee)) + } + + transactionSignature(): Buffer { + return this.withReference((t) => Buffer.from(t.transactionSignature)) + } + + transactionHash(): TransactionHash { + return this.withReference((t) => Buffer.from(t.transactionHash)) + } +} + +/** + * Serialized version of the Transaction wrapper. + */ +export type SerializedTransaction = Buffer + +export type SerializedIronfishBlock = SerializedBlock< + SerializedWasmNoteEncryptedHash, + SerializedTransaction +> + +/** + * Serializer and equality checker for Transaction wrappers. + */ +export class TransactionSerde implements Serde { + equals(): boolean { + throw new Error(`Not implemented`) + } + + serialize(transaction: IronfishTransaction): SerializedTransaction { + return transaction.serialize() + } + + deserialize(data: SerializedTransaction): IronfishTransaction { + return new IronfishTransaction(data) + } +} + +/** + * Implementation of a Blockchain Strategy using the Wasm zero-knowledge proofs. + */ +export class IronfishStrategy + implements + Strategy< + IronfishNoteEncrypted, + WasmNoteEncryptedHash, + IronfishTransaction, + SerializedWasmNoteEncrypted, + SerializedWasmNoteEncryptedHash, + SerializedTransaction + > { + _noteHasher: NoteHasher + + _nullifierHasher: NullifierHasher + + _blockSerde: BlockSerde< + IronfishNoteEncrypted, + WasmNoteEncryptedHash, + IronfishTransaction, + SerializedWasmNoteEncrypted, + SerializedWasmNoteEncryptedHash, + SerializedTransaction + > + private _verifierClass: typeof IronfishVerifier + private miningRewardCachedByYear: Map + + constructor(verifierClass: typeof IronfishVerifier | null = null) { + this._noteHasher = new NoteHasher() + this._nullifierHasher = new NullifierHasher() + this._blockSerde = new BlockSerde(this) + this._verifierClass = verifierClass || Verifier + this.miningRewardCachedByYear = new Map() + } + + noteHasher(): NoteHasher { + return this._noteHasher + } + + nullifierHasher(): NullifierHasher { + return this._nullifierHasher + } + + transactionSerde(): TransactionSerde { + return new TransactionSerde() + } + + hashBlockHeader(serializedHeader: Buffer): BlockHash { + return hashBlockHeader(serializedHeader) + } + + /** + * See https://ironfish.network/docs/whitepaper/4_mining#include-the-miner-reward-based-on-coin-emission-schedule + * + * Annual coin issuance from mining goes down every year. Year is defined here by the + * number of blocks (IRON_FISH_YEAR_IN_BLOCKS) + * + * Given the genesis block supply (GENESIS_SUPPLY_IN_IRON) the formula to calculate + * reward per block is: + * (genesisSupply / 4) * e ^(-.05 * yearsAfterLaunch) + * Where e is the natural number e (Euler's number), and -.05 is a decay function constant + * + * @param sequence Block sequence + * @returns mining reward (in ORE) per block given the block sequence + */ + miningReward(sequence: bigint): number { + const yearsAfterLaunch = Math.floor(Number(sequence) / IRON_FISH_YEAR_IN_BLOCKS) + let reward = this.miningRewardCachedByYear.get(yearsAfterLaunch) + if (reward) { + return reward + } + + const annualReward = (GENESIS_SUPPLY_IN_IRON / 4) * Math.E ** (-0.05 * yearsAfterLaunch) + reward = this.convertIronToOre(annualReward / IRON_FISH_YEAR_IN_BLOCKS) + this.miningRewardCachedByYear.set(yearsAfterLaunch, reward) + + return reward + } + + convertIronToOre(iron: number): number { + return Math.round(iron * 10 ** 8) + } + + createVerifier(chain: IronfishBlockchain): IronfishVerifier { + return new this._verifierClass(chain) + } + + async createMinersFee( + totalTransactionFees: bigint, + blockSequence: bigint, + minerKey: string, + ): Promise { + const transaction = AsyncTransactionWorkerPool.createTransaction() + + // Generate a public address from the miner's spending key + const owner = generateNewPublicAddress(minerKey).public_address + + // Create a new note with value equal to the inverse of the sum of the + // transaction fees and the mining reward + const amount = totalTransactionFees + BigInt(this.miningReward(blockSequence)) + const minerNote = new WasmNote(owner, amount, '') + const serializedNote = Buffer.from(minerNote.serialize()) + minerNote.free() + + await transaction.receive(minerKey, new IronfishNote(serializedNote)) + + return new IronfishTransaction(Buffer.from((await transaction.postMinersFee()).serialize())) + } +} + +export type IronfishBlock = Block< + IronfishNoteEncrypted, + WasmNoteEncryptedHash, + IronfishTransaction, + SerializedWasmNoteEncrypted, + SerializedWasmNoteEncryptedHash, + SerializedTransaction +> + +export type IronfishBlockHeader = BlockHeader< + IronfishNoteEncrypted, + WasmNoteEncryptedHash, + IronfishTransaction, + SerializedWasmNoteEncrypted, + SerializedWasmNoteEncryptedHash, + SerializedTransaction +> + +export type IronfishBlockchain = Blockchain< + IronfishNoteEncrypted, + WasmNoteEncryptedHash, + IronfishTransaction, + SerializedWasmNoteEncrypted, + SerializedWasmNoteEncryptedHash, + SerializedTransaction +> + +export type IronfishCaptain = Captain< + IronfishNoteEncrypted, + WasmNoteEncryptedHash, + IronfishTransaction, + SerializedWasmNoteEncrypted, + SerializedWasmNoteEncryptedHash, + SerializedTransaction +> + +export type IronfishMemPool = MemPool< + IronfishNoteEncrypted, + WasmNoteEncryptedHash, + IronfishTransaction, + SerializedWasmNoteEncrypted, + SerializedWasmNoteEncryptedHash, + SerializedTransaction +> + +export class IronfishVerifier extends Verifier< + IronfishNoteEncrypted, + WasmNoteEncryptedHash, + IronfishTransaction, + SerializedWasmNoteEncrypted, + SerializedWasmNoteEncryptedHash, + SerializedTransaction +> {} + +export type IronfishWitness = Witness< + IronfishNoteEncrypted, + WasmNoteEncryptedHash, + SerializedWasmNoteEncrypted, + SerializedWasmNoteEncryptedHash +> + +export type IronfishMiningDirector = MiningDirector< + IronfishNoteEncrypted, + WasmNoteEncryptedHash, + IronfishTransaction, + SerializedWasmNoteEncrypted, + SerializedWasmNoteEncryptedHash, + SerializedTransaction +> diff --git a/ironfish/src/strategy/strategy.test.slow.ts b/ironfish/src/strategy/strategy.test.slow.ts new file mode 100644 index 0000000000..7c94169a50 --- /dev/null +++ b/ironfish/src/strategy/strategy.test.slow.ts @@ -0,0 +1,262 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { + WasmSimpleTransaction, + generateKey, + generateNewPublicAddress, + WasmNote, + WasmTransaction, + WasmTransactionPosted, + Key, +} from 'ironfish-wasm-nodejs' +import { MerkleTree } from '../captain' +import { IDatabase } from '../storage' +import { + IronfishNote, + IronfishNoteEncrypted, + IronfishStrategy, + NoteHasher, + WasmNoteEncryptedHash, +} from '.' +import { makeDb, makeDbName } from '../captain/testUtilities' +import { AsyncTransactionWorkerPool } from './asyncTransactionWorkerPool' + +async function makeWasmStrategyTree({ + depth, + name, + database, +}: { + depth?: number + name?: string + database?: IDatabase +} = {}): Promise> { + const openDb = !database + + if (!name) name = makeDbName() + if (!database) database = makeDb(name) + + const tree = await MerkleTree.new(new NoteHasher(), database, name, depth) + + if (openDb) { + await database.open() + } + + return tree +} + +type ThenArg = T extends PromiseLike ? U : T + +/** + * Tests whether it's possible to create a miner reward and transfer those funds + * to another account using the Wasm transactions + strategy. + * + * This is an integration test dependent on order of execution of the `it` + * blocks inside the test. + */ +describe('Demonstrate the Sapling API', () => { + let tree: ThenArg> + let receiverKey: Key + let spenderKey: Key + let minerNote: WasmNote + let minerTransaction: WasmTransactionPosted + let simpleTransaction: WasmSimpleTransaction + let publicTransaction: WasmTransactionPosted + + beforeAll(async () => { + // Pay the cost of setting up Sapling and the DB outside of any test + tree = await makeWasmStrategyTree() + spenderKey = generateKey() + }) + + describe('Can transact between two accounts', () => { + it('Can create a miner reward', () => { + const owner = generateNewPublicAddress(spenderKey.spending_key).public_address + + minerNote = new WasmNote(owner, BigInt(42), '') + const transaction = new WasmTransaction() + expect(transaction.receive(spenderKey.spending_key, minerNote)).toBe('') + minerTransaction = transaction.post_miners_fee() + expect(minerTransaction).toBeTruthy() + expect(minerTransaction.notesLength).toEqual(1) + }) + + it('Can verify the miner transaction', () => { + const serializedTransaction = minerTransaction.serialize() + const deserializedTransaction = WasmTransactionPosted.deserialize(serializedTransaction) + expect(deserializedTransaction.verify()).toBeTruthy() + }) + + it('Can add the miner transaction note to the tree', async () => { + for (let i = 0; i < minerTransaction.notesLength; i++) { + const note = Buffer.from(minerTransaction.getNote(i)) + await tree.add(new IronfishNoteEncrypted(note)) + } + const treeSize: number = await tree.size() + expect(treeSize).toBeGreaterThan(0) + }) + + it('Can create a simple transaction', () => { + simpleTransaction = new WasmSimpleTransaction(spenderKey.spending_key, BigInt(0)) + expect(simpleTransaction).toBeTruthy() + }) + + it('Can add a spend to the transaction', async () => { + const witness = await tree.witness(0) + if (witness == null) throw new Error('Witness should not be null') + const result = simpleTransaction.spend(minerNote, witness) + expect(result).toEqual('') + }) + + it('Can add a receive to the transaction', () => { + receiverKey = generateKey() + const receivingNote = new WasmNote(receiverKey.public_address, BigInt(40), '') + const result = simpleTransaction.receive(receivingNote) + expect(result).toEqual('') + }) + + it('Can post the transaction', () => { + publicTransaction = simpleTransaction.post() + expect(publicTransaction).toBeTruthy() + }) + + it('Can verify the transaction', async () => { + expect(publicTransaction.verify()).toBeTruthy() + for (let i = 0; i < publicTransaction.notesLength; i++) { + const note = Buffer.from(publicTransaction.getNote(i)) + await tree.add(new IronfishNoteEncrypted(note)) + } + }) + + it('Exposes binding signature on the transaction', () => { + const hex_signature = Buffer.from(publicTransaction.transactionSignature).toString('hex') + expect(hex_signature.toString().length).toBe(128) + }) + + it('Exposes transaction hash', () => { + expect(publicTransaction.transactionHash.length).toBe(32) + }) + }) + + describe('Serializes and deserializes transactions', () => { + it('Does not hold a posted transaction if no references are taken', async () => { + // Generate a miner's fee transaction + const strategy = new IronfishStrategy() + const minersFee = await strategy.createMinersFee( + BigInt(0), + BigInt(0), + generateKey().spending_key, + ) + await AsyncTransactionWorkerPool.stop() + + expect(minersFee['wasmTransactionPosted']).toBeNull() + expect(minersFee.verify()).toEqual({ valid: 1 }) + expect(minersFee['wasmTransactionPosted']).toBeNull() + }, 60000) + + it('Holds a posted transaction if a reference is taken', async () => { + // Generate a miner's fee transaction + const strategy = new IronfishStrategy() + const minersFee = await strategy.createMinersFee( + BigInt(0), + BigInt(0), + generateKey().spending_key, + ) + await AsyncTransactionWorkerPool.stop() + + minersFee.withReference(() => { + expect(minersFee['wasmTransactionPosted']).not.toBeNull() + + expect(minersFee.verify()).toEqual({ valid: 1 }) + expect(minersFee['wasmTransactionPosted']).not.toBeNull() + }) + + expect(minersFee['wasmTransactionPosted']).toBeNull() + }, 60000) + + it('Does not hold a note if no references are taken', async () => { + // Generate a miner's fee transaction + const key = generateKey() + const strategy = new IronfishStrategy() + const minersFee = await strategy.createMinersFee(BigInt(0), BigInt(0), key.spending_key) + await AsyncTransactionWorkerPool.stop() + + expect(minersFee['wasmTransactionPosted']).toBeNull() + const noteIterator = minersFee.notes() + expect(minersFee['wasmTransactionPosted']).toBeNull() + + let note: IronfishNoteEncrypted | null = null + for (const n of noteIterator) { + note = n + } + if (note === null) throw new Error('Must have at least one note') + + expect(note['wasmNoteEncrypted']).toBeNull() + const decryptedNote = note.decryptNoteForOwner(key.incoming_view_key) + expect(decryptedNote).toBeDefined() + expect(note['wasmNoteEncrypted']).toBeNull() + + if (decryptedNote === undefined) throw new Error('Note must be decryptable') + + expect(decryptedNote['wasmNote']).toBeNull() + expect(decryptedNote.value()).toBe(BigInt(500000000)) + expect(decryptedNote['wasmNote']).toBeNull() + }, 60000) + }) + + describe('Finding notes to spend', () => { + let receiverNote: IronfishNote + const receiverWitnessIndex = 1 + let transaction: WasmTransaction + + it('Decrypts and fails to decrypt notes', async () => { + // Get the note we added in the previous example + const latestNote = await tree.get(receiverWitnessIndex) + + // We should be able to decrypt the note as owned by the receiver + const decryptedNote = latestNote.decryptNoteForOwner(receiverKey.incoming_view_key) + expect(decryptedNote).toBeTruthy() + if (!decryptedNote) throw new Error('DecryptedNote should be truthy') + receiverNote = decryptedNote + + // If we can decrypt a note as owned by the receiver, the spender should not be able to decrypt it as owned + expect(latestNote.decryptNoteForOwner(spenderKey.incoming_view_key)).toBeUndefined() + + // Nor should the receiver be able to decrypt it as spent + expect(latestNote.decryptNoteForSpender(receiverKey.outgoing_view_key)).toBeUndefined() + + // However, the spender should be able to decrypt it as spent + expect(latestNote.decryptNoteForSpender(spenderKey.outgoing_view_key)).toBeTruthy() + }) + + it('Can create a transaction', async () => { + transaction = new WasmTransaction() + + const witness = await tree.witness(receiverWitnessIndex) + if (witness == null) throw new Error('Witness must not be null') + + // The `transaction.spend` method is used to spend the note. The owner needs to sign the transaction + // with their private key; this is how the note gets authorized to spend. + const note = receiverNote.takeReference() + expect(transaction.spend(receiverKey.spending_key, note, witness)).toBe('') + receiverNote.returnReference() + + const noteForSpender = new WasmNote(spenderKey.public_address, BigInt(10), '') + const receiverNoteToSelf = new WasmNote( + generateNewPublicAddress(receiverKey.spending_key).public_address, + BigInt(29), + '', + ) + + expect(transaction.receive(receiverKey.spending_key, noteForSpender)).toBe('') + expect(transaction.receive(receiverKey.spending_key, receiverNoteToSelf)).toBe('') + }) + + it('Can post a transaction', () => { + const postedTransaction = transaction.post(receiverKey.spending_key, undefined, BigInt(1)) + expect(postedTransaction).toBeTruthy() + expect(postedTransaction.verify()).toBeTruthy() + }) + }) +}) diff --git a/ironfish/src/telemetry/DisabledTelemetry.ts b/ironfish/src/telemetry/DisabledTelemetry.ts new file mode 100644 index 0000000000..b477bc7654 --- /dev/null +++ b/ironfish/src/telemetry/DisabledTelemetry.ts @@ -0,0 +1,43 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Telemetry, EnabledTelemetry, Metric } from '.' + +/** + * Implementation of Telemetry interface that discards metrics. + */ + +export default class DisabledTelemetry implements Telemetry { + /** + * Called if the user requests to stop submitting metrics. + * Returns a new NodeTelemetry on node; can be adapted for use in browser, + * but that isn't implemented yet. + * + * @returns an enabled telemetry and a status message to display to the user + */ + startCollecting(endpoint: string): { status: string; next: Telemetry } { + return { status: 'Collecting telemetry data', next: new EnabledTelemetry(endpoint) } + } + + /** + * Called if the user requests to stop submitting metrics. + * Since disabled telemetry is already not submitting metrics, + * it is a noop + * + * @returns this and a status message to send to the user + */ + async stopCollecting(): Promise<{ status: string; next: Telemetry }> { + return Promise.resolve({ status: "Not collecting telemetry; can't stop now", next: this }) + } + + /** + * Black hole to submit metrics to when telemetry is disabled. + */ + submit(_metric: Metric): void { + // discard + } + + isEnabled(): boolean { + return false + } +} diff --git a/ironfish/src/telemetry/NodeTelemetry.ts b/ironfish/src/telemetry/NodeTelemetry.ts new file mode 100644 index 0000000000..688d5000b2 --- /dev/null +++ b/ironfish/src/telemetry/NodeTelemetry.ts @@ -0,0 +1,57 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Worker } from 'worker_threads' +import DisabledTelemetry from './DisabledTelemetry' +import { Telemetry, Metric } from './index' + +/** + * Telemetry implementation that sends metrics to a node worker thread + * to be posted. + */ + +export default class NodeTelemetry implements Telemetry { + worker: Worker + + constructor(endpoint: string) { + this.worker = new Worker(__dirname + '/telemetryBackgroundTask.js', { + workerData: { endpoint }, + }) + } + + /** + * Called if the user requests to submit metrics. + * This is a noop if metrics are already enabled. + * + * @returns this and a status message to send to the user + */ + startCollecting(_endpoint: string): { status: string; next: Telemetry } { + return { status: 'Telemetry is already enabled', next: this } + } + + /** + * Called if the user request to stop recording metrics. + * + * Shut down the workers read and returns new DisabledTelemetry + * + * @returns new DisabledTelemetry to replace this one and a status message + * to send to the user + */ + async stopCollecting(): Promise<{ status: string; next: Telemetry }> { + await this.worker.terminate() + return { status: 'Stopped collecting telemetry', next: new DisabledTelemetry() } + } + + /** + * Submit the provided metric to the metric server. + * + * This returns immediately, but a background task is scheduled. + */ + submit(metric: Metric): void { + this.worker.postMessage(metric) + } + + isEnabled(): boolean { + return true + } +} diff --git a/ironfish/src/telemetry/__snapshots__/submit.test.ts.snap b/ironfish/src/telemetry/__snapshots__/submit.test.ts.snap new file mode 100644 index 0000000000..98357a8554 --- /dev/null +++ b/ironfish/src/telemetry/__snapshots__/submit.test.ts.snap @@ -0,0 +1,86 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Telemetry submitMetric function Succeeds with a validly formatted metric 1`] = ` +[MockFunction] { + "calls": Array [ + Array [ + Object { + "fields": Array [ + Object { + "name": "hello", + "type": "string", + "value": "world", + }, + ], + "name": "test metric", + "tags": Object { + "you know": "me", + }, + "timestamp": 2020-12-31T00:00:00.000Z, + }, + ], + ], + "results": Array [ + Object { + "type": "return", + "value": undefined, + }, + ], +} +`; + +exports[`Telemetry submitMetric function submits with default date if unspecified 1`] = ` +[MockFunction] { + "calls": Array [ + Array [ + Object { + "fields": Array [ + Object { + "name": "hello", + "type": "string", + "value": "world", + }, + ], + "name": "test metric", + "tags": Object { + "you know": "me", + }, + "timestamp": 1999-12-31T00:00:00.000Z, + }, + ], + ], + "results": Array [ + Object { + "type": "return", + "value": undefined, + }, + ], +} +`; + +exports[`Telemetry submitMetric function submits with no tags if unspecified 1`] = ` +[MockFunction] { + "calls": Array [ + Array [ + Object { + "fields": Array [ + Object { + "name": "hello", + "type": "string", + "value": "world", + }, + ], + "name": "test metric", + "tags": Object {}, + "timestamp": 2020-12-31T00:00:00.000Z, + }, + ], + ], + "results": Array [ + Object { + "type": "return", + "value": undefined, + }, + ], +} +`; diff --git a/ironfish/src/telemetry/index.ts b/ironfish/src/telemetry/index.ts new file mode 100644 index 0000000000..3046de521d --- /dev/null +++ b/ironfish/src/telemetry/index.ts @@ -0,0 +1,145 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +// WARNING: this file contains nodejs-specific functionality +// that will need to be ported to the browser. + +import DisabledTelemetry from './DisabledTelemetry' +import NodeTelemetry from './NodeTelemetry' + +export { NodeTelemetry, DisabledTelemetry } + +export type Field = { + name: string + type: 'string' | 'boolean' | 'float' | 'integer' + value: string | boolean | number +} + +/** + * A specific datapoint being collected. + */ +export type Metric = { + /** + * The name of whatever is being measured. + */ + name: string + /** + * The exact time at which the metric was recorded. + * JS gives us millisecond accuracy here. + * Defaults to new Date() if not specified + */ + timestamp?: Date + /** + * Collection of string keys and values to help identify + * this metric. + * + * Expected values will be something like: "clientid": "xxx" + * or "software version": "xxx". + */ + tags?: Record + /** + * Array of measured values for this particular measurement. + * There must be at least one field. + * Each field has a name, type, and a single value. + */ + fields: Field[] +} + +/** + * Tool for collecting metrics. Connects to a node and sets up + * event listeners for all known metrics. + */ +export interface Telemetry { + startCollecting(endpoint: string): { status: string; next: Telemetry } + stopCollecting(): Promise<{ + next: Telemetry + status: string + }> + submit(metric: Metric): void + isEnabled(): boolean +} + +// This can be changed to a switch for browser implementation +export const EnabledTelemetry = NodeTelemetry + +let telemetry: Telemetry = new DisabledTelemetry() + +// List of tags that get added to every metric. +let defaultTags: Record = {} + +/** + * Check if telemetry reporting is currently active + */ +export function isEnabled(): boolean { + return telemetry.isEnabled() +} + +/** + * Set the telemetry used for collecting metrics. + * + * This is primarily exposed for unit testing and initialization. + * Prefer the startCollecting and stopCollecting state managers + * in the general case. + */ +export function setTelemetry(newTelemetry: Telemetry): void { + telemetry = newTelemetry +} + +/** + * Instruct the current telemetry to start collecting data. + * + * Is a noop if it is already collecting. + * + * Returns a status message intended for be displayed to the user + */ +export function startCollecting(endpoint: string): string { + const result = telemetry.startCollecting(endpoint) + telemetry = result.next + return result.status +} + +/** + * Instruct the current telemetry to stop collecting data. + * + * Is a noop if it is not collecting. + * + * Returns a status message intended for display to the user + */ +export async function stopCollecting(): Promise { + const result = await telemetry.stopCollecting() + telemetry = result.next + return result.status +} + +/** + * Set key-value tags that get attached to every + * request. + * + * These will probably be set on node startup, and never + * changed. + * + * They can be set before telemetry is enabled. + */ +export function setDefaultTags(tags: Record): void { + defaultTags = tags +} + +/** + * Submit a metric to the telemetry service. + * + * This can be called unconditionally; the currently enabled + * telemetry will decide whether to discard it if telemetry + * is disabled. + */ +export function submitMetric(metric: Metric): void { + if (metric.fields.length === 0) { + throw new Error('Metric must have at least one field') + } + const toSubmit = { + ...metric, + timestamp: metric.timestamp || new Date(), + tags: metric.tags ? { ...defaultTags, ...metric.tags } : { ...defaultTags }, + } + telemetry.submit(toSubmit) +} diff --git a/ironfish/src/telemetry/submit.test.ts b/ironfish/src/telemetry/submit.test.ts new file mode 100644 index 0000000000..1e51dd4b98 --- /dev/null +++ b/ironfish/src/telemetry/submit.test.ts @@ -0,0 +1,124 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Worker } from 'worker_threads' +import { + submitMetric, + setTelemetry, + DisabledTelemetry, + EnabledTelemetry, + Metric, + setDefaultTags, +} from '.' + +jest.mock('worker_threads') +// Tell typescript to treat it as a mock +const MockWorker = (Worker as unknown) as jest.Mock + +describe('Enabled and disabled telemetry', () => { + const metric: Metric = { + name: 'test metric', + timestamp: new Date('2020-12-31'), + fields: [{ name: 'hello', type: 'string', value: 'world' }], + } + + beforeEach(() => { + MockWorker.mockReset() + }) + + it("doesn't crash when submitting a metric to disabled telemetry", () => { + const telemetry = new DisabledTelemetry() + expect(() => telemetry.submit(metric)).not.toThrow() + expect(Worker).not.toHaveBeenCalled() + }) + + it('submits to the worker when submitting to enabled telemetry', () => { + const telemetry = new EnabledTelemetry('an url') + expect(() => telemetry.submit(metric)).not.toThrow() + expect(telemetry.worker.postMessage).toMatchInlineSnapshot(` + [MockFunction] { + "calls": Array [ + Array [ + Object { + "fields": Array [ + Object { + "name": "hello", + "type": "string", + "value": "world", + }, + ], + "name": "test metric", + "timestamp": 2020-12-31T00:00:00.000Z, + }, + ], + ], + "results": Array [ + Object { + "type": "return", + "value": undefined, + }, + ], + } + `) + }) +}) + +describe('Telemetry submitMetric function', () => { + const metric: Metric = { + name: 'test metric', + timestamp: new Date('2020-12-31'), + tags: { 'you know': 'me' }, + fields: [{ name: 'hello', type: 'string', value: 'world' }], + } + + const telemetry = new DisabledTelemetry() + const mockSubmit = jest.fn() + telemetry.submit = mockSubmit + setTelemetry(telemetry) + + beforeEach(() => { + mockSubmit.mockClear() + setDefaultTags({}) + }) + + it('Succeeds with a validly formatted metric', () => { + submitMetric(metric) + expect(mockSubmit).toMatchSnapshot() + }) + + it('throws if fields is empty', () => { + const fieldlessMetric = { ...metric } + fieldlessMetric.fields = [] + expect(() => submitMetric(fieldlessMetric)).toThrowErrorMatchingInlineSnapshot( + `"Metric must have at least one field"`, + ) + expect(mockSubmit).not.toBeCalled() + }) + + it('submits with no tags if unspecified', () => { + const taglessMetric = { ...metric } + delete taglessMetric.tags + submitMetric(taglessMetric) + expect(mockSubmit).toMatchSnapshot() + }) + + it('submits with default tags', () => { + setDefaultTags({ my: 'default tag' }) + submitMetric(metric) + const expectedMetric = { + ...metric, + tags: { my: 'default tag', 'you know': 'me' }, + } + expect(mockSubmit.mock.calls).toMatchObject([[expectedMetric]]) + }) + + it('submits with default date if unspecified', () => { + const now = new Date('1999-12-31') + jest.spyOn(global, 'Date').mockImplementation(() => (now as unknown) as string) + const datelessMetric = { ...metric } + delete datelessMetric.timestamp + submitMetric(datelessMetric) + expect(mockSubmit).toMatchSnapshot() + }) +}) diff --git a/ironfish/src/telemetry/telemetryBackgroundTask.test.ts b/ironfish/src/telemetry/telemetryBackgroundTask.test.ts new file mode 100644 index 0000000000..fa9c27155f --- /dev/null +++ b/ironfish/src/telemetry/telemetryBackgroundTask.test.ts @@ -0,0 +1,47 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Metric } from '.' +import { handleMetric, sendMetrics, MAX_QUEUE_BEFORE_SUBMIT } from './telemetryBackgroundTask' +import axios from 'axios' + +jest.mock('worker_threads') +jest.mock('axios') + +describe('Telemetry background thread', () => { + const postMock = jest.fn().mockImplementation(() => Promise.resolve({})) + axios.post = postMock + const metric: Metric = { + name: 'test metric', + timestamp: new Date('2020-12-31'), + fields: [{ name: 'hello', type: 'string', value: 'world' }], + } + const endpoint = 'http://localhost:8000/writeMetric' + + afterEach(() => { + postMock.mockClear() + }) + + it('posts a metric', () => { + handleMetric(metric, endpoint) + expect(postMock).not.toHaveBeenCalled() + sendMetrics(endpoint) + expect(axios.post).toHaveBeenCalledWith('http://localhost:8000/writeMetric', [ + { + name: 'test metric', + timestamp: new Date('2020-12-31'), + fields: [{ name: 'hello', string: 'world' }], + }, + ]) + }) + + it('posts immediately if there are many metrics', () => { + for (let i = 0; i < MAX_QUEUE_BEFORE_SUBMIT; i++) { + handleMetric(metric, endpoint) + } + expect(postMock).not.toHaveBeenCalled() + handleMetric(metric, endpoint) + expect(postMock).toHaveBeenCalled() + }) +}) diff --git a/ironfish/src/telemetry/telemetryBackgroundTask.ts b/ironfish/src/telemetry/telemetryBackgroundTask.ts new file mode 100644 index 0000000000..30394258b7 --- /dev/null +++ b/ironfish/src/telemetry/telemetryBackgroundTask.ts @@ -0,0 +1,77 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +/** + * You might think metrics are an io-bound problem, but in order to support batching + * and retries, we've placed them on a worker thread. + */ + +// WARNING: This file only runs on node and will need to be ported +// to webworkers to collect metrics in the browser + +import { parentPort, workerData, MessagePort } from 'worker_threads' +import axios, { AxiosError } from 'axios' +import { createRootLogger, Logger } from '../logger' + +import { Metric } from '.' + +/// 5 seconds between sending batches of metrics +const BATCH_INTERVAL = 5000 +/// Send batch early if the queue is large +export const MAX_QUEUE_BEFORE_SUBMIT = 1000 +/// Max length of queue before dumping metrics (in event of network outage) +const MAX_QUEUE_BEFORE_DUMP = 10000 + +type MetricOnWire = Omit & { + fields: Record[] +} + +let metrics: MetricOnWire[] = [] + +export function handleMetric(metric: Metric, endpoint: string, logger?: Logger): void { + const fields = metric.fields.map((field) => { + const httpField: Record = { name: field.name } + httpField[field.type] = field.value + return httpField + }) + metrics.push({ ...metric, fields }) + if (metrics.length > MAX_QUEUE_BEFORE_SUBMIT) { + sendMetrics(endpoint, logger) + } +} + +export function sendMetrics(endpoint: string, logger?: Logger): void { + if (metrics.length == 0) { + return + } + + const toSubmit = metrics + metrics = [] + + axios + .post(endpoint, toSubmit) + .then(() => { + if (logger) logger.debug(`Submitted batch of ${toSubmit.length} metrics`) + }) + .catch((err: AxiosError) => { + if (logger) logger.warn('Unable to submit metrics', err.code || '') + + // Put the metrics back on the queue to try again + // But if metric server is unavailable dump buffer to prevent memory leak + if (metrics.length < MAX_QUEUE_BEFORE_DUMP) { + metrics.push(...toSubmit) + } + }) +} + +export function startTelemetryWorker(port: MessagePort): void { + const logger = createRootLogger().withTag('telemetryWorker') + const { endpoint } = (workerData as unknown) as { endpoint: string } + port.on('message', (metric: Metric) => handleMetric(metric, endpoint, logger)) + setInterval(() => sendMetrics(endpoint, logger), BATCH_INTERVAL) +} + +if (parentPort !== null) { + startTelemetryWorker(parentPort) +} diff --git a/ironfish/src/testUtilities/fixtures.ts b/ironfish/src/testUtilities/fixtures.ts new file mode 100644 index 0000000000..330a8b766a --- /dev/null +++ b/ironfish/src/testUtilities/fixtures.ts @@ -0,0 +1,137 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { Accounts, Account } from '../account' +import { IJSON } from '../serde' +import { IronfishBlock, IronfishCaptain, SerializedIronfishBlock } from '../strategy' +import fs from 'fs' +import path from 'path' +import { getCurrentTestPath } from './utils' + +type FixtureGenerate = () => Promise | T +type FixtureRestore = (fixture: T) => Promise | void +type FitxureDeserialize = (data: TSerialized) => Promise | T +type FixtureSerialize = (fixture: T) => Promise | TSerialized + +const fixtureIds = new Map() + +export function disableFixtures(): void { + const testName = expect.getState().currentTestName.replace(/ /g, '_') + const fixtureInfo = fixtureIds.get(testName) || { id: 0, disabled: false } + fixtureIds.set(testName, fixtureInfo) + fixtureInfo.disabled = true +} + +export async function useFixture( + generate: FixtureGenerate, + options: { + restore?: FixtureRestore + process?: FixtureRestore + deserialize?: FitxureDeserialize + serialize?: FixtureSerialize + }, +): Promise { + const testName = expect.getState().currentTestName.replace(/ /g, '_') + const testDir = path.dirname(getCurrentTestPath()) + + const fixtureInfo = fixtureIds.get(testName) || { id: 0, disabled: false } + const fixtureId = (fixtureInfo.id += 1) + const fixtureName = `${testName}_${fixtureId}` + + fixtureIds.set(testName, fixtureInfo) + + const fixtureDir = path.join(testDir, 'fixtures') + const fixturePath = path.join(fixtureDir, fixtureName) + + // Use the same parameters as jest snapshots for usability + const updateFixtures = + process.argv.indexOf('--updateSnapshot') !== -1 || process.argv.indexOf('-u') !== -1 + + let fixture: TFixture | null = null + + if (!updateFixtures && !fixtureInfo.disabled && fs.existsSync(fixturePath)) { + const buffer = await fs.promises.readFile(fixturePath) + const data = IJSON.parse(buffer.toString('utf8')) as TSerialized + + if (options.deserialize) { + fixture = await options.deserialize(data) + } else { + fixture = (data as unknown) as TFixture + } + + if (options.restore) { + await options.restore(fixture) + } + } else { + fixture = await generate() + + const serialized = options.serialize ? await options?.serialize(fixture) : fixture + const data = IJSON.stringify(serialized, ' ') + + if (!fs.existsSync(fixtureDir)) { + await fs.promises.mkdir(fixtureDir) + } + + await fs.promises.writeFile(fixturePath, data) + } + + if (options.process) { + await options.process(fixture) + } + + return fixture +} + +export async function useAccountFixture( + accounts: Accounts, + generate: FixtureGenerate | string, +): Promise { + if (typeof generate === 'string') { + const name = generate + generate = () => accounts.createAccount(name) + } + + return useFixture(generate, { + restore: async (account: Account): Promise => { + await accounts.importAccount(account) + }, + }) +} + +/* + * We need the workaround because transactions related to us + * that get added onto a block don't get handled in the same + * way as if we created them, which is a problem. that's why + * the transaction fixture uses accounts.createTransaction() + * and not accountst.pay(), so if its generated, and if its + * cached, both have the same flow where we manually sync + * them afterwards. + */ +export async function restoreBlockFixtureToAccounts( + block: IronfishBlock, + accounts: Accounts, +): Promise { + for (const transaction of block.transactions) { + await accounts.syncTransaction(transaction, { submittedSequence: BigInt(1) }) + } +} + +export async function useBlockFixture( + captain: IronfishCaptain, + generate: FixtureGenerate, + addTransactionsTo?: Accounts, +): Promise { + return useFixture(generate, { + process: async (block: IronfishBlock): Promise => { + if (addTransactionsTo) { + await restoreBlockFixtureToAccounts(block, addTransactionsTo) + } + }, + serialize: (block: IronfishBlock): SerializedIronfishBlock => { + return captain.blockSerde.serialize(block) + }, + deserialize: (serialized: SerializedIronfishBlock): IronfishBlock => { + return captain.blockSerde.deserialize(serialized) + }, + }) +} diff --git a/ironfish/src/testUtilities/index.ts b/ironfish/src/testUtilities/index.ts new file mode 100644 index 0000000000..3fd09b3519 --- /dev/null +++ b/ironfish/src/testUtilities/index.ts @@ -0,0 +1,6 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +export * from './fixtures' +export * from './nodeTest' +export * from './utils' diff --git a/ironfish/src/testUtilities/nodeTest.ts b/ironfish/src/testUtilities/nodeTest.ts new file mode 100644 index 0000000000..7b0172cca4 --- /dev/null +++ b/ironfish/src/testUtilities/nodeTest.ts @@ -0,0 +1,97 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { IronfishNode } from '../node' +import { IronfishSdk } from '../sdk' +import { v4 as uuid } from 'uuid' +import os from 'os' +import path from 'path' +import { IronfishBlockchain, IronfishCaptain } from '../strategy' +import { IronfishTestVerifier } from './verifier' +import { IronfishTestStrategy } from './strategy' + +/** + * Used as an easy wrapper for testing the node, and blockchain. Use + * {@link createNodeTest} to create one to make sure you call the proper + * test lifecycle methods on the NodeTest + */ +export class NodeTest { + sdk!: IronfishSdk + node!: IronfishNode + strategy!: IronfishTestStrategy + captain!: IronfishCaptain + chain!: IronfishBlockchain + + setups = new Array<{ + sdk: IronfishSdk + node: IronfishNode + captain: IronfishCaptain + strategy: IronfishTestStrategy + chain: IronfishBlockchain + }>() + + async createSetup(): Promise<{ + sdk: IronfishSdk + node: IronfishNode + strategy: IronfishTestStrategy + captain: IronfishCaptain + chain: IronfishBlockchain + }> { + const dataDir = path.join(os.tmpdir(), uuid()) + const verifierClass = IronfishTestVerifier + const strategyClass = IronfishTestStrategy + + const sdk = await IronfishSdk.init({ dataDir, verifierClass, strategyClass }) + const node = await sdk.node() + const strategy = node.strategy as IronfishTestStrategy + const captain = node.captain + const chain = node.captain.chain + + sdk.config.setOverride('bootstrapNodes', ['']) + await node.openDB() + + const setup = { sdk, node, captain, strategy, chain } + this.setups.push(setup) + return setup + } + + async setup(): Promise { + const { sdk, node, captain, strategy, chain } = await this.createSetup() + + this.sdk = sdk + this.node = node + this.strategy = strategy + this.captain = captain + this.chain = chain + } + + async teardownEach(): Promise { + for (const { node } of this.setups) { + await node.shutdown() + } + } + + async teardownAll(): Promise { + for (const { node } of this.setups) { + await node.closeDB() + } + } +} + +/** Call this to create a {@link NodeTest} and ensure its test lifecycle + * methods are called properly like beforeEach, beforeAll, etc + */ +export function createNodeTest(preserveState = false): NodeTest { + const nodeTest = new NodeTest() + if (preserveState) { + beforeAll(() => nodeTest.setup()) + afterEach(() => nodeTest.teardownEach()) + afterAll(() => nodeTest.teardownAll()) + } else { + beforeEach(() => nodeTest.setup()) + afterEach(() => nodeTest.teardownEach()) + afterEach(() => nodeTest.teardownAll()) + } + + return nodeTest +} diff --git a/ironfish/src/testUtilities/strategy.ts b/ironfish/src/testUtilities/strategy.ts new file mode 100644 index 0000000000..c8d20f54cc --- /dev/null +++ b/ironfish/src/testUtilities/strategy.ts @@ -0,0 +1,21 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { IronfishStrategy } from '../strategy' + +export class IronfishTestStrategy extends IronfishStrategy { + private _miningReward: number | null = null + + disableMiningReward(): void { + this._miningReward = 0 + } + + miningReward(sequence: bigint): number { + if (this._miningReward !== null) { + return this._miningReward + } + + return super.miningReward(sequence) + } +} diff --git a/ironfish/src/testUtilities/utils.ts b/ironfish/src/testUtilities/utils.ts new file mode 100644 index 0000000000..b53c5d2d0f --- /dev/null +++ b/ironfish/src/testUtilities/utils.ts @@ -0,0 +1,13 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +/** + * This is only usable in the jasmine runner + */ +export function getCurrentTestPath(): string { + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-explicit-any + const jasmineAny = global.jasmine as any + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access + return jasmineAny.testPath as string +} diff --git a/ironfish/src/testUtilities/verifier.ts b/ironfish/src/testUtilities/verifier.ts new file mode 100644 index 0000000000..abaf465ad1 --- /dev/null +++ b/ironfish/src/testUtilities/verifier.ts @@ -0,0 +1,28 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Verifier } from '../captain' +import { + IronfishBlockchain, + IronfishNoteEncrypted, + IronfishTransaction, + SerializedTransaction, + SerializedWasmNoteEncrypted, + SerializedWasmNoteEncryptedHash, + WasmNoteEncryptedHash, +} from '../strategy' + +export class IronfishTestVerifier extends Verifier< + IronfishNoteEncrypted, + WasmNoteEncryptedHash, + IronfishTransaction, + SerializedWasmNoteEncrypted, + SerializedWasmNoteEncryptedHash, + SerializedTransaction +> { + constructor(chain: IronfishBlockchain) { + super(chain) + this.enableVerifyTarget = false + } +} diff --git a/ironfish/src/transactionPool.ts b/ironfish/src/transactionPool.ts new file mode 100644 index 0000000000..f80db784b1 --- /dev/null +++ b/ironfish/src/transactionPool.ts @@ -0,0 +1,147 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import Captain from './captain' +import { Nullifier } from './captain/anchorChain/nullifiers' +import Transaction from './captain/anchorChain/strategies/Transaction' +import { createRootLogger, Logger } from './logger' +import { JsonSerializable } from './serde' + +export class TransactionPool< + E, + H, + T extends Transaction, + SE extends JsonSerializable, + SH extends JsonSerializable, + ST +> { + private transactions = new Map() + private readonly captain: Captain + private readonly logger: Logger + + constructor(captain: Captain, logger: Logger = createRootLogger()) { + this.captain = captain + this.logger = logger.withTag('transactionpool') + } + + size(): number { + return this.transactions.size + } + + exists(transactionHash: Buffer): boolean { + const hash = transactionHash.toString('hex') + return this.transactions.has(hash) + } + + async *get(): AsyncGenerator { + await this.prune() + + for (const transaction of this.transactions.values()) { + yield transaction + } + } + + /** + * Accepts a transaction from the network + */ + acceptTransaction(transaction: T): boolean { + const hash = transaction.transactionHash().toString('hex') + if (this.transactions.has(hash)) return false + + this.add(transaction) + return true + } + + private add(transaction: T): void { + const hash = transaction.transactionHash().toString('hex') + const fee = transaction.transactionFee() + + this.logger.debug('notes: ', transaction.notesLength()) + this.logger.debug('spends: ', transaction.spendsLength()) + this.logger.debug('fee: ', fee) + + this.transactions.set(hash, transaction) + this.logger.info(`Accepted tx ${hash}, poolsize ${this.size()}`) + } + + /** + * Scan the current transaction pool and remove any transactions that + * are not valid. This removes: + * * transactions with invalid proofs + * * transactions that have been seen before the tree was `beforeSize` + * * transactions whose nullifiers were already seen in the transaction list + */ + async prune(): Promise { + // The size of the tree before which any valid transactions must not have been seen + const beforeSize = await this.captain.chain.nullifiers.size() + + const seenNullifiers: Nullifier[] = [] + let pruneCount = 0 + + for (const transaction of this.transactions.values()) { + const isValid = await this.isValidTransaction(transaction, beforeSize, seenNullifiers) + + if (!isValid) { + const hash = transaction.transactionHash().toString('hex') + this.transactions.delete(hash) + pruneCount++ + } + } + + if (pruneCount > 0) { + this.logger.debug(`Pruned ${pruneCount} transactions from the waiting pool`) + } + } + + /** + * Check whether or not the transaction is valid. + * + * Ensures that: + * * Proofs are valid + * * transactionFee is nonnegative + * * transaction spends have not been spent previously on the chain + * * transaction spends have not been spent previously in the list of seenNullifiers + * * transaction spend root actually existed in the notes tree + * + * @param transaction the transaction being tested + * @param beforeSize the size of the nullifiers tree + * before which the transaction must not be seen + * @param seenNullifiers list of nullifiers that were previously spent in this block. + * this method updates seenNullifiers as a side effect, and checks that there + * are no duplicates. + * TODO: seenNullifiers is currently a list, which requires a linear scan for each + * spend. It would be better if it were a set, but the JS native Set doesn't know how + * to operate on the Buffer backed Nullifier. + * TODO: transactions that have been previously verified are needlessly verified again + * when the only thing that might have changed is whether they have been spent before + */ + async isValidTransaction( + transaction: T, + beforeSize: number, + seenNullifiers: Nullifier[], + ): Promise { + // it's faster to check if spends have been seen or not, so do that first + for (const spend of transaction.spends()) { + if (!(await this.captain.chain.verifier.verifySpend(spend, beforeSize))) { + return false + } + } + const validity = transaction.verify() + if (!validity.valid) { + return false + } + + for (const spend of transaction.spends()) { + for (const seen of seenNullifiers) { + if (this.captain.strategy.nullifierHasher().hashSerde().equals(spend.nullifier, seen)) { + return false + } + } + + seenNullifiers.push(spend.nullifier) + } + + return true + } +} diff --git a/ironfish/src/utils/array.ts b/ironfish/src/utils/array.ts new file mode 100644 index 0000000000..ba46b18ceb --- /dev/null +++ b/ironfish/src/utils/array.ts @@ -0,0 +1,26 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { Assert } from '../assert' + +function shuffle(array: Array): Array { + return array.slice().sort(() => Math.random() - 0.5) +} + +function sampleOrThrow(array: Array): T { + Assert.isTrue(array.length > 0) + return array[Math.floor(Math.random() * array.length)] +} + +function remove(array: Array, item: T): boolean { + for (let i = 0; i < array.length; ++i) { + if (array[i] === item) { + array.splice(i, 1) + return true + } + } + return false +} + +export const ArrayUtils = { shuffle, sampleOrThrow, remove } diff --git a/ironfish/src/utils/async.ts b/ironfish/src/utils/async.ts new file mode 100644 index 0000000000..5391ea0e26 --- /dev/null +++ b/ironfish/src/utils/async.ts @@ -0,0 +1,19 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export class AsyncUtils { + static async materialize(iter: AsyncIterable): Promise> { + const results = [] + for await (const result of iter) { + results.push(result) + } + return results + } + + static async count(iter: AsyncIterable): Promise { + let count = 0 + for await (const _result of iter) ++count + return count + } +} diff --git a/ironfish/src/utils/currency.test.ts b/ironfish/src/utils/currency.test.ts new file mode 100644 index 0000000000..092f720bfa --- /dev/null +++ b/ironfish/src/utils/currency.test.ts @@ -0,0 +1,35 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import { displayIronAmountWithCurrency, isValidAmount, oreToIron, ironToOre } from './currency' + +describe('Currency utils', () => { + test('displayIronAmountWithCurrency returns the right string', () => { + expect(displayIronAmountWithCurrency(0.00000002, true)).toEqual('$IRON 0.00000002 ($ORE 2)') + expect(displayIronAmountWithCurrency(0.0000001, true)).toEqual('$IRON 0.00000010 ($ORE 10)') + expect(displayIronAmountWithCurrency(0, true)).toEqual('$IRON 0.00000000 ($ORE 0)') + expect(displayIronAmountWithCurrency(1, true)).toEqual( + '$IRON 1.00000000 ($ORE 100,000,000)', + ) + expect(displayIronAmountWithCurrency(100, true)).toEqual( + '$IRON 100.00000000 ($ORE 10,000,000,000)', + ) + expect(displayIronAmountWithCurrency(100, false)).toEqual('$IRON 100.00000000') + }) + + test('isValidAmount returns the right value', () => { + expect(isValidAmount(0.0000000000001)).toBe(false) + expect(isValidAmount(0.00000001)).toBe(true) + expect(isValidAmount(10.000001)).toBe(true) + }) + + test('oreToIron returns the right value', () => { + expect(oreToIron(2394)).toBe(0.00002394) + expect(oreToIron(999)).toBe(0.00000999) + }) + + test('ironToOre returns the right value', () => { + expect(ironToOre(0.00002394)).toBe(2394) + expect(ironToOre(0.00000999)).toBe(999) + }) +}) diff --git a/ironfish/src/utils/currency.ts b/ironfish/src/utils/currency.ts new file mode 100644 index 0000000000..3060f21c32 --- /dev/null +++ b/ironfish/src/utils/currency.ts @@ -0,0 +1,43 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +const ORE_TICKER = '$ORE' +const IRON_TICKER = '$IRON' +const ORE_TO_IRON = 100000000 +export const MINIMUM_IRON_AMOUNT = 1 / ORE_TO_IRON +const FLOAT = ORE_TO_IRON.toString().length - 1 + +export const isValidAmount = (amount: number): boolean => { + return amount >= MINIMUM_IRON_AMOUNT +} + +export const ironToOre = (amount: number): number => { + const iron = amount * ORE_TO_IRON + + const pow = Math.pow(10, 0) + return Math.round(iron * pow) / pow +} + +export const oreToIron = (amount: number): number => { + return amount / ORE_TO_IRON +} + +/* + * Return a string with the format $IRON X.XXXXXXXX ($ORE X^8) + */ +export const displayIronAmountWithCurrency = (amount: number, displayOre: boolean): string => { + let iron = `${IRON_TICKER} ${amount.toLocaleString(undefined, { + minimumFractionDigits: FLOAT, + maximumFractionDigits: FLOAT, + })}` + + if (displayOre) { + iron += ` (${ORE_TICKER} ${ironToOre(amount).toLocaleString(undefined, { + minimumFractionDigits: 0, + maximumFractionDigits: 0, + })})` + } + + return iron +} diff --git a/ironfish/src/utils/enums.ts b/ironfish/src/utils/enums.ts new file mode 100644 index 0000000000..95ffc6ef53 --- /dev/null +++ b/ironfish/src/utils/enums.ts @@ -0,0 +1,22 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export type StrEnumValue = T[keyof T] +export type StrEnum = Record + +export class StrEnumUtils { + static getValues>(enumType: T): Array> { + return Object.values(enumType) + .filter((v) => typeof v === 'string') + .map((v) => v as StrEnumValue) + } + + static isInEnum>(value: unknown, enumType: T): value is StrEnumValue { + for (const enumValue of StrEnumUtils.getValues(enumType)) { + if (enumValue === value) return true + } + + return false + } +} diff --git a/ironfish/src/utils/error.ts b/ironfish/src/utils/error.ts new file mode 100644 index 0000000000..27795caa2f --- /dev/null +++ b/ironfish/src/utils/error.ts @@ -0,0 +1,16 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +/** + * This is used to unwrap a message from an error if its possible + * otherwise just returns the error + */ +export function renderError(error: unknown): string { + if (!error) return '' + if (error instanceof Error) return error.message + if (typeof error === 'string') return error + return JSON.stringify(error) +} + +export const ErrorUtils = { renderError } diff --git a/ironfish/src/utils/file.ts b/ironfish/src/utils/file.ts new file mode 100644 index 0000000000..a8d184b121 --- /dev/null +++ b/ironfish/src/utils/file.ts @@ -0,0 +1,24 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +type SizeSuffix = { B: string; KB: string; MB: string; GB: string } + +const fileSizeSuffix: SizeSuffix = { B: 'B', KB: 'KB', MB: 'MB', GB: 'GB' } +const memorySizeSuffix: SizeSuffix = { B: 'B', KB: 'KiB', MB: 'MiB', GB: 'GiB' } + +const formatFileSize = ( + bytes: number, + base = 1000, + suffix: SizeSuffix = fileSizeSuffix, +): string => { + if (bytes < Math.pow(base, 1)) return `${bytes.toFixed(0)} ${suffix.B}` + if (bytes < Math.pow(base, 2)) return (bytes / Math.pow(base, 1)).toFixed(0) + ` ${suffix.KB}` + if (bytes < Math.pow(base, 3)) return (bytes / Math.pow(base, 2)).toFixed(2) + ` ${suffix.MB}` + else return (bytes / Math.pow(base, 3)).toFixed(2) + ` ${suffix.GB}` +} + +const formatMemorySize = (bytes: number): string => { + return formatFileSize(bytes, 1024, memorySizeSuffix) +} + +export const FileUtils = { formatFileSize, formatMemorySize } diff --git a/ironfish/src/utils/hash.ts b/ironfish/src/utils/hash.ts new file mode 100644 index 0000000000..dc39a86fc0 --- /dev/null +++ b/ironfish/src/utils/hash.ts @@ -0,0 +1,19 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import { IronfishBlockHeader } from '../strategy' + +function renderHashHex(hashHex: string): string { + return `${hashHex.slice(0, 5)}...${hashHex.slice(-5)}` +} + +function renderHash(hash: Buffer): string { + return renderHashHex(hash.toString('hex')) +} + +function renderBlockHeaderHash(header: IronfishBlockHeader): string { + return renderHash(header.hash) +} + +export const HashUtils = { renderHashHex, renderHash, renderBlockHeaderHash } diff --git a/ironfish/src/utils/index.ts b/ironfish/src/utils/index.ts new file mode 100644 index 0000000000..1e77755203 --- /dev/null +++ b/ironfish/src/utils/index.ts @@ -0,0 +1,16 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +export * from './array' +export * from './async' +export * from './currency' +export * from './enums' +export * from './error' +export * from './file' +export * from './hash' +export * from './json' +export * from './math' +export * from './promise' +export * from './strings' +export * from './types' +export * from './yup' diff --git a/ironfish/src/utils/json.ts b/ironfish/src/utils/json.ts new file mode 100644 index 0000000000..0ea016efb0 --- /dev/null +++ b/ironfish/src/utils/json.ts @@ -0,0 +1,48 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import parseJson, { JSONError } from 'parse-json' +import { Assert } from '../assert' + +function IsParseJsonError(e: unknown): e is JSONError { + return typeof e === 'object' && !!e && 'codeFrame' in e +} + +export class ParseJsonError extends Error { + jsonMessage: string + jsonFileName: string + jsonCodeFrame: string + + constructor(fileName: string, message: string, codeFrame: string) { + super(`Parsing ${fileName} Failed\n${message}`) + this.jsonFileName = fileName + this.jsonMessage = message + this.jsonCodeFrame = codeFrame + } +} + +function parse(data: string, fileName?: string): T { + const [result, error] = tryParse(data, fileName) + if (error) throw error + Assert.isNotNull(result) + return result +} + +function tryParse( + data: string, + fileName?: string, +): [T, null] | [null, ParseJsonError] { + try { + const config = parseJson(data, fileName || '') as T + return [config, null] + } catch (e) { + if (IsParseJsonError(e)) { + const error = new ParseJsonError(e.fileName, e.message, e.codeFrame) + return [null, error] + } + + throw e + } +} + +export const JSONUtils = { parse, tryParse } diff --git a/ironfish/src/utils/math.ts b/ironfish/src/utils/math.ts new file mode 100644 index 0000000000..0434958e97 --- /dev/null +++ b/ironfish/src/utils/math.ts @@ -0,0 +1,30 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +function arrayAverage(values: number[]): number { + if (values.length === 0) return 0 + + let total = 0 + for (const value of values) { + total += value + } + return total / values.length +} + +function arraySum(values: number[]): number { + if (values.length === 0) return 0 + + let total = 0 + for (const value of values) { + total += value + } + return total +} + +function round(value: number, places: number): number { + const scalar = Math.pow(10, places) + return Math.round(value * scalar) / scalar +} + +export const MathUtils = { arrayAverage, arraySum, round } diff --git a/ironfish/src/utils/promise.ts b/ironfish/src/utils/promise.ts new file mode 100644 index 0000000000..41aa2a64db --- /dev/null +++ b/ironfish/src/utils/promise.ts @@ -0,0 +1,31 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +export type PromiseResolve = (value: T) => void +export type PromiseReject = (error?: unknown) => void + +/** + * This creates a promise and splits it out into the promise, + * the resolve and reject functions. Useful when you are + * creating pending promises resolved by some later code. + */ +export class PromiseUtils { + static split(): [Promise, PromiseResolve, PromiseReject] { + const handlers: { + resolve: PromiseResolve | null + reject: PromiseReject | null + } = { resolve: null, reject: null } + + const promise = new Promise((resolve, reject) => { + handlers.resolve = resolve + handlers.reject = reject + }) + + return [promise, handlers.resolve as PromiseResolve, handlers.reject as PromiseReject] + } + + static sleep(timeMs: number): Promise { + return new Promise((resolve) => setTimeout(resolve, timeMs)) + } +} diff --git a/ironfish/src/utils/strings.ts b/ironfish/src/utils/strings.ts new file mode 100644 index 0000000000..657fd09261 --- /dev/null +++ b/ironfish/src/utils/strings.ts @@ -0,0 +1,26 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ +import crypto from 'crypto' + +/** + * A simple MD5 number hash from a string + * + * This is not meant for any production or cryptographic use just a + * simple way to hash a string for test or display purposes + */ +function hashToNumber(value: string): number { + return parseInt(hash(value).toString('hex'), 16) +} + +/** + * A simple MD5 hash from a string + * + * This is not meant for any production or cryptographic use just a + * simple way to hash a string for test or display purposes + */ +function hash(value: string): Buffer { + return Buffer.from(crypto.createHash('md5').update(value).digest('hex')) +} + +export const StringUtils = { hash, hashToNumber } diff --git a/ironfish/src/utils/types.ts b/ironfish/src/utils/types.ts new file mode 100644 index 0000000000..80065f5081 --- /dev/null +++ b/ironfish/src/utils/types.ts @@ -0,0 +1,40 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +/** + * Equivilent to the builtin Partial just recursive. + * + * @see https://www.typescriptlang.org/docs/handbook/utility-types.html#partialtype + */ +export type PartialRecursive = { + [P in keyof T]?: T[P] extends (infer U)[] + ? PartialRecursive[] + : T[P] extends Record + ? PartialRecursive + : T[P] +} + +/** + * Converts a type from Promise to T. + * + * This does not unwrap recursively. + */ +export type UnwrapPromise = T extends Promise + ? U + : T extends (...args: unknown[]) => Promise + ? U + : T extends (...args: unknown[]) => infer U + ? U + : T + +/** + * The return type of setTimeout, this type be used with clearTimeout + * + * This exists because the return type is different on the web versus node + * */ +export type SetTimeoutToken = ReturnType + +export function IsNodeTimeout(timer: number | NodeJS.Timeout): timer is NodeJS.Timeout { + return typeof timer !== 'number' +} diff --git a/ironfish/src/utils/yup.ts b/ironfish/src/utils/yup.ts new file mode 100644 index 0000000000..2584ea29ce --- /dev/null +++ b/ironfish/src/utils/yup.ts @@ -0,0 +1,40 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +import * as yup from 'yup' +import { UnwrapPromise } from './types' + +export type YupSchema = yup.Schema + +export type YupSchemaResult> = UnwrapPromise< + ReturnType +> + +export class YupUtils { + static async tryValidate( + schema: S, + value: unknown, + options?: yup.ValidateOptions, + ): Promise< + { result: YupSchemaResult; error: null } | { result: null; error: yup.ValidationError } + > { + if (!options) { + options = { stripUnknown: true } + } + + if (options.stripUnknown === undefined) { + options.stripUnknown = true + } + + try { + const result = await schema.validate(value, options) + return { result: result as YupSchemaResult, error: null } + } catch (e) { + if (e instanceof yup.ValidationError) { + return { result: null, error: e } + } + throw e + } + } +} diff --git a/ironfish/tsconfig.eslint.json b/ironfish/tsconfig.eslint.json new file mode 100644 index 0000000000..a8d4317b49 --- /dev/null +++ b/ironfish/tsconfig.eslint.json @@ -0,0 +1,4 @@ +{ + "extends": "./tsconfig.json", + "exclude": [] +} diff --git a/ironfish/tsconfig.json b/ironfish/tsconfig.json new file mode 100644 index 0000000000..3fcbf9255b --- /dev/null +++ b/ironfish/tsconfig.json @@ -0,0 +1,8 @@ +{ + "extends": "../config/tsconfig.base.json", + "compilerOptions": { + "outDir": "build" + }, + "include": ["src"], + "exclude": ["src/**/*.test.*"], +} diff --git a/ironfish/tsconfig.test.json b/ironfish/tsconfig.test.json new file mode 100644 index 0000000000..c5fca47e07 --- /dev/null +++ b/ironfish/tsconfig.test.json @@ -0,0 +1,8 @@ +{ + "extends": "../config/tsconfig.base.json", + "compilerOptions": { + "noEmit": true, + "tsBuildInfoFile": "./build/tsconfig.tsbuildinfo" + }, + "include": ["src"], +} diff --git a/lerna.json b/lerna.json new file mode 100644 index 0000000000..27d579a0cb --- /dev/null +++ b/lerna.json @@ -0,0 +1,16 @@ +{ + "packages": [ + "config/eslint-config-ironfish-react", + "config/eslint-config-ironfish", + "config/monorepo-template-library", + "ironfish-cli", + "ironfish-http-api", + "ironfish-rosetta-api", + "ironfish", + "ironfish-wasm/nodejs", + "ironfish-wasm/web" + ], + "version": "independent", + "npmClient": "yarn", + "useWorkspaces": true +} diff --git a/openapitools.json b/openapitools.json new file mode 100644 index 0000000000..5ef9056080 --- /dev/null +++ b/openapitools.json @@ -0,0 +1,7 @@ +{ + "$schema": "node_modules/@openapitools/openapi-generator-cli/config.schema.json", + "spaces": 2, + "generator-cli": { + "version": "5.0.0-beta3" + } +} diff --git a/package.json b/package.json new file mode 100644 index 0000000000..7539633249 --- /dev/null +++ b/package.json @@ -0,0 +1,66 @@ +{ + "name": "root", + "version": "1.0.0", + "repository": "https://github.com/iron-fish/ironfish", + "private": true, + "author": "Iron Fish (https://ironfish.network)", + "license": "MPL-2.0", + "workspaces": [ + "ironfish-cli", + "ironfish-http-api", + "ironfish-rosetta-api", + "config/eslint-config-ironfish-react", + "config/eslint-config-ironfish", + "config/monorepo-template-library", + "ironfish", + "ironfish-wasm/nodejs", + "ironfish-wasm/web" + ], + "scripts": { + "build": "lerna run build", + "build:changed": "lerna run --since origin/master --include-dependents build", + "build:docs": "lerna run build:docs", + "clean": "lerna run clean && lerna exec -- rm -rf ./build", + "lint": "lerna run lint -- --max-warnings=0", + "lint:changed": "lerna run --since origin/master --include-dependents lint", + "lint:fix": "lerna run lint:fix", + "lint:ci": "lerna run lint:ci", + "prebuild": "(cd ironfish-wasm && yarn build)", + "preinstall": "(cd ironfish-wasm && yarn build)", + "test": "lerna run test", + "test:coverage": "lerna run test --stream -- --collect-coverage", + "test:coverage:html": "lerna run test:coverage:html", + "test:changed": "lerna run --since origin/master --include-dependents test", + "test:update": "lerna run test -- -u", + "test:update:changed": "lerna run --since origin/master --include-dependents test -- -u", + "test:slow:coverage": "lerna run test:slow --stream -- --collect-coverage --testPathIgnorePatterns", + "typecheck": "lerna exec -- tsc --noEmit", + "typecheck:changed": "lerna exec --since origin/master --include-dependents -- tsc --noEmit", + "coverage:upload": " lerna exec '\"yarn codecov -t $CODECOV_TOKEN -f ./coverage/clover.xml -F $LERNA_PACKAGE_NAME -p $ROOT_PATH/ --disable=gcov\"'" + }, + "devDependencies": { + "@types/jest": "^26.0.5", + "@typescript-eslint/eslint-plugin": "4.9.0", + "@typescript-eslint/parser": "4.9.0", + "codecov": "^3.8.1", + "eslint": "7.12.0", + "eslint-config-prettier": "6.11.0", + "eslint-plugin-header": "^3.1.0", + "eslint-plugin-import": "^2.22.0", + "eslint-plugin-jest": "24.0.2", + "eslint-plugin-prettier": "3.1.4", + "eslint-plugin-react": "^7.20.3", + "eslint-plugin-react-hooks": "4.1.2", + "jest": "^26.4.2", + "lerna": "^3.22.1", + "prettier": "2.1.2", + "ts-jest": "^26.4.0", + "typescript": "4.1.2" + }, + "resolutions": { + "axios": "^0.21.1", + "node-forge": "0.10.0", + "node-notifier": "8.0.1", + "object-path": "^0.11.4" + } +} diff --git a/rust-toolchain b/rust-toolchain new file mode 100644 index 0000000000..7d58e0997b --- /dev/null +++ b/rust-toolchain @@ -0,0 +1 @@ +1.48.0 \ No newline at end of file diff --git a/typedoc.json b/typedoc.json new file mode 100644 index 0000000000..29ff723a70 --- /dev/null +++ b/typedoc.json @@ -0,0 +1,5 @@ +{ + "excludeExternals": true, + "mode": "modules", + "out": "./build/docs" +} diff --git a/yarn.lock b/yarn.lock new file mode 100644 index 0000000000..ac92735645 --- /dev/null +++ b/yarn.lock @@ -0,0 +1,12620 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@apidevtools/json-schema-ref-parser@9.0.6": + version "9.0.6" + resolved "https://registry.yarnpkg.com/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-9.0.6.tgz#5d9000a3ac1fd25404da886da6b266adcd99cf1c" + integrity sha512-M3YgsLjI0lZxvrpeGVk9Ap032W6TPQkH6pRAZz81Ac3WUNF79VQooAFnp8umjvVzUmD93NkogxEwbSce7qMsUg== + dependencies: + "@jsdevtools/ono" "^7.1.3" + call-me-maybe "^1.0.1" + js-yaml "^3.13.1" + +"@assemblyscript/loader@^0.10.1": + version "0.10.1" + resolved "https://registry.yarnpkg.com/@assemblyscript/loader/-/loader-0.10.1.tgz#70e45678f06c72fa2e350e8553ec4a4d72b92e06" + integrity sha512-H71nDOOL8Y7kWRLqf6Sums+01Q5msqBW2KhDUTemh1tvY04eSkSXrK0uj/4mmY0Xr16/3zyZmsrxN7CKuRbNRg== + +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.10.4.tgz#168da1a36e90da68ae8d49c0f1b48c7c6249213a" + integrity sha512-vG6SvB6oYEhvgisZNFRmRCUkLz11c7rp+tbNTynGqc6mS1d5ATd/sGyV6W0KZZnXRKMTzZDRgQT3Ou9jhpAfUg== + dependencies: + "@babel/highlight" "^7.10.4" + +"@babel/core@^7.1.0", "@babel/core@^7.7.5": + version "7.12.9" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.12.9.tgz#fd450c4ec10cdbb980e2928b7aa7a28484593fc8" + integrity sha512-gTXYh3M5wb7FRXQy+FErKFAv90BnlOuNn1QkCK2lREoPAjrQCO49+HVSrFoe5uakFAF5eenS75KbO2vQiLrTMQ== + dependencies: + "@babel/code-frame" "^7.10.4" + "@babel/generator" "^7.12.5" + "@babel/helper-module-transforms" "^7.12.1" + "@babel/helpers" "^7.12.5" + "@babel/parser" "^7.12.7" + "@babel/template" "^7.12.7" + "@babel/traverse" "^7.12.9" + "@babel/types" "^7.12.7" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.1" + json5 "^2.1.2" + lodash "^4.17.19" + resolve "^1.3.2" + semver "^5.4.1" + source-map "^0.5.0" + +"@babel/generator@^7.12.5": + version "7.12.5" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.12.5.tgz#a2c50de5c8b6d708ab95be5e6053936c1884a4de" + integrity sha512-m16TQQJ8hPt7E+OS/XVQg/7U184MLXtvuGbCdA7na61vha+ImkyyNM/9DDA0unYCVZn3ZOhng+qz48/KBOT96A== + dependencies: + "@babel/types" "^7.12.5" + jsesc "^2.5.1" + source-map "^0.5.0" + +"@babel/helper-function-name@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.10.4.tgz#d2d3b20c59ad8c47112fa7d2a94bc09d5ef82f1a" + integrity sha512-YdaSyz1n8gY44EmN7x44zBn9zQ1Ry2Y+3GTA+3vH6Mizke1Vw0aWDM66FOYEPw8//qKkmqOckrGgTYa+6sceqQ== + dependencies: + "@babel/helper-get-function-arity" "^7.10.4" + "@babel/template" "^7.10.4" + "@babel/types" "^7.10.4" + +"@babel/helper-get-function-arity@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.10.4.tgz#98c1cbea0e2332f33f9a4661b8ce1505b2c19ba2" + integrity sha512-EkN3YDB+SRDgiIUnNgcmiD361ti+AVbL3f3Henf6dqqUyr5dMsorno0lJWJuLhDhkI5sYEpgj6y9kB8AOU1I2A== + dependencies: + "@babel/types" "^7.10.4" + +"@babel/helper-member-expression-to-functions@^7.12.1": + version "7.12.7" + resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.12.7.tgz#aa77bd0396ec8114e5e30787efa78599d874a855" + integrity sha512-DCsuPyeWxeHgh1Dus7APn7iza42i/qXqiFPWyBDdOFtvS581JQePsc1F/nD+fHrcswhLlRc2UpYS1NwERxZhHw== + dependencies: + "@babel/types" "^7.12.7" + +"@babel/helper-module-imports@^7.12.1": + version "7.12.5" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.12.5.tgz#1bfc0229f794988f76ed0a4d4e90860850b54dfb" + integrity sha512-SR713Ogqg6++uexFRORf/+nPXMmWIn80TALu0uaFb+iQIUoR7bOC7zBWyzBs5b3tBBJXuyD0cRu1F15GyzjOWA== + dependencies: + "@babel/types" "^7.12.5" + +"@babel/helper-module-transforms@^7.12.1": + version "7.12.1" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.12.1.tgz#7954fec71f5b32c48e4b303b437c34453fd7247c" + integrity sha512-QQzehgFAZ2bbISiCpmVGfiGux8YVFXQ0abBic2Envhej22DVXV9nCFaS5hIQbkyo1AdGb+gNME2TSh3hYJVV/w== + dependencies: + "@babel/helper-module-imports" "^7.12.1" + "@babel/helper-replace-supers" "^7.12.1" + "@babel/helper-simple-access" "^7.12.1" + "@babel/helper-split-export-declaration" "^7.11.0" + "@babel/helper-validator-identifier" "^7.10.4" + "@babel/template" "^7.10.4" + "@babel/traverse" "^7.12.1" + "@babel/types" "^7.12.1" + lodash "^4.17.19" + +"@babel/helper-optimise-call-expression@^7.10.4": + version "7.12.7" + resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.12.7.tgz#7f94ae5e08721a49467346aa04fd22f750033b9c" + integrity sha512-I5xc9oSJ2h59OwyUqjv95HRyzxj53DAubUERgQMrpcCEYQyToeHA+NEcUEsVWB4j53RDeskeBJ0SgRAYHDBckw== + dependencies: + "@babel/types" "^7.12.7" + +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.8.0": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz#2f75a831269d4f677de49986dff59927533cf375" + integrity sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg== + +"@babel/helper-replace-supers@^7.12.1": + version "7.12.5" + resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.12.5.tgz#f009a17543bbbbce16b06206ae73b63d3fca68d9" + integrity sha512-5YILoed0ZyIpF4gKcpZitEnXEJ9UoDRki1Ey6xz46rxOzfNMAhVIJMoune1hmPVxh40LRv1+oafz7UsWX+vyWA== + dependencies: + "@babel/helper-member-expression-to-functions" "^7.12.1" + "@babel/helper-optimise-call-expression" "^7.10.4" + "@babel/traverse" "^7.12.5" + "@babel/types" "^7.12.5" + +"@babel/helper-simple-access@^7.12.1": + version "7.12.1" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.12.1.tgz#32427e5aa61547d38eb1e6eaf5fd1426fdad9136" + integrity sha512-OxBp7pMrjVewSSC8fXDFrHrBcJATOOFssZwv16F3/6Xtc138GHybBfPbm9kfiqQHKhYQrlamWILwlDCeyMFEaA== + dependencies: + "@babel/types" "^7.12.1" + +"@babel/helper-split-export-declaration@^7.11.0": + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.11.0.tgz#f8a491244acf6a676158ac42072911ba83ad099f" + integrity sha512-74Vejvp6mHkGE+m+k5vHY93FX2cAtrw1zXrZXRlG4l410Nm9PxfEiVTn1PjDPV5SnmieiueY4AFg2xqhNFuuZg== + dependencies: + "@babel/types" "^7.11.0" + +"@babel/helper-validator-identifier@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.10.4.tgz#a78c7a7251e01f616512d31b10adcf52ada5e0d2" + integrity sha512-3U9y+43hz7ZM+rzG24Qe2mufW5KhvFg/NhnNph+i9mgCtdTCtMJuI1TMkrIUiK7Ix4PYlRF9I5dhqaLYA/ADXw== + +"@babel/helpers@^7.12.5": + version "7.12.5" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.12.5.tgz#1a1ba4a768d9b58310eda516c449913fe647116e" + integrity sha512-lgKGMQlKqA8meJqKsW6rUnc4MdUk35Ln0ATDqdM1a/UpARODdI4j5Y5lVfUScnSNkJcdCRAaWkspykNoFg9sJA== + dependencies: + "@babel/template" "^7.10.4" + "@babel/traverse" "^7.12.5" + "@babel/types" "^7.12.5" + +"@babel/highlight@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.10.4.tgz#7d1bdfd65753538fabe6c38596cdb76d9ac60143" + integrity sha512-i6rgnR/YgPEQzZZnbTHHuZdlE8qyoBNalD6F+q4vAFlcMEcqmkoG+mPqJYJCo63qPf74+Y1UZsl3l6f7/RIkmA== + dependencies: + "@babel/helper-validator-identifier" "^7.10.4" + chalk "^2.0.0" + js-tokens "^4.0.0" + +"@babel/parser@^7.1.0", "@babel/parser@^7.12.7": + version "7.12.7" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.12.7.tgz#fee7b39fe809d0e73e5b25eecaf5780ef3d73056" + integrity sha512-oWR02Ubp4xTLCAqPRiNIuMVgNO5Aif/xpXtabhzW2HWUD47XJsAB4Zd/Rg30+XeQA3juXigV7hlquOTmwqLiwg== + +"@babel/plugin-syntax-async-generators@^7.8.4": + version "7.8.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" + integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-bigint@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" + integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-class-properties@^7.8.3": + version "7.12.1" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.1.tgz#bcb297c5366e79bebadef509549cd93b04f19978" + integrity sha512-U40A76x5gTwmESz+qiqssqmeEsKvcSyvtgktrm0uzcARAmM9I1jR221f6Oq+GmHrcD+LvZDag1UTOTe2fL3TeA== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-import-meta@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" + integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-json-strings@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" + integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-logical-assignment-operators@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" + integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" + integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-numeric-separator@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" + integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-object-rest-spread@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" + integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-catch-binding@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" + integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-chaining@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" + integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-top-level-await@^7.8.3": + version "7.12.1" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.12.1.tgz#dd6c0b357ac1bb142d98537450a319625d13d2a0" + integrity sha512-i7ooMZFS+a/Om0crxZodrTzNEPJHZrlMVGMTEpFAj6rYY/bKCddB0Dk/YxfPuYXOopuhKk/e1jV6h+WUU9XN3A== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/runtime@^7.10.5": + version "7.12.5" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.12.5.tgz#410e7e487441e1b360c29be715d870d9b985882e" + integrity sha512-plcc+hbExy3McchJCEQG3knOsuh3HH+Prx1P6cLIkET/0dLuQDEnrT+s27Axgc9bqfsmNUNHfscgMUdBpC9xfg== + dependencies: + regenerator-runtime "^0.13.4" + +"@babel/template@^7.10.4", "@babel/template@^7.12.7", "@babel/template@^7.3.3": + version "7.12.7" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.12.7.tgz#c817233696018e39fbb6c491d2fb684e05ed43bc" + integrity sha512-GkDzmHS6GV7ZeXfJZ0tLRBhZcMcY0/Lnb+eEbXDBfCAcZCjrZKe6p3J4we/D24O9Y8enxWAg1cWwof59yLh2ow== + dependencies: + "@babel/code-frame" "^7.10.4" + "@babel/parser" "^7.12.7" + "@babel/types" "^7.12.7" + +"@babel/traverse@^7.1.0", "@babel/traverse@^7.12.1", "@babel/traverse@^7.12.5", "@babel/traverse@^7.12.9": + version "7.12.9" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.12.9.tgz#fad26c972eabbc11350e0b695978de6cc8e8596f" + integrity sha512-iX9ajqnLdoU1s1nHt36JDI9KG4k+vmI8WgjK5d+aDTwQbL2fUnzedNedssA645Ede3PM2ma1n8Q4h2ohwXgMXw== + dependencies: + "@babel/code-frame" "^7.10.4" + "@babel/generator" "^7.12.5" + "@babel/helper-function-name" "^7.10.4" + "@babel/helper-split-export-declaration" "^7.11.0" + "@babel/parser" "^7.12.7" + "@babel/types" "^7.12.7" + debug "^4.1.0" + globals "^11.1.0" + lodash "^4.17.19" + +"@babel/types@^7.0.0", "@babel/types@^7.10.4", "@babel/types@^7.11.0", "@babel/types@^7.12.1", "@babel/types@^7.12.5", "@babel/types@^7.12.7", "@babel/types@^7.3.0", "@babel/types@^7.3.3": + version "7.12.7" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.12.7.tgz#6039ff1e242640a29452c9ae572162ec9a8f5d13" + integrity sha512-MNyI92qZq6jrQkXvtIiykvl4WtoRrVV9MPn+ZfsoEENjiWcBQ3ZSHrkxnJWgWtLX3XXqX5hrSQ+X69wkmesXuQ== + dependencies: + "@babel/helper-validator-identifier" "^7.10.4" + lodash "^4.17.19" + to-fast-properties "^2.0.0" + +"@bcoe/v8-coverage@^0.2.3": + version "0.2.3" + resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" + integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== + +"@cnakazawa/watch@^1.0.3": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@cnakazawa/watch/-/watch-1.0.4.tgz#f864ae85004d0fcab6f50be9141c4da368d1656a" + integrity sha512-v9kIhKwjeZThiWrLmj0y17CWoyddASLj9O2yvbZkbvw/N3rWOYy9zkV66ursAoVr0mV15bL8g0c4QZUE6cdDoQ== + dependencies: + exec-sh "^0.3.2" + minimist "^1.2.0" + +"@dabh/diagnostics@^2.0.2": + version "2.0.2" + resolved "https://registry.yarnpkg.com/@dabh/diagnostics/-/diagnostics-2.0.2.tgz#290d08f7b381b8f94607dc8f471a12c675f9db31" + integrity sha512-+A1YivoVDNNVCdfozHSR8v/jyuuLTMXwjWuxPFlFlUapXoGc+Gj9mDlTDDfrwl7rXCl2tNZ0kE8sIBO6YOn96Q== + dependencies: + colorspace "1.1.x" + enabled "2.0.x" + kuler "^2.0.0" + +"@eslint/eslintrc@^0.2.0": + version "0.2.1" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.2.1.tgz#f72069c330461a06684d119384435e12a5d76e3c" + integrity sha512-XRUeBZ5zBWLYgSANMpThFddrZZkEbGHgUdt5UJjZfnlN9BGCiUBrf+nvbRupSjMvqzwnQN0qwCmOxITt1cfywA== + dependencies: + ajv "^6.12.4" + debug "^4.1.1" + espree "^7.3.0" + globals "^12.1.0" + ignore "^4.0.6" + import-fresh "^3.2.1" + js-yaml "^3.13.1" + lodash "^4.17.19" + minimatch "^3.0.4" + strip-json-comments "^3.1.1" + +"@eslint/eslintrc@^0.2.2": + version "0.2.2" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.2.2.tgz#d01fc791e2fc33e88a29d6f3dc7e93d0cd784b76" + integrity sha512-EfB5OHNYp1F4px/LI/FEnGylop7nOqkQ1LRzCM0KccA2U8tvV8w01KBv37LbO7nW4H+YhKyo2LcJhRwjjV17QQ== + dependencies: + ajv "^6.12.4" + debug "^4.1.1" + espree "^7.3.0" + globals "^12.1.0" + ignore "^4.0.6" + import-fresh "^3.2.1" + js-yaml "^3.13.1" + lodash "^4.17.19" + minimatch "^3.0.4" + strip-json-comments "^3.1.1" + +"@evocateur/libnpmaccess@^3.1.2": + version "3.1.2" + resolved "https://registry.yarnpkg.com/@evocateur/libnpmaccess/-/libnpmaccess-3.1.2.tgz#ecf7f6ce6b004e9f942b098d92200be4a4b1c845" + integrity sha512-KSCAHwNWro0CF2ukxufCitT9K5LjL/KuMmNzSu8wuwN2rjyKHD8+cmOsiybK+W5hdnwc5M1SmRlVCaMHQo+3rg== + dependencies: + "@evocateur/npm-registry-fetch" "^4.0.0" + aproba "^2.0.0" + figgy-pudding "^3.5.1" + get-stream "^4.0.0" + npm-package-arg "^6.1.0" + +"@evocateur/libnpmpublish@^1.2.2": + version "1.2.2" + resolved "https://registry.yarnpkg.com/@evocateur/libnpmpublish/-/libnpmpublish-1.2.2.tgz#55df09d2dca136afba9c88c759ca272198db9f1a" + integrity sha512-MJrrk9ct1FeY9zRlyeoyMieBjGDG9ihyyD9/Ft6MMrTxql9NyoEx2hw9casTIP4CdqEVu+3nQ2nXxoJ8RCXyFg== + dependencies: + "@evocateur/npm-registry-fetch" "^4.0.0" + aproba "^2.0.0" + figgy-pudding "^3.5.1" + get-stream "^4.0.0" + lodash.clonedeep "^4.5.0" + normalize-package-data "^2.4.0" + npm-package-arg "^6.1.0" + semver "^5.5.1" + ssri "^6.0.1" + +"@evocateur/npm-registry-fetch@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@evocateur/npm-registry-fetch/-/npm-registry-fetch-4.0.0.tgz#8c4c38766d8d32d3200fcb0a83f064b57365ed66" + integrity sha512-k1WGfKRQyhJpIr+P17O5vLIo2ko1PFLKwoetatdduUSt/aQ4J2sJrJwwatdI5Z3SiYk/mRH9S3JpdmMFd/IK4g== + dependencies: + JSONStream "^1.3.4" + bluebird "^3.5.1" + figgy-pudding "^3.4.1" + lru-cache "^5.1.1" + make-fetch-happen "^5.0.0" + npm-package-arg "^6.1.0" + safe-buffer "^5.1.2" + +"@evocateur/pacote@^9.6.3": + version "9.6.5" + resolved "https://registry.yarnpkg.com/@evocateur/pacote/-/pacote-9.6.5.tgz#33de32ba210b6f17c20ebab4d497efc6755f4ae5" + integrity sha512-EI552lf0aG2nOV8NnZpTxNo2PcXKPmDbF9K8eCBFQdIZwHNGN/mi815fxtmUMa2wTa1yndotICIDt/V0vpEx2w== + dependencies: + "@evocateur/npm-registry-fetch" "^4.0.0" + bluebird "^3.5.3" + cacache "^12.0.3" + chownr "^1.1.2" + figgy-pudding "^3.5.1" + get-stream "^4.1.0" + glob "^7.1.4" + infer-owner "^1.0.4" + lru-cache "^5.1.1" + make-fetch-happen "^5.0.0" + minimatch "^3.0.4" + minipass "^2.3.5" + mississippi "^3.0.0" + mkdirp "^0.5.1" + normalize-package-data "^2.5.0" + npm-package-arg "^6.1.0" + npm-packlist "^1.4.4" + npm-pick-manifest "^3.0.0" + osenv "^0.1.5" + promise-inflight "^1.0.1" + promise-retry "^1.1.1" + protoduck "^5.0.1" + rimraf "^2.6.3" + safe-buffer "^5.2.0" + semver "^5.7.0" + ssri "^6.0.1" + tar "^4.4.10" + unique-filename "^1.1.1" + which "^1.3.1" + +"@influxdata/influxdb-client@1.9.0": + version "1.9.0" + resolved "https://registry.yarnpkg.com/@influxdata/influxdb-client/-/influxdb-client-1.9.0.tgz#0178d88c325550fd18cfa54821dd273b3466798a" + integrity sha512-wJ+qfGukxMxpDE5d2XHiOspKheWTb1PdPOd2eD3MVuh8hjr6fqyJ3zuacTp/WVubeyppPhM4vYcDjkKCg1OEhA== + +"@istanbuljs/load-nyc-config@^1.0.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" + integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== + dependencies: + camelcase "^5.3.1" + find-up "^4.1.0" + get-package-type "^0.1.0" + js-yaml "^3.13.1" + resolve-from "^5.0.0" + +"@istanbuljs/schema@^0.1.2": + version "0.1.2" + resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.2.tgz#26520bf09abe4a5644cd5414e37125a8954241dd" + integrity sha512-tsAQNx32a8CoFhjhijUIhI4kccIAgmGhy8LZMZgGfmXcpMbPRUqn5LWmgRttILi6yeGmBJd2xsPkFMs0PzgPCw== + +"@jest/console@^26.6.2": + version "26.6.2" + resolved "https://registry.yarnpkg.com/@jest/console/-/console-26.6.2.tgz#4e04bc464014358b03ab4937805ee36a0aeb98f2" + integrity sha512-IY1R2i2aLsLr7Id3S6p2BA82GNWryt4oSvEXLAKc+L2zdi89dSkE8xC1C+0kpATG4JhBJREnQOH7/zmccM2B0g== + dependencies: + "@jest/types" "^26.6.2" + "@types/node" "*" + chalk "^4.0.0" + jest-message-util "^26.6.2" + jest-util "^26.6.2" + slash "^3.0.0" + +"@jest/core@^26.6.3": + version "26.6.3" + resolved "https://registry.yarnpkg.com/@jest/core/-/core-26.6.3.tgz#7639fcb3833d748a4656ada54bde193051e45fad" + integrity sha512-xvV1kKbhfUqFVuZ8Cyo+JPpipAHHAV3kcDBftiduK8EICXmTFddryy3P7NfZt8Pv37rA9nEJBKCCkglCPt/Xjw== + dependencies: + "@jest/console" "^26.6.2" + "@jest/reporters" "^26.6.2" + "@jest/test-result" "^26.6.2" + "@jest/transform" "^26.6.2" + "@jest/types" "^26.6.2" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + exit "^0.1.2" + graceful-fs "^4.2.4" + jest-changed-files "^26.6.2" + jest-config "^26.6.3" + jest-haste-map "^26.6.2" + jest-message-util "^26.6.2" + jest-regex-util "^26.0.0" + jest-resolve "^26.6.2" + jest-resolve-dependencies "^26.6.3" + jest-runner "^26.6.3" + jest-runtime "^26.6.3" + jest-snapshot "^26.6.2" + jest-util "^26.6.2" + jest-validate "^26.6.2" + jest-watcher "^26.6.2" + micromatch "^4.0.2" + p-each-series "^2.1.0" + rimraf "^3.0.0" + slash "^3.0.0" + strip-ansi "^6.0.0" + +"@jest/environment@^26.6.2": + version "26.6.2" + resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-26.6.2.tgz#ba364cc72e221e79cc8f0a99555bf5d7577cf92c" + integrity sha512-nFy+fHl28zUrRsCeMB61VDThV1pVTtlEokBRgqPrcT1JNq4yRNIyTHfyht6PqtUvY9IsuLGTrbG8kPXjSZIZwA== + dependencies: + "@jest/fake-timers" "^26.6.2" + "@jest/types" "^26.6.2" + "@types/node" "*" + jest-mock "^26.6.2" + +"@jest/fake-timers@^26.6.2": + version "26.6.2" + resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-26.6.2.tgz#459c329bcf70cee4af4d7e3f3e67848123535aad" + integrity sha512-14Uleatt7jdzefLPYM3KLcnUl1ZNikaKq34enpb5XG9i81JpppDb5muZvonvKyrl7ftEHkKS5L5/eB/kxJ+bvA== + dependencies: + "@jest/types" "^26.6.2" + "@sinonjs/fake-timers" "^6.0.1" + "@types/node" "*" + jest-message-util "^26.6.2" + jest-mock "^26.6.2" + jest-util "^26.6.2" + +"@jest/globals@^26.6.2": + version "26.6.2" + resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-26.6.2.tgz#5b613b78a1aa2655ae908eba638cc96a20df720a" + integrity sha512-85Ltnm7HlB/KesBUuALwQ68YTU72w9H2xW9FjZ1eL1U3lhtefjjl5c2MiUbpXt/i6LaPRvoOFJ22yCBSfQ0JIA== + dependencies: + "@jest/environment" "^26.6.2" + "@jest/types" "^26.6.2" + expect "^26.6.2" + +"@jest/reporters@^26.6.2": + version "26.6.2" + resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-26.6.2.tgz#1f518b99637a5f18307bd3ecf9275f6882a667f6" + integrity sha512-h2bW53APG4HvkOnVMo8q3QXa6pcaNt1HkwVsOPMBV6LD/q9oSpxNSYZQYkAnjdMjrJ86UuYeLo+aEZClV6opnw== + dependencies: + "@bcoe/v8-coverage" "^0.2.3" + "@jest/console" "^26.6.2" + "@jest/test-result" "^26.6.2" + "@jest/transform" "^26.6.2" + "@jest/types" "^26.6.2" + chalk "^4.0.0" + collect-v8-coverage "^1.0.0" + exit "^0.1.2" + glob "^7.1.2" + graceful-fs "^4.2.4" + istanbul-lib-coverage "^3.0.0" + istanbul-lib-instrument "^4.0.3" + istanbul-lib-report "^3.0.0" + istanbul-lib-source-maps "^4.0.0" + istanbul-reports "^3.0.2" + jest-haste-map "^26.6.2" + jest-resolve "^26.6.2" + jest-util "^26.6.2" + jest-worker "^26.6.2" + slash "^3.0.0" + source-map "^0.6.0" + string-length "^4.0.1" + terminal-link "^2.0.0" + v8-to-istanbul "^7.0.0" + optionalDependencies: + node-notifier "^8.0.0" + +"@jest/source-map@^26.6.2": + version "26.6.2" + resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-26.6.2.tgz#29af5e1e2e324cafccc936f218309f54ab69d535" + integrity sha512-YwYcCwAnNmOVsZ8mr3GfnzdXDAl4LaenZP5z+G0c8bzC9/dugL8zRmxZzdoTl4IaS3CryS1uWnROLPFmb6lVvA== + dependencies: + callsites "^3.0.0" + graceful-fs "^4.2.4" + source-map "^0.6.0" + +"@jest/test-result@^26.6.2": + version "26.6.2" + resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-26.6.2.tgz#55da58b62df134576cc95476efa5f7949e3f5f18" + integrity sha512-5O7H5c/7YlojphYNrK02LlDIV2GNPYisKwHm2QTKjNZeEzezCbwYs9swJySv2UfPMyZ0VdsmMv7jIlD/IKYQpQ== + dependencies: + "@jest/console" "^26.6.2" + "@jest/types" "^26.6.2" + "@types/istanbul-lib-coverage" "^2.0.0" + collect-v8-coverage "^1.0.0" + +"@jest/test-sequencer@^26.6.3": + version "26.6.3" + resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-26.6.3.tgz#98e8a45100863886d074205e8ffdc5a7eb582b17" + integrity sha512-YHlVIjP5nfEyjlrSr8t/YdNfU/1XEt7c5b4OxcXCjyRhjzLYu/rO69/WHPuYcbCWkz8kAeZVZp2N2+IOLLEPGw== + dependencies: + "@jest/test-result" "^26.6.2" + graceful-fs "^4.2.4" + jest-haste-map "^26.6.2" + jest-runner "^26.6.3" + jest-runtime "^26.6.3" + +"@jest/transform@^26.6.2": + version "26.6.2" + resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-26.6.2.tgz#5ac57c5fa1ad17b2aae83e73e45813894dcf2e4b" + integrity sha512-E9JjhUgNzvuQ+vVAL21vlyfy12gP0GhazGgJC4h6qUt1jSdUXGWJ1wfu/X7Sd8etSgxV4ovT1pb9v5D6QW4XgA== + dependencies: + "@babel/core" "^7.1.0" + "@jest/types" "^26.6.2" + babel-plugin-istanbul "^6.0.0" + chalk "^4.0.0" + convert-source-map "^1.4.0" + fast-json-stable-stringify "^2.0.0" + graceful-fs "^4.2.4" + jest-haste-map "^26.6.2" + jest-regex-util "^26.0.0" + jest-util "^26.6.2" + micromatch "^4.0.2" + pirates "^4.0.1" + slash "^3.0.0" + source-map "^0.6.1" + write-file-atomic "^3.0.0" + +"@jest/types@^26.6.2": + version "26.6.2" + resolved "https://registry.yarnpkg.com/@jest/types/-/types-26.6.2.tgz#bef5a532030e1d88a2f5a6d933f84e97226ed48e" + integrity sha512-fC6QCp7Sc5sX6g8Tvbmj4XUTbyrik0akgRy03yjXbQaBWWNWGE7SGtJk98m0N8nzegD/7SggrUlivxo5ax4KWQ== + dependencies: + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^15.0.0" + chalk "^4.0.0" + +"@jsdevtools/ono@7.1.3", "@jsdevtools/ono@^7.1.3": + version "7.1.3" + resolved "https://registry.yarnpkg.com/@jsdevtools/ono/-/ono-7.1.3.tgz#9df03bbd7c696a5c58885c34aa06da41c8543796" + integrity sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg== + +"@lerna/add@3.21.0": + version "3.21.0" + resolved "https://registry.yarnpkg.com/@lerna/add/-/add-3.21.0.tgz#27007bde71cc7b0a2969ab3c2f0ae41578b4577b" + integrity sha512-vhUXXF6SpufBE1EkNEXwz1VLW03f177G9uMOFMQkp6OJ30/PWg4Ekifuz9/3YfgB2/GH8Tu4Lk3O51P2Hskg/A== + dependencies: + "@evocateur/pacote" "^9.6.3" + "@lerna/bootstrap" "3.21.0" + "@lerna/command" "3.21.0" + "@lerna/filter-options" "3.20.0" + "@lerna/npm-conf" "3.16.0" + "@lerna/validation-error" "3.13.0" + dedent "^0.7.0" + npm-package-arg "^6.1.0" + p-map "^2.1.0" + semver "^6.2.0" + +"@lerna/bootstrap@3.21.0": + version "3.21.0" + resolved "https://registry.yarnpkg.com/@lerna/bootstrap/-/bootstrap-3.21.0.tgz#bcd1b651be5b0970b20d8fae04c864548123aed6" + integrity sha512-mtNHlXpmvJn6JTu0KcuTTPl2jLsDNud0QacV/h++qsaKbhAaJr/FElNZ5s7MwZFUM3XaDmvWzHKaszeBMHIbBw== + dependencies: + "@lerna/command" "3.21.0" + "@lerna/filter-options" "3.20.0" + "@lerna/has-npm-version" "3.16.5" + "@lerna/npm-install" "3.16.5" + "@lerna/package-graph" "3.18.5" + "@lerna/pulse-till-done" "3.13.0" + "@lerna/rimraf-dir" "3.16.5" + "@lerna/run-lifecycle" "3.16.2" + "@lerna/run-topologically" "3.18.5" + "@lerna/symlink-binary" "3.17.0" + "@lerna/symlink-dependencies" "3.17.0" + "@lerna/validation-error" "3.13.0" + dedent "^0.7.0" + get-port "^4.2.0" + multimatch "^3.0.0" + npm-package-arg "^6.1.0" + npmlog "^4.1.2" + p-finally "^1.0.0" + p-map "^2.1.0" + p-map-series "^1.0.0" + p-waterfall "^1.0.0" + read-package-tree "^5.1.6" + semver "^6.2.0" + +"@lerna/changed@3.21.0": + version "3.21.0" + resolved "https://registry.yarnpkg.com/@lerna/changed/-/changed-3.21.0.tgz#108e15f679bfe077af500f58248c634f1044ea0b" + integrity sha512-hzqoyf8MSHVjZp0gfJ7G8jaz+++mgXYiNs9iViQGA8JlN/dnWLI5sWDptEH3/B30Izo+fdVz0S0s7ydVE3pWIw== + dependencies: + "@lerna/collect-updates" "3.20.0" + "@lerna/command" "3.21.0" + "@lerna/listable" "3.18.5" + "@lerna/output" "3.13.0" + +"@lerna/check-working-tree@3.16.5": + version "3.16.5" + resolved "https://registry.yarnpkg.com/@lerna/check-working-tree/-/check-working-tree-3.16.5.tgz#b4f8ae61bb4523561dfb9f8f8d874dd46bb44baa" + integrity sha512-xWjVBcuhvB8+UmCSb5tKVLB5OuzSpw96WEhS2uz6hkWVa/Euh1A0/HJwn2cemyK47wUrCQXtczBUiqnq9yX5VQ== + dependencies: + "@lerna/collect-uncommitted" "3.16.5" + "@lerna/describe-ref" "3.16.5" + "@lerna/validation-error" "3.13.0" + +"@lerna/child-process@3.16.5": + version "3.16.5" + resolved "https://registry.yarnpkg.com/@lerna/child-process/-/child-process-3.16.5.tgz#38fa3c18064aa4ac0754ad80114776a7b36a69b2" + integrity sha512-vdcI7mzei9ERRV4oO8Y1LHBZ3A5+ampRKg1wq5nutLsUA4mEBN6H7JqjWOMY9xZemv6+kATm2ofjJ3lW5TszQg== + dependencies: + chalk "^2.3.1" + execa "^1.0.0" + strong-log-transformer "^2.0.0" + +"@lerna/clean@3.21.0": + version "3.21.0" + resolved "https://registry.yarnpkg.com/@lerna/clean/-/clean-3.21.0.tgz#c0b46b5300cc3dae2cda3bec14b803082da3856d" + integrity sha512-b/L9l+MDgE/7oGbrav6rG8RTQvRiZLO1zTcG17zgJAAuhlsPxJExMlh2DFwJEVi2les70vMhHfST3Ue1IMMjpg== + dependencies: + "@lerna/command" "3.21.0" + "@lerna/filter-options" "3.20.0" + "@lerna/prompt" "3.18.5" + "@lerna/pulse-till-done" "3.13.0" + "@lerna/rimraf-dir" "3.16.5" + p-map "^2.1.0" + p-map-series "^1.0.0" + p-waterfall "^1.0.0" + +"@lerna/cli@3.18.5": + version "3.18.5" + resolved "https://registry.yarnpkg.com/@lerna/cli/-/cli-3.18.5.tgz#c90c461542fcd35b6d5b015a290fb0dbfb41d242" + integrity sha512-erkbxkj9jfc89vVs/jBLY/fM0I80oLmJkFUV3Q3wk9J3miYhP14zgVEBsPZY68IZlEjT6T3Xlq2xO1AVaatHsA== + dependencies: + "@lerna/global-options" "3.13.0" + dedent "^0.7.0" + npmlog "^4.1.2" + yargs "^14.2.2" + +"@lerna/collect-uncommitted@3.16.5": + version "3.16.5" + resolved "https://registry.yarnpkg.com/@lerna/collect-uncommitted/-/collect-uncommitted-3.16.5.tgz#a494d61aac31cdc7aec4bbe52c96550274132e63" + integrity sha512-ZgqnGwpDZiWyzIQVZtQaj9tRizsL4dUOhuOStWgTAw1EMe47cvAY2kL709DzxFhjr6JpJSjXV5rZEAeU3VE0Hg== + dependencies: + "@lerna/child-process" "3.16.5" + chalk "^2.3.1" + figgy-pudding "^3.5.1" + npmlog "^4.1.2" + +"@lerna/collect-updates@3.20.0": + version "3.20.0" + resolved "https://registry.yarnpkg.com/@lerna/collect-updates/-/collect-updates-3.20.0.tgz#62f9d76ba21a25b7d9fbf31c02de88744a564bd1" + integrity sha512-qBTVT5g4fupVhBFuY4nI/3FSJtQVcDh7/gEPOpRxoXB/yCSnT38MFHXWl+y4einLciCjt/+0x6/4AG80fjay2Q== + dependencies: + "@lerna/child-process" "3.16.5" + "@lerna/describe-ref" "3.16.5" + minimatch "^3.0.4" + npmlog "^4.1.2" + slash "^2.0.0" + +"@lerna/command@3.21.0": + version "3.21.0" + resolved "https://registry.yarnpkg.com/@lerna/command/-/command-3.21.0.tgz#9a2383759dc7b700dacfa8a22b2f3a6e190121f7" + integrity sha512-T2bu6R8R3KkH5YoCKdutKv123iUgUbW8efVjdGCDnCMthAQzoentOJfDeodBwn0P2OqCl3ohsiNVtSn9h78fyQ== + dependencies: + "@lerna/child-process" "3.16.5" + "@lerna/package-graph" "3.18.5" + "@lerna/project" "3.21.0" + "@lerna/validation-error" "3.13.0" + "@lerna/write-log-file" "3.13.0" + clone-deep "^4.0.1" + dedent "^0.7.0" + execa "^1.0.0" + is-ci "^2.0.0" + npmlog "^4.1.2" + +"@lerna/conventional-commits@3.22.0": + version "3.22.0" + resolved "https://registry.yarnpkg.com/@lerna/conventional-commits/-/conventional-commits-3.22.0.tgz#2798f4881ee2ef457bdae027ab7d0bf0af6f1e09" + integrity sha512-z4ZZk1e8Mhz7+IS8NxHr64wyklHctCJyWpJKEZZPJiLFJ8yKto/x38O80R10pIzC0rr8Sy/OsjSH4bl0TbbgqA== + dependencies: + "@lerna/validation-error" "3.13.0" + conventional-changelog-angular "^5.0.3" + conventional-changelog-core "^3.1.6" + conventional-recommended-bump "^5.0.0" + fs-extra "^8.1.0" + get-stream "^4.0.0" + lodash.template "^4.5.0" + npm-package-arg "^6.1.0" + npmlog "^4.1.2" + pify "^4.0.1" + semver "^6.2.0" + +"@lerna/create-symlink@3.16.2": + version "3.16.2" + resolved "https://registry.yarnpkg.com/@lerna/create-symlink/-/create-symlink-3.16.2.tgz#412cb8e59a72f5a7d9463e4e4721ad2070149967" + integrity sha512-pzXIJp6av15P325sgiIRpsPXLFmkisLhMBCy4764d+7yjf2bzrJ4gkWVMhsv4AdF0NN3OyZ5jjzzTtLNqfR+Jw== + dependencies: + "@zkochan/cmd-shim" "^3.1.0" + fs-extra "^8.1.0" + npmlog "^4.1.2" + +"@lerna/create@3.22.0": + version "3.22.0" + resolved "https://registry.yarnpkg.com/@lerna/create/-/create-3.22.0.tgz#d6bbd037c3dc5b425fe5f6d1b817057c278f7619" + integrity sha512-MdiQQzCcB4E9fBF1TyMOaAEz9lUjIHp1Ju9H7f3lXze5JK6Fl5NYkouAvsLgY6YSIhXMY8AHW2zzXeBDY4yWkw== + dependencies: + "@evocateur/pacote" "^9.6.3" + "@lerna/child-process" "3.16.5" + "@lerna/command" "3.21.0" + "@lerna/npm-conf" "3.16.0" + "@lerna/validation-error" "3.13.0" + camelcase "^5.0.0" + dedent "^0.7.0" + fs-extra "^8.1.0" + globby "^9.2.0" + init-package-json "^1.10.3" + npm-package-arg "^6.1.0" + p-reduce "^1.0.0" + pify "^4.0.1" + semver "^6.2.0" + slash "^2.0.0" + validate-npm-package-license "^3.0.3" + validate-npm-package-name "^3.0.0" + whatwg-url "^7.0.0" + +"@lerna/describe-ref@3.16.5": + version "3.16.5" + resolved "https://registry.yarnpkg.com/@lerna/describe-ref/-/describe-ref-3.16.5.tgz#a338c25aaed837d3dc70b8a72c447c5c66346ac0" + integrity sha512-c01+4gUF0saOOtDBzbLMFOTJDHTKbDFNErEY6q6i9QaXuzy9LNN62z+Hw4acAAZuJQhrVWncVathcmkkjvSVGw== + dependencies: + "@lerna/child-process" "3.16.5" + npmlog "^4.1.2" + +"@lerna/diff@3.21.0": + version "3.21.0" + resolved "https://registry.yarnpkg.com/@lerna/diff/-/diff-3.21.0.tgz#e6df0d8b9916167ff5a49fcb02ac06424280a68d" + integrity sha512-5viTR33QV3S7O+bjruo1SaR40m7F2aUHJaDAC7fL9Ca6xji+aw1KFkpCtVlISS0G8vikUREGMJh+c/VMSc8Usw== + dependencies: + "@lerna/child-process" "3.16.5" + "@lerna/command" "3.21.0" + "@lerna/validation-error" "3.13.0" + npmlog "^4.1.2" + +"@lerna/exec@3.21.0": + version "3.21.0" + resolved "https://registry.yarnpkg.com/@lerna/exec/-/exec-3.21.0.tgz#17f07533893cb918a17b41bcc566dc437016db26" + integrity sha512-iLvDBrIE6rpdd4GIKTY9mkXyhwsJ2RvQdB9ZU+/NhR3okXfqKc6py/24tV111jqpXTtZUW6HNydT4dMao2hi1Q== + dependencies: + "@lerna/child-process" "3.16.5" + "@lerna/command" "3.21.0" + "@lerna/filter-options" "3.20.0" + "@lerna/profiler" "3.20.0" + "@lerna/run-topologically" "3.18.5" + "@lerna/validation-error" "3.13.0" + p-map "^2.1.0" + +"@lerna/filter-options@3.20.0": + version "3.20.0" + resolved "https://registry.yarnpkg.com/@lerna/filter-options/-/filter-options-3.20.0.tgz#0f0f5d5a4783856eece4204708cc902cbc8af59b" + integrity sha512-bmcHtvxn7SIl/R9gpiNMVG7yjx7WyT0HSGw34YVZ9B+3xF/83N3r5Rgtjh4hheLZ+Q91Or0Jyu5O3Nr+AwZe2g== + dependencies: + "@lerna/collect-updates" "3.20.0" + "@lerna/filter-packages" "3.18.0" + dedent "^0.7.0" + figgy-pudding "^3.5.1" + npmlog "^4.1.2" + +"@lerna/filter-packages@3.18.0": + version "3.18.0" + resolved "https://registry.yarnpkg.com/@lerna/filter-packages/-/filter-packages-3.18.0.tgz#6a7a376d285208db03a82958cfb8172e179b4e70" + integrity sha512-6/0pMM04bCHNATIOkouuYmPg6KH3VkPCIgTfQmdkPJTullERyEQfNUKikrefjxo1vHOoCACDpy65JYyKiAbdwQ== + dependencies: + "@lerna/validation-error" "3.13.0" + multimatch "^3.0.0" + npmlog "^4.1.2" + +"@lerna/get-npm-exec-opts@3.13.0": + version "3.13.0" + resolved "https://registry.yarnpkg.com/@lerna/get-npm-exec-opts/-/get-npm-exec-opts-3.13.0.tgz#d1b552cb0088199fc3e7e126f914e39a08df9ea5" + integrity sha512-Y0xWL0rg3boVyJk6An/vurKzubyJKtrxYv2sj4bB8Mc5zZ3tqtv0ccbOkmkXKqbzvNNF7VeUt1OJ3DRgtC/QZw== + dependencies: + npmlog "^4.1.2" + +"@lerna/get-packed@3.16.0": + version "3.16.0" + resolved "https://registry.yarnpkg.com/@lerna/get-packed/-/get-packed-3.16.0.tgz#1b316b706dcee86c7baa55e50b087959447852ff" + integrity sha512-AjsFiaJzo1GCPnJUJZiTW6J1EihrPkc2y3nMu6m3uWFxoleklsSCyImumzVZJssxMi3CPpztj8LmADLedl9kXw== + dependencies: + fs-extra "^8.1.0" + ssri "^6.0.1" + tar "^4.4.8" + +"@lerna/github-client@3.22.0": + version "3.22.0" + resolved "https://registry.yarnpkg.com/@lerna/github-client/-/github-client-3.22.0.tgz#5d816aa4f76747ed736ae64ff962b8f15c354d95" + integrity sha512-O/GwPW+Gzr3Eb5bk+nTzTJ3uv+jh5jGho9BOqKlajXaOkMYGBELEAqV5+uARNGWZFvYAiF4PgqHb6aCUu7XdXg== + dependencies: + "@lerna/child-process" "3.16.5" + "@octokit/plugin-enterprise-rest" "^6.0.1" + "@octokit/rest" "^16.28.4" + git-url-parse "^11.1.2" + npmlog "^4.1.2" + +"@lerna/gitlab-client@3.15.0": + version "3.15.0" + resolved "https://registry.yarnpkg.com/@lerna/gitlab-client/-/gitlab-client-3.15.0.tgz#91f4ec8c697b5ac57f7f25bd50fe659d24aa96a6" + integrity sha512-OsBvRSejHXUBMgwWQqNoioB8sgzL/Pf1pOUhHKtkiMl6aAWjklaaq5HPMvTIsZPfS6DJ9L5OK2GGZuooP/5c8Q== + dependencies: + node-fetch "^2.5.0" + npmlog "^4.1.2" + whatwg-url "^7.0.0" + +"@lerna/global-options@3.13.0": + version "3.13.0" + resolved "https://registry.yarnpkg.com/@lerna/global-options/-/global-options-3.13.0.tgz#217662290db06ad9cf2c49d8e3100ee28eaebae1" + integrity sha512-SlZvh1gVRRzYLVluz9fryY1nJpZ0FHDGB66U9tFfvnnxmueckRQxLopn3tXj3NU1kc3QANT2I5BsQkOqZ4TEFQ== + +"@lerna/has-npm-version@3.16.5": + version "3.16.5" + resolved "https://registry.yarnpkg.com/@lerna/has-npm-version/-/has-npm-version-3.16.5.tgz#ab83956f211d8923ea6afe9b979b38cc73b15326" + integrity sha512-WL7LycR9bkftyqbYop5rEGJ9sRFIV55tSGmbN1HLrF9idwOCD7CLrT64t235t3t4O5gehDnwKI5h2U3oxTrF8Q== + dependencies: + "@lerna/child-process" "3.16.5" + semver "^6.2.0" + +"@lerna/import@3.22.0": + version "3.22.0" + resolved "https://registry.yarnpkg.com/@lerna/import/-/import-3.22.0.tgz#1a5f0394f38e23c4f642a123e5e1517e70d068d2" + integrity sha512-uWOlexasM5XR6tXi4YehODtH9Y3OZrFht3mGUFFT3OIl2s+V85xIGFfqFGMTipMPAGb2oF1UBLL48kR43hRsOg== + dependencies: + "@lerna/child-process" "3.16.5" + "@lerna/command" "3.21.0" + "@lerna/prompt" "3.18.5" + "@lerna/pulse-till-done" "3.13.0" + "@lerna/validation-error" "3.13.0" + dedent "^0.7.0" + fs-extra "^8.1.0" + p-map-series "^1.0.0" + +"@lerna/info@3.21.0": + version "3.21.0" + resolved "https://registry.yarnpkg.com/@lerna/info/-/info-3.21.0.tgz#76696b676fdb0f35d48c83c63c1e32bb5e37814f" + integrity sha512-0XDqGYVBgWxUquFaIptW2bYSIu6jOs1BtkvRTWDDhw4zyEdp6q4eaMvqdSap1CG+7wM5jeLCi6z94wS0AuiuwA== + dependencies: + "@lerna/command" "3.21.0" + "@lerna/output" "3.13.0" + envinfo "^7.3.1" + +"@lerna/init@3.21.0": + version "3.21.0" + resolved "https://registry.yarnpkg.com/@lerna/init/-/init-3.21.0.tgz#1e810934dc8bf4e5386c031041881d3b4096aa5c" + integrity sha512-6CM0z+EFUkFfurwdJCR+LQQF6MqHbYDCBPyhu/d086LRf58GtYZYj49J8mKG9ktayp/TOIxL/pKKjgLD8QBPOg== + dependencies: + "@lerna/child-process" "3.16.5" + "@lerna/command" "3.21.0" + fs-extra "^8.1.0" + p-map "^2.1.0" + write-json-file "^3.2.0" + +"@lerna/link@3.21.0": + version "3.21.0" + resolved "https://registry.yarnpkg.com/@lerna/link/-/link-3.21.0.tgz#8be68ff0ccee104b174b5bbd606302c2f06e9d9b" + integrity sha512-tGu9GxrX7Ivs+Wl3w1+jrLi1nQ36kNI32dcOssij6bg0oZ2M2MDEFI9UF2gmoypTaN9uO5TSsjCFS7aR79HbdQ== + dependencies: + "@lerna/command" "3.21.0" + "@lerna/package-graph" "3.18.5" + "@lerna/symlink-dependencies" "3.17.0" + p-map "^2.1.0" + slash "^2.0.0" + +"@lerna/list@3.21.0": + version "3.21.0" + resolved "https://registry.yarnpkg.com/@lerna/list/-/list-3.21.0.tgz#42f76fafa56dea13b691ec8cab13832691d61da2" + integrity sha512-KehRjE83B1VaAbRRkRy6jLX1Cin8ltsrQ7FHf2bhwhRHK0S54YuA6LOoBnY/NtA8bHDX/Z+G5sMY78X30NS9tg== + dependencies: + "@lerna/command" "3.21.0" + "@lerna/filter-options" "3.20.0" + "@lerna/listable" "3.18.5" + "@lerna/output" "3.13.0" + +"@lerna/listable@3.18.5": + version "3.18.5" + resolved "https://registry.yarnpkg.com/@lerna/listable/-/listable-3.18.5.tgz#e82798405b5ed8fc51843c8ef1e7a0e497388a1a" + integrity sha512-Sdr3pVyaEv5A7ZkGGYR7zN+tTl2iDcinryBPvtuv20VJrXBE8wYcOks1edBTcOWsPjCE/rMP4bo1pseyk3UTsg== + dependencies: + "@lerna/query-graph" "3.18.5" + chalk "^2.3.1" + columnify "^1.5.4" + +"@lerna/log-packed@3.16.0": + version "3.16.0" + resolved "https://registry.yarnpkg.com/@lerna/log-packed/-/log-packed-3.16.0.tgz#f83991041ee77b2495634e14470b42259fd2bc16" + integrity sha512-Fp+McSNBV/P2mnLUYTaSlG8GSmpXM7krKWcllqElGxvAqv6chk2K3c2k80MeVB4WvJ9tRjUUf+i7HUTiQ9/ckQ== + dependencies: + byte-size "^5.0.1" + columnify "^1.5.4" + has-unicode "^2.0.1" + npmlog "^4.1.2" + +"@lerna/npm-conf@3.16.0": + version "3.16.0" + resolved "https://registry.yarnpkg.com/@lerna/npm-conf/-/npm-conf-3.16.0.tgz#1c10a89ae2f6c2ee96962557738685300d376827" + integrity sha512-HbO3DUrTkCAn2iQ9+FF/eisDpWY5POQAOF1m7q//CZjdC2HSW3UYbKEGsSisFxSfaF9Z4jtrV+F/wX6qWs3CuA== + dependencies: + config-chain "^1.1.11" + pify "^4.0.1" + +"@lerna/npm-dist-tag@3.18.5": + version "3.18.5" + resolved "https://registry.yarnpkg.com/@lerna/npm-dist-tag/-/npm-dist-tag-3.18.5.tgz#9ef9abb7c104077b31f6fab22cc73b314d54ac55" + integrity sha512-xw0HDoIG6HreVsJND9/dGls1c+lf6vhu7yJoo56Sz5bvncTloYGLUppIfDHQr4ZvmPCK8rsh0euCVh2giPxzKQ== + dependencies: + "@evocateur/npm-registry-fetch" "^4.0.0" + "@lerna/otplease" "3.18.5" + figgy-pudding "^3.5.1" + npm-package-arg "^6.1.0" + npmlog "^4.1.2" + +"@lerna/npm-install@3.16.5": + version "3.16.5" + resolved "https://registry.yarnpkg.com/@lerna/npm-install/-/npm-install-3.16.5.tgz#d6bfdc16f81285da66515ae47924d6e278d637d3" + integrity sha512-hfiKk8Eku6rB9uApqsalHHTHY+mOrrHeWEs+gtg7+meQZMTS3kzv4oVp5cBZigndQr3knTLjwthT/FX4KvseFg== + dependencies: + "@lerna/child-process" "3.16.5" + "@lerna/get-npm-exec-opts" "3.13.0" + fs-extra "^8.1.0" + npm-package-arg "^6.1.0" + npmlog "^4.1.2" + signal-exit "^3.0.2" + write-pkg "^3.1.0" + +"@lerna/npm-publish@3.18.5": + version "3.18.5" + resolved "https://registry.yarnpkg.com/@lerna/npm-publish/-/npm-publish-3.18.5.tgz#240e4039959fd9816b49c5b07421e11b5cb000af" + integrity sha512-3etLT9+2L8JAx5F8uf7qp6iAtOLSMj+ZYWY6oUgozPi/uLqU0/gsMsEXh3F0+YVW33q0M61RpduBoAlOOZnaTg== + dependencies: + "@evocateur/libnpmpublish" "^1.2.2" + "@lerna/otplease" "3.18.5" + "@lerna/run-lifecycle" "3.16.2" + figgy-pudding "^3.5.1" + fs-extra "^8.1.0" + npm-package-arg "^6.1.0" + npmlog "^4.1.2" + pify "^4.0.1" + read-package-json "^2.0.13" + +"@lerna/npm-run-script@3.16.5": + version "3.16.5" + resolved "https://registry.yarnpkg.com/@lerna/npm-run-script/-/npm-run-script-3.16.5.tgz#9c2ec82453a26c0b46edc0bb7c15816c821f5c15" + integrity sha512-1asRi+LjmVn3pMjEdpqKJZFT/3ZNpb+VVeJMwrJaV/3DivdNg7XlPK9LTrORuKU4PSvhdEZvJmSlxCKyDpiXsQ== + dependencies: + "@lerna/child-process" "3.16.5" + "@lerna/get-npm-exec-opts" "3.13.0" + npmlog "^4.1.2" + +"@lerna/otplease@3.18.5": + version "3.18.5" + resolved "https://registry.yarnpkg.com/@lerna/otplease/-/otplease-3.18.5.tgz#b77b8e760b40abad9f7658d988f3ea77d4fd0231" + integrity sha512-S+SldXAbcXTEDhzdxYLU0ZBKuYyURP/ND2/dK6IpKgLxQYh/z4ScljPDMyKymmEvgiEJmBsPZAAPfmNPEzxjog== + dependencies: + "@lerna/prompt" "3.18.5" + figgy-pudding "^3.5.1" + +"@lerna/output@3.13.0": + version "3.13.0" + resolved "https://registry.yarnpkg.com/@lerna/output/-/output-3.13.0.tgz#3ded7cc908b27a9872228a630d950aedae7a4989" + integrity sha512-7ZnQ9nvUDu/WD+bNsypmPG5MwZBwu86iRoiW6C1WBuXXDxM5cnIAC1m2WxHeFnjyMrYlRXM9PzOQ9VDD+C15Rg== + dependencies: + npmlog "^4.1.2" + +"@lerna/pack-directory@3.16.4": + version "3.16.4" + resolved "https://registry.yarnpkg.com/@lerna/pack-directory/-/pack-directory-3.16.4.tgz#3eae5f91bdf5acfe0384510ed53faddc4c074693" + integrity sha512-uxSF0HZeGyKaaVHz5FroDY9A5NDDiCibrbYR6+khmrhZtY0Bgn6hWq8Gswl9iIlymA+VzCbshWIMX4o2O8C8ng== + dependencies: + "@lerna/get-packed" "3.16.0" + "@lerna/package" "3.16.0" + "@lerna/run-lifecycle" "3.16.2" + figgy-pudding "^3.5.1" + npm-packlist "^1.4.4" + npmlog "^4.1.2" + tar "^4.4.10" + temp-write "^3.4.0" + +"@lerna/package-graph@3.18.5": + version "3.18.5" + resolved "https://registry.yarnpkg.com/@lerna/package-graph/-/package-graph-3.18.5.tgz#c740e2ea3578d059e551633e950690831b941f6b" + integrity sha512-8QDrR9T+dBegjeLr+n9WZTVxUYUhIUjUgZ0gvNxUBN8S1WB9r6H5Yk56/MVaB64tA3oGAN9IIxX6w0WvTfFudA== + dependencies: + "@lerna/prerelease-id-from-version" "3.16.0" + "@lerna/validation-error" "3.13.0" + npm-package-arg "^6.1.0" + npmlog "^4.1.2" + semver "^6.2.0" + +"@lerna/package@3.16.0": + version "3.16.0" + resolved "https://registry.yarnpkg.com/@lerna/package/-/package-3.16.0.tgz#7e0a46e4697ed8b8a9c14d59c7f890e0d38ba13c" + integrity sha512-2lHBWpaxcBoiNVbtyLtPUuTYEaB/Z+eEqRS9duxpZs6D+mTTZMNy6/5vpEVSCBmzvdYpyqhqaYjjSLvjjr5Riw== + dependencies: + load-json-file "^5.3.0" + npm-package-arg "^6.1.0" + write-pkg "^3.1.0" + +"@lerna/prerelease-id-from-version@3.16.0": + version "3.16.0" + resolved "https://registry.yarnpkg.com/@lerna/prerelease-id-from-version/-/prerelease-id-from-version-3.16.0.tgz#b24bfa789f5e1baab914d7b08baae9b7bd7d83a1" + integrity sha512-qZyeUyrE59uOK8rKdGn7jQz+9uOpAaF/3hbslJVFL1NqF9ELDTqjCPXivuejMX/lN4OgD6BugTO4cR7UTq/sZA== + dependencies: + semver "^6.2.0" + +"@lerna/profiler@3.20.0": + version "3.20.0" + resolved "https://registry.yarnpkg.com/@lerna/profiler/-/profiler-3.20.0.tgz#0f6dc236f4ea8f9ea5f358c6703305a4f32ad051" + integrity sha512-bh8hKxAlm6yu8WEOvbLENm42i2v9SsR4WbrCWSbsmOElx3foRnMlYk7NkGECa+U5c3K4C6GeBbwgqs54PP7Ljg== + dependencies: + figgy-pudding "^3.5.1" + fs-extra "^8.1.0" + npmlog "^4.1.2" + upath "^1.2.0" + +"@lerna/project@3.21.0": + version "3.21.0" + resolved "https://registry.yarnpkg.com/@lerna/project/-/project-3.21.0.tgz#5d784d2d10c561a00f20320bcdb040997c10502d" + integrity sha512-xT1mrpET2BF11CY32uypV2GPtPVm6Hgtha7D81GQP9iAitk9EccrdNjYGt5UBYASl4CIDXBRxwmTTVGfrCx82A== + dependencies: + "@lerna/package" "3.16.0" + "@lerna/validation-error" "3.13.0" + cosmiconfig "^5.1.0" + dedent "^0.7.0" + dot-prop "^4.2.0" + glob-parent "^5.0.0" + globby "^9.2.0" + load-json-file "^5.3.0" + npmlog "^4.1.2" + p-map "^2.1.0" + resolve-from "^4.0.0" + write-json-file "^3.2.0" + +"@lerna/prompt@3.18.5": + version "3.18.5" + resolved "https://registry.yarnpkg.com/@lerna/prompt/-/prompt-3.18.5.tgz#628cd545f225887d060491ab95df899cfc5218a1" + integrity sha512-rkKj4nm1twSbBEb69+Em/2jAERK8htUuV8/xSjN0NPC+6UjzAwY52/x9n5cfmpa9lyKf/uItp7chCI7eDmNTKQ== + dependencies: + inquirer "^6.2.0" + npmlog "^4.1.2" + +"@lerna/publish@3.22.1": + version "3.22.1" + resolved "https://registry.yarnpkg.com/@lerna/publish/-/publish-3.22.1.tgz#b4f7ce3fba1e9afb28be4a1f3d88222269ba9519" + integrity sha512-PG9CM9HUYDreb1FbJwFg90TCBQooGjj+n/pb3gw/eH5mEDq0p8wKdLFe0qkiqUkm/Ub5C8DbVFertIo0Vd0zcw== + dependencies: + "@evocateur/libnpmaccess" "^3.1.2" + "@evocateur/npm-registry-fetch" "^4.0.0" + "@evocateur/pacote" "^9.6.3" + "@lerna/check-working-tree" "3.16.5" + "@lerna/child-process" "3.16.5" + "@lerna/collect-updates" "3.20.0" + "@lerna/command" "3.21.0" + "@lerna/describe-ref" "3.16.5" + "@lerna/log-packed" "3.16.0" + "@lerna/npm-conf" "3.16.0" + "@lerna/npm-dist-tag" "3.18.5" + "@lerna/npm-publish" "3.18.5" + "@lerna/otplease" "3.18.5" + "@lerna/output" "3.13.0" + "@lerna/pack-directory" "3.16.4" + "@lerna/prerelease-id-from-version" "3.16.0" + "@lerna/prompt" "3.18.5" + "@lerna/pulse-till-done" "3.13.0" + "@lerna/run-lifecycle" "3.16.2" + "@lerna/run-topologically" "3.18.5" + "@lerna/validation-error" "3.13.0" + "@lerna/version" "3.22.1" + figgy-pudding "^3.5.1" + fs-extra "^8.1.0" + npm-package-arg "^6.1.0" + npmlog "^4.1.2" + p-finally "^1.0.0" + p-map "^2.1.0" + p-pipe "^1.2.0" + semver "^6.2.0" + +"@lerna/pulse-till-done@3.13.0": + version "3.13.0" + resolved "https://registry.yarnpkg.com/@lerna/pulse-till-done/-/pulse-till-done-3.13.0.tgz#c8e9ce5bafaf10d930a67d7ed0ccb5d958fe0110" + integrity sha512-1SOHpy7ZNTPulzIbargrgaJX387csN7cF1cLOGZiJQA6VqnS5eWs2CIrG8i8wmaUavj2QlQ5oEbRMVVXSsGrzA== + dependencies: + npmlog "^4.1.2" + +"@lerna/query-graph@3.18.5": + version "3.18.5" + resolved "https://registry.yarnpkg.com/@lerna/query-graph/-/query-graph-3.18.5.tgz#df4830bb5155273003bf35e8dda1c32d0927bd86" + integrity sha512-50Lf4uuMpMWvJ306be3oQDHrWV42nai9gbIVByPBYJuVW8dT8O8pA3EzitNYBUdLL9/qEVbrR0ry1HD7EXwtRA== + dependencies: + "@lerna/package-graph" "3.18.5" + figgy-pudding "^3.5.1" + +"@lerna/resolve-symlink@3.16.0": + version "3.16.0" + resolved "https://registry.yarnpkg.com/@lerna/resolve-symlink/-/resolve-symlink-3.16.0.tgz#37fc7095fabdbcf317c26eb74e0d0bde8efd2386" + integrity sha512-Ibj5e7njVHNJ/NOqT4HlEgPFPtPLWsO7iu59AM5bJDcAJcR96mLZ7KGVIsS2tvaO7akMEJvt2P+ErwCdloG3jQ== + dependencies: + fs-extra "^8.1.0" + npmlog "^4.1.2" + read-cmd-shim "^1.0.1" + +"@lerna/rimraf-dir@3.16.5": + version "3.16.5" + resolved "https://registry.yarnpkg.com/@lerna/rimraf-dir/-/rimraf-dir-3.16.5.tgz#04316ab5ffd2909657aaf388ea502cb8c2f20a09" + integrity sha512-bQlKmO0pXUsXoF8lOLknhyQjOZsCc0bosQDoX4lujBXSWxHVTg1VxURtWf2lUjz/ACsJVDfvHZbDm8kyBk5okA== + dependencies: + "@lerna/child-process" "3.16.5" + npmlog "^4.1.2" + path-exists "^3.0.0" + rimraf "^2.6.2" + +"@lerna/run-lifecycle@3.16.2": + version "3.16.2" + resolved "https://registry.yarnpkg.com/@lerna/run-lifecycle/-/run-lifecycle-3.16.2.tgz#67b288f8ea964db9ea4fb1fbc7715d5bbb0bce00" + integrity sha512-RqFoznE8rDpyyF0rOJy3+KjZCeTkO8y/OB9orPauR7G2xQ7PTdCpgo7EO6ZNdz3Al+k1BydClZz/j78gNCmL2A== + dependencies: + "@lerna/npm-conf" "3.16.0" + figgy-pudding "^3.5.1" + npm-lifecycle "^3.1.2" + npmlog "^4.1.2" + +"@lerna/run-topologically@3.18.5": + version "3.18.5" + resolved "https://registry.yarnpkg.com/@lerna/run-topologically/-/run-topologically-3.18.5.tgz#3cd639da20e967d7672cb88db0f756b92f2fdfc3" + integrity sha512-6N1I+6wf4hLOnPW+XDZqwufyIQ6gqoPfHZFkfWlvTQ+Ue7CuF8qIVQ1Eddw5HKQMkxqN10thKOFfq/9NQZ4NUg== + dependencies: + "@lerna/query-graph" "3.18.5" + figgy-pudding "^3.5.1" + p-queue "^4.0.0" + +"@lerna/run@3.21.0": + version "3.21.0" + resolved "https://registry.yarnpkg.com/@lerna/run/-/run-3.21.0.tgz#2a35ec84979e4d6e42474fe148d32e5de1cac891" + integrity sha512-fJF68rT3veh+hkToFsBmUJ9MHc9yGXA7LSDvhziAojzOb0AI/jBDp6cEcDQyJ7dbnplba2Lj02IH61QUf9oW0Q== + dependencies: + "@lerna/command" "3.21.0" + "@lerna/filter-options" "3.20.0" + "@lerna/npm-run-script" "3.16.5" + "@lerna/output" "3.13.0" + "@lerna/profiler" "3.20.0" + "@lerna/run-topologically" "3.18.5" + "@lerna/timer" "3.13.0" + "@lerna/validation-error" "3.13.0" + p-map "^2.1.0" + +"@lerna/symlink-binary@3.17.0": + version "3.17.0" + resolved "https://registry.yarnpkg.com/@lerna/symlink-binary/-/symlink-binary-3.17.0.tgz#8f8031b309863814883d3f009877f82e38aef45a" + integrity sha512-RLpy9UY6+3nT5J+5jkM5MZyMmjNHxZIZvXLV+Q3MXrf7Eaa1hNqyynyj4RO95fxbS+EZc4XVSk25DGFQbcRNSQ== + dependencies: + "@lerna/create-symlink" "3.16.2" + "@lerna/package" "3.16.0" + fs-extra "^8.1.0" + p-map "^2.1.0" + +"@lerna/symlink-dependencies@3.17.0": + version "3.17.0" + resolved "https://registry.yarnpkg.com/@lerna/symlink-dependencies/-/symlink-dependencies-3.17.0.tgz#48d6360e985865a0e56cd8b51b308a526308784a" + integrity sha512-KmjU5YT1bpt6coOmdFueTJ7DFJL4H1w5eF8yAQ2zsGNTtZ+i5SGFBWpb9AQaw168dydc3s4eu0W0Sirda+F59Q== + dependencies: + "@lerna/create-symlink" "3.16.2" + "@lerna/resolve-symlink" "3.16.0" + "@lerna/symlink-binary" "3.17.0" + fs-extra "^8.1.0" + p-finally "^1.0.0" + p-map "^2.1.0" + p-map-series "^1.0.0" + +"@lerna/timer@3.13.0": + version "3.13.0" + resolved "https://registry.yarnpkg.com/@lerna/timer/-/timer-3.13.0.tgz#bcd0904551db16e08364d6c18e5e2160fc870781" + integrity sha512-RHWrDl8U4XNPqY5MQHkToWS9jHPnkLZEt5VD+uunCKTfzlxGnRCr3/zVr8VGy/uENMYpVP3wJa4RKGY6M0vkRw== + +"@lerna/validation-error@3.13.0": + version "3.13.0" + resolved "https://registry.yarnpkg.com/@lerna/validation-error/-/validation-error-3.13.0.tgz#c86b8f07c5ab9539f775bd8a54976e926f3759c3" + integrity sha512-SiJP75nwB8GhgwLKQfdkSnDufAaCbkZWJqEDlKOUPUvVOplRGnfL+BPQZH5nvq2BYSRXsksXWZ4UHVnQZI/HYA== + dependencies: + npmlog "^4.1.2" + +"@lerna/version@3.22.1": + version "3.22.1" + resolved "https://registry.yarnpkg.com/@lerna/version/-/version-3.22.1.tgz#9805a9247a47ee62d6b81bd9fa5fb728b24b59e2" + integrity sha512-PSGt/K1hVqreAFoi3zjD0VEDupQ2WZVlVIwesrE5GbrL2BjXowjCsTDPqblahDUPy0hp6h7E2kG855yLTp62+g== + dependencies: + "@lerna/check-working-tree" "3.16.5" + "@lerna/child-process" "3.16.5" + "@lerna/collect-updates" "3.20.0" + "@lerna/command" "3.21.0" + "@lerna/conventional-commits" "3.22.0" + "@lerna/github-client" "3.22.0" + "@lerna/gitlab-client" "3.15.0" + "@lerna/output" "3.13.0" + "@lerna/prerelease-id-from-version" "3.16.0" + "@lerna/prompt" "3.18.5" + "@lerna/run-lifecycle" "3.16.2" + "@lerna/run-topologically" "3.18.5" + "@lerna/validation-error" "3.13.0" + chalk "^2.3.1" + dedent "^0.7.0" + load-json-file "^5.3.0" + minimatch "^3.0.4" + npmlog "^4.1.2" + p-map "^2.1.0" + p-pipe "^1.2.0" + p-reduce "^1.0.0" + p-waterfall "^1.0.0" + semver "^6.2.0" + slash "^2.0.0" + temp-write "^3.4.0" + write-json-file "^3.2.0" + +"@lerna/write-log-file@3.13.0": + version "3.13.0" + resolved "https://registry.yarnpkg.com/@lerna/write-log-file/-/write-log-file-3.13.0.tgz#b78d9e4cfc1349a8be64d91324c4c8199e822a26" + integrity sha512-RibeMnDPvlL8bFYW5C8cs4mbI3AHfQef73tnJCQ/SgrXZHehmHnsyWUiE7qDQCAo+B1RfTapvSyFF69iPj326A== + dependencies: + npmlog "^4.1.2" + write-file-atomic "^2.3.0" + +"@mrmlnc/readdir-enhanced@^2.2.1": + version "2.2.1" + resolved "https://registry.yarnpkg.com/@mrmlnc/readdir-enhanced/-/readdir-enhanced-2.2.1.tgz#524af240d1a360527b730475ecfa1344aa540dde" + integrity sha512-bPHp6Ji8b41szTOcaP63VlnbbO5Ny6dwAATtY6JTjh5N2OLrb5Qk/Th5cRkRQhkWCt+EJsYrNB0MiL+Gpn6e3g== + dependencies: + call-me-maybe "^1.0.1" + glob-to-regexp "^0.3.0" + +"@nestjs/common@7.6.1": + version "7.6.1" + resolved "https://registry.yarnpkg.com/@nestjs/common/-/common-7.6.1.tgz#d62a3207c6db031a44f0fdc55e207586149d9fd1" + integrity sha512-Tq95a6a0kP3rxtV49xal168QNx49JPfO3s6SZ01sCJMWJVtGy8KCaC8YHAx7+KYamH43K6bd9Qv0K9R8lZxEtg== + dependencies: + axios "0.21.0" + iterare "1.2.1" + tslib "2.0.3" + uuid "8.3.2" + +"@nestjs/core@7.6.1": + version "7.6.1" + resolved "https://registry.yarnpkg.com/@nestjs/core/-/core-7.6.1.tgz#3b04ac45dd158f9373650d43894cbd2ae016a694" + integrity sha512-Rd0hqV2TsseMfZFTZBQH6zlNe5Hif5kVe5KNOnXADghKVaSLOO9SQ+VVPqXzeRYvC+xvJxnSdDyeTIriVsgMlg== + dependencies: + "@nuxtjs/opencollective" "0.3.2" + fast-safe-stringify "2.0.7" + iterare "1.2.1" + object-hash "2.0.3" + path-to-regexp "3.2.0" + tslib "2.0.3" + uuid "8.3.2" + +"@nodelib/fs.scandir@2.1.3": + version "2.1.3" + resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.3.tgz#3a582bdb53804c6ba6d146579c46e52130cf4a3b" + integrity sha512-eGmwYQn3gxo4r7jdQnkrrN6bY478C3P+a/y72IJukF8LjB6ZHeB3c+Ehacj3sYeSmUXGlnA67/PmbM9CVwL7Dw== + dependencies: + "@nodelib/fs.stat" "2.0.3" + run-parallel "^1.1.9" + +"@nodelib/fs.stat@2.0.3", "@nodelib/fs.stat@^2.0.2": + version "2.0.3" + resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.3.tgz#34dc5f4cabbc720f4e60f75a747e7ecd6c175bd3" + integrity sha512-bQBFruR2TAwoevBEd/NWMoAAtNGzTRgdrqnYCc7dhzfoNvqPzLyqlEQnzZ3kVnNrSp25iyxE00/3h2fqGAGArA== + +"@nodelib/fs.stat@^1.1.2": + version "1.1.3" + resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-1.1.3.tgz#2b5a3ab3f918cca48a8c754c08168e3f03eba61b" + integrity sha512-shAmDyaQC4H92APFoIaVDHCx5bStIocgvbwQyxPRrbUY20V1EYTbSDchWbuwlMG3V17cprZhA6+78JfB+3DTPw== + +"@nodelib/fs.walk@^1.2.3": + version "1.2.4" + resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.4.tgz#011b9202a70a6366e436ca5c065844528ab04976" + integrity sha512-1V9XOY4rDW0rehzbrcqAmHnz8e7SKvX27gh8Gt2WgB0+pdzdiLV83p72kZPU+jvMbS1qU5mauP2iOvO8rhmurQ== + dependencies: + "@nodelib/fs.scandir" "2.1.3" + fastq "^1.6.0" + +"@nuxtjs/opencollective@0.3.2": + version "0.3.2" + resolved "https://registry.yarnpkg.com/@nuxtjs/opencollective/-/opencollective-0.3.2.tgz#620ce1044f7ac77185e825e1936115bb38e2681c" + integrity sha512-um0xL3fO7Mf4fDxcqx9KryrB7zgRM5JSlvGN5AGkP6JLM5XEKyjeAiPbNxdXVXQ16isuAhYpvP88NgL2BGd6aA== + dependencies: + chalk "^4.1.0" + consola "^2.15.0" + node-fetch "^2.6.1" + +"@oclif/color@^0.x": + version "0.1.2" + resolved "https://registry.yarnpkg.com/@oclif/color/-/color-0.1.2.tgz#28b07e2850d9ce814d0b587ce3403b7ad8f7d987" + integrity sha512-M9o+DOrb8l603qvgz1FogJBUGLqcMFL1aFg2ZEL0FbXJofiNTLOWIeB4faeZTLwE6dt0xH9GpCVpzksMMzGbmA== + dependencies: + ansi-styles "^3.2.1" + chalk "^3.0.0" + strip-ansi "^5.2.0" + supports-color "^5.4.0" + tslib "^1" + +"@oclif/command@^1", "@oclif/command@^1.5.10", "@oclif/command@^1.5.20", "@oclif/command@^1.6", "@oclif/command@^1.6.0", "@oclif/command@^1.8.0": + version "1.8.0" + resolved "https://registry.yarnpkg.com/@oclif/command/-/command-1.8.0.tgz#c1a499b10d26e9d1a611190a81005589accbb339" + integrity sha512-5vwpq6kbvwkQwKqAoOU3L72GZ3Ta8RRrewKj9OJRolx28KLJJ8Dg9Rf7obRwt5jQA9bkYd8gqzMTrI7H3xLfaw== + dependencies: + "@oclif/config" "^1.15.1" + "@oclif/errors" "^1.3.3" + "@oclif/parser" "^3.8.3" + "@oclif/plugin-help" "^3" + debug "^4.1.1" + semver "^7.3.2" + +"@oclif/config@^1", "@oclif/config@^1.12.6", "@oclif/config@^1.12.8", "@oclif/config@^1.15.1", "@oclif/config@^1.17.0": + version "1.17.0" + resolved "https://registry.yarnpkg.com/@oclif/config/-/config-1.17.0.tgz#ba8639118633102a7e481760c50054623d09fcab" + integrity sha512-Lmfuf6ubjQ4ifC/9bz1fSCHc6F6E653oyaRXxg+lgT4+bYf9bk+nqrUpAbrXyABkCqgIBiFr3J4zR/kiFdE1PA== + dependencies: + "@oclif/errors" "^1.3.3" + "@oclif/parser" "^3.8.0" + debug "^4.1.1" + globby "^11.0.1" + is-wsl "^2.1.1" + tslib "^2.0.0" + +"@oclif/dev-cli@^1": + version "1.25.1" + resolved "https://registry.yarnpkg.com/@oclif/dev-cli/-/dev-cli-1.25.1.tgz#bd62a9739a2dcedee19acaed329dad3bb031a2f7" + integrity sha512-q+ywUIRa1SB9A+hIFXlvzWc5oeYgI1sBiFyYrV775lyNC7uVldRCKJ9MVi+YBXZxp7hXeh6r0dC7K2mrOryaJQ== + dependencies: + "@oclif/command" "^1.8.0" + "@oclif/config" "^1.17.0" + "@oclif/errors" "^1.3.3" + "@oclif/plugin-help" "^3.2.0" + cli-ux "^5.2.1" + debug "^4.1.1" + find-yarn-workspace-root "^2.0.0" + fs-extra "^8.1" + github-slugger "^1.2.1" + lodash "^4.17.11" + normalize-package-data "^3.0.0" + qqjs "^0.3.10" + tslib "^2.0.3" + +"@oclif/errors@^1.2.1", "@oclif/errors@^1.2.2", "@oclif/errors@^1.3.3": + version "1.3.4" + resolved "https://registry.yarnpkg.com/@oclif/errors/-/errors-1.3.4.tgz#a96f94536b4e25caa72eff47e8b3ed04f6995f55" + integrity sha512-pJKXyEqwdfRTUdM8n5FIHiQQHg5ETM0Wlso8bF9GodczO40mF5Z3HufnYWJE7z8sGKxOeJCdbAVZbS8Y+d5GCw== + dependencies: + clean-stack "^3.0.0" + fs-extra "^8.1" + indent-string "^4.0.0" + strip-ansi "^6.0.0" + wrap-ansi "^7.0.0" + +"@oclif/fixpack@^2.3.0": + version "2.3.0" + resolved "https://registry.yarnpkg.com/@oclif/fixpack/-/fixpack-2.3.0.tgz#4b2c3500c4545b6a2a9ad89de8320a1e6535ba5a" + integrity sha512-49WCnMIUO6MOj5h0kSsSh0JsrKVXa332nwMYafz1mvfrLhWHvA/7CW1yvWXbmVT7RUdE0+EfqVTHFdNlipAfpg== + dependencies: + alce "1.2.0" + colors "*" + extend-object "^1.0.0" + rc "^1.2.8" + +"@oclif/linewrap@^1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@oclif/linewrap/-/linewrap-1.0.0.tgz#aedcb64b479d4db7be24196384897b5000901d91" + integrity sha512-Ups2dShK52xXa8w6iBWLgcjPJWjais6KPJQq3gQ/88AY6BXoTX+MIGFPrWQO1KLMiQfoTpcLnUwloN4brrVUHw== + +"@oclif/parser@^3.8.0", "@oclif/parser@^3.8.3": + version "3.8.5" + resolved "https://registry.yarnpkg.com/@oclif/parser/-/parser-3.8.5.tgz#c5161766a1efca7343e1f25d769efbefe09f639b" + integrity sha512-yojzeEfmSxjjkAvMRj0KzspXlMjCfBzNRPkWw8ZwOSoNWoJn+OCS/m/S+yfV6BvAM4u2lTzX9Y5rCbrFIgkJLg== + dependencies: + "@oclif/errors" "^1.2.2" + "@oclif/linewrap" "^1.0.0" + chalk "^2.4.2" + tslib "^1.9.3" + +"@oclif/plugin-help@3", "@oclif/plugin-help@^3", "@oclif/plugin-help@^3.2.0": + version "3.2.0" + resolved "https://registry.yarnpkg.com/@oclif/plugin-help/-/plugin-help-3.2.0.tgz#b2c1112f49202ebce042f86b2e42e49908172ef1" + integrity sha512-7jxtpwVWAVbp1r46ZnTK/uF+FeZc6y4p1XcGaIUuPAp7wx6NJhIRN/iMT9UfNFX/Cz7mq+OyJz+E+i0zrik86g== + dependencies: + "@oclif/command" "^1.5.20" + "@oclif/config" "^1.15.1" + chalk "^2.4.1" + indent-string "^4.0.0" + lodash.template "^4.4.0" + string-width "^4.2.0" + strip-ansi "^6.0.0" + widest-line "^3.1.0" + wrap-ansi "^4.0.0" + +"@oclif/plugin-not-found@1.2.4", "@oclif/plugin-not-found@^1.2.2": + version "1.2.4" + resolved "https://registry.yarnpkg.com/@oclif/plugin-not-found/-/plugin-not-found-1.2.4.tgz#160108c82f0aa10f4fb52cee4e0135af34b7220b" + integrity sha512-G440PCuMi/OT8b71aWkR+kCWikngGtyRjOR24sPMDbpUFV4+B3r51fz1fcqeUiiEOYqUpr0Uy/sneUe1O/NfBg== + dependencies: + "@oclif/color" "^0.x" + "@oclif/command" "^1.6.0" + cli-ux "^4.9.0" + fast-levenshtein "^2.0.6" + lodash "^4.17.13" + +"@oclif/plugin-warn-if-update-available@^1.5.4": + version "1.7.0" + resolved "https://registry.yarnpkg.com/@oclif/plugin-warn-if-update-available/-/plugin-warn-if-update-available-1.7.0.tgz#5a72abe39ce0b831eb4ae81cb64eb4b9f3ea424a" + integrity sha512-Nwyz3BJ8RhsfQ+OmFSsJSPIfn5YJqMrCzPh72Zgo2jqIjKIBWD8N9vTTe4kZlpeUUn77SyXFfwlBQbNCL5OEuQ== + dependencies: + "@oclif/command" "^1.5.10" + "@oclif/config" "^1.12.8" + "@oclif/errors" "^1.2.2" + chalk "^2.4.1" + debug "^4.1.0" + fs-extra "^7.0.0" + http-call "^5.2.2" + lodash.template "^4.4.0" + semver "^5.6.0" + +"@oclif/screen@^1.0.3": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@oclif/screen/-/screen-1.0.4.tgz#b740f68609dfae8aa71c3a6cab15d816407ba493" + integrity sha512-60CHpq+eqnTxLZQ4PGHYNwUX572hgpMHGPtTWMjdTMsAvlm69lZV/4ly6O3sAYkomo4NggGcomrDpBe34rxUqw== + +"@oclif/test@^1": + version "1.2.8" + resolved "https://registry.yarnpkg.com/@oclif/test/-/test-1.2.8.tgz#a5b2ebd747832217d9af65ac30b58780c4c17c5e" + integrity sha512-HCh0qPge1JCqTEw4s2ScnicEZd4Ro4/0VvdjpsfCiX6fuDV53fRZ2uqLTgxKGHrVoqOZnVrRZHyhFyEsFGs+zQ== + dependencies: + fancy-test "^1.4.3" + +"@octokit/auth-token@^2.4.0": + version "2.4.4" + resolved "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-2.4.4.tgz#ee31c69b01d0378c12fd3ffe406030f3d94d3b56" + integrity sha512-LNfGu3Ro9uFAYh10MUZVaT7X2CnNm2C8IDQmabx+3DygYIQjs9FwzFAHN/0t6mu5HEPhxcb1XOuxdpY82vCg2Q== + dependencies: + "@octokit/types" "^6.0.0" + +"@octokit/endpoint@^6.0.1": + version "6.0.10" + resolved "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-6.0.10.tgz#741ce1fa2f4fb77ce8ebe0c6eaf5ce63f565f8e8" + integrity sha512-9+Xef8nT7OKZglfkOMm7IL6VwxXUQyR7DUSU0LH/F7VNqs8vyd7es5pTfz9E7DwUIx7R3pGscxu1EBhYljyu7Q== + dependencies: + "@octokit/types" "^6.0.0" + is-plain-object "^5.0.0" + universal-user-agent "^6.0.0" + +"@octokit/openapi-types@^1.2.0": + version "1.2.2" + resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-1.2.2.tgz#55d927436c07ef148ec927fbf4d55580a19bd68e" + integrity sha512-vrKDLd/Rq4IE16oT+jJkDBx0r29NFkdkU8GwqVSP4RajsAvP23CMGtFhVK0pedUhAiMvG1bGnFcTC/xCKaKgmw== + +"@octokit/plugin-enterprise-rest@^6.0.1": + version "6.0.1" + resolved "https://registry.yarnpkg.com/@octokit/plugin-enterprise-rest/-/plugin-enterprise-rest-6.0.1.tgz#e07896739618dab8da7d4077c658003775f95437" + integrity sha512-93uGjlhUD+iNg1iWhUENAtJata6w5nE+V4urXOAlIXdco6xNZtUSfYY8dzp3Udy74aqO/B5UZL80x/YMa5PKRw== + +"@octokit/plugin-paginate-rest@^1.1.1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-1.1.2.tgz#004170acf8c2be535aba26727867d692f7b488fc" + integrity sha512-jbsSoi5Q1pj63sC16XIUboklNw+8tL9VOnJsWycWYR78TKss5PVpIPb1TUUcMQ+bBh7cY579cVAWmf5qG+dw+Q== + dependencies: + "@octokit/types" "^2.0.1" + +"@octokit/plugin-request-log@^1.0.0": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@octokit/plugin-request-log/-/plugin-request-log-1.0.2.tgz#394d59ec734cd2f122431fbaf05099861ece3c44" + integrity sha512-oTJSNAmBqyDR41uSMunLQKMX0jmEXbwD1fpz8FG27lScV3RhtGfBa1/BBLym+PxcC16IBlF7KH9vP1BUYxA+Eg== + +"@octokit/plugin-rest-endpoint-methods@2.4.0": + version "2.4.0" + resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-2.4.0.tgz#3288ecf5481f68c494dd0602fc15407a59faf61e" + integrity sha512-EZi/AWhtkdfAYi01obpX0DF7U6b1VRr30QNQ5xSFPITMdLSfhcBqjamE3F+sKcxPbD7eZuMHu3Qkk2V+JGxBDQ== + dependencies: + "@octokit/types" "^2.0.1" + deprecation "^2.3.1" + +"@octokit/request-error@^1.0.2": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-1.2.1.tgz#ede0714c773f32347576c25649dc013ae6b31801" + integrity sha512-+6yDyk1EES6WK+l3viRDElw96MvwfJxCt45GvmjDUKWjYIb3PJZQkq3i46TwGwoPD4h8NmTrENmtyA1FwbmhRA== + dependencies: + "@octokit/types" "^2.0.0" + deprecation "^2.0.0" + once "^1.4.0" + +"@octokit/request-error@^2.0.0": + version "2.0.4" + resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-2.0.4.tgz#07dd5c0521d2ee975201274c472a127917741262" + integrity sha512-LjkSiTbsxIErBiRh5wSZvpZqT4t0/c9+4dOe0PII+6jXR+oj/h66s7E4a/MghV7iT8W9ffoQ5Skoxzs96+gBPA== + dependencies: + "@octokit/types" "^6.0.0" + deprecation "^2.0.0" + once "^1.4.0" + +"@octokit/request@^5.2.0": + version "5.4.12" + resolved "https://registry.yarnpkg.com/@octokit/request/-/request-5.4.12.tgz#b04826fa934670c56b135a81447be2c1723a2ffc" + integrity sha512-MvWYdxengUWTGFpfpefBBpVmmEYfkwMoxonIB3sUGp5rhdgwjXL1ejo6JbgzG/QD9B/NYt/9cJX1pxXeSIUCkg== + dependencies: + "@octokit/endpoint" "^6.0.1" + "@octokit/request-error" "^2.0.0" + "@octokit/types" "^6.0.3" + deprecation "^2.0.0" + is-plain-object "^5.0.0" + node-fetch "^2.6.1" + once "^1.4.0" + universal-user-agent "^6.0.0" + +"@octokit/rest@^16.28.4": + version "16.43.2" + resolved "https://registry.yarnpkg.com/@octokit/rest/-/rest-16.43.2.tgz#c53426f1e1d1044dee967023e3279c50993dd91b" + integrity sha512-ngDBevLbBTFfrHZeiS7SAMAZ6ssuVmXuya+F/7RaVvlysgGa1JKJkKWY+jV6TCJYcW0OALfJ7nTIGXcBXzycfQ== + dependencies: + "@octokit/auth-token" "^2.4.0" + "@octokit/plugin-paginate-rest" "^1.1.1" + "@octokit/plugin-request-log" "^1.0.0" + "@octokit/plugin-rest-endpoint-methods" "2.4.0" + "@octokit/request" "^5.2.0" + "@octokit/request-error" "^1.0.2" + atob-lite "^2.0.0" + before-after-hook "^2.0.0" + btoa-lite "^1.0.0" + deprecation "^2.0.0" + lodash.get "^4.4.2" + lodash.set "^4.3.2" + lodash.uniq "^4.5.0" + octokit-pagination-methods "^1.1.0" + once "^1.4.0" + universal-user-agent "^4.0.0" + +"@octokit/types@^2.0.0", "@octokit/types@^2.0.1": + version "2.16.2" + resolved "https://registry.yarnpkg.com/@octokit/types/-/types-2.16.2.tgz#4c5f8da3c6fecf3da1811aef678fda03edac35d2" + integrity sha512-O75k56TYvJ8WpAakWwYRN8Bgu60KrmX0z1KqFp1kNiFNkgW+JW+9EBKZ+S33PU6SLvbihqd+3drvPxKK68Ee8Q== + dependencies: + "@types/node" ">= 8" + +"@octokit/types@^6.0.0", "@octokit/types@^6.0.3": + version "6.0.3" + resolved "https://registry.yarnpkg.com/@octokit/types/-/types-6.0.3.tgz#df21856c3690a90e332ea36e39fbf413b0a4f25d" + integrity sha512-6y0Emzp+uPpdC5QLzUY1YRklvqiZBMTOz2ByhXdmTFlc3lNv8Mi28dX1U1b4scNtFMUa3tkpjofNFJ5NqMJaZw== + dependencies: + "@octokit/openapi-types" "^1.2.0" + "@types/node" ">= 8" + +"@openapitools/openapi-generator-cli@2.1.10": + version "2.1.10" + resolved "https://registry.yarnpkg.com/@openapitools/openapi-generator-cli/-/openapi-generator-cli-2.1.10.tgz#a1e768477949172e5c10272a3bdfbbd8a14adcde" + integrity sha512-gVfieuXDzGxRlaANHd3HxFw2bYx8wx95ASVittt6IotUWyiU9RT7qxZL517vl0fDZk8qkt0n/4bglhHgAxLEzw== + dependencies: + "@nestjs/common" "7.6.1" + "@nestjs/core" "7.6.1" + "@nuxtjs/opencollective" "0.3.2" + chalk "4.1.0" + commander "6.2.1" + compare-versions "3.6.0" + concurrently "5.3.0" + console.table "0.10.0" + fs-extra "9.0.1" + glob "7.1.6" + inquirer "7.3.3" + lodash "4.17.20" + reflect-metadata "0.1.13" + rxjs "6.6.3" + tslib "2.0.3" + +"@sindresorhus/is@^0.14.0": + version "0.14.0" + resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.14.0.tgz#9fb3a3cf3132328151f353de4632e01e52102bea" + integrity sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ== + +"@sinonjs/commons@^1.7.0": + version "1.8.1" + resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.1.tgz#e7df00f98a203324f6dc7cc606cad9d4a8ab2217" + integrity sha512-892K+kWUUi3cl+LlqEWIDrhvLgdL79tECi8JZUyq6IviKy/DNhuzCRlbHUjxK89f4ypPMMaFnFuR9Ie6DoIMsw== + dependencies: + type-detect "4.0.8" + +"@sinonjs/fake-timers@^6.0.1": + version "6.0.1" + resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-6.0.1.tgz#293674fccb3262ac782c7aadfdeca86b10c75c40" + integrity sha512-MZPUxrmFubI36XS1DI3qmI0YdN1gks62JtFZvxR67ljjSNCeK6U08Zx4msEWOXuofgqUt6zPHSi1H9fbjR/NRA== + dependencies: + "@sinonjs/commons" "^1.7.0" + +"@sqltools/formatter@1.2.2": + version "1.2.2" + resolved "https://registry.yarnpkg.com/@sqltools/formatter/-/formatter-1.2.2.tgz#9390a8127c0dcba61ebd7fdcc748655e191bdd68" + integrity sha512-/5O7Fq6Vnv8L6ucmPjaWbVG1XkP4FO+w5glqfkIsq3Xw4oyNAdJddbnYodNDAfjVUvo/rrSCTom4kAND7T1o5Q== + +"@szmarczak/http-timer@^1.1.2": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-1.1.2.tgz#b1665e2c461a2cd92f4c1bbf50d5454de0d4b421" + integrity sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA== + dependencies: + defer-to-connect "^1.0.1" + +"@tootallnate/once@1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" + integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw== + +"@types/abstract-leveldown@*": + version "5.0.1" + resolved "https://registry.yarnpkg.com/@types/abstract-leveldown/-/abstract-leveldown-5.0.1.tgz#3c7750d0186b954c7f2d2f6acc8c3c7ba0c3412e" + integrity sha512-wYxU3kp5zItbxKmeRYCEplS2MW7DzyBnxPGj+GJVHZEUZiK/nn5Ei1sUFgURDh+X051+zsGe28iud3oHjrYWQQ== + +"@types/babel__core@^7.0.0", "@types/babel__core@^7.1.7": + version "7.1.12" + resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.12.tgz#4d8e9e51eb265552a7e4f1ff2219ab6133bdfb2d" + integrity sha512-wMTHiiTiBAAPebqaPiPDLFA4LYPKr6Ph0Xq/6rq1Ur3v66HXyG+clfR9CNETkD7MQS8ZHvpQOtA53DLws5WAEQ== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + "@types/babel__generator" "*" + "@types/babel__template" "*" + "@types/babel__traverse" "*" + +"@types/babel__generator@*": + version "7.6.2" + resolved "https://registry.yarnpkg.com/@types/babel__generator/-/babel__generator-7.6.2.tgz#f3d71178e187858f7c45e30380f8f1b7415a12d8" + integrity sha512-MdSJnBjl+bdwkLskZ3NGFp9YcXGx5ggLpQQPqtgakVhsWK0hTtNYhjpZLlWQTviGTvF8at+Bvli3jV7faPdgeQ== + dependencies: + "@babel/types" "^7.0.0" + +"@types/babel__template@*": + version "7.4.0" + resolved "https://registry.yarnpkg.com/@types/babel__template/-/babel__template-7.4.0.tgz#0c888dd70b3ee9eebb6e4f200e809da0076262be" + integrity sha512-NTPErx4/FiPCGScH7foPyr+/1Dkzkni+rHiYHHoTjvwou7AQzJkNeD60A9CXRy+ZEN2B1bggmkTMCDb+Mv5k+A== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + +"@types/babel__traverse@*", "@types/babel__traverse@^7.0.4", "@types/babel__traverse@^7.0.6": + version "7.0.16" + resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.0.16.tgz#0bbbf70c7bc4193210dd27e252c51260a37cd6a7" + integrity sha512-S63Dt4CZOkuTmpLGGWtT/mQdVORJOpx6SZWGVaP56dda/0Nx5nEe82K7/LAm8zYr6SfMq+1N2OreIOrHAx656w== + dependencies: + "@babel/types" "^7.3.0" + +"@types/blessed@0.1.17": + version "0.1.17" + resolved "https://registry.yarnpkg.com/@types/blessed/-/blessed-0.1.17.tgz#15b3280a6b8729f3c270a762ccafc8514ac5120d" + integrity sha512-BKvUtnrXksNdK0fOYV/9HJGkjCcAvOGMSCJsiHaBFyBeyqHwy2OHK32r5XNI+q0eXuAuGqtPOnDetHnbZoYqag== + dependencies: + "@types/node" "*" + +"@types/body-parser@*": + version "1.19.0" + resolved "https://registry.yarnpkg.com/@types/body-parser/-/body-parser-1.19.0.tgz#0685b3c47eb3006ffed117cdd55164b61f80538f" + integrity sha512-W98JrE0j2K78swW4ukqMleo8R7h/pFETjM2DQ90MF6XK2i4LO4W3gQ71Lt4w3bfm2EvVSyWHplECvB5sK22yFQ== + dependencies: + "@types/connect" "*" + "@types/node" "*" + +"@types/buffer-json@2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@types/buffer-json/-/buffer-json-2.0.0.tgz#6ec0ee9ba7334a378a9c1d849bccc1b079a33554" + integrity sha512-nFKOrY93Tvv5Tobws+YbkGlPOJsn1nVpZah3BlSyQ4EniFm97KLvSr54tZ5xQp8mlf/XxbYwskNCYQB9EdrPlQ== + +"@types/chai@*": + version "4.2.14" + resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.2.14.tgz#44d2dd0b5de6185089375d976b4ec5caf6861193" + integrity sha512-G+ITQPXkwTrslfG5L/BksmbLUA0M1iybEsmCWPqzSxsRRhJZimBKJkoMi8fr/CPygPTj4zO5pJH7I2/cm9M7SQ== + +"@types/colors@1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@types/colors/-/colors-1.2.1.tgz#57703f1a9f7f5fc40afb81eef13e96acdd1016a6" + integrity sha512-7jNkpfN2lVO07nJ1RWzyMnNhH/I5N9iWuMPx9pedptxJ4MODf8rRV0lbJi6RakQ4sKQk231Fw4e2W9n3D7gZ3w== + dependencies: + colors "*" + +"@types/connect@*": + version "3.4.34" + resolved "https://registry.yarnpkg.com/@types/connect/-/connect-3.4.34.tgz#170a40223a6d666006d93ca128af2beb1d9b1901" + integrity sha512-ePPA/JuI+X0vb+gSWlPKOY0NdNAie/rPUqX2GUPpbZwiKTkSPhjXWuee47E4MtE54QVzGCQMQkAL6JhV2E1+cQ== + dependencies: + "@types/node" "*" + +"@types/cookiejar@*": + version "2.1.2" + resolved "https://registry.yarnpkg.com/@types/cookiejar/-/cookiejar-2.1.2.tgz#66ad9331f63fe8a3d3d9d8c6e3906dd10f6446e8" + integrity sha512-t73xJJrvdTjXrn4jLS9VSGRbz0nUY3cl2DMGDU48lKl+HR9dbbjW2A9r3g40VA++mQpy6uuHg33gy7du2BKpog== + +"@types/cors@2.8.9": + version "2.8.9" + resolved "https://registry.yarnpkg.com/@types/cors/-/cors-2.8.9.tgz#4bd1fcac72eca8d5bec93e76c7fdcbdc1bc2cd4a" + integrity sha512-zurD1ibz21BRlAOIKP8yhrxlqKx6L9VCwkB5kMiP6nZAhoF5MvC7qS1qPA7nRcr1GJolfkQC7/EAL4hdYejLtg== + +"@types/debug@^4.1.2": + version "4.1.5" + resolved "https://registry.yarnpkg.com/@types/debug/-/debug-4.1.5.tgz#b14efa8852b7768d898906613c23f688713e02cd" + integrity sha512-Q1y515GcOdTHgagaVFhHnIFQ38ygs/kmxdNpvpou+raI9UO3YZcHDngBSYKQklcKlvA7iuQlmIKbzvmxcOE9CQ== + +"@types/express-openapi@^1.9.0": + version "1.9.0" + resolved "https://registry.yarnpkg.com/@types/express-openapi/-/express-openapi-1.9.0.tgz#c2ed167d3982c252a58b5a6a7c49eed493454c00" + integrity sha512-eP5Fy/qBRNQhwiFKvSOgfOECN9Z6OK1UiTyY0ld2o0Ah7wlOwtx7Z11LJZ55l8xfMqDSNM4R3hCl6+/yezkVMA== + dependencies: + express-openapi "*" + +"@types/express-serve-static-core@*": + version "4.17.15" + resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.15.tgz#7c3d37829a991da9a507c1efd44d97532e8909e3" + integrity sha512-pb71P0BrBAx7cQE+/7QnA1HTQUkdBKMlkPY7lHUMn0YvPJkL2UA+KW3BdWQ309IT+i9En/qm45ZxpjIcpgEhNQ== + dependencies: + "@types/node" "*" + "@types/qs" "*" + "@types/range-parser" "*" + +"@types/express@*", "@types/express@^4.17.9": + version "4.17.9" + resolved "https://registry.yarnpkg.com/@types/express/-/express-4.17.9.tgz#f5f2df6add703ff28428add52bdec8a1091b0a78" + integrity sha512-SDzEIZInC4sivGIFY4Sz1GG6J9UObPwCInYJjko2jzOf/Imx/dlpume6Xxwj1ORL82tBbmN4cPDIDkLbWHk9hw== + dependencies: + "@types/body-parser" "*" + "@types/express-serve-static-core" "*" + "@types/qs" "*" + "@types/serve-static" "*" + +"@types/glob@^7.1.1": + version "7.1.3" + resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.1.3.tgz#e6ba80f36b7daad2c685acd9266382e68985c183" + integrity sha512-SEYeGAIQIQX8NN6LDKprLjbrd5dARM5EXsd8GI/A5l0apYI1fGMWgPHSe4ZKL4eozlAyI+doUE9XbYS4xCkQ1w== + dependencies: + "@types/minimatch" "*" + "@types/node" "*" + +"@types/graceful-fs@^4.1.2": + version "4.1.4" + resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.4.tgz#4ff9f641a7c6d1a3508ff88bc3141b152772e753" + integrity sha512-mWA/4zFQhfvOA8zWkXobwJvBD7vzcxgrOQ0J5CH1votGqdq9m7+FwtGaqyCZqC3NyyBkc9z4m+iry4LlqcMWJg== + dependencies: + "@types/node" "*" + +"@types/imurmurhash@0.1.1": + version "0.1.1" + resolved "https://registry.yarnpkg.com/@types/imurmurhash/-/imurmurhash-0.1.1.tgz#9dd1135b18af3191e29f0a6ae14cc18b200003ea" + integrity sha512-ThbETc7uxx6rIpNP0fE3bqrSSIeBWPrFY4TzY4WFsvdQYWinub+PLZV/9nT3zicRJJPWbmHqJIsHZHeh5Ad+Ug== + +"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": + version "2.0.3" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.3.tgz#4ba8ddb720221f432e443bd5f9117fd22cfd4762" + integrity sha512-sz7iLqvVUg1gIedBOvlkxPlc8/uVzyS5OwGz1cKjXzkl3FpL3al0crU8YGU1WoHkxn0Wxbw5tyi6hvzJKNzFsw== + +"@types/istanbul-lib-report@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" + integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== + dependencies: + "@types/istanbul-lib-coverage" "*" + +"@types/istanbul-reports@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.0.tgz#508b13aa344fa4976234e75dddcc34925737d821" + integrity sha512-nwKNbvnwJ2/mndE9ItP/zc2TCzw6uuodnF4EHYWD+gCQDVBuRQL5UzbZD0/ezy1iKsFU2ZQiDqg4M9dN4+wZgA== + dependencies: + "@types/istanbul-lib-report" "*" + +"@types/jest@26.x": + version "26.0.16" + resolved "https://registry.yarnpkg.com/@types/jest/-/jest-26.0.16.tgz#b47abd50f6ed0503f589db8e126fc8eb470cf87c" + integrity sha512-Gp12+7tmKCgv9JjtltxUXokohCAEZfpJaEW5tn871SGRp8I+bRWBonQO7vW5NHwnAHe5dd50+Q4zyKuN35i09g== + dependencies: + jest-diff "^26.0.0" + pretty-format "^26.0.0" + +"@types/jest@^26.0.5": + version "26.0.17" + resolved "https://registry.yarnpkg.com/@types/jest/-/jest-26.0.17.tgz#c63b44af7528bbc05974dfacc2c90fe13ed5534d" + integrity sha512-5sy3dHuiT/nJGM0XZ8ozFgdR4Y/gmi89n2OCDthTULSi8nG3YdcSDVuxYT3X7eN62NGXWJYz2oNOpDp/aIaynQ== + dependencies: + jest-diff "^26.0.0" + pretty-format "^26.0.0" + +"@types/json-schema@^7.0.3": + version "7.0.6" + resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.6.tgz#f4c7ec43e81b319a9815115031709f26987891f0" + integrity sha512-3c+yGKvVP5Y9TYBEibGNR+kLtijnj7mYrXRg+WpFb2X9xm04g/DXYkfg4hmzJQosc9snFNUPkbYIhu+KAm6jJw== + +"@types/json5@^0.0.29": + version "0.0.29" + resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" + integrity sha1-7ihweulOEdK4J7y+UnC86n8+ce4= + +"@types/leveldown@4.0.2": + version "4.0.2" + resolved "https://registry.yarnpkg.com/@types/leveldown/-/leveldown-4.0.2.tgz#edb44a33668ae58656721bb1852345e6a2f2e42a" + integrity sha512-VW6QbUnPb5yLbUBcXEh93lFNphyxkBul7Ae41OCgROd76WfLM3qzAbuzErx1LtsTqwcNlbavTr9rWXHCiGVF8A== + dependencies: + "@types/abstract-leveldown" "*" + "@types/node" "*" + +"@types/levelup@4.3.0": + version "4.3.0" + resolved "https://registry.yarnpkg.com/@types/levelup/-/levelup-4.3.0.tgz#4f55585e05a33caa08c1439c344bbba93e947327" + integrity sha512-h82BoajhjU/zwLoM4BUBX/SCodCFi1ae/ZlFOYh5Z4GbHeaXj9H709fF1LYl/StrK8KSwnJOeMRPo9lnC6sz4w== + dependencies: + "@types/abstract-leveldown" "*" + "@types/node" "*" + +"@types/lodash@*", "@types/lodash@^4.14.161": + version "4.14.165" + resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.165.tgz#74d55d947452e2de0742bad65270433b63a8c30f" + integrity sha512-tjSSOTHhI5mCHTy/OOXYIhi2Wt1qcbHmuXD1Ha7q70CgI/I71afO4XtLb/cVexki1oVYchpul/TOuu3Arcdxrg== + +"@types/lru-cache@5.1.0": + version "5.1.0" + resolved "https://registry.yarnpkg.com/@types/lru-cache/-/lru-cache-5.1.0.tgz#57f228f2b80c046b4a1bd5cac031f81f207f4f03" + integrity sha512-RaE0B+14ToE4l6UqdarKPnXwVDuigfFv+5j9Dze/Nqr23yyuqdNvzcZi3xB+3Agvi5R4EOgAksfv3lXX4vBt9w== + +"@types/mime@*": + version "2.0.3" + resolved "https://registry.yarnpkg.com/@types/mime/-/mime-2.0.3.tgz#c893b73721db73699943bfc3653b1deb7faa4a3a" + integrity sha512-Jus9s4CDbqwocc5pOAnh8ShfrnMcPHuJYzVcSUU7lrh8Ni5HuIqX3oilL86p3dlTrk0LzHRCgA/GQ7uNCw6l2Q== + +"@types/minimatch@*", "@types/minimatch@^3.0.3": + version "3.0.3" + resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d" + integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA== + +"@types/minimist@^1.2.0": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@types/minimist/-/minimist-1.2.1.tgz#283f669ff76d7b8260df8ab7a4262cc83d988256" + integrity sha512-fZQQafSREFyuZcdWFAExYjBiCL7AUCdgsk80iO0q4yihYYdcIiH28CcuPTGFgLOCC8RlW49GSQxdHwZP+I7CNg== + +"@types/node@*", "@types/node@>= 8": + version "14.14.10" + resolved "https://registry.yarnpkg.com/@types/node/-/node-14.14.10.tgz#5958a82e41863cfc71f2307b3748e3491ba03785" + integrity sha512-J32dgx2hw8vXrSbu4ZlVhn1Nm3GbeCFNw2FWL8S5QKucHGY0cyNwjdQdO+KMBZ4wpmC7KhLCiNsdk1RFRIYUQQ== + +"@types/node@^14.14.11": + version "14.14.11" + resolved "https://registry.yarnpkg.com/@types/node/-/node-14.14.11.tgz#fc25a4248a5e8d0837019b1d170146d07334abe0" + integrity sha512-BJ97wAUuU3NUiUCp44xzUFquQEvnk1wu7q4CMEUYKJWjdkr0YWYDsm4RFtAvxYsNjLsKcrFt6RvK8r+mnzMbEQ== + +"@types/normalize-package-data@^2.4.0": + version "2.4.0" + resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz#e486d0d97396d79beedd0a6e33f4534ff6b4973e" + integrity sha512-f5j5b/Gf71L+dbqxIpQ4Z2WlmI/mPJ0fOkGGmFgtb6sAu97EPczzbS3/tJKxmcYDj55OX6ssqwDAWOHIYDRDGA== + +"@types/parse-json@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0" + integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== + +"@types/pg@^7.14.3": + version "7.14.10" + resolved "https://registry.yarnpkg.com/@types/pg/-/pg-7.14.10.tgz#95c01de1a54e45d78058b9b5b4c365102e4113f7" + integrity sha512-m6G0mrpj71YgVgHJF0cIHC3OZTKiQSUzTkMj869a+YWXF2tdbmO2PmIpNnDkiFPhHWcoGq2bk5P2e0CZX0F9Mg== + dependencies: + "@types/node" "*" + pg-protocol "^1.2.0" + pg-types "^2.2.0" + +"@types/prettier@^2.0.0": + version "2.1.5" + resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.1.5.tgz#b6ab3bba29e16b821d84e09ecfaded462b816b00" + integrity sha512-UEyp8LwZ4Dg30kVU2Q3amHHyTn1jEdhCIE59ANed76GaT1Vp76DD3ZWSAxgCrw6wJ0TqeoBpqmfUHiUDPs//HQ== + +"@types/qs@*": + version "6.9.5" + resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.5.tgz#434711bdd49eb5ee69d90c1d67c354a9a8ecb18b" + integrity sha512-/JHkVHtx/REVG0VVToGRGH2+23hsYLHdyG+GrvoUGlGAd0ErauXDyvHtRI/7H7mzLm+tBCKA7pfcpkQ1lf58iQ== + +"@types/range-parser@*": + version "1.2.3" + resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.3.tgz#7ee330ba7caafb98090bece86a5ee44115904c2c" + integrity sha512-ewFXqrQHlFsgc09MK5jP5iR7vumV/BYayNC6PgJO2LPe8vrnNFyjQjSppfEngITi0qvfKtzFvgKymGheFM9UOA== + +"@types/serve-static@*": + version "1.13.8" + resolved "https://registry.yarnpkg.com/@types/serve-static/-/serve-static-1.13.8.tgz#851129d434433c7082148574ffec263d58309c46" + integrity sha512-MoJhSQreaVoL+/hurAZzIm8wafFR6ajiTM1m4A0kv6AGeVBl4r4pOV8bGFrjjq1sGxDTnCoF8i22o0/aE5XCyA== + dependencies: + "@types/mime" "*" + "@types/node" "*" + +"@types/simple-peer@9.6.1": + version "9.6.1" + resolved "https://registry.yarnpkg.com/@types/simple-peer/-/simple-peer-9.6.1.tgz#461ff4159cfaca861cb2fe695de6e557881a98c8" + integrity sha512-WdXxmHW+m0NaD5XPGlp5AB7raq+NUWX/+/aQvDcoOx0qiP78QmDdk015W29uoyvGhyiJCb9dZJuI2Oi39zGRfg== + dependencies: + "@types/node" "*" + +"@types/sinon@*": + version "9.0.9" + resolved "https://registry.yarnpkg.com/@types/sinon/-/sinon-9.0.9.tgz#115843b491583f924080f684b6d0d7438344f73c" + integrity sha512-z/y8maYOQyYLyqaOB+dYQ6i0pxKLOsfwCmHmn4T7jS/SDHicIslr37oE3Dg8SCqKrKeBy6Lemu7do2yy+unLrw== + dependencies: + "@types/sinonjs__fake-timers" "*" + +"@types/sinonjs__fake-timers@*": + version "6.0.2" + resolved "https://registry.yarnpkg.com/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-6.0.2.tgz#3a84cf5ec3249439015e14049bd3161419bf9eae" + integrity sha512-dIPoZ3g5gcx9zZEszaxLSVTvMReD3xxyyDnQUjA6IYDG9Ba2AV0otMPs+77sG9ojB4Qr2N2Vk5RnKeuA0X/0bg== + +"@types/stack-utils@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.0.tgz#7036640b4e21cc2f259ae826ce843d277dad8cff" + integrity sha512-RJJrrySY7A8havqpGObOB4W92QXKJo63/jFLLgpvOtsGUqbQZ9Sbgl35KMm1DjC6j7AvmmU2bIno+3IyEaemaw== + +"@types/superagent@*": + version "4.1.10" + resolved "https://registry.yarnpkg.com/@types/superagent/-/superagent-4.1.10.tgz#5e2cc721edf58f64fe9b819f326ee74803adee86" + integrity sha512-xAgkb2CMWUMCyVc/3+7iQfOEBE75NvuZeezvmixbUw3nmENf2tCnQkW5yQLTYqvXUQ+R6EXxdqKKbal2zM5V/g== + dependencies: + "@types/cookiejar" "*" + "@types/node" "*" + +"@types/supertest@2.0.10": + version "2.0.10" + resolved "https://registry.yarnpkg.com/@types/supertest/-/supertest-2.0.10.tgz#630d79b4d82c73e043e43ff777a9ca98d457cab7" + integrity sha512-Xt8TbEyZTnD5Xulw95GLMOkmjGICrOQyJ2jqgkSjAUR3mm7pAIzSR0NFBaMcwlzVvlpCjNwbATcWWwjNiZiFrQ== + dependencies: + "@types/superagent" "*" + +"@types/swagger-ui-express@^4.1.2": + version "4.1.2" + resolved "https://registry.yarnpkg.com/@types/swagger-ui-express/-/swagger-ui-express-4.1.2.tgz#cfc884904a104c3193f46f423d04ee0416be1ef4" + integrity sha512-t9teFTU8dKe69rX9EwL6OM2hbVquYdFM+sQ0REny4RalPlxAm+zyP04B12j4c7qEuDS6CnlwICywqWStPA3v4g== + dependencies: + "@types/express" "*" + "@types/serve-static" "*" + +"@types/uuid@^8.0.1": + version "8.3.0" + resolved "https://registry.yarnpkg.com/@types/uuid/-/uuid-8.3.0.tgz#215c231dff736d5ba92410e6d602050cce7e273f" + integrity sha512-eQ9qFW/fhfGJF8WKHGEHZEyVWfZxrT+6CLIJGBcZPfxUh/+BnEj+UCGYMlr9qZuX/2AltsvwrGqp0LhEW8D0zQ== + +"@types/validator@^13.1.1": + version "13.1.1" + resolved "https://registry.yarnpkg.com/@types/validator/-/validator-13.1.1.tgz#8d220ae4851a1e745764c7ca72294b6d46aeb533" + integrity sha512-/39uXOKe1KV4ElXb8cp0RVyeRn9X1QRwllComMMql+FQ9nhmTx0Yhw+kJtccPSShsjma+KXjW/TiXyGUNqNn+w== + +"@types/winston@^2.4.4": + version "2.4.4" + resolved "https://registry.yarnpkg.com/@types/winston/-/winston-2.4.4.tgz#48cc744b7b42fad74b9a2e8490e0112bd9a3d08d" + integrity sha512-BVGCztsypW8EYwJ+Hq+QNYiT/MUyCif0ouBH+flrY66O5W+KIXAMML6E/0fJpm7VjIzgangahl5S03bJJQGrZw== + dependencies: + winston "*" + +"@types/ws@^7.2.6": + version "7.4.0" + resolved "https://registry.yarnpkg.com/@types/ws/-/ws-7.4.0.tgz#499690ea08736e05a8186113dac37769ab251a0e" + integrity sha512-Y29uQ3Uy+58bZrFLhX36hcI3Np37nqWE7ky5tjiDoy1GDZnIwVxS0CgF+s+1bXMzjKBFy+fqaRfb708iNzdinw== + dependencies: + "@types/node" "*" + +"@types/yargs-parser@*": + version "15.0.0" + resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-15.0.0.tgz#cb3f9f741869e20cce330ffbeb9271590483882d" + integrity sha512-FA/BWv8t8ZWJ+gEOnLLd8ygxH/2UFbAvgEonyfN6yWGLKc7zVjbpl2Y4CTjid9h2RfgPP6SEt6uHwEOply00yw== + +"@types/yargs@^15.0.0": + version "15.0.11" + resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-15.0.11.tgz#361d7579ecdac1527687bcebf9946621c12ab78c" + integrity sha512-jfcNBxHFYJ4nPIacsi3woz1+kvUO6s1CyeEhtnDHBjHUMNj5UlW2GynmnSgiJJEdNg9yW5C8lfoNRZrHGv5EqA== + dependencies: + "@types/yargs-parser" "*" + +"@types/yup@0.29.10": + version "0.29.10" + resolved "https://registry.yarnpkg.com/@types/yup/-/yup-0.29.10.tgz#1bfa4c4a47a6f57fcc8510948757b9e47c0d6ca3" + integrity sha512-kRKRZaWkxxnOK7H5C4oWqhCw9ID1QF3cBZ2oAPoXYsjIncwgpDGigWtXGjZ91t+hsc3cvPdBci9YoJo1A96CYg== + +"@typescript-eslint/eslint-plugin@4.9.0": + version "4.9.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-4.9.0.tgz#8fde15743413661fdc086c9f1f5d74a80b856113" + integrity sha512-WrVzGMzzCrgrpnQMQm4Tnf+dk+wdl/YbgIgd5hKGa2P+lnJ2MON+nQnbwgbxtN9QDLi8HO+JAq0/krMnjQK6Cw== + dependencies: + "@typescript-eslint/experimental-utils" "4.9.0" + "@typescript-eslint/scope-manager" "4.9.0" + debug "^4.1.1" + functional-red-black-tree "^1.0.1" + regexpp "^3.0.0" + semver "^7.3.2" + tsutils "^3.17.1" + +"@typescript-eslint/eslint-plugin@4.9.1": + version "4.9.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-4.9.1.tgz#66758cbe129b965fe9c63b04b405d0cf5280868b" + integrity sha512-QRLDSvIPeI1pz5tVuurD+cStNR4sle4avtHhxA+2uyixWGFjKzJ+EaFVRW6dA/jOgjV5DTAjOxboQkRDE8cRlQ== + dependencies: + "@typescript-eslint/experimental-utils" "4.9.1" + "@typescript-eslint/scope-manager" "4.9.1" + debug "^4.1.1" + functional-red-black-tree "^1.0.1" + regexpp "^3.0.0" + semver "^7.3.2" + tsutils "^3.17.1" + +"@typescript-eslint/experimental-utils@4.9.0", "@typescript-eslint/experimental-utils@^4.0.1": + version "4.9.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-4.9.0.tgz#23a296b85d243afba24e75a43fd55aceda5141f0" + integrity sha512-0p8GnDWB3R2oGhmRXlEnCvYOtaBCijtA5uBfH5GxQKsukdSQyI4opC4NGTUb88CagsoNQ4rb/hId2JuMbzWKFQ== + dependencies: + "@types/json-schema" "^7.0.3" + "@typescript-eslint/scope-manager" "4.9.0" + "@typescript-eslint/types" "4.9.0" + "@typescript-eslint/typescript-estree" "4.9.0" + eslint-scope "^5.0.0" + eslint-utils "^2.0.0" + +"@typescript-eslint/experimental-utils@4.9.1": + version "4.9.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-4.9.1.tgz#86633e8395191d65786a808dc3df030a55267ae2" + integrity sha512-c3k/xJqk0exLFs+cWSJxIjqLYwdHCuLWhnpnikmPQD2+NGAx9KjLYlBDcSI81EArh9FDYSL6dslAUSwILeWOxg== + dependencies: + "@types/json-schema" "^7.0.3" + "@typescript-eslint/scope-manager" "4.9.1" + "@typescript-eslint/types" "4.9.1" + "@typescript-eslint/typescript-estree" "4.9.1" + eslint-scope "^5.0.0" + eslint-utils "^2.0.0" + +"@typescript-eslint/parser@4.9.0": + version "4.9.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-4.9.0.tgz#bb65f1214b5e221604996db53ef77c9d62b09249" + integrity sha512-QRSDAV8tGZoQye/ogp28ypb8qpsZPV6FOLD+tbN4ohKUWHD2n/u0Q2tIBnCsGwQCiD94RdtLkcqpdK4vKcLCCw== + dependencies: + "@typescript-eslint/scope-manager" "4.9.0" + "@typescript-eslint/types" "4.9.0" + "@typescript-eslint/typescript-estree" "4.9.0" + debug "^4.1.1" + +"@typescript-eslint/parser@4.9.1": + version "4.9.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-4.9.1.tgz#2d74c4db5dd5117379a9659081a4d1ec02629055" + integrity sha512-Gv2VpqiomvQ2v4UL+dXlQcZ8zCX4eTkoIW+1aGVWT6yTO+6jbxsw7yQl2z2pPl/4B9qa5JXeIbhJpONKjXIy3g== + dependencies: + "@typescript-eslint/scope-manager" "4.9.1" + "@typescript-eslint/types" "4.9.1" + "@typescript-eslint/typescript-estree" "4.9.1" + debug "^4.1.1" + +"@typescript-eslint/scope-manager@4.9.0": + version "4.9.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-4.9.0.tgz#5eefe305d6b71d1c85af6587b048426bfd4d3708" + integrity sha512-q/81jtmcDtMRE+nfFt5pWqO0R41k46gpVLnuefqVOXl4QV1GdQoBWfk5REcipoJNQH9+F5l+dwa9Li5fbALjzg== + dependencies: + "@typescript-eslint/types" "4.9.0" + "@typescript-eslint/visitor-keys" "4.9.0" + +"@typescript-eslint/scope-manager@4.9.1": + version "4.9.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-4.9.1.tgz#cc2fde310b3f3deafe8436a924e784eaab265103" + integrity sha512-sa4L9yUfD/1sg9Kl8OxPxvpUcqxKXRjBeZxBuZSSV1v13hjfEJkn84n0An2hN8oLQ1PmEl2uA6FkI07idXeFgQ== + dependencies: + "@typescript-eslint/types" "4.9.1" + "@typescript-eslint/visitor-keys" "4.9.1" + +"@typescript-eslint/types@4.9.0": + version "4.9.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-4.9.0.tgz#3fe8c3632abd07095c7458f7451bd14c85d0033c" + integrity sha512-luzLKmowfiM/IoJL/rus1K9iZpSJK6GlOS/1ezKplb7MkORt2dDcfi8g9B0bsF6JoRGhqn0D3Va55b+vredFHA== + +"@typescript-eslint/types@4.9.1": + version "4.9.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-4.9.1.tgz#a1a7dd80e4e5ac2c593bc458d75dd1edaf77faa2" + integrity sha512-fjkT+tXR13ks6Le7JiEdagnwEFc49IkOyys7ueWQ4O8k4quKPwPJudrwlVOJCUQhXo45PrfIvIarcrEjFTNwUA== + +"@typescript-eslint/typescript-estree@4.9.0": + version "4.9.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-4.9.0.tgz#38a98df6ee281cfd6164d6f9d91795b37d9e508c" + integrity sha512-rmDR++PGrIyQzAtt3pPcmKWLr7MA+u/Cmq9b/rON3//t5WofNR4m/Ybft2vOLj0WtUzjn018ekHjTsnIyBsQug== + dependencies: + "@typescript-eslint/types" "4.9.0" + "@typescript-eslint/visitor-keys" "4.9.0" + debug "^4.1.1" + globby "^11.0.1" + is-glob "^4.0.1" + lodash "^4.17.15" + semver "^7.3.2" + tsutils "^3.17.1" + +"@typescript-eslint/typescript-estree@4.9.1": + version "4.9.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-4.9.1.tgz#6e5b86ff5a5f66809e1f347469fadeec69ac50bf" + integrity sha512-bzP8vqwX6Vgmvs81bPtCkLtM/Skh36NE6unu6tsDeU/ZFoYthlTXbBmpIrvosgiDKlWTfb2ZpPELHH89aQjeQw== + dependencies: + "@typescript-eslint/types" "4.9.1" + "@typescript-eslint/visitor-keys" "4.9.1" + debug "^4.1.1" + globby "^11.0.1" + is-glob "^4.0.1" + lodash "^4.17.15" + semver "^7.3.2" + tsutils "^3.17.1" + +"@typescript-eslint/visitor-keys@4.9.0": + version "4.9.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-4.9.0.tgz#f284e9fac43f2d6d35094ce137473ee321f266c8" + integrity sha512-sV45zfdRqQo1A97pOSx3fsjR+3blmwtdCt8LDrXgCX36v4Vmz4KHrhpV6Fo2cRdXmyumxx11AHw0pNJqCNpDyg== + dependencies: + "@typescript-eslint/types" "4.9.0" + eslint-visitor-keys "^2.0.0" + +"@typescript-eslint/visitor-keys@4.9.1": + version "4.9.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-4.9.1.tgz#d76374a58c4ead9e92b454d186fea63487b25ae1" + integrity sha512-9gspzc6UqLQHd7lXQS7oWs+hrYggspv/rk6zzEMhCbYwPE/sF7oxo7GAjkS35Tdlt7wguIG+ViWCPtVZHz/ybQ== + dependencies: + "@typescript-eslint/types" "4.9.1" + eslint-visitor-keys "^2.0.0" + +"@zkochan/cmd-shim@^3.1.0": + version "3.1.0" + resolved "https://registry.yarnpkg.com/@zkochan/cmd-shim/-/cmd-shim-3.1.0.tgz#2ab8ed81f5bb5452a85f25758eb9b8681982fd2e" + integrity sha512-o8l0+x7C7sMZU3v9GuJIAU10qQLtwR1dtRQIOmlNMtyaqhmpXOzx1HWiYoWfmmf9HHZoAkXpc9TM9PQYF9d4Jg== + dependencies: + is-windows "^1.0.0" + mkdirp-promise "^5.0.1" + mz "^2.5.0" + +JSONStream@^1.0.4, JSONStream@^1.2.1, JSONStream@^1.3.4, JSONStream@^1.3.5: + version "1.3.5" + resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-1.3.5.tgz#3208c1f08d3a4d99261ab64f92302bc15e111ca0" + integrity sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ== + dependencies: + jsonparse "^1.2.0" + through ">=2.2.7 <3" + +abab@^2.0.3: + version "2.0.5" + resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.5.tgz#c0b678fb32d60fc1219c784d6a826fe385aeb79a" + integrity sha512-9IK9EadsbHo6jLWIpxpR6pL0sazTXV6+SQv25ZB+F7Bj9mJNaOc4nCRabwd5M/JwmUa8idz6Eci6eKfJryPs6Q== + +abbrev@1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" + integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== + +abstract-leveldown@~6.2.1: + version "6.2.3" + resolved "https://registry.yarnpkg.com/abstract-leveldown/-/abstract-leveldown-6.2.3.tgz#036543d87e3710f2528e47040bc3261b77a9a8eb" + integrity sha512-BsLm5vFMRUrrLeCcRc+G0t2qOaTzpoJQLOubq2XM72eNpjF5UdU5o/5NvlNhx95XHcAvcl8OMXr4mlg/fRgUXQ== + dependencies: + buffer "^5.5.0" + immediate "^3.2.3" + level-concat-iterator "~2.0.0" + level-supports "~1.0.0" + xtend "~4.0.0" + +accepts@~1.3.7: + version "1.3.7" + resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd" + integrity sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA== + dependencies: + mime-types "~2.1.24" + negotiator "0.6.2" + +acorn-globals@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-6.0.0.tgz#46cdd39f0f8ff08a876619b55f5ac8a6dc770b45" + integrity sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg== + dependencies: + acorn "^7.1.1" + acorn-walk "^7.1.1" + +acorn-jsx@^5.2.0, acorn-jsx@^5.3.1: + version "5.3.1" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.1.tgz#fc8661e11b7ac1539c47dbfea2e72b3af34d267b" + integrity sha512-K0Ptm/47OKfQRpNQ2J/oIN/3QYiK6FwW+eJbILhsdxh2WTLdl+30o8aGdTbm5JbffpFFAg/g+zi1E+jvJha5ng== + +acorn-walk@^7.1.1: + version "7.2.0" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" + integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== + +acorn@^7.1.1, acorn@^7.4.0: + version "7.4.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" + integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== + +agent-base@4, agent-base@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-4.3.0.tgz#8165f01c436009bccad0b1d122f05ed770efc6ee" + integrity sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg== + dependencies: + es6-promisify "^5.0.0" + +agent-base@5: + version "5.1.1" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-5.1.1.tgz#e8fb3f242959db44d63be665db7a8e739537a32c" + integrity sha512-TMeqbNl2fMW0nMjTEPOwe3J/PRFP4vqeoNuQMG0HlMrtm5QxKqdvAkZ1pRBQ/ulIyDD5Yq0nJ7YbdD8ey0TO3g== + +agent-base@6: + version "6.0.2" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" + integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== + dependencies: + debug "4" + +agent-base@~4.2.1: + version "4.2.1" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-4.2.1.tgz#d89e5999f797875674c07d87f260fc41e83e8ca9" + integrity sha512-JVwXMr9nHYTUXsBFKUqhJwvlcYU/blreOEUkhNR2eXZIvwd+c+o5V4MgDPKWnMS/56awN3TRzIP+KoPn+roQtg== + dependencies: + es6-promisify "^5.0.0" + +agentkeepalive@^3.4.1: + version "3.5.2" + resolved "https://registry.yarnpkg.com/agentkeepalive/-/agentkeepalive-3.5.2.tgz#a113924dd3fa24a0bc3b78108c450c2abee00f67" + integrity sha512-e0L/HNe6qkQ7H19kTlRRqUibEAwDK5AFk6y3PtMsuut2VAH6+Q4xZml1tNDJD7kSAyqmbG/K08K5WEJYtUrSlQ== + dependencies: + humanize-ms "^1.2.1" + +ajv@^6.10.0, ajv@^6.10.2, ajv@^6.12.3, ajv@^6.12.4, ajv@^6.12.6, ajv@^6.5.2, ajv@^6.5.4: + version "6.12.6" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +alce@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/alce/-/alce-1.2.0.tgz#a8be2dacaac42494612f18dc09db691f3dea4aab" + integrity sha1-qL4trKrEJJRhLxjcCdtpHz3qSqs= + dependencies: + esprima "^1.2.0" + estraverse "^1.5.0" + +ansi-align@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/ansi-align/-/ansi-align-3.0.0.tgz#b536b371cf687caaef236c18d3e21fe3797467cb" + integrity sha512-ZpClVKqXN3RGBmKibdfWzqCY4lnjEuoNzU5T0oEFpfd/z5qJHVarukridD4juLO2FXMiwUQxr9WqQtaYa8XRYw== + dependencies: + string-width "^3.0.0" + +ansi-colors@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.1.tgz#cbb9ae256bf750af1eab344f229aa27fe94ba348" + integrity sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA== + +ansi-escapes@^3.1.0, ansi-escapes@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-3.2.0.tgz#8780b98ff9dbf5638152d1f1fe5c1d7b4442976b" + integrity sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ== + +ansi-escapes@^4.2.1, ansi-escapes@^4.3.0: + version "4.3.1" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.1.tgz#a5c47cc43181f1f38ffd7076837700d395522a61" + integrity sha512-JWF7ocqNrp8u9oqpgV+wH5ftbt+cfvv+PTjOvKLT3AdYly/LmORARfEVT1iyjwN+4MqE5UmVKoAdIBqeoCHgLA== + dependencies: + type-fest "^0.11.0" + +ansi-regex@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" + integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= + +ansi-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" + integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg= + +ansi-regex@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997" + integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg== + +ansi-regex@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.0.tgz#388539f55179bf39339c81af30a654d69f87cb75" + integrity sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg== + +ansi-styles@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" + integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= + +ansi-styles@^3.0.0, ansi-styles@^3.2.0, ansi-styles@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== + dependencies: + color-convert "^1.9.0" + +ansi-styles@^4.0.0, ansi-styles@^4.1.0, ansi-styles@^4.2.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +ansicolors@~0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/ansicolors/-/ansicolors-0.3.2.tgz#665597de86a9ffe3aa9bfbe6cae5c6ea426b4979" + integrity sha1-ZlWX3oap/+Oqm/vmyuXG6kJrSXk= + +any-promise@^1.0.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" + integrity sha1-q8av7tzqUugJzcA3au0845Y10X8= + +any-shell-escape@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/any-shell-escape/-/any-shell-escape-0.1.1.tgz#d55ab972244c71a9a5e1ab0879f30bf110806959" + integrity sha1-1Vq5ciRMcaml4asIefML8RCAaVk= + +anymatch@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb" + integrity sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw== + dependencies: + micromatch "^3.1.4" + normalize-path "^2.1.1" + +anymatch@^3.0.3, anymatch@~3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.1.tgz#c55ecf02185e2469259399310c173ce31233b142" + integrity sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg== + dependencies: + normalize-path "^3.0.0" + picomatch "^2.0.4" + +app-root-path@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/app-root-path/-/app-root-path-3.0.0.tgz#210b6f43873227e18a4b810a032283311555d5ad" + integrity sha512-qMcx+Gy2UZynHjOHOIXPNvpf+9cjvk3cWrBBK7zg4gH9+clobJRb9NGzcT7mQTcV/6Gm/1WelUtqxVXnNlrwcw== + +append-field@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/append-field/-/append-field-1.0.0.tgz#1e3440e915f0b1203d23748e78edd7b9b5b43e56" + integrity sha1-HjRA6RXwsSA9I3SOeO3XubW0PlY= + +aproba@^1.0.3, aproba@^1.1.1: + version "1.2.0" + resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" + integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== + +aproba@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/aproba/-/aproba-2.0.0.tgz#52520b8ae5b569215b354efc0caa3fe1e45a8adc" + integrity sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ== + +are-we-there-yet@~1.1.2: + version "1.1.5" + resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz#4b35c2944f062a8bfcda66410760350fe9ddfc21" + integrity sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w== + dependencies: + delegates "^1.0.0" + readable-stream "^2.0.6" + +arg@^4.1.0: + version "4.1.3" + resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" + integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== + +argparse@^1.0.7: + version "1.0.10" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== + dependencies: + sprintf-js "~1.0.2" + +argv@0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/argv/-/argv-0.0.2.tgz#ecbd16f8949b157183711b1bda334f37840185ab" + integrity sha1-7L0W+JSbFXGDcRsb2jNPN4QBhas= + +arr-diff@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" + integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= + +arr-flatten@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" + integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== + +arr-union@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" + integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= + +array-differ@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/array-differ/-/array-differ-1.0.0.tgz#eff52e3758249d33be402b8bb8e564bb2b5d4031" + integrity sha1-7/UuN1gknTO+QCuLuOVkuytdQDE= + +array-differ@^2.0.3: + version "2.1.0" + resolved "https://registry.yarnpkg.com/array-differ/-/array-differ-2.1.0.tgz#4b9c1c3f14b906757082925769e8ab904f4801b1" + integrity sha512-KbUpJgx909ZscOc/7CLATBFam7P1Z1QRQInvgT0UztM9Q72aGKCunKASAl7WNW0tnPmPyEMeMhdsfWhfmW037w== + +array-differ@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/array-differ/-/array-differ-3.0.0.tgz#3cbb3d0f316810eafcc47624734237d6aee4ae6b" + integrity sha512-THtfYS6KtME/yIAhKjZ2ul7XI96lQGHRputJQHO80LAWQnuGP4iCIN8vdMRboGbIEYBwU33q8Tch1os2+X0kMg== + +array-find-index@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1" + integrity sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E= + +array-flatten@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" + integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI= + +array-ify@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/array-ify/-/array-ify-1.0.0.tgz#9e528762b4a9066ad163a6962a364418e9626ece" + integrity sha1-nlKHYrSpBmrRY6aWKjZEGOlibs4= + +array-includes@^3.1.1: + version "3.1.2" + resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.2.tgz#a8db03e0b88c8c6aeddc49cb132f9bcab4ebf9c8" + integrity sha512-w2GspexNQpx+PutG3QpT437/BenZBj0M/MZGn5mzv/MofYqo0xmRHzn4lFsoDlWJ+THYsGJmFlW68WlDFx7VRw== + dependencies: + call-bind "^1.0.0" + define-properties "^1.1.3" + es-abstract "^1.18.0-next.1" + get-intrinsic "^1.0.1" + is-string "^1.0.5" + +array-union@^1.0.1, array-union@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" + integrity sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk= + dependencies: + array-uniq "^1.0.1" + +array-union@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" + integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== + +array-uniq@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" + integrity sha1-r2rId6Jcx/dOBYiUdThY39sk/bY= + +array-unique@^0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" + integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= + +array.prototype.flat@^1.2.3: + version "1.2.4" + resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.2.4.tgz#6ef638b43312bd401b4c6199fdec7e2dc9e9a123" + integrity sha512-4470Xi3GAPAjZqFcljX2xzckv1qeKPizoNkiS0+O4IoPR2ZNpcjE0pkhdihlDouK+x6QOast26B4Q/O9DJnwSg== + dependencies: + call-bind "^1.0.0" + define-properties "^1.1.3" + es-abstract "^1.18.0-next.1" + +array.prototype.flatmap@^1.2.3: + version "1.2.4" + resolved "https://registry.yarnpkg.com/array.prototype.flatmap/-/array.prototype.flatmap-1.2.4.tgz#94cfd47cc1556ec0747d97f7c7738c58122004c9" + integrity sha512-r9Z0zYoxqHz60vvQbWEdXIEtCwHF0yxaWfno9qzXeNHvfyl3BZqygmGzb84dsubyaXLH4husF+NFgMSdpZhk2Q== + dependencies: + call-bind "^1.0.0" + define-properties "^1.1.3" + es-abstract "^1.18.0-next.1" + function-bind "^1.1.1" + +arrify@^1.0.0, arrify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" + integrity sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0= + +arrify@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/arrify/-/arrify-2.0.1.tgz#c9655e9331e0abcd588d2a7cad7e9956f66701fa" + integrity sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug== + +asap@^2.0.0: + version "2.0.6" + resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" + integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= + +asn1@~0.2.3: + version "0.2.4" + resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136" + integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg== + dependencies: + safer-buffer "~2.1.0" + +assert-plus@1.0.0, assert-plus@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" + integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= + +assertion-error@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/assertion-error/-/assertion-error-1.1.0.tgz#e60b6b0e8f301bd97e5375215bda406c85118c0b" + integrity sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw== + +assign-symbols@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" + integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= + +astral-regex@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-1.0.0.tgz#6c8c3fb827dd43ee3918f27b82782ab7658a6fd9" + integrity sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg== + +async@0.9.x: + version "0.9.2" + resolved "https://registry.yarnpkg.com/async/-/async-0.9.2.tgz#aea74d5e61c1f899613bf64bda66d4c78f2fd17d" + integrity sha1-rqdNXmHB+JlhO/ZL2mbUx48v0X0= + +async@^2.6.0, async@^2.6.2: + version "2.6.3" + resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff" + integrity sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg== + dependencies: + lodash "^4.17.14" + +async@^3.1.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/async/-/async-3.2.0.tgz#b3a2685c5ebb641d3de02d161002c60fc9f85720" + integrity sha512-TR2mEZFVOj2pLStYxLht7TyfuRzaydfpxr3k9RpHIzMgw7A64dzsdqCxH1WJyQdoe8T10nDXd9wnEigmiuHIZw== + +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= + +at-least-node@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2" + integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== + +atob-lite@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/atob-lite/-/atob-lite-2.0.0.tgz#0fef5ad46f1bd7a8502c65727f0367d5ee43d696" + integrity sha1-D+9a1G8b16hQLGVyfwNn1e5D1pY= + +atob@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" + integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== + +aws-sign2@~0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" + integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg= + +aws4@^1.8.0: + version "1.11.0" + resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.11.0.tgz#d61f46d83b2519250e2784daf5b09479a8b41c59" + integrity sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA== + +axios@0.21.0: + version "0.21.0" + resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.0.tgz#26df088803a2350dff2c27f96fef99fe49442aca" + integrity sha512-fmkJBknJKoZwem3/IKSSLpkdNXZeBu5Q7GA/aRsr2btgrptmSCxi2oFjZHqGdK9DoTil9PIHlPIZw2EcRJXRvw== + dependencies: + follow-redirects "^1.10.0" + +axios@0.21.1: + version "0.21.1" + resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.1.tgz#22563481962f4d6bde9a76d516ef0e5d3c09b2b8" + integrity sha512-dKQiRHxGD9PPRIUNIWvZhPTPpl1rf/OxTYKsqKUDjBwYylTvV7SjSHJb9ratfyzM6wCdLCOYLzs73qpg5c4iGA== + dependencies: + follow-redirects "^1.10.0" + +axios@^0.18.0: + version "0.18.1" + resolved "https://registry.yarnpkg.com/axios/-/axios-0.18.1.tgz#ff3f0de2e7b5d180e757ad98000f1081b87bcea3" + integrity sha512-0BfJq4NSfQXd+SkFdrvFbG7addhYSBA2mQwISr46pD6E5iqkWg02RAs8vyTT/j0RTnoYmeXauBuSv1qKwR179g== + dependencies: + follow-redirects "1.5.10" + is-buffer "^2.0.2" + +babel-jest@^26.6.3: + version "26.6.3" + resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-26.6.3.tgz#d87d25cb0037577a0c89f82e5755c5d293c01056" + integrity sha512-pl4Q+GAVOHwvjrck6jKjvmGhnO3jHX/xuB9d27f+EJZ/6k+6nMuPjorrYp7s++bKKdANwzElBWnLWaObvTnaZA== + dependencies: + "@jest/transform" "^26.6.2" + "@jest/types" "^26.6.2" + "@types/babel__core" "^7.1.7" + babel-plugin-istanbul "^6.0.0" + babel-preset-jest "^26.6.2" + chalk "^4.0.0" + graceful-fs "^4.2.4" + slash "^3.0.0" + +babel-plugin-istanbul@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.0.0.tgz#e159ccdc9af95e0b570c75b4573b7c34d671d765" + integrity sha512-AF55rZXpe7trmEylbaE1Gv54wn6rwU03aptvRoVIGP8YykoSxqdVLV1TfwflBCE/QtHmqtP8SWlTENqbK8GCSQ== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@istanbuljs/load-nyc-config" "^1.0.0" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-instrument "^4.0.0" + test-exclude "^6.0.0" + +babel-plugin-jest-hoist@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-26.6.2.tgz#8185bd030348d254c6d7dd974355e6a28b21e62d" + integrity sha512-PO9t0697lNTmcEHH69mdtYiOIkkOlj9fySqfO3K1eCcdISevLAE0xY59VLLUj0SoiPiTX/JU2CYFpILydUa5Lw== + dependencies: + "@babel/template" "^7.3.3" + "@babel/types" "^7.3.3" + "@types/babel__core" "^7.0.0" + "@types/babel__traverse" "^7.0.6" + +babel-preset-current-node-syntax@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.0.tgz#cf5feef29551253471cfa82fc8e0f5063df07a77" + integrity sha512-mGkvkpocWJes1CmMKtgGUwCeeq0pOhALyymozzDWYomHTbDLwueDYG6p4TK1YOeYHCzBzYPsWkgTto10JubI1Q== + dependencies: + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-bigint" "^7.8.3" + "@babel/plugin-syntax-class-properties" "^7.8.3" + "@babel/plugin-syntax-import-meta" "^7.8.3" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.8.3" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.8.3" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-top-level-await" "^7.8.3" + +babel-preset-jest@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-26.6.2.tgz#747872b1171df032252426586881d62d31798fee" + integrity sha512-YvdtlVm9t3k777c5NPQIv6cxFFFapys25HiUmuSgHwIZhfifweR5c5Sf5nwE3MAbfu327CYSvps8Yx6ANLyleQ== + dependencies: + babel-plugin-jest-hoist "^26.6.2" + babel-preset-current-node-syntax "^1.0.0" + +balanced-match@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" + integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= + +base64-js@^1.2.0, base64-js@^1.3.1: + version "1.5.1" + resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" + integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== + +base@^0.11.1: + version "0.11.2" + resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" + integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== + dependencies: + cache-base "^1.0.1" + class-utils "^0.3.5" + component-emitter "^1.2.1" + define-property "^1.0.0" + isobject "^3.0.1" + mixin-deep "^1.2.0" + pascalcase "^0.1.1" + +bcrypt-pbkdf@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" + integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4= + dependencies: + tweetnacl "^0.14.3" + +before-after-hook@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/before-after-hook/-/before-after-hook-2.1.0.tgz#b6c03487f44e24200dd30ca5e6a1979c5d2fb635" + integrity sha512-IWIbu7pMqyw3EAJHzzHbWa85b6oud/yfKYg5rqB5hNE8CeMi3nX+2C2sj0HswfblST86hpVEOAb9x34NZd6P7A== + +binary-extensions@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.1.0.tgz#30fa40c9e7fe07dbc895678cd287024dea241dd9" + integrity sha512-1Yj8h9Q+QDF5FzhMs/c9+6UntbD5MkRfRwac8DoEm9ZfUBZ7tZ55YcGVAzEe4bXsdQHEk+s9S5wsOKVdZrw0tQ== + +binaryextensions@^2.1.2: + version "2.3.0" + resolved "https://registry.yarnpkg.com/binaryextensions/-/binaryextensions-2.3.0.tgz#1d269cbf7e6243ea886aa41453c3651ccbe13c22" + integrity sha512-nAihlQsYGyc5Bwq6+EsubvANYGExeJKHDO3RjnvwU042fawQTQfM3Kxn7IHUXQOz4bzfwsGYYHGSvXyW4zOGLg== + +bindings@^1.2.1: + version "1.5.0" + resolved "https://registry.yarnpkg.com/bindings/-/bindings-1.5.0.tgz#10353c9e945334bc0511a6d90b38fbc7c9c504df" + integrity sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ== + dependencies: + file-uri-to-path "1.0.0" + +bl@^4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/bl/-/bl-4.0.3.tgz#12d6287adc29080e22a705e5764b2a9522cdc489" + integrity sha512-fs4G6/Hu4/EE+F75J8DuN/0IpQqNjAdC7aEQv7Qt8MHGUH7Ckv2MwTEEeN9QehD0pfIDkMI1bkHYkKy7xHyKIg== + dependencies: + buffer "^5.5.0" + inherits "^2.0.4" + readable-stream "^3.4.0" + +blake3-wasm@2.1.5: + version "2.1.5" + resolved "https://registry.yarnpkg.com/blake3-wasm/-/blake3-wasm-2.1.5.tgz#b22dbb84bc9419ed0159caa76af4b1b132e6ba52" + integrity sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g== + +blessed@0.1.81: + version "0.1.81" + resolved "https://registry.yarnpkg.com/blessed/-/blessed-0.1.81.tgz#f962d687ec2c369570ae71af843256e6d0ca1129" + integrity sha1-+WLWh+wsNpVwrnGvhDJW5tDKESk= + +bluebird@^3.5.1, bluebird@^3.5.3, bluebird@^3.5.5: + version "3.7.2" + resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" + integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== + +body-parser@1.19.0: + version "1.19.0" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.0.tgz#96b2709e57c9c4e09a6fd66a8fd979844f69f08a" + integrity sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw== + dependencies: + bytes "3.1.0" + content-type "~1.0.4" + debug "2.6.9" + depd "~1.1.2" + http-errors "1.7.2" + iconv-lite "0.4.24" + on-finished "~2.3.0" + qs "6.7.0" + raw-body "2.4.0" + type-is "~1.6.17" + +boxen@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/boxen/-/boxen-4.2.0.tgz#e411b62357d6d6d36587c8ac3d5d974daa070e64" + integrity sha512-eB4uT9RGzg2odpER62bBwSLvUeGC+WbRjjyyFhGsKnc8wp/m0+hQsMUvUe3H2V0D5vw0nBdO1hCJoZo5mKeuIQ== + dependencies: + ansi-align "^3.0.0" + camelcase "^5.3.1" + chalk "^3.0.0" + cli-boxes "^2.2.0" + string-width "^4.1.0" + term-size "^2.1.0" + type-fest "^0.8.1" + widest-line "^3.1.0" + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +braces@^2.3.1: + version "2.3.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" + integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== + dependencies: + arr-flatten "^1.1.0" + array-unique "^0.3.2" + extend-shallow "^2.0.1" + fill-range "^4.0.0" + isobject "^3.0.1" + repeat-element "^1.1.2" + snapdragon "^0.8.1" + snapdragon-node "^2.0.1" + split-string "^3.0.2" + to-regex "^3.0.1" + +braces@^3.0.1, braces@~3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" + integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== + dependencies: + fill-range "^7.0.1" + +browser-process-hrtime@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" + integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== + +bs-logger@0.x: + version "0.2.6" + resolved "https://registry.yarnpkg.com/bs-logger/-/bs-logger-0.2.6.tgz#eb7d365307a72cf974cc6cda76b68354ad336bd8" + integrity sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog== + dependencies: + fast-json-stable-stringify "2.x" + +bser@2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" + integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== + dependencies: + node-int64 "^0.4.0" + +btoa-lite@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/btoa-lite/-/btoa-lite-1.0.0.tgz#337766da15801210fdd956c22e9c6891ab9d0337" + integrity sha1-M3dm2hWAEhD92VbCLpxokaudAzc= + +buffer-alloc-unsafe@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz#bd7dc26ae2972d0eda253be061dba992349c19f0" + integrity sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg== + +buffer-alloc@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/buffer-alloc/-/buffer-alloc-1.2.0.tgz#890dd90d923a873e08e10e5fd51a57e5b7cce0ec" + integrity sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow== + dependencies: + buffer-alloc-unsafe "^1.1.0" + buffer-fill "^1.0.0" + +buffer-fill@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/buffer-fill/-/buffer-fill-1.0.0.tgz#f8f78b76789888ef39f205cd637f68e702122b2c" + integrity sha1-+PeLdniYiO858gXNY39o5wISKyw= + +buffer-from@1.x, buffer-from@^1.0.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" + integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== + +buffer-json@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/buffer-json/-/buffer-json-2.0.0.tgz#f73e13b1e42f196fe2fd67d001c7d7107edd7c23" + integrity sha512-+jjPFVqyfF1esi9fvfUs3NqM0pH1ziZ36VP4hmA/y/Ssfo/5w5xHKfTw9BwQjoJ1w/oVtpLomqwUHKdefGyuHw== + +buffer-map@0.0.7: + version "0.0.7" + resolved "https://registry.yarnpkg.com/buffer-map/-/buffer-map-0.0.7.tgz#5c2db65f7b3a723a2d9dff8e896fada3d2dc1c5d" + integrity sha512-95try3p/vMRkIAAnJDaGkFhGpT/65NoeW6XelEPjAomWYR58RQtW4khn0SwKj34kZoE7uxL7w2koZSwbnszvQQ== + +buffer-writer@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/buffer-writer/-/buffer-writer-2.0.0.tgz#ce7eb81a38f7829db09c873f2fbb792c0c98ec04" + integrity sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw== + +buffer@6.0.3, buffer@^6.0.2: + version "6.0.3" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-6.0.3.tgz#2ace578459cc8fbe2a70aaa8f52ee63b6a74c6c6" + integrity sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA== + dependencies: + base64-js "^1.3.1" + ieee754 "^1.2.1" + +buffer@^5.5.0: + version "5.7.1" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" + integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== + dependencies: + base64-js "^1.3.1" + ieee754 "^1.1.13" + +builtins@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/builtins/-/builtins-1.0.3.tgz#cb94faeb61c8696451db36534e1422f94f0aee88" + integrity sha1-y5T662HIaWRR2zZTThQi+U8K7og= + +busboy@^0.2.11: + version "0.2.14" + resolved "https://registry.yarnpkg.com/busboy/-/busboy-0.2.14.tgz#6c2a622efcf47c57bbbe1e2a9c37ad36c7925453" + integrity sha1-bCpiLvz0fFe7vh4qnDetNseSVFM= + dependencies: + dicer "0.2.5" + readable-stream "1.1.x" + +byline@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/byline/-/byline-5.0.0.tgz#741c5216468eadc457b03410118ad77de8c1ddb1" + integrity sha1-dBxSFkaOrcRXsDQQEYrXfejB3bE= + +byte-size@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/byte-size/-/byte-size-5.0.1.tgz#4b651039a5ecd96767e71a3d7ed380e48bed4191" + integrity sha512-/XuKeqWocKsYa/cBY1YbSJSWWqTi4cFgr9S6OyM7PBaPbr9zvNGwWP33vt0uqGhwDdN+y3yhbXVILEUpnwEWGw== + +bytes@3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6" + integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg== + +cacache@^12.0.0, cacache@^12.0.3: + version "12.0.4" + resolved "https://registry.yarnpkg.com/cacache/-/cacache-12.0.4.tgz#668bcbd105aeb5f1d92fe25570ec9525c8faa40c" + integrity sha512-a0tMB40oefvuInr4Cwb3GerbL9xTj1D5yg0T5xrjGCGyfvbxseIXX7BAO/u/hIXdafzOI5JC3wDwHyf24buOAQ== + dependencies: + bluebird "^3.5.5" + chownr "^1.1.1" + figgy-pudding "^3.5.1" + glob "^7.1.4" + graceful-fs "^4.1.15" + infer-owner "^1.0.3" + lru-cache "^5.1.1" + mississippi "^3.0.0" + mkdirp "^0.5.1" + move-concurrently "^1.0.1" + promise-inflight "^1.0.1" + rimraf "^2.6.3" + ssri "^6.0.1" + unique-filename "^1.1.1" + y18n "^4.0.0" + +cache-base@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" + integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== + dependencies: + collection-visit "^1.0.0" + component-emitter "^1.2.1" + get-value "^2.0.6" + has-value "^1.0.0" + isobject "^3.0.1" + set-value "^2.0.0" + to-object-path "^0.3.0" + union-value "^1.0.0" + unset-value "^1.0.0" + +cacheable-request@^6.0.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-6.1.0.tgz#20ffb8bd162ba4be11e9567d823db651052ca912" + integrity sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg== + dependencies: + clone-response "^1.0.2" + get-stream "^5.1.0" + http-cache-semantics "^4.0.0" + keyv "^3.0.0" + lowercase-keys "^2.0.0" + normalize-url "^4.1.0" + responselike "^1.0.2" + +call-bind@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.0.tgz#24127054bb3f9bdcb4b1fb82418186072f77b8ce" + integrity sha512-AEXsYIyyDY3MCzbwdhzG3Jx1R0J2wetQyUynn6dYHAO+bg8l1k7jwZtRv4ryryFs7EP+NDlikJlVe59jr0cM2w== + dependencies: + function-bind "^1.1.1" + get-intrinsic "^1.0.0" + +call-me-maybe@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/call-me-maybe/-/call-me-maybe-1.0.1.tgz#26d208ea89e37b5cbde60250a15f031c16a4d66b" + integrity sha1-JtII6onje1y95gJQoV8DHBak1ms= + +caller-callsite@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/caller-callsite/-/caller-callsite-2.0.0.tgz#847e0fce0a223750a9a027c54b33731ad3154134" + integrity sha1-hH4PzgoiN1CpoCfFSzNzGtMVQTQ= + dependencies: + callsites "^2.0.0" + +caller-path@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-2.0.0.tgz#468f83044e369ab2010fac5f06ceee15bb2cb1f4" + integrity sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ= + dependencies: + caller-callsite "^2.0.0" + +callsites@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-2.0.0.tgz#06eb84f00eea413da86affefacbffb36093b3c50" + integrity sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA= + +callsites@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== + +camelcase-keys@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-2.1.0.tgz#308beeaffdf28119051efa1d932213c91b8f92e7" + integrity sha1-MIvur/3ygRkFHvodkyITyRuPkuc= + dependencies: + camelcase "^2.0.0" + map-obj "^1.0.0" + +camelcase-keys@^4.0.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-4.2.0.tgz#a2aa5fb1af688758259c32c141426d78923b9b77" + integrity sha1-oqpfsa9oh1glnDLBQUJteJI7m3c= + dependencies: + camelcase "^4.1.0" + map-obj "^2.0.0" + quick-lru "^1.0.0" + +camelcase-keys@^6.2.2: + version "6.2.2" + resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-6.2.2.tgz#5e755d6ba51aa223ec7d3d52f25778210f9dc3c0" + integrity sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg== + dependencies: + camelcase "^5.3.1" + map-obj "^4.0.0" + quick-lru "^4.0.1" + +camelcase@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-2.1.1.tgz#7c1d16d679a1bbe59ca02cacecfb011e201f5a1f" + integrity sha1-fB0W1nmhu+WcoCys7PsBHiAfWh8= + +camelcase@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-4.1.0.tgz#d545635be1e33c542649c69173e5de6acfae34dd" + integrity sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0= + +camelcase@^5.0.0, camelcase@^5.3.1: + version "5.3.1" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" + integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== + +camelcase@^6.0.0: + version "6.2.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.2.0.tgz#924af881c9d525ac9d87f40d964e5cea982a1809" + integrity sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg== + +capture-exit@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/capture-exit/-/capture-exit-2.0.0.tgz#fb953bfaebeb781f62898239dabb426d08a509a4" + integrity sha512-PiT/hQmTonHhl/HFGN+Lx3JJUznrVYJ3+AQsnthneZbvW7x+f08Tk7yLJTLEOUvBTbduLeeBkxEaYXUOUrRq6g== + dependencies: + rsvp "^4.8.4" + +capture-stack-trace@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/capture-stack-trace/-/capture-stack-trace-1.0.1.tgz#a6c0bbe1f38f3aa0b92238ecb6ff42c344d4135d" + integrity sha512-mYQLZnx5Qt1JgB1WEiMCf2647plpGeQ2NMR/5L0HNZzGQo4fuSPnK+wjfPnKZV0aiJDgzmWqqkV/g7JD+DW0qw== + +cardinal@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/cardinal/-/cardinal-2.1.1.tgz#7cc1055d822d212954d07b085dea251cc7bc5505" + integrity sha1-fMEFXYItISlU0HsIXeolHMe8VQU= + dependencies: + ansicolors "~0.3.2" + redeyed "~2.1.0" + +caseless@~0.12.0: + version "0.12.0" + resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" + integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= + +chai@4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/chai/-/chai-4.2.0.tgz#760aa72cf20e3795e84b12877ce0e83737aa29e5" + integrity sha512-XQU3bhBukrOsQCuwZndwGcCVQHyZi53fQ6Ys1Fym7E4olpIqqZZhhoFJoaKVvV17lWQoXYwgWN2nF5crA8J2jw== + dependencies: + assertion-error "^1.1.0" + check-error "^1.0.2" + deep-eql "^3.0.1" + get-func-name "^2.0.0" + pathval "^1.1.0" + type-detect "^4.0.5" + +chalk@4.1.0, chalk@^4.0.0, chalk@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.0.tgz#4e14870a618d9e2edd97dd8345fd9d9dc315646a" + integrity sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +chalk@^1.0.0, chalk@^1.1.1: + version "1.1.3" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" + integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= + dependencies: + ansi-styles "^2.2.1" + escape-string-regexp "^1.0.2" + has-ansi "^2.0.0" + strip-ansi "^3.0.0" + supports-color "^2.0.0" + +chalk@^2.0.0, chalk@^2.0.1, chalk@^2.3.0, chalk@^2.3.1, chalk@^2.4.1, chalk@^2.4.2: + version "2.4.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + +chalk@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-3.0.0.tgz#3f73c2bf526591f574cc492c51e2456349f844e4" + integrity sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +char-regex@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" + integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== + +chardet@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e" + integrity sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA== + +check-error@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/check-error/-/check-error-1.0.2.tgz#574d312edd88bb5dd8912e9286dd6c0aed4aac82" + integrity sha1-V00xLt2Iu13YkS6Sht1sCu1KrII= + +chokidar@^3.2.2: + version "3.4.3" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.4.3.tgz#c1df38231448e45ca4ac588e6c79573ba6a57d5b" + integrity sha512-DtM3g7juCXQxFVSNPNByEC2+NImtBuxQQvWlHunpJIS5Ocr0lG306cC7FCi7cEA0fzmybPUIl4txBIobk1gGOQ== + dependencies: + anymatch "~3.1.1" + braces "~3.0.2" + glob-parent "~5.1.0" + is-binary-path "~2.1.0" + is-glob "~4.0.1" + normalize-path "~3.0.0" + readdirp "~3.5.0" + optionalDependencies: + fsevents "~2.1.2" + +chokidar@^3.4.0: + version "3.5.1" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.1.tgz#ee9ce7bbebd2b79f49f304799d5468e31e14e68a" + integrity sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw== + dependencies: + anymatch "~3.1.1" + braces "~3.0.2" + glob-parent "~5.1.0" + is-binary-path "~2.1.0" + is-glob "~4.0.1" + normalize-path "~3.0.0" + readdirp "~3.5.0" + optionalDependencies: + fsevents "~2.3.1" + +chownr@^1.1.1, chownr@^1.1.2: + version "1.1.4" + resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" + integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== + +ci-info@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46" + integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ== + +cjs-module-lexer@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-0.6.0.tgz#4186fcca0eae175970aee870b9fe2d6cf8d5655f" + integrity sha512-uc2Vix1frTfnuzxxu1Hp4ktSvM3QaI4oXl4ZUqL1wjTu/BGki9TrCWoqLTg/drR1KwAEarXuRFCG2Svr1GxPFw== + +class-utils@^0.3.5: + version "0.3.6" + resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" + integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== + dependencies: + arr-union "^3.1.0" + define-property "^0.2.5" + isobject "^3.0.0" + static-extend "^0.1.1" + +clean-stack@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" + integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== + +clean-stack@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-3.0.1.tgz#155bf0b2221bf5f4fba89528d24c5953f17fe3a8" + integrity sha512-lR9wNiMRcVQjSB3a7xXGLuz4cr4wJuuXlaAEbRutGowQTmlp7R72/DOgN21e8jdwblMWl9UOJMJXarX94pzKdg== + dependencies: + escape-string-regexp "4.0.0" + +cli-boxes@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-1.0.0.tgz#4fa917c3e59c94a004cd61f8ee509da651687143" + integrity sha1-T6kXw+WclKAEzWH47lCdplFocUM= + +cli-boxes@^2.2.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-2.2.1.tgz#ddd5035d25094fce220e9cab40a45840a440318f" + integrity sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw== + +cli-cursor@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-2.1.0.tgz#b35dac376479facc3e94747d41d0d0f5238ffcb5" + integrity sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU= + dependencies: + restore-cursor "^2.0.0" + +cli-cursor@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-3.1.0.tgz#264305a7ae490d1d03bf0c9ba7c925d1753af307" + integrity sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw== + dependencies: + restore-cursor "^3.1.0" + +cli-highlight@^2.1.4: + version "2.1.10" + resolved "https://registry.yarnpkg.com/cli-highlight/-/cli-highlight-2.1.10.tgz#26a087da9209dce4fcb8cf5427dc97cd96ac173a" + integrity sha512-CcPFD3JwdQ2oSzy+AMG6j3LRTkNjM82kzcSKzoVw6cLanDCJNlsLjeqVTOTfOfucnWv5F0rmBemVf1m9JiIasw== + dependencies: + chalk "^4.0.0" + highlight.js "^10.0.0" + mz "^2.4.0" + parse5 "^5.1.1" + parse5-htmlparser2-tree-adapter "^6.0.0" + yargs "^16.0.0" + +cli-progress@^3.4.0: + version "3.8.2" + resolved "https://registry.yarnpkg.com/cli-progress/-/cli-progress-3.8.2.tgz#abaf1fc6d6401351f16f068117a410554a0eb8c7" + integrity sha512-qRwBxLldMSfxB+YGFgNRaj5vyyHe1yMpVeDL79c+7puGujdKJHQHydgqXDcrkvQgJ5U/d3lpf6vffSoVVUftVQ== + dependencies: + colors "^1.1.2" + string-width "^4.2.0" + +cli-table@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/cli-table/-/cli-table-0.3.1.tgz#f53b05266a8b1a0b934b3d0821e6e2dc5914ae23" + integrity sha1-9TsFJmqLGguTSz0IIebi3FkUriM= + dependencies: + colors "1.0.3" + +cli-ux@^4.9.0: + version "4.9.3" + resolved "https://registry.yarnpkg.com/cli-ux/-/cli-ux-4.9.3.tgz#4c3e070c1ea23eef010bbdb041192e0661be84ce" + integrity sha512-/1owvF0SZ5Gn54cgrikJ0QskgTzeg30HGjkmjFoaHDJzAqFpuX1DBpFR8aLvsE1J5s9MgeYRENQK4BFwOag5VA== + dependencies: + "@oclif/errors" "^1.2.2" + "@oclif/linewrap" "^1.0.0" + "@oclif/screen" "^1.0.3" + ansi-escapes "^3.1.0" + ansi-styles "^3.2.1" + cardinal "^2.1.1" + chalk "^2.4.1" + clean-stack "^2.0.0" + extract-stack "^1.0.0" + fs-extra "^7.0.0" + hyperlinker "^1.0.0" + indent-string "^3.2.0" + is-wsl "^1.1.0" + lodash "^4.17.11" + password-prompt "^1.0.7" + semver "^5.6.0" + strip-ansi "^5.0.0" + supports-color "^5.5.0" + supports-hyperlinks "^1.0.1" + treeify "^1.1.0" + tslib "^1.9.3" + +cli-ux@^5.2.1, cli-ux@^5.5.0: + version "5.5.1" + resolved "https://registry.yarnpkg.com/cli-ux/-/cli-ux-5.5.1.tgz#99d28dae0c3ef7845fa2ea56e066a1d5fcceca9e" + integrity sha512-t3DT1U1C3rArLGYLpKa3m9dr/8uKZRI8HRm/rXKL7UTjm4c+Yd9zHNWg1tP8uaJkUbhmvx5SQHwb3VWpPUVdHQ== + dependencies: + "@oclif/command" "^1.6.0" + "@oclif/errors" "^1.2.1" + "@oclif/linewrap" "^1.0.0" + "@oclif/screen" "^1.0.3" + ansi-escapes "^4.3.0" + ansi-styles "^4.2.0" + cardinal "^2.1.1" + chalk "^4.1.0" + clean-stack "^3.0.0" + cli-progress "^3.4.0" + extract-stack "^2.0.0" + fs-extra "^8.1" + hyperlinker "^1.0.0" + indent-string "^4.0.0" + is-wsl "^2.2.0" + js-yaml "^3.13.1" + lodash "^4.17.11" + natural-orderby "^2.0.1" + object-treeify "^1.1.4" + password-prompt "^1.1.2" + semver "^7.3.2" + string-width "^4.2.0" + strip-ansi "^6.0.0" + supports-color "^7.1.0" + supports-hyperlinks "^2.1.0" + tslib "^2.0.0" + +cli-width@^2.0.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.2.1.tgz#b0433d0b4e9c847ef18868a4ef16fd5fc8271c48" + integrity sha512-GRMWDxpOB6Dgk2E5Uo+3eEBvtOOlimMmpbFiKuLFnQzYDavtLFY3K5ona41jgN/WdRZtG7utuVSVTL4HbZHGkw== + +cli-width@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-3.0.0.tgz#a2f48437a2caa9a22436e794bf071ec9e61cedf6" + integrity sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw== + +cliui@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5" + integrity sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA== + dependencies: + string-width "^3.1.0" + strip-ansi "^5.2.0" + wrap-ansi "^5.1.0" + +cliui@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1" + integrity sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.0" + wrap-ansi "^6.2.0" + +cliui@^7.0.2: + version "7.0.4" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" + integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.0" + wrap-ansi "^7.0.0" + +clone-buffer@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/clone-buffer/-/clone-buffer-1.0.0.tgz#e3e25b207ac4e701af721e2cb5a16792cac3dc58" + integrity sha1-4+JbIHrE5wGvch4staFnksrD3Fg= + +clone-deep@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387" + integrity sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ== + dependencies: + is-plain-object "^2.0.4" + kind-of "^6.0.2" + shallow-clone "^3.0.0" + +clone-response@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.2.tgz#d1dc973920314df67fbeb94223b4ee350239e96b" + integrity sha1-0dyXOSAxTfZ/vrlCI7TuNQI56Ws= + dependencies: + mimic-response "^1.0.0" + +clone-stats@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/clone-stats/-/clone-stats-1.0.0.tgz#b3782dff8bb5474e18b9b6bf0fdfe782f8777680" + integrity sha1-s3gt/4u1R04Yuba/D9/ngvh3doA= + +clone@^1.0.2: + version "1.0.4" + resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e" + integrity sha1-2jCcwmPfFZlMaIypAheco8fNfH4= + +clone@^2.1.1: + version "2.1.2" + resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" + integrity sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18= + +cloneable-readable@^1.0.0: + version "1.1.3" + resolved "https://registry.yarnpkg.com/cloneable-readable/-/cloneable-readable-1.1.3.tgz#120a00cb053bfb63a222e709f9683ea2e11d8cec" + integrity sha512-2EF8zTQOxYq70Y4XKtorQupqF0m49MBz2/yf5Bj+MHjvpG3Hy7sImifnqD6UA+TKYxeSV+u6qqQPawN5UvnpKQ== + dependencies: + inherits "^2.0.1" + process-nextick-args "^2.0.0" + readable-stream "^2.3.5" + +co@^4.6.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" + integrity sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ= + +code-point-at@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" + integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c= + +codecov@^3.8.1: + version "3.8.1" + resolved "https://registry.yarnpkg.com/codecov/-/codecov-3.8.1.tgz#06fe026b75525ed1ce864d4a34f1010c52c51546" + integrity sha512-Qm7ltx1pzLPsliZY81jyaQ80dcNR4/JpcX0IHCIWrHBXgseySqbdbYfkdiXd7o/xmzQpGRVCKGYeTrHUpn6Dcw== + dependencies: + argv "0.0.2" + ignore-walk "3.0.3" + js-yaml "3.14.0" + teeny-request "6.0.1" + urlgrey "0.4.4" + +collect-v8-coverage@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59" + integrity sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg== + +collection-visit@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" + integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= + dependencies: + map-visit "^1.0.0" + object-visit "^1.0.0" + +color-convert@^1.9.0, color-convert@^1.9.1: + version "1.9.3" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== + dependencies: + color-name "1.1.3" + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= + +color-name@^1.0.0, color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +color-string@^1.5.2: + version "1.5.4" + resolved "https://registry.yarnpkg.com/color-string/-/color-string-1.5.4.tgz#dd51cd25cfee953d138fe4002372cc3d0e504cb6" + integrity sha512-57yF5yt8Xa3czSEW1jfQDE79Idk0+AkN/4KWad6tbdxUmAs3MvjxlWSWD4deYytcRfoZ9nhKyFl1kj5tBvidbw== + dependencies: + color-name "^1.0.0" + simple-swizzle "^0.2.2" + +color@3.0.x: + version "3.0.0" + resolved "https://registry.yarnpkg.com/color/-/color-3.0.0.tgz#d920b4328d534a3ac8295d68f7bd4ba6c427be9a" + integrity sha512-jCpd5+s0s0t7p3pHQKpnJ0TpQKKdleP71LWcA0aqiljpiuAkOSUFN/dyH8ZwF0hRmFlrIuRhufds1QyEP9EB+w== + dependencies: + color-convert "^1.9.1" + color-string "^1.5.2" + +colors@*, colors@1.4.0, colors@^1.1.2, colors@^1.2.1: + version "1.4.0" + resolved "https://registry.yarnpkg.com/colors/-/colors-1.4.0.tgz#c50491479d4c1bdaed2c9ced32cf7c7dc2360f78" + integrity sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA== + +colors@1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/colors/-/colors-1.0.3.tgz#0433f44d809680fdeb60ed260f1b0c262e82a40b" + integrity sha1-BDP0TYCWgP3rYO0mDxsMJi6CpAs= + +colorspace@1.1.x: + version "1.1.2" + resolved "https://registry.yarnpkg.com/colorspace/-/colorspace-1.1.2.tgz#e0128950d082b86a2168580796a0aa5d6c68d8c5" + integrity sha512-vt+OoIP2d76xLhjwbBaucYlNSpPsrJWPlBTtwCpQKIu6/CSMutyzX93O/Do0qzpH3YoHEes8YEFXyZ797rEhzQ== + dependencies: + color "3.0.x" + text-hex "1.0.x" + +columnify@^1.5.4: + version "1.5.4" + resolved "https://registry.yarnpkg.com/columnify/-/columnify-1.5.4.tgz#4737ddf1c7b69a8a7c340570782e947eec8e78bb" + integrity sha1-Rzfd8ce2mop8NAVweC6UfuyOeLs= + dependencies: + strip-ansi "^3.0.0" + wcwidth "^1.0.0" + +combined-stream@^1.0.6, combined-stream@^1.0.8, combined-stream@~1.0.6: + version "1.0.8" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== + dependencies: + delayed-stream "~1.0.0" + +commander@2.6.0: + version "2.6.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.6.0.tgz#9df7e52fb2a0cb0fb89058ee80c3104225f37e1d" + integrity sha1-nfflL7Kgyw+4kFjugMMQQiXzfh0= + +commander@6.2.1: + version "6.2.1" + resolved "https://registry.yarnpkg.com/commander/-/commander-6.2.1.tgz#0792eb682dfbc325999bb2b84fddddba110ac73c" + integrity sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA== + +commander@^6.0.0: + version "6.2.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-6.2.0.tgz#b990bfb8ac030aedc6d11bc04d1488ffef56db75" + integrity sha512-zP4jEKbe8SHzKJYQmq8Y9gYjtO/POJLgIdKgV7B9qNmABVFVc+ctqSX6iXh4mCpJfRBOabiZ2YKPg8ciDw6C+Q== + +common-tags@^1.4.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/common-tags/-/common-tags-1.8.0.tgz#8e3153e542d4a39e9b10554434afaaf98956a937" + integrity sha512-6P6g0uetGpW/sdyUy/iQQCbFF0kWVMSIVSyYz7Zgjcgh8mgw8PQzDNZeyZ5DQ2gM7LBoZPHmnjz8rUthkBG5tw== + +commondir@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" + integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= + +compare-func@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/compare-func/-/compare-func-2.0.0.tgz#fb65e75edbddfd2e568554e8b5b05fff7a51fcb3" + integrity sha512-zHig5N+tPWARooBnb0Zx1MFcdfpyJrfTJ3Y5L+IFvUm8rM74hHz66z0gw0x4tijh5CorKkKUCnW82R2vmpeCRA== + dependencies: + array-ify "^1.0.0" + dot-prop "^5.1.0" + +compare-versions@3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/compare-versions/-/compare-versions-3.6.0.tgz#1a5689913685e5a87637b8d3ffca75514ec41d62" + integrity sha512-W6Af2Iw1z4CB7q4uU4hv646dW9GQuBM+YpC0UvUCWSD8w90SJjp+ujJuXaEMtAXBtSqGfMPuFOVn4/+FlaqfBA== + +component-emitter@^1.2.1, component-emitter@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" + integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= + +concat-stream@^1.5.0, concat-stream@^1.5.2: + version "1.6.2" + resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" + integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== + dependencies: + buffer-from "^1.0.0" + inherits "^2.0.3" + readable-stream "^2.2.2" + typedarray "^0.0.6" + +concat-stream@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-2.0.0.tgz#414cf5af790a48c60ab9be4527d56d5e41133cb1" + integrity sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A== + dependencies: + buffer-from "^1.0.0" + inherits "^2.0.3" + readable-stream "^3.0.2" + typedarray "^0.0.6" + +concurrently@5.3.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/concurrently/-/concurrently-5.3.0.tgz#7500de6410d043c912b2da27de3202cb489b1e7b" + integrity sha512-8MhqOB6PWlBfA2vJ8a0bSFKATOdWlHiQlk11IfmQBPaHVP8oP2gsh2MObE6UR3hqDHqvaIvLTyceNW6obVuFHQ== + dependencies: + chalk "^2.4.2" + date-fns "^2.0.1" + lodash "^4.17.15" + read-pkg "^4.0.1" + rxjs "^6.5.2" + spawn-command "^0.0.2-1" + supports-color "^6.1.0" + tree-kill "^1.2.2" + yargs "^13.3.0" + +concurrently@^3.4.0: + version "3.6.1" + resolved "https://registry.yarnpkg.com/concurrently/-/concurrently-3.6.1.tgz#2f95baec5c4051294dfbb55b57a3b98a3e2b45ec" + integrity sha512-/+ugz+gwFSEfTGUxn0KHkY+19XPRTXR8+7oUK/HxgiN1n7FjeJmkrbSiXAJfyQ0zORgJYPaenmymwon51YXH9Q== + dependencies: + chalk "^2.4.1" + commander "2.6.0" + date-fns "^1.23.0" + lodash "^4.5.1" + read-pkg "^3.0.0" + rx "2.3.24" + spawn-command "^0.0.2-1" + supports-color "^3.2.3" + tree-kill "^1.1.0" + +config-chain@^1.1.11: + version "1.1.12" + resolved "https://registry.yarnpkg.com/config-chain/-/config-chain-1.1.12.tgz#0fde8d091200eb5e808caf25fe618c02f48e4efa" + integrity sha512-a1eOIcu8+7lUInge4Rpf/n4Krkf3Dd9lqhljRzII1/Zno/kRtUWnznPO3jOKBmTEktkt3fkxisUcivoj0ebzoA== + dependencies: + ini "^1.3.4" + proto-list "~1.2.1" + +configstore@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/configstore/-/configstore-5.0.1.tgz#d365021b5df4b98cdd187d6a3b0e3f6a7cc5ed96" + integrity sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA== + dependencies: + dot-prop "^5.2.0" + graceful-fs "^4.1.2" + make-dir "^3.0.0" + unique-string "^2.0.0" + write-file-atomic "^3.0.0" + xdg-basedir "^4.0.0" + +connect@^3.7.0: + version "3.7.0" + resolved "https://registry.yarnpkg.com/connect/-/connect-3.7.0.tgz#5d49348910caa5e07a01800b030d0c35f20484f8" + integrity sha512-ZqRXc+tZukToSNmh5C2iWMSoV3X1YUcPbqEM4DkEG5tNQXrQUZCNVGGv3IuicnkMtPfGf3Xtp8WCXs295iQ1pQ== + dependencies: + debug "2.6.9" + finalhandler "1.1.2" + parseurl "~1.3.3" + utils-merge "1.0.1" + +consola@2.15.0, consola@^2.15.0: + version "2.15.0" + resolved "https://registry.yarnpkg.com/consola/-/consola-2.15.0.tgz#40fc4eefa4d2f8ef2e2806147f056ea207fcc0e9" + integrity sha512-vlcSGgdYS26mPf7qNi+dCisbhiyDnrN1zaRbw3CSuc2wGOMEGGPsp46PdRG5gqXwgtJfjxDkxRNAgRPr1B77vQ== + +console-control-strings@^1.0.0, console-control-strings@~1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" + integrity sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4= + +console.table@0.10.0: + version "0.10.0" + resolved "https://registry.yarnpkg.com/console.table/-/console.table-0.10.0.tgz#0917025588875befd70cf2eff4bef2c6e2d75d04" + integrity sha1-CRcCVYiHW+/XDPLv9L7yxuLXXQQ= + dependencies: + easy-table "1.1.0" + +contains-path@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/contains-path/-/contains-path-0.1.0.tgz#fe8cf184ff6670b6baef01a9d4861a5cbec4120a" + integrity sha1-/ozxhP9mcLa67wGp1IYaXL7EEgo= + +content-disposition@0.5.3: + version "0.5.3" + resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.3.tgz#e130caf7e7279087c5616c2007d0485698984fbd" + integrity sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g== + dependencies: + safe-buffer "5.1.2" + +content-type@^1.0.4, content-type@~1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" + integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== + +conventional-changelog-angular@^5.0.3: + version "5.0.12" + resolved "https://registry.yarnpkg.com/conventional-changelog-angular/-/conventional-changelog-angular-5.0.12.tgz#c979b8b921cbfe26402eb3da5bbfda02d865a2b9" + integrity sha512-5GLsbnkR/7A89RyHLvvoExbiGbd9xKdKqDTrArnPbOqBqG/2wIosu0fHwpeIRI8Tl94MhVNBXcLJZl92ZQ5USw== + dependencies: + compare-func "^2.0.0" + q "^1.5.1" + +conventional-changelog-core@^3.1.6: + version "3.2.3" + resolved "https://registry.yarnpkg.com/conventional-changelog-core/-/conventional-changelog-core-3.2.3.tgz#b31410856f431c847086a7dcb4d2ca184a7d88fb" + integrity sha512-LMMX1JlxPIq/Ez5aYAYS5CpuwbOk6QFp8O4HLAcZxe3vxoCtABkhfjetk8IYdRB9CDQGwJFLR3Dr55Za6XKgUQ== + dependencies: + conventional-changelog-writer "^4.0.6" + conventional-commits-parser "^3.0.3" + dateformat "^3.0.0" + get-pkg-repo "^1.0.0" + git-raw-commits "2.0.0" + git-remote-origin-url "^2.0.0" + git-semver-tags "^2.0.3" + lodash "^4.2.1" + normalize-package-data "^2.3.5" + q "^1.5.1" + read-pkg "^3.0.0" + read-pkg-up "^3.0.0" + through2 "^3.0.0" + +conventional-changelog-preset-loader@^2.1.1: + version "2.3.4" + resolved "https://registry.yarnpkg.com/conventional-changelog-preset-loader/-/conventional-changelog-preset-loader-2.3.4.tgz#14a855abbffd59027fd602581f1f34d9862ea44c" + integrity sha512-GEKRWkrSAZeTq5+YjUZOYxdHq+ci4dNwHvpaBC3+ENalzFWuCWa9EZXSuZBpkr72sMdKB+1fyDV4takK1Lf58g== + +conventional-changelog-writer@^4.0.6: + version "4.0.18" + resolved "https://registry.yarnpkg.com/conventional-changelog-writer/-/conventional-changelog-writer-4.0.18.tgz#10b73baa59c7befc69b360562f8b9cd19e63daf8" + integrity sha512-mAQDCKyB9HsE8Ko5cCM1Jn1AWxXPYV0v8dFPabZRkvsiWUul2YyAqbIaoMKF88Zf2ffnOPSvKhboLf3fnjo5/A== + dependencies: + compare-func "^2.0.0" + conventional-commits-filter "^2.0.7" + dateformat "^3.0.0" + handlebars "^4.7.6" + json-stringify-safe "^5.0.1" + lodash "^4.17.15" + meow "^8.0.0" + semver "^6.0.0" + split "^1.0.0" + through2 "^4.0.0" + +conventional-commits-filter@^2.0.2, conventional-commits-filter@^2.0.7: + version "2.0.7" + resolved "https://registry.yarnpkg.com/conventional-commits-filter/-/conventional-commits-filter-2.0.7.tgz#f8d9b4f182fce00c9af7139da49365b136c8a0b3" + integrity sha512-ASS9SamOP4TbCClsRHxIHXRfcGCnIoQqkvAzCSbZzTFLfcTqJVugB0agRgsEELsqaeWgsXv513eS116wnlSSPA== + dependencies: + lodash.ismatch "^4.4.0" + modify-values "^1.0.0" + +conventional-commits-parser@^3.0.3: + version "3.2.0" + resolved "https://registry.yarnpkg.com/conventional-commits-parser/-/conventional-commits-parser-3.2.0.tgz#9e261b139ca4b7b29bcebbc54460da36894004ca" + integrity sha512-XmJiXPxsF0JhAKyfA2Nn+rZwYKJ60nanlbSWwwkGwLQFbugsc0gv1rzc7VbbUWAzJfR1qR87/pNgv9NgmxtBMQ== + dependencies: + JSONStream "^1.0.4" + is-text-path "^1.0.1" + lodash "^4.17.15" + meow "^8.0.0" + split2 "^2.0.0" + through2 "^4.0.0" + trim-off-newlines "^1.0.0" + +conventional-recommended-bump@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/conventional-recommended-bump/-/conventional-recommended-bump-5.0.1.tgz#5af63903947b6e089e77767601cb592cabb106ba" + integrity sha512-RVdt0elRcCxL90IrNP0fYCpq1uGt2MALko0eyeQ+zQuDVWtMGAy9ng6yYn3kax42lCj9+XBxQ8ZN6S9bdKxDhQ== + dependencies: + concat-stream "^2.0.0" + conventional-changelog-preset-loader "^2.1.1" + conventional-commits-filter "^2.0.2" + conventional-commits-parser "^3.0.3" + git-raw-commits "2.0.0" + git-semver-tags "^2.0.3" + meow "^4.0.0" + q "^1.5.1" + +convert-source-map@^1.4.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442" + integrity sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA== + dependencies: + safe-buffer "~5.1.1" + +cookie-signature@1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" + integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw= + +cookie@0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.0.tgz#beb437e7022b3b6d49019d088665303ebe9c14ba" + integrity sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg== + +cookiejar@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/cookiejar/-/cookiejar-2.1.2.tgz#dd8a235530752f988f9a0844f3fc589e3111125c" + integrity sha512-Mw+adcfzPxcPeI+0WlvRrr/3lGVO0bD75SxX6811cxSh1Wbxx7xZBGK1eVtDf6si8rg2lhnUjsVLMFMfbRIuwA== + +copy-concurrently@^1.0.0: + version "1.0.5" + resolved "https://registry.yarnpkg.com/copy-concurrently/-/copy-concurrently-1.0.5.tgz#92297398cae34937fcafd6ec8139c18051f0b5e0" + integrity sha512-f2domd9fsVDFtaFcbaRZuYXwtdmnzqbADSwhSWYxYB/Q8zsdUUFMXVRwXGDMWmbEzAn1kdRrtI1T/KTFOL4X2A== + dependencies: + aproba "^1.1.1" + fs-write-stream-atomic "^1.0.8" + iferr "^0.1.5" + mkdirp "^0.5.1" + rimraf "^2.5.4" + run-queue "^1.0.0" + +copy-descriptor@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" + integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= + +core-util-is@1.0.2, core-util-is@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" + integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= + +cors@2.8.5: + version "2.8.5" + resolved "https://registry.yarnpkg.com/cors/-/cors-2.8.5.tgz#eac11da51592dd86b9f06f6e7ac293b3df875d29" + integrity sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g== + dependencies: + object-assign "^4" + vary "^1" + +cosmiconfig@^5.1.0: + version "5.2.1" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-5.2.1.tgz#040f726809c591e77a17c0a3626ca45b4f168b1a" + integrity sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA== + dependencies: + import-fresh "^2.0.0" + is-directory "^0.3.1" + js-yaml "^3.13.1" + parse-json "^4.0.0" + +cosmiconfig@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-7.0.0.tgz#ef9b44d773959cae63ddecd122de23853b60f8d3" + integrity sha512-pondGvTuVYDk++upghXJabWzL6Kxu6f26ljFw64Swq9v6sQPUL3EUlVDV56diOjpCayKihL6hVe8exIACU4XcA== + dependencies: + "@types/parse-json" "^4.0.0" + import-fresh "^3.2.1" + parse-json "^5.0.0" + path-type "^4.0.0" + yaml "^1.10.0" + +cp-file@^3.1.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/cp-file/-/cp-file-3.2.0.tgz#6f83616254624f0ad58aa4aa8d076f026be7e188" + integrity sha1-b4NhYlRiTwrViqSqjQdvAmvn4Yg= + dependencies: + graceful-fs "^4.1.2" + mkdirp "^0.5.0" + nested-error-stacks "^1.0.1" + object-assign "^4.0.1" + pify "^2.3.0" + pinkie-promise "^2.0.0" + readable-stream "^2.1.4" + +cpy-cli@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/cpy-cli/-/cpy-cli-1.0.1.tgz#67fb5a4a2dec28ca8abff375de4b9e71f6a7561c" + integrity sha1-Z/taSi3sKMqKv/N13kuecfanVhw= + dependencies: + cpy "^4.0.0" + meow "^3.6.0" + +cpy@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/cpy/-/cpy-4.0.1.tgz#b67267eba2f3960ba06a5a61ac94033422833424" + integrity sha1-tnJn66LzlgugalphrJQDNCKDNCQ= + dependencies: + cp-file "^3.1.0" + globby "^4.0.0" + meow "^3.6.0" + nested-error-stacks "^1.0.0" + object-assign "^4.0.1" + pinkie-promise "^2.0.0" + +create-error-class@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/create-error-class/-/create-error-class-3.0.2.tgz#06be7abef947a3f14a30fd610671d401bca8b7b6" + integrity sha1-Br56vvlHo/FKMP1hBnHUAbyot7Y= + dependencies: + capture-stack-trace "^1.0.0" + +create-require@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" + integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ== + +cross-env@^3.1.4: + version "3.2.4" + resolved "https://registry.yarnpkg.com/cross-env/-/cross-env-3.2.4.tgz#9e0585f277864ed421ce756f81a980ff0d698aba" + integrity sha1-ngWF8neGTtQhznVvgamA/w1piro= + dependencies: + cross-spawn "^5.1.0" + is-windows "^1.0.0" + +cross-fetch@^3.0.5: + version "3.0.6" + resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.0.6.tgz#3a4040bc8941e653e0e9cf17f29ebcd177d3365c" + integrity sha512-KBPUbqgFjzWlVcURG+Svp9TlhA5uliYtiNx/0r8nv0pdypeQCRJ9IaSIc3q/x3q8t3F75cHuwxVql1HFGHCNJQ== + dependencies: + node-fetch "2.6.1" + +cross-spawn@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449" + integrity sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk= + dependencies: + lru-cache "^4.0.1" + shebang-command "^1.2.0" + which "^1.2.9" + +cross-spawn@^6.0.0, cross-spawn@^6.0.5: + version "6.0.5" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" + integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== + dependencies: + nice-try "^1.0.4" + path-key "^2.0.1" + semver "^5.5.0" + shebang-command "^1.2.0" + which "^1.2.9" + +cross-spawn@^7.0.0, cross-spawn@^7.0.2, cross-spawn@^7.0.3: + version "7.0.3" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + +crypto-random-string@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5" + integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA== + +cssom@^0.4.4: + version "0.4.4" + resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.4.4.tgz#5a66cf93d2d0b661d80bf6a44fb65f5c2e4e0a10" + integrity sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw== + +cssom@~0.3.6: + version "0.3.8" + resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.3.8.tgz#9f1276f5b2b463f2114d3f2c75250af8c1a36f4a" + integrity sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg== + +cssstyle@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-2.3.0.tgz#ff665a0ddbdc31864b09647f34163443d90b0852" + integrity sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A== + dependencies: + cssom "~0.3.6" + +currently-unhandled@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/currently-unhandled/-/currently-unhandled-0.4.1.tgz#988df33feab191ef799a61369dd76c17adf957ea" + integrity sha1-mI3zP+qxke95mmE2nddsF635V+o= + dependencies: + array-find-index "^1.0.1" + +cyclist@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/cyclist/-/cyclist-1.0.1.tgz#596e9698fd0c80e12038c2b82d6eb1b35b6224d9" + integrity sha1-WW6WmP0MgOEgOMK4LW6xs1tiJNk= + +dargs@^4.0.1: + version "4.1.0" + resolved "https://registry.yarnpkg.com/dargs/-/dargs-4.1.0.tgz#03a9dbb4b5c2f139bf14ae53f0b8a2a6a86f4e17" + integrity sha1-A6nbtLXC8Tm/FK5T8LiipqhvThc= + dependencies: + number-is-nan "^1.0.0" + +dargs@^6.0.0, dargs@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/dargs/-/dargs-6.1.0.tgz#1f3b9b56393ecf8caa7cbfd6c31496ffcfb9b272" + integrity sha512-5dVBvpBLBnPwSsYXqfybFyehMmC/EenKEcf23AhCTgTf48JFBbmJKqoZBsERDnjL0FyiVTYWdFsRfTLHxLyKdQ== + +dashdash@^1.12.0: + version "1.14.1" + resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" + integrity sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA= + dependencies: + assert-plus "^1.0.0" + +data-urls@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/data-urls/-/data-urls-2.0.0.tgz#156485a72963a970f5d5821aaf642bef2bf2db9b" + integrity sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ== + dependencies: + abab "^2.0.3" + whatwg-mimetype "^2.3.0" + whatwg-url "^8.0.0" + +date-fns@2.16.1, date-fns@^2.0.1: + version "2.16.1" + resolved "https://registry.yarnpkg.com/date-fns/-/date-fns-2.16.1.tgz#05775792c3f3331da812af253e1a935851d3834b" + integrity sha512-sAJVKx/FqrLYHAQeN7VpJrPhagZc9R4ImZIWYRFZaaohR3KzmuK88touwsSwSVT8Qcbd4zoDsnGfX4GFB4imyQ== + +date-fns@^1.23.0: + version "1.30.1" + resolved "https://registry.yarnpkg.com/date-fns/-/date-fns-1.30.1.tgz#2e71bf0b119153dbb4cc4e88d9ea5acfb50dc05c" + integrity sha512-hBSVCvSmWC+QypYObzwGOd9wqdDpOt+0wl0KbU+R+uuZBS1jN8VsD1ss3irQDknRj5NvxiTF6oj/nDRnN/UQNw== + +dateformat@^3.0.0, dateformat@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-3.0.3.tgz#a6e37499a4d9a9cf85ef5872044d62901c9889ae" + integrity sha512-jyCETtSl3VMZMWeRo7iY1FL19ges1t55hMo5yaam4Jrsm5EPL89UQkoQRyiI+Yf4k8r2ZpdngkV8hr1lIdjb3Q== + +debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.9: + version "2.6.9" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +debug@3.1.0, debug@=3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" + integrity sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g== + dependencies: + ms "2.0.0" + +debug@4, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.2.0: + version "4.3.1" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee" + integrity sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ== + dependencies: + ms "2.1.2" + +debug@^3.1.0, debug@^3.2.6: + version "3.2.7" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" + integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== + dependencies: + ms "^2.1.1" + +debuglog@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/debuglog/-/debuglog-1.0.1.tgz#aa24ffb9ac3df9a2351837cfb2d279360cd78492" + integrity sha1-qiT/uaw9+aI1GDfPstJ5NgzXhJI= + +decamelize-keys@^1.0.0, decamelize-keys@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/decamelize-keys/-/decamelize-keys-1.1.0.tgz#d171a87933252807eb3cb61dc1c1445d078df2d9" + integrity sha1-0XGoeTMlKAfrPLYdwcFEXQeN8tk= + dependencies: + decamelize "^1.1.0" + map-obj "^1.0.0" + +decamelize@^1.1.0, decamelize@^1.1.2, decamelize@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" + integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= + +decimal.js@^10.2.0: + version "10.2.1" + resolved "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.2.1.tgz#238ae7b0f0c793d3e3cea410108b35a2c01426a3" + integrity sha512-KaL7+6Fw6i5A2XSnsbhm/6B+NuEA7TZ4vqxnd5tXz9sbKtrN9Srj8ab4vKVdK8YAqZO9P1kg45Y6YLoduPf+kw== + +decode-uri-component@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" + integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= + +decompress-response@^3.2.0, decompress-response@^3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-3.3.0.tgz#80a4dd323748384bfa248083622aedec982adff3" + integrity sha1-gKTdMjdIOEv6JICDYirt7Jgq3/M= + dependencies: + mimic-response "^1.0.0" + +dedent@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" + integrity sha1-JJXduvbrh0q7Dhvp3yLS5aVEMmw= + +deep-eql@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/deep-eql/-/deep-eql-3.0.1.tgz#dfc9404400ad1c8fe023e7da1df1c147c4b444df" + integrity sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw== + dependencies: + type-detect "^4.0.0" + +deep-extend@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" + integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== + +deep-is@^0.1.3, deep-is@~0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" + integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= + +deepmerge@^4.2.2: + version "4.2.2" + resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" + integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== + +defaults@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/defaults/-/defaults-1.0.3.tgz#c656051e9817d9ff08ed881477f3fe4019f3ef7d" + integrity sha1-xlYFHpgX2f8I7YgUd/P+QBnz730= + dependencies: + clone "^1.0.2" + +defer-to-connect@^1.0.1: + version "1.1.3" + resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-1.1.3.tgz#331ae050c08dcf789f8c83a7b81f0ed94f4ac591" + integrity sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ== + +deferred-leveldown@~5.3.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/deferred-leveldown/-/deferred-leveldown-5.3.0.tgz#27a997ad95408b61161aa69bd489b86c71b78058" + integrity sha512-a59VOT+oDy7vtAbLRCZwWgxu2BaCfd5Hk7wxJd48ei7I+nsg8Orlb9CLG0PMZienk9BSUKgeAqkO2+Lw+1+Ukw== + dependencies: + abstract-leveldown "~6.2.1" + inherits "^2.0.3" + +define-properties@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" + integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== + dependencies: + object-keys "^1.0.12" + +define-property@^0.2.5: + version "0.2.5" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" + integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= + dependencies: + is-descriptor "^0.1.0" + +define-property@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" + integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY= + dependencies: + is-descriptor "^1.0.0" + +define-property@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" + integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== + dependencies: + is-descriptor "^1.0.2" + isobject "^3.0.1" + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= + +delegates@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" + integrity sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o= + +depd@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" + integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= + +deprecation@^2.0.0, deprecation@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/deprecation/-/deprecation-2.3.1.tgz#6368cbdb40abf3373b525ac87e4a260c3a700919" + integrity sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ== + +destroy@~1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80" + integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA= + +detect-conflict@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/detect-conflict/-/detect-conflict-1.0.1.tgz#088657a66a961c05019db7c4230883b1c6b4176e" + integrity sha1-CIZXpmqWHAUBnbfEIwiDsca0F24= + +detect-indent@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-5.0.0.tgz#3871cc0a6a002e8c3e5b3cf7f336264675f06b9d" + integrity sha1-OHHMCmoALow+Wzz38zYmRnXwa50= + +detect-indent@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-6.0.0.tgz#0abd0f549f69fc6659a254fe96786186b6f528fd" + integrity sha512-oSyFlqaTHCItVRGK5RmrmjB+CmaMOW7IaNA/kdxqhoa6d17j/5ce9O9eWXmV/KEdRwqpQA+Vqe8a8Bsybu4YnA== + +detect-libc@^1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" + integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups= + +detect-newline@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" + integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== + +dezalgo@^1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/dezalgo/-/dezalgo-1.0.3.tgz#7f742de066fc748bc8db820569dddce49bf0d456" + integrity sha1-f3Qt4Gb8dIvI24IFad3c5Jvw1FY= + dependencies: + asap "^2.0.0" + wrappy "1" + +dicer@0.2.5: + version "0.2.5" + resolved "https://registry.yarnpkg.com/dicer/-/dicer-0.2.5.tgz#5996c086bb33218c812c090bddc09cd12facb70f" + integrity sha1-WZbAhrszIYyBLAkL3cCc0S+stw8= + dependencies: + readable-stream "1.1.x" + streamsearch "0.1.2" + +diff-sequences@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-26.6.2.tgz#48ba99157de1923412eed41db6b6d4aa9ca7c0b1" + integrity sha512-Mv/TDa3nZ9sbc5soK+OoA74BsS3mL37yixCvUAQkiuA4Wz6YtwP/K47n2rv2ovzHZvoiQeA5FTQOschKkEwB0Q== + +diff@^3.5.0: + version "3.5.0" + resolved "https://registry.yarnpkg.com/diff/-/diff-3.5.0.tgz#800c0dd1e0a8bfbc95835c202ad220fe317e5a12" + integrity sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA== + +diff@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" + integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== + +difunc@0.0.4: + version "0.0.4" + resolved "https://registry.yarnpkg.com/difunc/-/difunc-0.0.4.tgz#09322073e67f82effd2f22881985e7d3e441b3ac" + integrity sha512-zBiL4ALDmviHdoLC0g0G6wVme5bwAow9WfhcZLLopXCAWgg3AEf7RYTs2xugszIGulRHzEVDF/SHl9oyQU07Pw== + dependencies: + esprima "^4.0.0" + +dir-glob@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-2.0.0.tgz#0b205d2b6aef98238ca286598a8204d29d0a0034" + integrity sha512-37qirFDz8cA5fimp9feo43fSuRo2gHwaIn6dXL8Ber1dGwUosDrGZeCCXq57WnIqE4aQ+u3eQZzsk1yOzhdwag== + dependencies: + arrify "^1.0.1" + path-type "^3.0.0" + +dir-glob@^2.2.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-2.2.2.tgz#fa09f0694153c8918b18ba0deafae94769fc50c4" + integrity sha512-f9LBi5QWzIW3I6e//uxZoLBlUt9kcp66qo0sSCxL6YZKc75R1c4MFCoe/LaZiBGmgujvQdxc5Bn3QhfyvK5Hsw== + dependencies: + path-type "^3.0.0" + +dir-glob@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" + integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== + dependencies: + path-type "^4.0.0" + +doctrine@1.5.0: + version "1.5.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-1.5.0.tgz#379dce730f6166f76cefa4e6707a159b02c5a6fa" + integrity sha1-N53Ocw9hZvds76TmcHoVmwLFpvo= + dependencies: + esutils "^2.0.2" + isarray "^1.0.0" + +doctrine@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" + integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== + dependencies: + esutils "^2.0.2" + +doctrine@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== + dependencies: + esutils "^2.0.2" + +domexception@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/domexception/-/domexception-1.0.1.tgz#937442644ca6a31261ef36e3ec677fe805582c90" + integrity sha512-raigMkn7CJNNo6Ihro1fzG7wr3fHuYVytzquZKX5n0yizGsTcYgzdIUwj1X9pK0VvjeihV+XiclP+DjwbsSKug== + dependencies: + webidl-conversions "^4.0.2" + +domexception@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/domexception/-/domexception-2.0.1.tgz#fb44aefba793e1574b0af6aed2801d057529f304" + integrity sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg== + dependencies: + webidl-conversions "^5.0.0" + +dot-prop@^4.2.0: + version "4.2.1" + resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-4.2.1.tgz#45884194a71fc2cda71cbb4bceb3a4dd2f433ba4" + integrity sha512-l0p4+mIuJIua0mhxGoh4a+iNL9bmeK5DvnSVQa6T0OhrVmaEa1XScX5Etc673FePCJOArq/4Pa2cLGODUWTPOQ== + dependencies: + is-obj "^1.0.0" + +dot-prop@^5.1.0, dot-prop@^5.2.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-5.3.0.tgz#90ccce708cd9cd82cc4dc8c3ddd9abdd55b20e88" + integrity sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q== + dependencies: + is-obj "^2.0.0" + +dotenv@8.2.0, dotenv@^8.2.0: + version "8.2.0" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-8.2.0.tgz#97e619259ada750eea3e4ea3e26bceea5424b16a" + integrity sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw== + +download-stats@^0.3.4: + version "0.3.4" + resolved "https://registry.yarnpkg.com/download-stats/-/download-stats-0.3.4.tgz#67ea0c32f14acd9f639da704eef509684ba2dae7" + integrity sha512-ic2BigbyUWx7/CBbsfGjf71zUNZB4edBGC3oRliSzsoNmvyVx3Ycfp1w3vp2Y78Ee0eIIkjIEO5KzW0zThDGaA== + dependencies: + JSONStream "^1.2.1" + lazy-cache "^2.0.1" + moment "^2.15.1" + +dtsgenerator@^3.3.1: + version "3.3.1" + resolved "https://registry.yarnpkg.com/dtsgenerator/-/dtsgenerator-3.3.1.tgz#00ab61551e646569043794137f709cc068f3347c" + integrity sha512-agBZATxyr+iUs/THNqJUxVzSrc4VK6c+U4kI5/MhSUL+vcvUi1eiZWpOHQz5QVz4GgQshlRf2HqBn/FzX3sShA== + dependencies: + commander "^6.0.0" + cross-fetch "^3.0.5" + debug "^4.1.1" + glob "^7.1.6" + https-proxy-agent "^5.0.0" + js-yaml "^3.14.0" + mkdirp "^1.0.4" + tslib "^2.0.0" + typescript "^3.9.7" + +duplexer3@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.4.tgz#ee01dd1cac0ed3cbc7fdbea37dc0a8f1ce002ce2" + integrity sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI= + +duplexer@^0.1.1, duplexer@~0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" + integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg== + +duplexify@^3.4.2, duplexify@^3.6.0: + version "3.7.1" + resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-3.7.1.tgz#2a4df5317f6ccfd91f86d6fd25d8d8a103b88309" + integrity sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g== + dependencies: + end-of-stream "^1.0.0" + inherits "^2.0.1" + readable-stream "^2.0.0" + stream-shift "^1.0.0" + +easy-stack@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/easy-stack/-/easy-stack-1.0.1.tgz#8afe4264626988cabb11f3c704ccd0c835411066" + integrity sha512-wK2sCs4feiiJeFXn3zvY0p41mdU5VUgbgs1rNsc/y5ngFUijdWd+iIN8eoyuZHKB8xN6BL4PdWmzqFmxNg6V2w== + +easy-table@1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/easy-table/-/easy-table-1.1.0.tgz#86f9ab4c102f0371b7297b92a651d5824bc8cb73" + integrity sha1-hvmrTBAvA3G3KXuSplHVgkvIy3M= + optionalDependencies: + wcwidth ">=1.0.1" + +ecc-jsbn@~0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" + integrity sha1-OoOpBOVDUyh4dMVkt1SThoSamMk= + dependencies: + jsbn "~0.1.0" + safer-buffer "^2.1.0" + +editions@^2.2.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/editions/-/editions-2.3.1.tgz#3bc9962f1978e801312fbd0aebfed63b49bfe698" + integrity sha512-ptGvkwTvGdGfC0hfhKg0MT+TRLRKGtUiWGBInxOm5pz7ssADezahjCUaYuZ8Dr+C05FW0AECIIPt4WBxVINEhA== + dependencies: + errlop "^2.0.0" + semver "^6.3.0" + +ee-first@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" + integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0= + +ejs@^2.5.9, ejs@^2.6.1: + version "2.7.4" + resolved "https://registry.yarnpkg.com/ejs/-/ejs-2.7.4.tgz#48661287573dcc53e366c7a1ae52c3a120eec9ba" + integrity sha512-7vmuyh5+kuUyJKePhQfRQBhXV5Ce+RnaeeQArKu1EAMpL3WbgMt5WG6uQZpEVvYSSsxMXRKOewtDk9RaTKXRlA== + +ejs@^3.1.5: + version "3.1.5" + resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.5.tgz#aed723844dc20acb4b170cd9ab1017e476a0d93b" + integrity sha512-dldq3ZfFtgVTJMLjOe+/3sROTzALlL9E34V4/sDtUd/KlBSS0s6U1/+WPE1B4sj9CXHJpL1M6rhNJnc9Wbal9w== + dependencies: + jake "^10.6.1" + +emittery@^0.7.1: + version "0.7.2" + resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.7.2.tgz#25595908e13af0f5674ab419396e2fb394cdfa82" + integrity sha512-A8OG5SR/ij3SsJdWDJdkkSYUjQdCUx6APQXem0SaEePBSRg4eymGYwBkKo1Y6DU+af/Jn2dBQqDBvjnr9Vi8nQ== + +"emoji-regex@>=6.0.0 <=6.1.1": + version "6.1.1" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-6.1.1.tgz#c6cd0ec1b0642e2a3c67a1137efc5e796da4f88e" + integrity sha1-xs0OwbBkLio8Z6ETfvxeeW2k+I4= + +emoji-regex@^7.0.1: + version "7.0.3" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" + integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA== + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + +enabled@2.0.x: + version "2.0.0" + resolved "https://registry.yarnpkg.com/enabled/-/enabled-2.0.0.tgz#f9dd92ec2d6f4bbc0d5d1e64e21d61cd4665e7c2" + integrity sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ== + +encodeurl@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" + integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= + +encoding@^0.1.11: + version "0.1.13" + resolved "https://registry.yarnpkg.com/encoding/-/encoding-0.1.13.tgz#56574afdd791f54a8e9b2785c0582a2d26210fa9" + integrity sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A== + dependencies: + iconv-lite "^0.6.2" + +end-of-stream@^1.0.0, end-of-stream@^1.1.0, end-of-stream@^1.4.1: + version "1.4.4" + resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" + integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== + dependencies: + once "^1.4.0" + +enquirer@^2.3.5: + version "2.3.6" + resolved "https://registry.yarnpkg.com/enquirer/-/enquirer-2.3.6.tgz#2a7fe5dd634a1e4125a975ec994ff5456dc3734d" + integrity sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg== + dependencies: + ansi-colors "^4.1.1" + +env-paths@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/env-paths/-/env-paths-2.2.0.tgz#cdca557dc009152917d6166e2febe1f039685e43" + integrity sha512-6u0VYSCo/OW6IoD5WCLLy9JUGARbamfSavcNXry/eu8aHVFei6CD3Sw+VGX5alea1i9pgPHW0mbu6Xj0uBh7gA== + +envinfo@^7.3.1: + version "7.7.3" + resolved "https://registry.yarnpkg.com/envinfo/-/envinfo-7.7.3.tgz#4b2d8622e3e7366afb8091b23ed95569ea0208cc" + integrity sha512-46+j5QxbPWza0PB1i15nZx0xQ4I/EfQxg9J8Had3b408SV63nEtor2e+oiY63amTo9KTuh2a3XLObNwduxYwwA== + +err-code@^1.0.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/err-code/-/err-code-1.1.2.tgz#06e0116d3028f6aef4806849eb0ea6a748ae6960" + integrity sha1-BuARbTAo9q70gGhJ6w6mp0iuaWA= + +err-code@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/err-code/-/err-code-2.0.3.tgz#23c2f3b756ffdfc608d30e27c9a941024807e7f9" + integrity sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA== + +errlop@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/errlop/-/errlop-2.2.0.tgz#1ff383f8f917ae328bebb802d6ca69666a42d21b" + integrity sha512-e64Qj9+4aZzjzzFpZC7p5kmm/ccCrbLhAJplhsDXQFs87XTsXwOpH4s1Io2s90Tau/8r2j9f4l/thhDevRjzxw== + +errno@~0.1.1: + version "0.1.7" + resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.7.tgz#4684d71779ad39af177e3f007996f7c67c852618" + integrity sha512-MfrRBDWzIWifgq6tJj60gkAwtLNb6sQPlcFrSOflcP1aFmmruKQ2wRnze/8V6kgyz7H3FF8Npzv78mZ7XLLflg== + dependencies: + prr "~1.0.1" + +error-ex@^1.2.0, error-ex@^1.3.1: + version "1.3.2" + resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" + integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== + dependencies: + is-arrayish "^0.2.1" + +error@^7.0.2: + version "7.2.1" + resolved "https://registry.yarnpkg.com/error/-/error-7.2.1.tgz#eab21a4689b5f684fc83da84a0e390de82d94894" + integrity sha512-fo9HBvWnx3NGUKMvMwB/CBCMMrfEJgbDTVDEkPygA3Bdd3lM1OyCd+rbQ8BwnpF6GdVeOLDNmyL4N5Bg80ZvdA== + dependencies: + string-template "~0.2.1" + +es-abstract@^1.17.0-next.1: + version "1.17.7" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.17.7.tgz#a4de61b2f66989fc7421676c1cb9787573ace54c" + integrity sha512-VBl/gnfcJ7OercKA9MVaegWsBHFjV492syMudcnQZvt/Dw8ezpcOHYZXa/J96O8vx+g4x65YKhxOwDUh63aS5g== + dependencies: + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.1" + is-callable "^1.2.2" + is-regex "^1.1.1" + object-inspect "^1.8.0" + object-keys "^1.1.1" + object.assign "^4.1.1" + string.prototype.trimend "^1.0.1" + string.prototype.trimstart "^1.0.1" + +es-abstract@^1.18.0-next.0, es-abstract@^1.18.0-next.1: + version "1.18.0-next.1" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.18.0-next.1.tgz#6e3a0a4bda717e5023ab3b8e90bec36108d22c68" + integrity sha512-I4UGspA0wpZXWENrdA0uHbnhte683t3qT/1VFH9aX2dA5PPSf6QW5HHXf5HImaqPmjXaVeVk4RGWnaylmV7uAA== + dependencies: + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.1" + is-callable "^1.2.2" + is-negative-zero "^2.0.0" + is-regex "^1.1.1" + object-inspect "^1.8.0" + object-keys "^1.1.1" + object.assign "^4.1.1" + string.prototype.trimend "^1.0.1" + string.prototype.trimstart "^1.0.1" + +es-to-primitive@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" + integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== + dependencies: + is-callable "^1.1.4" + is-date-object "^1.0.1" + is-symbol "^1.0.2" + +es6-promise@^4.0.3: + version "4.2.8" + resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-4.2.8.tgz#4eb21594c972bc40553d276e510539143db53e0a" + integrity sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w== + +es6-promisify@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/es6-promisify/-/es6-promisify-5.0.0.tgz#5109d62f3e56ea967c4b63505aef08291c8a5203" + integrity sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM= + dependencies: + es6-promise "^4.0.3" + +escalade@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" + integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== + +escape-goat@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/escape-goat/-/escape-goat-2.1.1.tgz#1b2dc77003676c457ec760b2dc68edb648188675" + integrity sha512-8/uIhbG12Csjy2JEW7D9pHbreaVaS/OpN3ycnyvElTdwM5n6GY6W6e2IPemfvGZeUMqZ9A/3GqIZMgKnBhAw/Q== + +escape-html@~1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" + integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= + +escape-string-regexp@4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" + integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== + +escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= + +escape-string-regexp@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" + integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== + +escodegen@^1.14.1: + version "1.14.3" + resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.14.3.tgz#4e7b81fba61581dc97582ed78cab7f0e8d63f503" + integrity sha512-qFcX0XJkdg+PB3xjZZG/wKSuT1PnQWx57+TVSjIMmILd2yC/6ByYElPwJnslDsuWuSAp4AwJGumarAAmJch5Kw== + dependencies: + esprima "^4.0.1" + estraverse "^4.2.0" + esutils "^2.0.2" + optionator "^0.8.1" + optionalDependencies: + source-map "~0.6.1" + +eslint-config-prettier@6.11.0: + version "6.11.0" + resolved "https://registry.yarnpkg.com/eslint-config-prettier/-/eslint-config-prettier-6.11.0.tgz#f6d2238c1290d01c859a8b5c1f7d352a0b0da8b1" + integrity sha512-oB8cpLWSAjOVFEJhhyMZh6NOEOtBVziaqdDQ86+qhDHFbZXoRTM7pNSvFRfW/W/L/LrQ38C99J5CGuRBBzBsdA== + dependencies: + get-stdin "^6.0.0" + +eslint-config-prettier@7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/eslint-config-prettier/-/eslint-config-prettier-7.0.0.tgz#c1ae4106f74e6c0357f44adb076771d032ac0e97" + integrity sha512-8Y8lGLVPPZdaNA7JXqnvETVC7IiVRgAP6afQu9gOQRn90YY3otMNh+x7Vr2vMePQntF+5erdSUBqSzCmU/AxaQ== + +eslint-import-resolver-node@^0.3.4: + version "0.3.4" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.4.tgz#85ffa81942c25012d8231096ddf679c03042c717" + integrity sha512-ogtf+5AB/O+nM6DIeBUNr2fuT7ot9Qg/1harBfBtaP13ekEWFQEEMP94BCB7zaNW3gyY+8SHYF00rnqYwXKWOA== + dependencies: + debug "^2.6.9" + resolve "^1.13.1" + +eslint-module-utils@^2.6.0: + version "2.6.0" + resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.6.0.tgz#579ebd094f56af7797d19c9866c9c9486629bfa6" + integrity sha512-6j9xxegbqe8/kZY8cYpcp0xhbK0EgJlg3g9mib3/miLaExuuwc3n5UEfSnU6hWMbT0FAYVvDbL9RrRgpUeQIvA== + dependencies: + debug "^2.6.9" + pkg-dir "^2.0.0" + +eslint-plugin-header@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-header/-/eslint-plugin-header-3.1.0.tgz#5e6819489a7722ae0c5c237387f78350d755c1d5" + integrity sha512-jKKcwMsB0/ftBv3UVmuQir1f8AmXzTS9rdzPkileW8/Nz9ivdea8vOU1ZrMbX+WH6CpwnHEo3403baSHk40Mag== + +eslint-plugin-import@^2.22.0: + version "2.22.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.22.1.tgz#0896c7e6a0cf44109a2d97b95903c2bb689d7702" + integrity sha512-8K7JjINHOpH64ozkAhpT3sd+FswIZTfMZTjdx052pnWrgRCVfp8op9tbjpAk3DdUeI/Ba4C8OjdC0r90erHEOw== + dependencies: + array-includes "^3.1.1" + array.prototype.flat "^1.2.3" + contains-path "^0.1.0" + debug "^2.6.9" + doctrine "1.5.0" + eslint-import-resolver-node "^0.3.4" + eslint-module-utils "^2.6.0" + has "^1.0.3" + minimatch "^3.0.4" + object.values "^1.1.1" + read-pkg-up "^2.0.0" + resolve "^1.17.0" + tsconfig-paths "^3.9.0" + +eslint-plugin-jest@24.0.2: + version "24.0.2" + resolved "https://registry.yarnpkg.com/eslint-plugin-jest/-/eslint-plugin-jest-24.0.2.tgz#4bf0fcdc86289d702a7dacb430b4363482af773b" + integrity sha512-DSBLNpkKDOpUJQkTGSs5sVJWsu0nDyQ2rYxkr0Eh7nrkc5bMUr/dlDbtTj3l8y6UaCVsem6rryF1OZrKnz1S5g== + dependencies: + "@typescript-eslint/experimental-utils" "^4.0.1" + +eslint-plugin-jest@24.1.3: + version "24.1.3" + resolved "https://registry.yarnpkg.com/eslint-plugin-jest/-/eslint-plugin-jest-24.1.3.tgz#fa3db864f06c5623ff43485ca6c0e8fc5fe8ba0c" + integrity sha512-dNGGjzuEzCE3d5EPZQ/QGtmlMotqnYWD/QpCZ1UuZlrMAdhG5rldh0N0haCvhGnUkSeuORS5VNROwF9Hrgn3Lg== + dependencies: + "@typescript-eslint/experimental-utils" "^4.0.1" + +eslint-plugin-prettier@3.1.4: + version "3.1.4" + resolved "https://registry.yarnpkg.com/eslint-plugin-prettier/-/eslint-plugin-prettier-3.1.4.tgz#168ab43154e2ea57db992a2cd097c828171f75c2" + integrity sha512-jZDa8z76klRqo+TdGDTFJSavwbnWK2ZpqGKNZ+VvweMW516pDUMmQ2koXvxEE4JhzNvTv+radye/bWGBmA6jmg== + dependencies: + prettier-linter-helpers "^1.0.0" + +eslint-plugin-prettier@3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-prettier/-/eslint-plugin-prettier-3.2.0.tgz#af391b2226fa0e15c96f36c733f6e9035dbd952c" + integrity sha512-kOUSJnFjAUFKwVxuzy6sA5yyMx6+o9ino4gCdShzBNx4eyFRudWRYKCFolKjoM40PEiuU6Cn7wBLfq3WsGg7qg== + dependencies: + prettier-linter-helpers "^1.0.0" + +eslint-plugin-react-hooks@4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.1.2.tgz#2eb53731d11c95826ef7a7272303eabb5c9a271e" + integrity sha512-ykUeqkGyUGgwTtk78C0o8UG2fzwmgJ0qxBGPp2WqRKsTwcLuVf01kTDRAtOsd4u6whX2XOC8749n2vPydP82fg== + +eslint-plugin-react-hooks@4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.2.0.tgz#8c229c268d468956334c943bb45fc860280f5556" + integrity sha512-623WEiZJqxR7VdxFCKLI6d6LLpwJkGPYKODnkH3D7WpOG5KM8yWueBd8TLsNAetEJNF5iJmolaAKO3F8yzyVBQ== + +eslint-plugin-react@^7.20.3: + version "7.21.5" + resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.21.5.tgz#50b21a412b9574bfe05b21db176e8b7b3b15bff3" + integrity sha512-8MaEggC2et0wSF6bUeywF7qQ46ER81irOdWS4QWxnnlAEsnzeBevk1sWh7fhpCghPpXb+8Ks7hvaft6L/xsR6g== + dependencies: + array-includes "^3.1.1" + array.prototype.flatmap "^1.2.3" + doctrine "^2.1.0" + has "^1.0.3" + jsx-ast-utils "^2.4.1 || ^3.0.0" + object.entries "^1.1.2" + object.fromentries "^2.0.2" + object.values "^1.1.1" + prop-types "^15.7.2" + resolve "^1.18.1" + string.prototype.matchall "^4.0.2" + +eslint-scope@^5.0.0, eslint-scope@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" + integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== + dependencies: + esrecurse "^4.3.0" + estraverse "^4.1.1" + +eslint-utils@^2.0.0, eslint-utils@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-2.1.0.tgz#d2de5e03424e707dc10c74068ddedae708741b27" + integrity sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg== + dependencies: + eslint-visitor-keys "^1.1.0" + +eslint-visitor-keys@^1.1.0, eslint-visitor-keys@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e" + integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ== + +eslint-visitor-keys@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.0.0.tgz#21fdc8fbcd9c795cc0321f0563702095751511a8" + integrity sha512-QudtT6av5WXels9WjIM7qz1XD1cWGvX4gGXvp/zBn9nXG02D0utdU3Em2m/QjTnrsk6bBjmCygl3rmj118msQQ== + +eslint@7.12.0: + version "7.12.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.12.0.tgz#7b6a85f87a9adc239e979bb721cde5ce0dc27da6" + integrity sha512-n5pEU27DRxCSlOhJ2rO57GDLcNsxO0LPpAbpFdh7xmcDmjmlGUfoyrsB3I7yYdQXO5N3gkSTiDrPSPNFiiirXA== + dependencies: + "@babel/code-frame" "^7.0.0" + "@eslint/eslintrc" "^0.2.0" + ajv "^6.10.0" + chalk "^4.0.0" + cross-spawn "^7.0.2" + debug "^4.0.1" + doctrine "^3.0.0" + enquirer "^2.3.5" + eslint-scope "^5.1.1" + eslint-utils "^2.1.0" + eslint-visitor-keys "^2.0.0" + espree "^7.3.0" + esquery "^1.2.0" + esutils "^2.0.2" + file-entry-cache "^5.0.1" + functional-red-black-tree "^1.0.1" + glob-parent "^5.0.0" + globals "^12.1.0" + ignore "^4.0.6" + import-fresh "^3.0.0" + imurmurhash "^0.1.4" + is-glob "^4.0.0" + js-yaml "^3.13.1" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.4.1" + lodash "^4.17.19" + minimatch "^3.0.4" + natural-compare "^1.4.0" + optionator "^0.9.1" + progress "^2.0.0" + regexpp "^3.1.0" + semver "^7.2.1" + strip-ansi "^6.0.0" + strip-json-comments "^3.1.0" + table "^5.2.3" + text-table "^0.2.0" + v8-compile-cache "^2.0.3" + +eslint@7.15.0: + version "7.15.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.15.0.tgz#eb155fb8ed0865fcf5d903f76be2e5b6cd7e0bc7" + integrity sha512-Vr64xFDT8w30wFll643e7cGrIkPEU50yIiI36OdSIDoSGguIeaLzBo0vpGvzo9RECUqq7htURfwEtKqwytkqzA== + dependencies: + "@babel/code-frame" "^7.0.0" + "@eslint/eslintrc" "^0.2.2" + ajv "^6.10.0" + chalk "^4.0.0" + cross-spawn "^7.0.2" + debug "^4.0.1" + doctrine "^3.0.0" + enquirer "^2.3.5" + eslint-scope "^5.1.1" + eslint-utils "^2.1.0" + eslint-visitor-keys "^2.0.0" + espree "^7.3.1" + esquery "^1.2.0" + esutils "^2.0.2" + file-entry-cache "^6.0.0" + functional-red-black-tree "^1.0.1" + glob-parent "^5.0.0" + globals "^12.1.0" + ignore "^4.0.6" + import-fresh "^3.0.0" + imurmurhash "^0.1.4" + is-glob "^4.0.0" + js-yaml "^3.13.1" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.4.1" + lodash "^4.17.19" + minimatch "^3.0.4" + natural-compare "^1.4.0" + optionator "^0.9.1" + progress "^2.0.0" + regexpp "^3.1.0" + semver "^7.2.1" + strip-ansi "^6.0.0" + strip-json-comments "^3.1.0" + table "^5.2.3" + text-table "^0.2.0" + v8-compile-cache "^2.0.3" + +espree@^7.3.0: + version "7.3.0" + resolved "https://registry.yarnpkg.com/espree/-/espree-7.3.0.tgz#dc30437cf67947cf576121ebd780f15eeac72348" + integrity sha512-dksIWsvKCixn1yrEXO8UosNSxaDoSYpq9reEjZSbHLpT5hpaCAKTLBwq0RHtLrIr+c0ByiYzWT8KTMRzoRCNlw== + dependencies: + acorn "^7.4.0" + acorn-jsx "^5.2.0" + eslint-visitor-keys "^1.3.0" + +espree@^7.3.1: + version "7.3.1" + resolved "https://registry.yarnpkg.com/espree/-/espree-7.3.1.tgz#f2df330b752c6f55019f8bd89b7660039c1bbbb6" + integrity sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g== + dependencies: + acorn "^7.4.0" + acorn-jsx "^5.3.1" + eslint-visitor-keys "^1.3.0" + +esprima@^1.2.0: + version "1.2.5" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-1.2.5.tgz#0993502feaf668138325756f30f9a51feeec11e9" + integrity sha1-CZNQL+r2aBODJXVvMPmlH+7sEek= + +esprima@^4.0.0, esprima@^4.0.1, esprima@~4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== + +esquery@^1.2.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.3.1.tgz#b78b5828aa8e214e29fb74c4d5b752e1c033da57" + integrity sha512-olpvt9QG0vniUBZspVRN6lwB7hOZoTRtT+jzR+tS4ffYx2mzbw+z0XCOk44aaLYKApNX5nMm+E+P6o25ip/DHQ== + dependencies: + estraverse "^5.1.0" + +esrecurse@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" + integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== + dependencies: + estraverse "^5.2.0" + +estraverse@^1.5.0: + version "1.9.3" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-1.9.3.tgz#af67f2dc922582415950926091a4005d29c9bb44" + integrity sha1-r2fy3JIlgkFZUJJgkaQAXSnJu0Q= + +estraverse@^4.1.1, estraverse@^4.2.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" + integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== + +estraverse@^5.1.0, estraverse@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.2.0.tgz#307df42547e6cc7324d3cf03c155d5cdb8c53880" + integrity sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ== + +esutils@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== + +etag@~1.8.1: + version "1.8.1" + resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" + integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc= + +event-pubsub@4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/event-pubsub/-/event-pubsub-4.3.0.tgz#f68d816bc29f1ec02c539dc58c8dd40ce72cb36e" + integrity sha512-z7IyloorXvKbFx9Bpie2+vMJKKx1fH1EN5yiTfp8CiLOTptSYy1g8H4yDpGlEdshL1PBiFtBHepF2cNsqeEeFQ== + +event-stream@=3.3.4: + version "3.3.4" + resolved "https://registry.yarnpkg.com/event-stream/-/event-stream-3.3.4.tgz#4ab4c9a0f5a54db9338b4c34d86bfce8f4b35571" + integrity sha1-SrTJoPWlTbkzi0w02Gv86PSzVXE= + dependencies: + duplexer "~0.1.1" + from "~0" + map-stream "~0.1.0" + pause-stream "0.0.11" + split "0.3" + stream-combiner "~0.0.4" + through "~2.3.1" + +eventemitter-asyncresource@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/eventemitter-asyncresource/-/eventemitter-asyncresource-1.0.0.tgz#734ff2e44bf448e627f7748f905d6bdd57bdb65b" + integrity sha512-39F7TBIV0G7gTelxwbEqnwhp90eqCPON1k0NwNfwhgKn4Co4ybUbj2pECcXT0B3ztRKZ7Pw1JujUUgmQJHcVAQ== + +eventemitter3@^3.1.0: + version "3.1.2" + resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-3.1.2.tgz#2d3d48f9c346698fce83a85d7d664e98535df6e7" + integrity sha512-tvtQIeLVHjDkJYnzf2dgVMxfuSGJeM/7UCG17TT4EumTfNtF+0nebF/4zWOIkCreAbtNqhGEboB6BWrwqNaw4Q== + +exec-sh@^0.3.2: + version "0.3.4" + resolved "https://registry.yarnpkg.com/exec-sh/-/exec-sh-0.3.4.tgz#3a018ceb526cc6f6df2bb504b2bfe8e3a4934ec5" + integrity sha512-sEFIkc61v75sWeOe72qyrqg2Qg0OuLESziUDk/O/z2qgS15y2gWVFrI6f2Qn/qw/0/NCfCEsmNA4zOjkwEZT1A== + +execa@^0.10.0: + version "0.10.0" + resolved "https://registry.yarnpkg.com/execa/-/execa-0.10.0.tgz#ff456a8f53f90f8eccc71a96d11bdfc7f082cb50" + integrity sha512-7XOMnz8Ynx1gGo/3hyV9loYNPWM94jG3+3T3Y8tsfSstFmETmENCMU/A/zj8Lyaj1lkgEepKepvd6240tBRvlw== + dependencies: + cross-spawn "^6.0.0" + get-stream "^3.0.0" + is-stream "^1.1.0" + npm-run-path "^2.0.0" + p-finally "^1.0.0" + signal-exit "^3.0.0" + strip-eof "^1.0.0" + +execa@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" + integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA== + dependencies: + cross-spawn "^6.0.0" + get-stream "^4.0.0" + is-stream "^1.1.0" + npm-run-path "^2.0.0" + p-finally "^1.0.0" + signal-exit "^3.0.0" + strip-eof "^1.0.0" + +execa@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/execa/-/execa-4.1.0.tgz#4e5491ad1572f2f17a77d388c6c857135b22847a" + integrity sha512-j5W0//W7f8UxAn8hXVnwG8tLwdiUy4FJLcSupCg6maBYZDpyBvTApK7KyuI4bKj8KOh1r2YH+6ucuYtJv1bTZA== + dependencies: + cross-spawn "^7.0.0" + get-stream "^5.0.0" + human-signals "^1.1.1" + is-stream "^2.0.0" + merge-stream "^2.0.0" + npm-run-path "^4.0.0" + onetime "^5.1.0" + signal-exit "^3.0.2" + strip-final-newline "^2.0.0" + +exit@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" + integrity sha1-BjJjj42HfMghB9MKD/8aF8uhzQw= + +expand-brackets@^2.1.4: + version "2.1.4" + resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" + integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI= + dependencies: + debug "^2.3.3" + define-property "^0.2.5" + extend-shallow "^2.0.1" + posix-character-classes "^0.1.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + +expect@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/expect/-/expect-26.6.2.tgz#c6b996bf26bf3fe18b67b2d0f51fc981ba934417" + integrity sha512-9/hlOBkQl2l/PLHJx6JjoDF6xPKcJEsUlWKb23rKE7KzeDqUZKXKNMW27KIue5JMdBV9HgmoJPcc8HtO85t9IA== + dependencies: + "@jest/types" "^26.6.2" + ansi-styles "^4.0.0" + jest-get-type "^26.3.0" + jest-matcher-utils "^26.6.2" + jest-message-util "^26.6.2" + jest-regex-util "^26.0.0" + +express-healthcheck@0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/express-healthcheck/-/express-healthcheck-0.1.0.tgz#cabec78129c4cb90cd7fb894dfae21b82e27cb07" + integrity sha1-yr7HgSnEy5DNf7iU364huC4nywc= + +express-normalize-query-params-middleware@^0.5.0: + version "0.5.1" + resolved "https://registry.yarnpkg.com/express-normalize-query-params-middleware/-/express-normalize-query-params-middleware-0.5.1.tgz#dbe1e8139aecb234fb6adb5c0059c75db9733d2a" + integrity sha1-2+HoE5rssjT7attcAFnHXblzPSo= + +express-openapi-validator@^4.9.0: + version "4.9.0" + resolved "https://registry.yarnpkg.com/express-openapi-validator/-/express-openapi-validator-4.9.0.tgz#1317559382ddd2313c8a8e80194c18232690c40f" + integrity sha512-z+n21Uk/egt/UnHQ+ZiA7XiJ/oWnn6IYILWaGpwiOAC+6d0n5j+Sc/GkFhr0JQxqOttGx3iyAthqDELrx474fg== + dependencies: + ajv "^6.12.6" + content-type "^1.0.4" + js-yaml "^3.14.0" + json-schema-ref-parser "^9.0.6" + lodash.clonedeep "^4.5.0" + lodash.merge "^4.6.2" + lodash.uniq "^4.5.0" + lodash.zipobject "^4.1.3" + media-typer "^1.1.0" + multer "^1.4.2" + ono "^7.1.3" + path-to-regexp "^6.2.0" + +express-openapi@*: + version "7.2.0" + resolved "https://registry.yarnpkg.com/express-openapi/-/express-openapi-7.2.0.tgz#3ca9aead76491099f6dbaa74d72414c2b9f59bb5" + integrity sha512-Zlz1qA5DA5vRj8UrWFNH5cA1js5g0VB5yRIR1cuxPGAclsU8q7Y6Dvx/BhQ10/OGYd8qwFk4mFvDYahSEQe7gw== + dependencies: + express-normalize-query-params-middleware "^0.5.0" + openapi-framework "^7.2.0" + openapi-types "^7.0.1" + +express@^4.17.1: + version "4.17.1" + resolved "https://registry.yarnpkg.com/express/-/express-4.17.1.tgz#4491fc38605cf51f8629d39c2b5d026f98a4c134" + integrity sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g== + dependencies: + accepts "~1.3.7" + array-flatten "1.1.1" + body-parser "1.19.0" + content-disposition "0.5.3" + content-type "~1.0.4" + cookie "0.4.0" + cookie-signature "1.0.6" + debug "2.6.9" + depd "~1.1.2" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + finalhandler "~1.1.2" + fresh "0.5.2" + merge-descriptors "1.0.1" + methods "~1.1.2" + on-finished "~2.3.0" + parseurl "~1.3.3" + path-to-regexp "0.1.7" + proxy-addr "~2.0.5" + qs "6.7.0" + range-parser "~1.2.1" + safe-buffer "5.1.2" + send "0.17.1" + serve-static "1.14.1" + setprototypeof "1.1.1" + statuses "~1.5.0" + type-is "~1.6.18" + utils-merge "1.0.1" + vary "~1.1.2" + +extend-object@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/extend-object/-/extend-object-1.0.0.tgz#42514f84015d1356caf5187969dfb2bc1bda0823" + integrity sha1-QlFPhAFdE1bK9Rh5ad+yvBvaCCM= + +extend-shallow@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" + integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= + dependencies: + is-extendable "^0.1.0" + +extend-shallow@^3.0.0, extend-shallow@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" + integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= + dependencies: + assign-symbols "^1.0.0" + is-extendable "^1.0.1" + +extend@~3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" + integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== + +external-editor@^3.0.3: + version "3.1.0" + resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-3.1.0.tgz#cb03f740befae03ea4d283caed2741a83f335495" + integrity sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew== + dependencies: + chardet "^0.7.0" + iconv-lite "^0.4.24" + tmp "^0.0.33" + +extglob@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" + integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== + dependencies: + array-unique "^0.3.2" + define-property "^1.0.0" + expand-brackets "^2.1.4" + extend-shallow "^2.0.1" + fragment-cache "^0.2.1" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + +extract-stack@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/extract-stack/-/extract-stack-1.0.0.tgz#b97acaf9441eea2332529624b732fc5a1c8165fa" + integrity sha1-uXrK+UQe6iMyUpYktzL8WhyBZfo= + +extract-stack@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/extract-stack/-/extract-stack-2.0.0.tgz#11367bc865bfcd9bc0db3123e5edb57786f11f9b" + integrity sha512-AEo4zm+TenK7zQorGK1f9mJ8L14hnTDi2ZQPR+Mub1NX8zimka1mXpV5LpH8x9HoUmFSHZCfLHqWvp0Y4FxxzQ== + +extsprintf@1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" + integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU= + +extsprintf@^1.2.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" + integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8= + +fancy-test@^1.4.3: + version "1.4.10" + resolved "https://registry.yarnpkg.com/fancy-test/-/fancy-test-1.4.10.tgz#310be93d4aa45d788bce56a573ae4d1b92b2e1a0" + integrity sha512-AaUX6wKS7D5OP2YK2q5G7c8PGx2lgoyLUD7Bbg8z323sb9aebBqzb9UN6phzI73UgO/ViihmNfOxF3kdfZLhew== + dependencies: + "@types/chai" "*" + "@types/lodash" "*" + "@types/node" "*" + "@types/sinon" "*" + lodash "^4.17.13" + mock-stdin "^1.0.0" + nock "^13.0.0" + stdout-stderr "^0.1.9" + +fast-deep-equal@^3.1.1: + version "3.1.3" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-diff@^1.1.2: + version "1.2.0" + resolved "https://registry.yarnpkg.com/fast-diff/-/fast-diff-1.2.0.tgz#73ee11982d86caaf7959828d519cfe927fac5f03" + integrity sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w== + +fast-glob@^2.0.2, fast-glob@^2.2.6: + version "2.2.7" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-2.2.7.tgz#6953857c3afa475fff92ee6015d52da70a4cd39d" + integrity sha512-g1KuQwHOZAmOZMuBtHdxDtju+T2RT8jgCC9aANsbpdiDDTSnjgfuVsIBNKbUeJI3oKMRExcfNDtJl4OhbffMsw== + dependencies: + "@mrmlnc/readdir-enhanced" "^2.2.1" + "@nodelib/fs.stat" "^1.1.2" + glob-parent "^3.1.0" + is-glob "^4.0.0" + merge2 "^1.2.3" + micromatch "^3.1.10" + +fast-glob@^3.0.3, fast-glob@^3.1.1: + version "3.2.4" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.4.tgz#d20aefbf99579383e7f3cc66529158c9b98554d3" + integrity sha512-kr/Oo6PX51265qeuCYsyGypiO5uJFgBS0jksyG7FUeCyQzNwYnzrNIMR1NXfkZXsMYXYLRAHgISHBz8gQcxKHQ== + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.0" + merge2 "^1.3.0" + micromatch "^4.0.2" + picomatch "^2.2.1" + +fast-json-stable-stringify@2.x, fast-json-stable-stringify@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-levenshtein@^2.0.6, fast-levenshtein@~2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= + +fast-safe-stringify@2.0.7, fast-safe-stringify@^2.0.4, fast-safe-stringify@^2.0.7: + version "2.0.7" + resolved "https://registry.yarnpkg.com/fast-safe-stringify/-/fast-safe-stringify-2.0.7.tgz#124aa885899261f68aedb42a7c080de9da608743" + integrity sha512-Utm6CdzT+6xsDk2m8S6uL8VHxNwI6Jub+e9NYTcAms28T84pTa25GJQV9j0CY0N1rM8hK4x6grpF2BQf+2qwVA== + +fastq@^1.6.0: + version "1.9.0" + resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.9.0.tgz#e16a72f338eaca48e91b5c23593bcc2ef66b7947" + integrity sha512-i7FVWL8HhVY+CTkwFxkN2mk3h+787ixS5S63eb78diVRc1MCssarHq3W5cj0av7YDSwmaV928RNag+U1etRQ7w== + dependencies: + reusify "^1.0.4" + +fb-watchman@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.1.tgz#fc84fb39d2709cf3ff6d743706157bb5708a8a85" + integrity sha512-DkPJKQeY6kKwmuMretBhr7G6Vodr7bFwDYTXIkfG1gjvNpaxBTQV3PbXg6bR1c1UP4jPOX0jHUbbHANL9vRjVg== + dependencies: + bser "2.1.1" + +fecha@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/fecha/-/fecha-4.2.0.tgz#3ffb6395453e3f3efff850404f0a59b6747f5f41" + integrity sha512-aN3pcx/DSmtyoovUudctc8+6Hl4T+hI9GBBHLjA76jdZl7+b1sgh5g4k+u/GL3dTy1/pnYzKp69FpJ0OicE3Wg== + +figgy-pudding@^3.4.1, figgy-pudding@^3.5.1: + version "3.5.2" + resolved "https://registry.yarnpkg.com/figgy-pudding/-/figgy-pudding-3.5.2.tgz#b4eee8148abb01dcf1d1ac34367d59e12fa61d6e" + integrity sha512-0btnI/H8f2pavGMN8w40mlSKOfTK2SVJmBfBeVIj3kNw0swwgzyRq0d5TJVOwodFmtvpPeWPN/MCcfuWF0Ezbw== + +figlet@^1.1.1: + version "1.5.0" + resolved "https://registry.yarnpkg.com/figlet/-/figlet-1.5.0.tgz#2db4d00a584e5155a96080632db919213c3e003c" + integrity sha512-ZQJM4aifMpz6H19AW1VqvZ7l4pOE9p7i/3LyxgO2kp+PO/VcDYNqIHEMtkccqIhTXMKci4kjueJr/iCQEaT/Ww== + +figures@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/figures/-/figures-2.0.0.tgz#3ab1a2d2a62c8bfb431a0c94cb797a2fce27c962" + integrity sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI= + dependencies: + escape-string-regexp "^1.0.5" + +figures@^3.0.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/figures/-/figures-3.2.0.tgz#625c18bd293c604dc4a8ddb2febf0c88341746af" + integrity sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg== + dependencies: + escape-string-regexp "^1.0.5" + +file-entry-cache@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-5.0.1.tgz#ca0f6efa6dd3d561333fb14515065c2fafdf439c" + integrity sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g== + dependencies: + flat-cache "^2.0.1" + +file-entry-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.0.tgz#7921a89c391c6d93efec2169ac6bf300c527ea0a" + integrity sha512-fqoO76jZ3ZnYrXLDRxBR1YvOvc0k844kcOg40bgsPrE25LAb/PDqTY+ho64Xh2c8ZXgIKldchCFHczG2UVRcWA== + dependencies: + flat-cache "^3.0.4" + +file-type@^3.6.0: + version "3.9.0" + resolved "https://registry.yarnpkg.com/file-type/-/file-type-3.9.0.tgz#257a078384d1db8087bc449d107d52a52672b9e9" + integrity sha1-JXoHg4TR24CHvESdEH1SpSZyuek= + +file-uri-to-path@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz#553a7b8446ff6f684359c445f1e37a05dacc33dd" + integrity sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw== + +filelist@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/filelist/-/filelist-1.0.1.tgz#f10d1a3ae86c1694808e8f20906f43d4c9132dbb" + integrity sha512-8zSK6Nu0DQIC08mUC46sWGXi+q3GGpKydAG36k+JDba6VRpkevvOWUW5a/PhShij4+vHT9M+ghgG7eM+a9JDUQ== + dependencies: + minimatch "^3.0.4" + +fill-range@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" + integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= + dependencies: + extend-shallow "^2.0.1" + is-number "^3.0.0" + repeat-string "^1.6.1" + to-regex-range "^2.1.0" + +fill-range@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" + integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== + dependencies: + to-regex-range "^5.0.1" + +finalhandler@1.1.2, finalhandler@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d" + integrity sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA== + dependencies: + debug "2.6.9" + encodeurl "~1.0.2" + escape-html "~1.0.3" + on-finished "~2.3.0" + parseurl "~1.3.3" + statuses "~1.5.0" + unpipe "~1.0.0" + +find-up@^1.0.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" + integrity sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8= + dependencies: + path-exists "^2.0.0" + pinkie-promise "^2.0.0" + +find-up@^2.0.0, find-up@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" + integrity sha1-RdG35QbHF93UgndaK3eSCjwMV6c= + dependencies: + locate-path "^2.0.0" + +find-up@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" + integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== + dependencies: + locate-path "^3.0.0" + +find-up@^4.0.0, find-up@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" + integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== + dependencies: + locate-path "^5.0.0" + path-exists "^4.0.0" + +find-yarn-workspace-root@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/find-yarn-workspace-root/-/find-yarn-workspace-root-2.0.0.tgz#f47fb8d239c900eb78179aa81b66673eac88f7bd" + integrity sha512-1IMnbjt4KzsQfnhnzNd8wUEgXZ44IzZaZmnLYx7D5FZlaHt2gW20Cri8Q+E/t5tIj4+epTBub+2Zxu/vNILzqQ== + dependencies: + micromatch "^4.0.2" + +first-chunk-stream@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/first-chunk-stream/-/first-chunk-stream-2.0.0.tgz#1bdecdb8e083c0664b91945581577a43a9f31d70" + integrity sha1-G97NuOCDwGZLkZRVgVd6Q6nzHXA= + dependencies: + readable-stream "^2.0.2" + +flat-cache@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-2.0.1.tgz#5d296d6f04bda44a4630a301413bdbc2ec085ec0" + integrity sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA== + dependencies: + flatted "^2.0.0" + rimraf "2.6.3" + write "1.0.3" + +flat-cache@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" + integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== + dependencies: + flatted "^3.1.0" + rimraf "^3.0.2" + +flatted@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-2.0.2.tgz#4575b21e2bcee7434aa9be662f4b7b5f9c2b5138" + integrity sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA== + +flatted@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.1.0.tgz#a5d06b4a8b01e3a63771daa5cb7a1903e2e57067" + integrity sha512-tW+UkmtNg/jv9CSofAKvgVcO7c2URjhTdW1ZTkcAritblu8tajiYy7YisnIflEwtKssCtOxpnBRoCB7iap0/TA== + +flush-write-stream@^1.0.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/flush-write-stream/-/flush-write-stream-1.1.1.tgz#8dd7d873a1babc207d94ead0c2e0e44276ebf2e8" + integrity sha512-3Z4XhFZ3992uIq0XOqb9AreonueSYphE6oYbpt5+3u06JWklbsPkNv3ZKkP9Bz/r+1MWCaMoSQ28P85+1Yc77w== + dependencies: + inherits "^2.0.3" + readable-stream "^2.3.6" + +fn-name@~3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/fn-name/-/fn-name-3.0.0.tgz#0596707f635929634d791f452309ab41558e3c5c" + integrity sha512-eNMNr5exLoavuAMhIUVsOKF79SWd/zG104ef6sxBTSw+cZc6BXdQXDvYcGvp0VbxVVSp1XDUNoz7mg1xMtSznA== + +fn.name@1.x.x: + version "1.1.0" + resolved "https://registry.yarnpkg.com/fn.name/-/fn.name-1.1.0.tgz#26cad8017967aea8731bc42961d04a3d5988accc" + integrity sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw== + +follow-redirects@1.5.10: + version "1.5.10" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.5.10.tgz#7b7a9f9aea2fdff36786a94ff643ed07f4ff5e2a" + integrity sha512-0V5l4Cizzvqt5D44aTXbFZz+FtyXV1vrDN6qrelxtfYQKW0KO0W2T/hkE8xvGa/540LkZlkaUjO4ailYTFtHVQ== + dependencies: + debug "=3.1.0" + +follow-redirects@^1.10.0: + version "1.13.0" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.13.0.tgz#b42e8d93a2a7eea5ed88633676d6597bc8e384db" + integrity sha512-aq6gF1BEKje4a9i9+5jimNFIpq4Q1WiwBToeRK5NvZBd/TRsmW8BsJfOEGkr76TbOyPVD3OVDN910EcUNtRYEA== + +for-in@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" + integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= + +forever-agent@~0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" + integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= + +form-data@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.0.tgz#31b7e39c85f1355b7139ee0c647cf0de7f83c682" + integrity sha512-CKMFDglpbMi6PyN+brwB9Q/GOw0eAnsrEZDgcsH5Krhz5Od/haKHAX0NmQfha2zPPz0JpWzA7GJHGSnvCRLWsg== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + +form-data@~2.3.2: + version "2.3.3" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" + integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.6" + mime-types "^2.1.12" + +formidable@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/formidable/-/formidable-1.2.2.tgz#bf69aea2972982675f00865342b982986f6b8dd9" + integrity sha512-V8gLm+41I/8kguQ4/o1D3RIHRmhYFG4pnNyonvua+40rqcEmT4+V71yaZ3B457xbbgCsCfjSPi65u/W6vK1U5Q== + +forwarded@~0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.2.tgz#98c23dab1175657b8c0573e8ceccd91b0ff18c84" + integrity sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ= + +fragment-cache@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" + integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= + dependencies: + map-cache "^0.2.2" + +fresh@0.5.2: + version "0.5.2" + resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" + integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac= + +from2@^2.1.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/from2/-/from2-2.3.0.tgz#8bfb5502bde4a4d36cfdeea007fcca21d7e382af" + integrity sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8= + dependencies: + inherits "^2.0.1" + readable-stream "^2.0.0" + +from@~0: + version "0.1.7" + resolved "https://registry.yarnpkg.com/from/-/from-0.1.7.tgz#83c60afc58b9c56997007ed1a768b3ab303a44fe" + integrity sha1-g8YK/Fi5xWmXAH7Rp2izqzA6RP4= + +fs-constants@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad" + integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow== + +fs-extra@9.0.1, fs-extra@^9.0.1: + version "9.0.1" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.0.1.tgz#910da0062437ba4c39fedd863f1675ccfefcb9fc" + integrity sha512-h2iAoN838FqAFJY2/qVpzFXy+EBxfVE220PalAqQLDVsFOHLJrZvut5puAbCdNv6WJk+B8ihI+k0c7JK5erwqQ== + dependencies: + at-least-node "^1.0.0" + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^1.0.0" + +fs-extra@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-6.0.1.tgz#8abc128f7946e310135ddc93b98bddb410e7a34b" + integrity sha512-GnyIkKhhzXZUWFCaJzvyDLEEgDkPfb4/TPvJCJVuS8MWZgoSsErf++QpiAlDnKFcqhRlm+tIOcencCjyJE6ZCA== + dependencies: + graceful-fs "^4.1.2" + jsonfile "^4.0.0" + universalify "^0.1.0" + +fs-extra@^7.0.0: + version "7.0.1" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-7.0.1.tgz#4f189c44aa123b895f722804f55ea23eadc348e9" + integrity sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw== + dependencies: + graceful-fs "^4.1.2" + jsonfile "^4.0.0" + universalify "^0.1.0" + +fs-extra@^8.1, fs-extra@^8.1.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-8.1.0.tgz#49d43c45a88cd9677668cb7be1b46efdb8d2e1c0" + integrity sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g== + dependencies: + graceful-fs "^4.2.0" + jsonfile "^4.0.0" + universalify "^0.1.0" + +fs-minipass@^1.2.5: + version "1.2.7" + resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.7.tgz#ccff8570841e7fe4265693da88936c55aed7f7c7" + integrity sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA== + dependencies: + minipass "^2.6.0" + +fs-routes@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/fs-routes/-/fs-routes-7.0.1.tgz#41fcfea23f23ecd2d740a7397813e9b169cdfb54" + integrity sha512-kSAfx/P8oLSi5+tblecTETcJJ/Q+qL+xzGx4hns/+gHXMkTOZEzG73/2dBDW1FFy5+ZW080XoMaBAN2kCN55aQ== + +fs-write-stream-atomic@^1.0.8: + version "1.0.10" + resolved "https://registry.yarnpkg.com/fs-write-stream-atomic/-/fs-write-stream-atomic-1.0.10.tgz#b47df53493ef911df75731e70a9ded0189db40c9" + integrity sha1-tH31NJPvkR33VzHnCp3tAYnbQMk= + dependencies: + graceful-fs "^4.1.2" + iferr "^0.1.5" + imurmurhash "^0.1.4" + readable-stream "1 || 2" + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= + +fsevents@^2.1.2: + version "2.2.1" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.2.1.tgz#1fb02ded2036a8ac288d507a65962bd87b97628d" + integrity sha512-bTLYHSeC0UH/EFXS9KqWnXuOl/wHK5Z/d+ghd5AsFMYN7wIGkUCOJyzy88+wJKkZPGON8u4Z9f6U4FdgURE9qA== + +fsevents@~2.1.2: + version "2.1.3" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.1.3.tgz#fb738703ae8d2f9fe900c33836ddebee8b97f23e" + integrity sha512-Auw9a4AxqWpa9GUfj370BMPzzyncfBABW8Mab7BGWBYDj4Isgq+cDKtx0i6u9jcX9pQDnswsaaOTgTmA5pEjuQ== + +fsevents@~2.3.1: + version "2.3.2" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" + integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +functional-red-black-tree@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" + integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc= + +gauge@~2.7.3: + version "2.7.4" + resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" + integrity sha1-LANAXHU4w51+s3sxcCLjJfsBi/c= + dependencies: + aproba "^1.0.3" + console-control-strings "^1.0.0" + has-unicode "^2.0.0" + object-assign "^4.1.0" + signal-exit "^3.0.0" + string-width "^1.0.1" + strip-ansi "^3.0.1" + wide-align "^1.1.0" + +genfun@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/genfun/-/genfun-5.0.0.tgz#9dd9710a06900a5c4a5bf57aca5da4e52fe76537" + integrity sha512-KGDOARWVga7+rnB3z9Sd2Letx515owfk0hSxHGuqjANb1M+x2bGZGqHLiozPsYMdM2OubeMni/Hpwmjq6qIUhA== + +gensync@^1.0.0-beta.1: + version "1.0.0-beta.2" + resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" + integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== + +get-browser-rtc@^1.0.2: + version "1.1.0" + resolved "https://registry.yarnpkg.com/get-browser-rtc/-/get-browser-rtc-1.1.0.tgz#d1494e299b00f33fc8e9d6d3343ba4ba99711a2c" + integrity sha512-MghbMJ61EJrRsDe7w1Bvqt3ZsBuqhce5nrn/XAwgwOXhcsz53/ltdxOse1h/8eKXj5slzxdsz56g5rzOFSGwfQ== + +get-caller-file@^2.0.1, get-caller-file@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== + +get-func-name@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/get-func-name/-/get-func-name-2.0.0.tgz#ead774abee72e20409433a066366023dd6887a41" + integrity sha1-6td0q+5y4gQJQzoGY2YCPdaIekE= + +get-intrinsic@^1.0.0, get-intrinsic@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.0.1.tgz#94a9768fcbdd0595a1c9273aacf4c89d075631be" + integrity sha512-ZnWP+AmS1VUaLgTRy47+zKtjTxz+0xMpx3I52i+aalBK1QP19ggLF3Db89KJX7kjfOfP2eoa01qc++GwPgufPg== + dependencies: + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.1" + +get-package-type@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" + integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== + +get-pkg-repo@^1.0.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/get-pkg-repo/-/get-pkg-repo-1.4.0.tgz#c73b489c06d80cc5536c2c853f9e05232056972d" + integrity sha1-xztInAbYDMVTbCyFP54FIyBWly0= + dependencies: + hosted-git-info "^2.1.4" + meow "^3.3.0" + normalize-package-data "^2.3.0" + parse-github-repo-url "^1.3.0" + through2 "^2.0.0" + +get-port@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/get-port/-/get-port-4.2.0.tgz#e37368b1e863b7629c43c5a323625f95cf24b119" + integrity sha512-/b3jarXkH8KJoOMQc3uVGHASwGLPq3gSFJ7tgJm2diza+bydJPTGOibin2steecKeOylE8oY2JERlVWkAJO6yw== + +get-stdin@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-4.0.1.tgz#b968c6b0a04384324902e8bf1a5df32579a450fe" + integrity sha1-uWjGsKBDhDJJAui/Gl3zJXmkUP4= + +get-stdin@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-5.0.1.tgz#122e161591e21ff4c52530305693f20e6393a398" + integrity sha1-Ei4WFZHiH/TFJTAwVpPyDmOTo5g= + +get-stdin@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-6.0.0.tgz#9e09bf712b360ab9225e812048f71fde9c89657b" + integrity sha512-jp4tHawyV7+fkkSKyvjuLZswblUtz+SQKzSWnBbii16BuZksJlU1wuBYXY75r+duh/llF1ur6oNwi+2ZzjKZ7g== + +get-stream@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14" + integrity sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ= + +get-stream@^4.0.0, get-stream@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" + integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== + dependencies: + pump "^3.0.0" + +get-stream@^5.0.0, get-stream@^5.1.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.2.0.tgz#4966a1795ee5ace65e706c4b7beb71257d6e22d3" + integrity sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA== + dependencies: + pump "^3.0.0" + +get-value@^2.0.3, get-value@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" + integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= + +getpass@^0.1.1: + version "0.1.7" + resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" + integrity sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo= + dependencies: + assert-plus "^1.0.0" + +gh-got@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/gh-got/-/gh-got-5.0.0.tgz#ee95be37106fd8748a96f8d1db4baea89e1bfa8a" + integrity sha1-7pW+NxBv2HSKlvjR20uuqJ4b+oo= + dependencies: + got "^6.2.0" + is-plain-obj "^1.1.0" + +gh-got@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/gh-got/-/gh-got-6.0.0.tgz#d74353004c6ec466647520a10bd46f7299d268d0" + integrity sha512-F/mS+fsWQMo1zfgG9MD8KWvTWPPzzhuVwY++fhQ5Ggd+0P+CAMHtzMZhNxG+TqGfHDChJKsbh6otfMGqO2AKBw== + dependencies: + got "^7.0.0" + is-plain-obj "^1.1.0" + +git-raw-commits@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/git-raw-commits/-/git-raw-commits-2.0.0.tgz#d92addf74440c14bcc5c83ecce3fb7f8a79118b5" + integrity sha512-w4jFEJFgKXMQJ0H0ikBk2S+4KP2VEjhCvLCNqbNRQC8BgGWgLKNCO7a9K9LI+TVT7Gfoloje502sEnctibffgg== + dependencies: + dargs "^4.0.1" + lodash.template "^4.0.2" + meow "^4.0.0" + split2 "^2.0.0" + through2 "^2.0.0" + +git-remote-origin-url@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/git-remote-origin-url/-/git-remote-origin-url-2.0.0.tgz#5282659dae2107145a11126112ad3216ec5fa65f" + integrity sha1-UoJlna4hBxRaERJhEq0yFuxfpl8= + dependencies: + gitconfiglocal "^1.0.0" + pify "^2.3.0" + +git-semver-tags@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/git-semver-tags/-/git-semver-tags-2.0.3.tgz#48988a718acf593800f99622a952a77c405bfa34" + integrity sha512-tj4FD4ww2RX2ae//jSrXZzrocla9db5h0V7ikPl1P/WwoZar9epdUhwR7XHXSgc+ZkNq72BEEerqQuicoEQfzA== + dependencies: + meow "^4.0.0" + semver "^6.0.0" + +git-up@^4.0.0: + version "4.0.2" + resolved "https://registry.yarnpkg.com/git-up/-/git-up-4.0.2.tgz#10c3d731051b366dc19d3df454bfca3f77913a7c" + integrity sha512-kbuvus1dWQB2sSW4cbfTeGpCMd8ge9jx9RKnhXhuJ7tnvT+NIrTVfYZxjtflZddQYcmdOTlkAcjmx7bor+15AQ== + dependencies: + is-ssh "^1.3.0" + parse-url "^5.0.0" + +git-url-parse@^11.1.2: + version "11.4.0" + resolved "https://registry.yarnpkg.com/git-url-parse/-/git-url-parse-11.4.0.tgz#f2bb1f2b00f05552540e95a62e31399a639a6aa6" + integrity sha512-KlIa5jvMYLjXMQXkqpFzobsyD/V2K5DRHl5OAf+6oDFPlPLxrGDVQlIdI63c4/Kt6kai4kALENSALlzTGST3GQ== + dependencies: + git-up "^4.0.0" + +gitconfiglocal@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/gitconfiglocal/-/gitconfiglocal-1.0.0.tgz#41d045f3851a5ea88f03f24ca1c6178114464b9b" + integrity sha1-QdBF84UaXqiPA/JMocYXgRRGS5s= + dependencies: + ini "^1.3.2" + +github-slugger@^1.2.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/github-slugger/-/github-slugger-1.3.0.tgz#9bd0a95c5efdfc46005e82a906ef8e2a059124c9" + integrity sha512-gwJScWVNhFYSRDvURk/8yhcFBee6aFjye2a7Lhb2bUyRulpIoek9p0I9Kt7PT67d/nUlZbFu8L9RLiA0woQN8Q== + dependencies: + emoji-regex ">=6.0.0 <=6.1.1" + +github-username@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/github-username/-/github-username-3.0.0.tgz#0a772219b3130743429f2456d0bdd3db55dce7b1" + integrity sha1-CnciGbMTB0NCnyRW0L3T21Xc57E= + dependencies: + gh-got "^5.0.0" + +github-username@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/github-username/-/github-username-4.1.0.tgz#cbe280041883206da4212ae9e4b5f169c30bf417" + integrity sha1-y+KABBiDIG2kISrp5LXxacML9Bc= + dependencies: + gh-got "^6.0.0" + +glob-parent@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae" + integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4= + dependencies: + is-glob "^3.1.0" + path-dirname "^1.0.0" + +glob-parent@^5.0.0, glob-parent@^5.1.0, glob-parent@~5.1.0: + version "5.1.1" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.1.tgz#b6c1ef417c4e5663ea498f1c45afac6916bbc229" + integrity sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ== + dependencies: + is-glob "^4.0.1" + +glob-to-regexp@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/glob-to-regexp/-/glob-to-regexp-0.3.0.tgz#8c5a1494d2066c570cc3bfe4496175acc4d502ab" + integrity sha1-jFoUlNIGbFcMw7/kSWF1rMTVAqs= + +glob@*, glob@7.1.6, glob@^7.0.0, glob@^7.0.3, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: + version "7.1.6" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" + integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + +glob@^6.0.1: + version "6.0.4" + resolved "https://registry.yarnpkg.com/glob/-/glob-6.0.4.tgz#0f08860f6a155127b2fadd4f9ce24b1aab6e4d22" + integrity sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI= + dependencies: + inflight "^1.0.4" + inherits "2" + minimatch "2 || 3" + once "^1.3.0" + path-is-absolute "^1.0.0" + +global-dirs@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/global-dirs/-/global-dirs-2.0.1.tgz#acdf3bb6685bcd55cb35e8a052266569e9469201" + integrity sha512-5HqUqdhkEovj2Of/ms3IeS/EekcO54ytHRLV4PEY2rhRwrHXLQjeVEES0Lhka0xwNDtGYn58wyC4s5+MHsOO6A== + dependencies: + ini "^1.3.5" + +globals@^11.1.0: + version "11.12.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + +globals@^12.1.0: + version "12.4.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-12.4.0.tgz#a18813576a41b00a24a97e7f815918c2e19925f8" + integrity sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg== + dependencies: + type-fest "^0.8.1" + +globby@^10.0.1: + version "10.0.2" + resolved "https://registry.yarnpkg.com/globby/-/globby-10.0.2.tgz#277593e745acaa4646c3ab411289ec47a0392543" + integrity sha512-7dUi7RvCoT/xast/o/dLN53oqND4yk0nsHkhRgn9w65C4PofCLOoJ39iSOg+qVDdWQPIEj+eszMHQ+aLVwwQSg== + dependencies: + "@types/glob" "^7.1.1" + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.0.3" + glob "^7.1.3" + ignore "^5.1.1" + merge2 "^1.2.3" + slash "^3.0.0" + +globby@^11.0.1: + version "11.0.1" + resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.1.tgz#9a2bf107a068f3ffeabc49ad702c79ede8cfd357" + integrity sha512-iH9RmgwCmUJHi2z5o2l3eTtGBtXek1OYlHrbcxOYugyHLmAsZrPj43OtHThd62Buh/Vv6VyCBD2bdyWcGNQqoQ== + dependencies: + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.1.1" + ignore "^5.1.4" + merge2 "^1.3.0" + slash "^3.0.0" + +globby@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-4.1.0.tgz#080f54549ec1b82a6c60e631fc82e1211dbe95f8" + integrity sha1-CA9UVJ7BuCpsYOYx/ILhIR2+lfg= + dependencies: + array-union "^1.0.1" + arrify "^1.0.0" + glob "^6.0.1" + object-assign "^4.0.1" + pify "^2.0.0" + pinkie-promise "^2.0.0" + +globby@^8.0.1: + version "8.0.2" + resolved "https://registry.yarnpkg.com/globby/-/globby-8.0.2.tgz#5697619ccd95c5275dbb2d6faa42087c1a941d8d" + integrity sha512-yTzMmKygLp8RUpG1Ymu2VXPSJQZjNAZPD4ywgYEaG7e4tBJeUQBO8OpXrf1RCNcEs5alsoJYPAMiIHP0cmeC7w== + dependencies: + array-union "^1.0.1" + dir-glob "2.0.0" + fast-glob "^2.0.2" + glob "^7.1.2" + ignore "^3.3.5" + pify "^3.0.0" + slash "^1.0.0" + +globby@^9.2.0: + version "9.2.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-9.2.0.tgz#fd029a706c703d29bdd170f4b6db3a3f7a7cb63d" + integrity sha512-ollPHROa5mcxDEkwg6bPt3QbEf4pDQSNtd6JPL1YvOvAo/7/0VAm9TccUeoTmarjPw4pfUthSCqcyfNB1I3ZSg== + dependencies: + "@types/glob" "^7.1.1" + array-union "^1.0.2" + dir-glob "^2.2.2" + fast-glob "^2.2.6" + glob "^7.1.3" + ignore "^4.0.3" + pify "^4.0.1" + slash "^2.0.0" + +got@^6.2.0: + version "6.7.1" + resolved "https://registry.yarnpkg.com/got/-/got-6.7.1.tgz#240cd05785a9a18e561dc1b44b41c763ef1e8db0" + integrity sha1-JAzQV4WpoY5WHcG0S0HHY+8ejbA= + dependencies: + create-error-class "^3.0.0" + duplexer3 "^0.1.4" + get-stream "^3.0.0" + is-redirect "^1.0.0" + is-retry-allowed "^1.0.0" + is-stream "^1.0.0" + lowercase-keys "^1.0.0" + safe-buffer "^5.0.1" + timed-out "^4.0.0" + unzip-response "^2.0.1" + url-parse-lax "^1.0.0" + +got@^7.0.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/got/-/got-7.1.0.tgz#05450fd84094e6bbea56f451a43a9c289166385a" + integrity sha512-Y5WMo7xKKq1muPsxD+KmrR8DH5auG7fBdDVueZwETwV6VytKyU9OX/ddpq2/1hp1vIPvVb4T81dKQz3BivkNLw== + dependencies: + decompress-response "^3.2.0" + duplexer3 "^0.1.4" + get-stream "^3.0.0" + is-plain-obj "^1.1.0" + is-retry-allowed "^1.0.0" + is-stream "^1.0.0" + isurl "^1.0.0-alpha5" + lowercase-keys "^1.0.0" + p-cancelable "^0.3.0" + p-timeout "^1.1.1" + safe-buffer "^5.0.1" + timed-out "^4.0.0" + url-parse-lax "^1.0.0" + url-to-options "^1.0.1" + +got@^9.6.0: + version "9.6.0" + resolved "https://registry.yarnpkg.com/got/-/got-9.6.0.tgz#edf45e7d67f99545705de1f7bbeeeb121765ed85" + integrity sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q== + dependencies: + "@sindresorhus/is" "^0.14.0" + "@szmarczak/http-timer" "^1.1.2" + cacheable-request "^6.0.0" + decompress-response "^3.3.0" + duplexer3 "^0.1.4" + get-stream "^4.1.0" + lowercase-keys "^1.0.1" + mimic-response "^1.0.1" + p-cancelable "^1.0.0" + to-readable-stream "^1.0.0" + url-parse-lax "^3.0.0" + +graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.2, graceful-fs@^4.2.4: + version "4.2.4" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.4.tgz#2256bde14d3632958c465ebc96dc467ca07a29fb" + integrity sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw== + +graphile-worker@0.9.0: + version "0.9.0" + resolved "https://registry.yarnpkg.com/graphile-worker/-/graphile-worker-0.9.0.tgz#aa1c794dea80acf7abeb68c5c8ce6a2c41675b1a" + integrity sha512-tb3zT0k3XCsVWUqReA1NlepkD4Pezmd1Ot9+9ItKxLLElK2T17eYpkMEsk48vH3yR7tMQSnwJHrEuirtn9z6fg== + dependencies: + "@types/debug" "^4.1.2" + "@types/pg" "^7.14.3" + chokidar "^3.4.0" + cosmiconfig "^7.0.0" + json5 "^2.1.3" + pg ">=6.5 <9" + tslib "^2.1.0" + yargs "^16.2.0" + +grouped-queue@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/grouped-queue/-/grouped-queue-1.1.0.tgz#63e3f9ca90af952269d1d40879e41221eacc74cb" + integrity sha512-rZOFKfCqLhsu5VqjBjEWiwrYqJR07KxIkH4mLZlNlGDfntbb4FbMyGFP14TlvRPrU9S3Hnn/sgxbC5ZeN0no3Q== + dependencies: + lodash "^4.17.15" + +growly@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/growly/-/growly-1.3.0.tgz#f10748cbe76af964b7c96c93c6bcc28af120c081" + integrity sha1-8QdIy+dq+WS3yWyTxrzCivEgwIE= + +handlebars@^4.7.6: + version "4.7.6" + resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.6.tgz#d4c05c1baf90e9945f77aa68a7a219aa4a7df74e" + integrity sha512-1f2BACcBfiwAfStCKZNrUCgqNZkGsAT7UM3kkYtXuLo0KnaVfjKOyf7PRzB6++aK9STyT1Pd2ZCPe3EGOXleXA== + dependencies: + minimist "^1.2.5" + neo-async "^2.6.0" + source-map "^0.6.1" + wordwrap "^1.0.0" + optionalDependencies: + uglify-js "^3.1.4" + +har-schema@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" + integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI= + +har-validator@~5.1.3: + version "5.1.5" + resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.5.tgz#1f0803b9f8cb20c0fa13822df1ecddb36bde1efd" + integrity sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w== + dependencies: + ajv "^6.12.3" + har-schema "^2.0.0" + +hard-rejection@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/hard-rejection/-/hard-rejection-2.1.0.tgz#1c6eda5c1685c63942766d79bb40ae773cecd883" + integrity sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA== + +has-ansi@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" + integrity sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE= + dependencies: + ansi-regex "^2.0.0" + +has-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa" + integrity sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo= + +has-flag@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-2.0.0.tgz#e8207af1cc7b30d446cc70b734b5e8be18f88d51" + integrity sha1-6CB68cx7MNRGzHC3NLXovhj4jVE= + +has-flag@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= + +has-flag@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +has-symbol-support-x@^1.4.1: + version "1.4.2" + resolved "https://registry.yarnpkg.com/has-symbol-support-x/-/has-symbol-support-x-1.4.2.tgz#1409f98bc00247da45da67cee0a36f282ff26455" + integrity sha512-3ToOva++HaW+eCpgqZrCfN51IPB+7bJNVT6CUATzueB5Heb8o6Nam0V3HG5dlDvZU1Gn5QLcbahiKw/XVk5JJw== + +has-symbols@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8" + integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg== + +has-to-string-tag-x@^1.2.0: + version "1.4.1" + resolved "https://registry.yarnpkg.com/has-to-string-tag-x/-/has-to-string-tag-x-1.4.1.tgz#a045ab383d7b4b2012a00148ab0aa5f290044d4d" + integrity sha512-vdbKfmw+3LoOYVr+mtxHaX5a96+0f3DljYd8JOqvOLsf5mw2Otda2qCDT9qRqLAhrjyQ0h7ual5nOiASpsGNFw== + dependencies: + has-symbol-support-x "^1.4.1" + +has-unicode@^2.0.0, has-unicode@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" + integrity sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk= + +has-value@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" + integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= + dependencies: + get-value "^2.0.3" + has-values "^0.1.4" + isobject "^2.0.0" + +has-value@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" + integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= + dependencies: + get-value "^2.0.6" + has-values "^1.0.0" + isobject "^3.0.0" + +has-values@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" + integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E= + +has-values@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" + integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= + dependencies: + is-number "^3.0.0" + kind-of "^4.0.0" + +has-yarn@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/has-yarn/-/has-yarn-2.1.0.tgz#137e11354a7b5bf11aa5cb649cf0c6f3ff2b2e77" + integrity sha512-UqBRqi4ju7T+TqGNdqAO0PaSVGsDGJUBQvk9eUWNGRY1CFGDzYhLWoM7JQEemnlvVcv/YEmc2wNW8BC24EnUsw== + +has@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +hdr-histogram-js@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/hdr-histogram-js/-/hdr-histogram-js-2.0.1.tgz#ecb1ff2bcb6181c3e93ff4af9472c28c7e97284e" + integrity sha512-uPZxl1dAFnjUFHWLZmt93vUUvtHeaBay9nVNHu38SdOjMSF/4KqJUqa1Seuj08ptU1rEb6AHvB41X8n/zFZ74Q== + dependencies: + "@assemblyscript/loader" "^0.10.1" + base64-js "^1.2.0" + pako "^1.0.3" + +hdr-histogram-percentiles-obj@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/hdr-histogram-percentiles-obj/-/hdr-histogram-percentiles-obj-3.0.0.tgz#9409f4de0c2dda78e61de2d9d78b1e9f3cba283c" + integrity sha512-7kIufnBqdsBGcSZLPJwqHT3yhk1QTsSlFsVD3kx5ixH/AlgBs9yM1q6DPhXZ8f8gtdqgh7N7/5btRLpQsS2gHw== + +hex-array@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/hex-array/-/hex-array-1.0.0.tgz#e5c24eea0383f2d9e8491f2e815bda0ce02233ae" + integrity sha1-5cJO6gOD8tnoSR8ugVvaDOAiM64= + +highlight.js@10.4.1, highlight.js@^10.0.0, highlight.js@^10.2.0: + version "10.4.1" + resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-10.4.1.tgz#d48fbcf4a9971c4361b3f95f302747afe19dbad0" + integrity sha512-yR5lWvNz7c85OhVAEAeFhVCc/GV4C30Fjzc/rCP0aCWzc1UUOPUk55dK/qdwTZHBvMZo+eZ2jpk62ndX/xMFlg== + +hosted-git-info@^2.1.4, hosted-git-info@^2.7.1: + version "2.8.8" + resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.8.tgz#7539bd4bc1e0e0a895815a2e0262420b12858488" + integrity sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg== + +hosted-git-info@^3.0.6: + version "3.0.7" + resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-3.0.7.tgz#a30727385ea85acfcee94e0aad9e368c792e036c" + integrity sha512-fWqc0IcuXs+BmE9orLDyVykAG9GJtGLGuZAAqgcckPgv5xad4AcXGIv8galtQvlwutxSlaMcdw7BUtq2EIvqCQ== + dependencies: + lru-cache "^6.0.0" + +html-encoding-sniffer@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3" + integrity sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ== + dependencies: + whatwg-encoding "^1.0.5" + +html-escaper@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" + integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== + +http-cache-semantics@^3.8.1: + version "3.8.1" + resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-3.8.1.tgz#39b0e16add9b605bf0a9ef3d9daaf4843b4cacd2" + integrity sha512-5ai2iksyV8ZXmnZhHH4rWPoxxistEexSi5936zIQ1bnNTW5VnA85B6P/VpXiRM017IgRvb2kKo1a//y+0wSp3w== + +http-cache-semantics@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz#49e91c5cbf36c9b94bcfcd71c23d5249ec74e390" + integrity sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ== + +http-call@^5.1.2, http-call@^5.2.2: + version "5.3.0" + resolved "https://registry.yarnpkg.com/http-call/-/http-call-5.3.0.tgz#4ded815b13f423de176eb0942d69c43b25b148db" + integrity sha512-ahwimsC23ICE4kPl9xTBjKB4inbRaeLyZeRunC/1Jy/Z6X8tv22MEAjK+KBOMSVLaqXPTTmd8638waVIKLGx2w== + dependencies: + content-type "^1.0.4" + debug "^4.1.1" + is-retry-allowed "^1.1.0" + is-stream "^2.0.0" + parse-json "^4.0.0" + tunnel-agent "^0.6.0" + +http-errors@1.7.2: + version "1.7.2" + resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.2.tgz#4f5029cf13239f31036e5b2e55292bcfbcc85c8f" + integrity sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg== + dependencies: + depd "~1.1.2" + inherits "2.0.3" + setprototypeof "1.1.1" + statuses ">= 1.5.0 < 2" + toidentifier "1.0.0" + +http-errors@~1.7.2: + version "1.7.3" + resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.3.tgz#6c619e4f9c60308c38519498c14fbb10aacebb06" + integrity sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw== + dependencies: + depd "~1.1.2" + inherits "2.0.4" + setprototypeof "1.1.1" + statuses ">= 1.5.0 < 2" + toidentifier "1.0.0" + +http-proxy-agent@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-2.1.0.tgz#e4821beef5b2142a2026bd73926fe537631c5405" + integrity sha512-qwHbBLV7WviBl0rQsOzH6o5lwyOIvwp/BdFnvVxXORldu5TmjFfjzBcWUWS5kWAZhmv+JtiDhSuQCp4sBfbIgg== + dependencies: + agent-base "4" + debug "3.1.0" + +http-proxy-agent@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz#8a8c8ef7f5932ccf953c296ca8291b95aa74aa3a" + integrity sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg== + dependencies: + "@tootallnate/once" "1" + agent-base "6" + debug "4" + +http-signature@~1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" + integrity sha1-muzZJRFHcvPZW2WmCruPfBj7rOE= + dependencies: + assert-plus "^1.0.0" + jsprim "^1.2.2" + sshpk "^1.7.0" + +https-proxy-agent@^2.2.3: + version "2.2.4" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz#4ee7a737abd92678a293d9b34a1af4d0d08c787b" + integrity sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg== + dependencies: + agent-base "^4.3.0" + debug "^3.1.0" + +https-proxy-agent@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-4.0.0.tgz#702b71fb5520a132a66de1f67541d9e62154d82b" + integrity sha512-zoDhWrkR3of1l9QAL8/scJZyLu8j/gBkcwcaQOZh7Gyh/+uJQzGVETdgT30akuwkpL8HTRfssqI3BZuV18teDg== + dependencies: + agent-base "5" + debug "4" + +https-proxy-agent@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz#e2a90542abb68a762e0a0850f6c9edadfd8506b2" + integrity sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA== + dependencies: + agent-base "6" + debug "4" + +human-signals@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-1.1.1.tgz#c5b1cd14f50aeae09ab6c59fe63ba3395fe4dfa3" + integrity sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw== + +humanize-ms@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/humanize-ms/-/humanize-ms-1.2.1.tgz#c46e3159a293f6b896da29316d8b6fe8bb79bbed" + integrity sha1-xG4xWaKT9riW2ikxbYtv6Lt5u+0= + dependencies: + ms "^2.0.0" + +hyperlinker@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/hyperlinker/-/hyperlinker-1.0.0.tgz#23dc9e38a206b208ee49bc2d6c8ef47027df0c0e" + integrity sha512-Ty8UblRWFEcfSuIaajM34LdPXIhbs1ajEX/BBPv24J+enSVaEVY63xQ6lTO9VRYS5LAoghIG0IDJ+p+IPzKUQQ== + +iconv-lite@0.4.24, iconv-lite@^0.4.24, iconv-lite@^0.4.4: + version "0.4.24" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" + integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== + dependencies: + safer-buffer ">= 2.1.2 < 3" + +iconv-lite@^0.6.2: + version "0.6.2" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.2.tgz#ce13d1875b0c3a674bd6a04b7f76b01b1b6ded01" + integrity sha512-2y91h5OpQlolefMPmUlivelittSWy0rP+oYVpn6A7GwVHNE8AWzoYOBNmlwks3LobaJxgHCYZAnyNo2GgpNRNQ== + dependencies: + safer-buffer ">= 2.1.2 < 3.0.0" + +ieee754@^1.1.13, ieee754@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" + integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== + +iferr@^0.1.5: + version "0.1.5" + resolved "https://registry.yarnpkg.com/iferr/-/iferr-0.1.5.tgz#c60eed69e6d8fdb6b3104a1fcbca1c192dc5b501" + integrity sha1-xg7taebY/bazEEofy8ocGS3FtQE= + +ignore-by-default@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/ignore-by-default/-/ignore-by-default-1.0.1.tgz#48ca6d72f6c6a3af00a9ad4ae6876be3889e2b09" + integrity sha1-SMptcvbGo68Aqa1K5odr44ieKwk= + +ignore-walk@3.0.3, ignore-walk@^3.0.1: + version "3.0.3" + resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.3.tgz#017e2447184bfeade7c238e4aefdd1e8f95b1e37" + integrity sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw== + dependencies: + minimatch "^3.0.4" + +ignore@^3.3.5: + version "3.3.10" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.10.tgz#0a97fb876986e8081c631160f8f9f389157f0043" + integrity sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug== + +ignore@^4.0.3, ignore@^4.0.6: + version "4.0.6" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc" + integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg== + +ignore@^5.1.1, ignore@^5.1.4: + version "5.1.8" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.8.tgz#f150a8b50a34289b33e22f5889abd4d8016f0e57" + integrity sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw== + +immediate@^3.2.3: + version "3.3.0" + resolved "https://registry.yarnpkg.com/immediate/-/immediate-3.3.0.tgz#1aef225517836bcdf7f2a2de2600c79ff0269266" + integrity sha512-HR7EVodfFUdQCTIeySw+WDRFJlPcLOJbXfwwZ7Oom6tjsvZ3bOkCDJHehQC3nxJrv7+f9XecwazynjU8e4Vw3Q== + +import-fresh@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-2.0.0.tgz#d81355c15612d386c61f9ddd3922d4304822a546" + integrity sha1-2BNVwVYS04bGH53dOSLUMEgipUY= + dependencies: + caller-path "^2.0.0" + resolve-from "^3.0.0" + +import-fresh@^3.0.0, import-fresh@^3.2.1: + version "3.2.2" + resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.2.2.tgz#fc129c160c5d68235507f4331a6baad186bdbc3e" + integrity sha512-cTPNrlvJT6twpYy+YmKUKrTSjWFs3bjYjAhCwm+z4EOCubZxAuO+hHpRN64TqjEaYSHs7tJAE0w1CKMGmsG/lw== + dependencies: + parent-module "^1.0.0" + resolve-from "^4.0.0" + +import-lazy@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/import-lazy/-/import-lazy-2.1.0.tgz#05698e3d45c88e8d7e9d92cb0584e77f096f3e43" + integrity sha1-BWmOPUXIjo1+nZLLBYTnfwlvPkM= + +import-local@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/import-local/-/import-local-2.0.0.tgz#55070be38a5993cf18ef6db7e961f5bee5c5a09d" + integrity sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ== + dependencies: + pkg-dir "^3.0.0" + resolve-cwd "^2.0.0" + +import-local@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.0.2.tgz#a8cfd0431d1de4a2199703d003e3e62364fa6db6" + integrity sha512-vjL3+w0oulAVZ0hBHnxa/Nm5TAurf9YLQJDhqRZyqb+VKGOB6LU8t9H1Nr5CIo16vh9XfJTOoHwU0B71S557gA== + dependencies: + pkg-dir "^4.2.0" + resolve-cwd "^3.0.0" + +imurmurhash@0.1.4, imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= + +indent-string@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-2.1.0.tgz#8e2d48348742121b4a8218b7a137e9a52049dc80" + integrity sha1-ji1INIdCEhtKghi3oTfppSBJ3IA= + dependencies: + repeating "^2.0.0" + +indent-string@^3.0.0, indent-string@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-3.2.0.tgz#4a5fd6d27cc332f37e5419a504dbb837105c9289" + integrity sha1-Sl/W0nzDMvN+VBmlBNu4NxBckok= + +indent-string@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" + integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== + +infer-owner@^1.0.3, infer-owner@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/infer-owner/-/infer-owner-1.0.4.tgz#c4cefcaa8e51051c2a40ba2ce8a3d27295af9467" + integrity sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A== + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1, inherits@~2.0.3: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +inherits@2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" + integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= + +ini@^1.3.2, ini@^1.3.4, ini@~1.3.0: + version "1.3.5" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927" + integrity sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw== + +ini@^1.3.5: + version "1.3.6" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.6.tgz#f1c46a2a93a253e7b3905115e74d527cd23061a1" + integrity sha512-IZUoxEjNjubzrmvzZU4lKP7OnYmX72XRl3sqkfJhBKweKi5rnGi5+IUdlj/H1M+Ip5JQ1WzaDMOBRY90Ajc5jg== + +init-package-json@^1.10.3: + version "1.10.3" + resolved "https://registry.yarnpkg.com/init-package-json/-/init-package-json-1.10.3.tgz#45ffe2f610a8ca134f2bd1db5637b235070f6cbe" + integrity sha512-zKSiXKhQveNteyhcj1CoOP8tqp1QuxPIPBl8Bid99DGLFqA1p87M6lNgfjJHSBoWJJlidGOv5rWjyYKEB3g2Jw== + dependencies: + glob "^7.1.1" + npm-package-arg "^4.0.0 || ^5.0.0 || ^6.0.0" + promzard "^0.3.0" + read "~1.0.1" + read-package-json "1 || 2" + semver "2.x || 3.x || 4 || 5" + validate-npm-package-license "^3.0.1" + validate-npm-package-name "^3.0.0" + +inquirer@7.3.3, inquirer@^7.1.0: + version "7.3.3" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-7.3.3.tgz#04d176b2af04afc157a83fd7c100e98ee0aad003" + integrity sha512-JG3eIAj5V9CwcGvuOmoo6LB9kbAYT8HXffUl6memuszlwDC/qvFAJw49XJ5NROSFNPxp3iQg1GqkFhaY/CR0IA== + dependencies: + ansi-escapes "^4.2.1" + chalk "^4.1.0" + cli-cursor "^3.1.0" + cli-width "^3.0.0" + external-editor "^3.0.3" + figures "^3.0.0" + lodash "^4.17.19" + mute-stream "0.0.8" + run-async "^2.4.0" + rxjs "^6.6.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + through "^2.3.6" + +inquirer@^6.2.0: + version "6.5.2" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-6.5.2.tgz#ad50942375d036d327ff528c08bd5fab089928ca" + integrity sha512-cntlB5ghuB0iuO65Ovoi8ogLHiWGs/5yNrtUcKjFhSSiVeAIVpD7koaSU9RM8mpXw5YDi9RdYXGQMaOURB7ycQ== + dependencies: + ansi-escapes "^3.2.0" + chalk "^2.4.2" + cli-cursor "^2.1.0" + cli-width "^2.0.0" + external-editor "^3.0.3" + figures "^2.0.0" + lodash "^4.17.12" + mute-stream "0.0.7" + run-async "^2.2.0" + rxjs "^6.4.0" + string-width "^2.1.0" + strip-ansi "^5.1.0" + through "^2.3.6" + +internal-slot@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.2.tgz#9c2e9fb3cd8e5e4256c6f45fe310067fcfa378a3" + integrity sha512-2cQNfwhAfJIkU4KZPkDI+Gj5yNNnbqi40W9Gge6dfnk4TocEVm00B3bdiL+JINrbGJil2TeHvM4rETGzk/f/0g== + dependencies: + es-abstract "^1.17.0-next.1" + has "^1.0.3" + side-channel "^1.0.2" + +interpret@^1.0.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.4.0.tgz#665ab8bc4da27a774a40584e812e3e0fa45b1a1e" + integrity sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA== + +ip-regex@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-2.1.0.tgz#fa78bf5d2e6913c911ce9f819ee5146bb6d844e9" + integrity sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk= + +ip@1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a" + integrity sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo= + +ipaddr.js@1.9.1: + version "1.9.1" + resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" + integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== + +is-accessor-descriptor@^0.1.6: + version "0.1.6" + resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" + integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= + dependencies: + kind-of "^3.0.2" + +is-accessor-descriptor@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" + integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== + dependencies: + kind-of "^6.0.0" + +is-arrayish@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= + +is-arrayish@^0.3.1: + version "0.3.2" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.3.2.tgz#4574a2ae56f7ab206896fb431eaeed066fdf8f03" + integrity sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ== + +is-binary-path@~2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" + integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== + dependencies: + binary-extensions "^2.0.0" + +is-buffer@^1.1.5: + version "1.1.6" + resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" + integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== + +is-buffer@^2.0.2: + version "2.0.5" + resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191" + integrity sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ== + +is-callable@^1.1.4, is-callable@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.2.tgz#c7c6715cd22d4ddb48d3e19970223aceabb080d9" + integrity sha512-dnMqspv5nU3LoewK2N/y7KLtxtakvTuaCsU9FU50/QDmdbHNy/4/JuRtMHqRU22o3q+W89YQndQEeCVwK+3qrA== + +is-ci@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-2.0.0.tgz#6bc6334181810e04b5c22b3d589fdca55026404c" + integrity sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w== + dependencies: + ci-info "^2.0.0" + +is-core-module@^2.1.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.2.0.tgz#97037ef3d52224d85163f5597b2b63d9afed981a" + integrity sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ== + dependencies: + has "^1.0.3" + +is-data-descriptor@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" + integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= + dependencies: + kind-of "^3.0.2" + +is-data-descriptor@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" + integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== + dependencies: + kind-of "^6.0.0" + +is-date-object@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.2.tgz#bda736f2cd8fd06d32844e7743bfa7494c3bfd7e" + integrity sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g== + +is-descriptor@^0.1.0: + version "0.1.6" + resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" + integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== + dependencies: + is-accessor-descriptor "^0.1.6" + is-data-descriptor "^0.1.4" + kind-of "^5.0.0" + +is-descriptor@^1.0.0, is-descriptor@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" + integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== + dependencies: + is-accessor-descriptor "^1.0.0" + is-data-descriptor "^1.0.0" + kind-of "^6.0.2" + +is-dir@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-dir/-/is-dir-1.0.0.tgz#41d37f495fccacc05a4778d66e83024c292ba3ff" + integrity sha1-QdN/SV/MrMBaR3jWboMCTCkro/8= + +is-directory@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/is-directory/-/is-directory-0.3.1.tgz#61339b6f2475fc772fd9c9d83f5c8575dc154ae1" + integrity sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE= + +is-docker@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.1.1.tgz#4125a88e44e450d384e09047ede71adc2d144156" + integrity sha512-ZOoqiXfEwtGknTiuDEy8pN2CfE3TxMHprvNer1mXiqwkOT77Rw3YVrUQ52EqAOU3QAWDQ+bQdx7HJzrv7LS2Hw== + +is-extendable@^0.1.0, is-extendable@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" + integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= + +is-extendable@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" + integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== + dependencies: + is-plain-object "^2.0.4" + +is-extglob@^2.1.0, is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= + +is-finite@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-finite/-/is-finite-1.1.0.tgz#904135c77fb42c0641d6aa1bcdbc4daa8da082f3" + integrity sha512-cdyMtqX/BOqqNBBiKlIVkytNHm49MtMlYyn1zxzvJKWmFMlGzm+ry5BBfYyeY9YmNKbRSo/o7OX9w9ale0wg3w== + +is-fullwidth-code-point@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" + integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs= + dependencies: + number-is-nan "^1.0.0" + +is-fullwidth-code-point@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" + integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= + +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + +is-generator-fn@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" + integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== + +is-glob@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" + integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= + dependencies: + is-extglob "^2.1.0" + +is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" + integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== + dependencies: + is-extglob "^2.1.1" + +is-installed-globally@^0.3.1: + version "0.3.2" + resolved "https://registry.yarnpkg.com/is-installed-globally/-/is-installed-globally-0.3.2.tgz#fd3efa79ee670d1187233182d5b0a1dd00313141" + integrity sha512-wZ8x1js7Ia0kecP/CHM/3ABkAmujX7WPvQk6uu3Fly/Mk44pySulQpnHG46OMjHGXApINnV4QhY3SWnECO2z5g== + dependencies: + global-dirs "^2.0.1" + is-path-inside "^3.0.1" + +is-negative-zero@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.0.tgz#9553b121b0fac28869da9ed459e20c7543788461" + integrity sha1-lVOxIbD6wohp2p7UWeIMdUN4hGE= + +is-npm@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-4.0.0.tgz#c90dd8380696df87a7a6d823c20d0b12bbe3c84d" + integrity sha512-96ECIfh9xtDDlPylNPXhzjsykHsMJZ18ASpaWzQyBr4YRTcVjUvzaHayDAES2oU/3KpljhHUjtSRNiDwi0F0ig== + +is-number@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" + integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= + dependencies: + kind-of "^3.0.2" + +is-number@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== + +is-obj@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" + integrity sha1-PkcprB9f3gJc19g6iW2rn09n2w8= + +is-obj@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-2.0.0.tgz#473fb05d973705e3fd9620545018ca8e22ef4982" + integrity sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w== + +is-object@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-object/-/is-object-1.0.2.tgz#a56552e1c665c9e950b4a025461da87e72f86fcf" + integrity sha512-2rRIahhZr2UWb45fIOuvZGpFtz0TyOZLf32KxBbSoUCeZR495zCKlWUKKUByk3geS2eAs7ZAABt0Y/Rx0GiQGA== + +is-path-inside@^3.0.1: + version "3.0.2" + resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.2.tgz#f5220fc82a3e233757291dddc9c5877f2a1f3017" + integrity sha512-/2UGPSgmtqwo1ktx8NDHjuPwZWmHhO+gj0f93EkhLB5RgW9RZevWYYlIkS6zePc6U2WpOdQYIwHe9YC4DWEBVg== + +is-plain-obj@^1.0.0, is-plain-obj@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" + integrity sha1-caUMhCnfync8kqOQpKA7OfzVHT4= + +is-plain-obj@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-2.1.0.tgz#45e42e37fccf1f40da8e5f76ee21515840c09287" + integrity sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA== + +is-plain-object@^2.0.3, is-plain-object@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" + integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== + dependencies: + isobject "^3.0.1" + +is-plain-object@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-5.0.0.tgz#4427f50ab3429e9025ea7d52e9043a9ef4159344" + integrity sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q== + +is-potential-custom-element-name@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.0.tgz#0c52e54bcca391bb2c494b21e8626d7336c6e397" + integrity sha1-DFLlS8yjkbssSUsh6GJtczbG45c= + +is-redirect@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-redirect/-/is-redirect-1.0.0.tgz#1d03dded53bd8db0f30c26e4f95d36fc7c87dc24" + integrity sha1-HQPd7VO9jbDzDCbk+V02/HyH3CQ= + +is-regex@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.1.tgz#c6f98aacc546f6cec5468a07b7b153ab564a57b9" + integrity sha512-1+QkEcxiLlB7VEyFtyBg94e08OAsvq7FUBgApTq/w2ymCLyKJgDPsybBENVtA7XCQEgEXxKPonG+mvYRxh/LIg== + dependencies: + has-symbols "^1.0.1" + +is-retry-allowed@^1.0.0, is-retry-allowed@^1.1.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-1.2.0.tgz#d778488bd0a4666a3be8a1482b9f2baafedea8b4" + integrity sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg== + +is-scoped@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-scoped/-/is-scoped-1.0.0.tgz#449ca98299e713038256289ecb2b540dc437cb30" + integrity sha1-RJypgpnnEwOCViieyytUDcQ3yzA= + dependencies: + scoped-regex "^1.0.0" + +is-ssh@^1.3.0: + version "1.3.2" + resolved "https://registry.yarnpkg.com/is-ssh/-/is-ssh-1.3.2.tgz#a4b82ab63d73976fd8263cceee27f99a88bdae2b" + integrity sha512-elEw0/0c2UscLrNG+OAorbP539E3rhliKPg+hDMWN9VwrDXfYK+4PBEykDPfxlYYtQvl84TascnQyobfQLHEhQ== + dependencies: + protocols "^1.1.0" + +is-stream@^1.0.0, is-stream@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" + integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= + +is-stream@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.0.tgz#bde9c32680d6fae04129d6ac9d921ce7815f78e3" + integrity sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw== + +is-string@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.5.tgz#40493ed198ef3ff477b8c7f92f644ec82a5cd3a6" + integrity sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ== + +is-symbol@^1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937" + integrity sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ== + dependencies: + has-symbols "^1.0.1" + +is-text-path@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-text-path/-/is-text-path-1.0.1.tgz#4e1aa0fb51bfbcb3e92688001397202c1775b66e" + integrity sha1-Thqg+1G/vLPpJogAE5cgLBd1tm4= + dependencies: + text-extensions "^1.0.0" + +is-typedarray@^1.0.0, is-typedarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= + +is-utf8@^0.2.0, is-utf8@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" + integrity sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI= + +is-windows@^1.0.0, is-windows@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" + integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== + +is-wsl@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-1.1.0.tgz#1f16e4aa22b04d1336b66188a66af3c600c3a66d" + integrity sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0= + +is-wsl@^2.1.1, is-wsl@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271" + integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== + dependencies: + is-docker "^2.0.0" + +is-yarn-global@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/is-yarn-global/-/is-yarn-global-0.3.0.tgz#d502d3382590ea3004893746754c89139973e232" + integrity sha512-VjSeb/lHmkoyd8ryPVIKvOCn4D1koMqY+vqyjjUfc3xyKtP4dYOxM44sZrnqQSzSds3xyOrUTLTC9LVCVgLngw== + +isarray@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" + integrity sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8= + +isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= + +isbinaryfile@^3.0.2: + version "3.0.3" + resolved "https://registry.yarnpkg.com/isbinaryfile/-/isbinaryfile-3.0.3.tgz#5d6def3edebf6e8ca8cae9c30183a804b5f8be80" + integrity sha512-8cJBL5tTd2OS0dM4jz07wQd5g0dCCqIhUxPIGtZfa5L6hWlvV5MHTITy/DBAsF+Oe2LS1X3krBUhNwaGUWpWxw== + dependencies: + buffer-alloc "^1.2.0" + +isbinaryfile@^4.0.0: + version "4.0.6" + resolved "https://registry.yarnpkg.com/isbinaryfile/-/isbinaryfile-4.0.6.tgz#edcb62b224e2b4710830b67498c8e4e5a4d2610b" + integrity sha512-ORrEy+SNVqUhrCaal4hA4fBzhggQQ+BaLntyPOdoEiwlKZW9BZiJXjg3RMiruE4tPEI3pyVPpySHQF/dKWperg== + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= + +isobject@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" + integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= + dependencies: + isarray "1.0.0" + +isobject@^3.0.0, isobject@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" + integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= + +isstream@~0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" + integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= + +istanbul-lib-coverage@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.0.0.tgz#f5944a37c70b550b02a78a5c3b2055b280cec8ec" + integrity sha512-UiUIqxMgRDET6eR+o5HbfRYP1l0hqkWOs7vNxC/mggutCMUIhWMm8gAHb8tHlyfD3/l6rlgNA5cKdDzEAf6hEg== + +istanbul-lib-instrument@^4.0.0, istanbul-lib-instrument@^4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz#873c6fff897450118222774696a3f28902d77c1d" + integrity sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ== + dependencies: + "@babel/core" "^7.7.5" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-coverage "^3.0.0" + semver "^6.3.0" + +istanbul-lib-report@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" + integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== + dependencies: + istanbul-lib-coverage "^3.0.0" + make-dir "^3.0.0" + supports-color "^7.1.0" + +istanbul-lib-source-maps@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.0.tgz#75743ce6d96bb86dc7ee4352cf6366a23f0b1ad9" + integrity sha512-c16LpFRkR8vQXyHZ5nLpY35JZtzj1PQY1iZmesUbf1FZHbIupcWfjgOXBY9YHkLEQ6puz1u4Dgj6qmU/DisrZg== + dependencies: + debug "^4.1.1" + istanbul-lib-coverage "^3.0.0" + source-map "^0.6.1" + +istanbul-reports@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.0.2.tgz#d593210e5000683750cb09fc0644e4b6e27fd53b" + integrity sha512-9tZvz7AiR3PEDNGiV9vIouQ/EAcqMXFmkcA1CDFTwOB98OZVDL0PH9glHotf5Ugp6GCOTypfzGWI/OqjWNCRUw== + dependencies: + html-escaper "^2.0.0" + istanbul-lib-report "^3.0.0" + +istextorbinary@^2.2.1, istextorbinary@^2.5.1: + version "2.6.0" + resolved "https://registry.yarnpkg.com/istextorbinary/-/istextorbinary-2.6.0.tgz#60776315fb0fa3999add276c02c69557b9ca28ab" + integrity sha512-+XRlFseT8B3L9KyjxxLjfXSLMuErKDsd8DBNrsaxoViABMEZlOSCstwmw0qpoFX3+U6yWU1yhLudAe6/lETGGA== + dependencies: + binaryextensions "^2.1.2" + editions "^2.2.0" + textextensions "^2.5.0" + +isurl@^1.0.0-alpha5: + version "1.0.0" + resolved "https://registry.yarnpkg.com/isurl/-/isurl-1.0.0.tgz#b27f4f49f3cdaa3ea44a0a5b7f3462e6edc39d67" + integrity sha512-1P/yWsxPlDtn7QeRD+ULKQPaIaN6yF368GZ2vDfv0AL0NwpStafjWCDDdn0k8wgFMWpVAqG7oJhxHnlud42i9w== + dependencies: + has-to-string-tag-x "^1.2.0" + is-object "^1.0.1" + +iterare@1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/iterare/-/iterare-1.2.1.tgz#139c400ff7363690e33abffa33cbba8920f00042" + integrity sha512-RKYVTCjAnRthyJes037NX/IiqeidgN1xc3j1RjFfECFp28A1GVwK9nA+i0rJPaHqSZwygLzRnFlzUuHFoWWy+Q== + +jake@^10.6.1: + version "10.8.2" + resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.2.tgz#ebc9de8558160a66d82d0eadc6a2e58fbc500a7b" + integrity sha512-eLpKyrfG3mzvGE2Du8VoPbeSkRry093+tyNjdYaBbJS9v17knImYGNXQCUV0gLxQtF82m3E8iRb/wdSQZLoq7A== + dependencies: + async "0.9.x" + chalk "^2.4.2" + filelist "^1.0.1" + minimatch "^3.0.4" + +jest-changed-files@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-26.6.2.tgz#f6198479e1cc66f22f9ae1e22acaa0b429c042d0" + integrity sha512-fDS7szLcY9sCtIip8Fjry9oGf3I2ht/QT21bAHm5Dmf0mD4X3ReNUf17y+bO6fR8WgbIZTlbyG1ak/53cbRzKQ== + dependencies: + "@jest/types" "^26.6.2" + execa "^4.0.0" + throat "^5.0.0" + +jest-cli@^26.6.3: + version "26.6.3" + resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-26.6.3.tgz#43117cfef24bc4cd691a174a8796a532e135e92a" + integrity sha512-GF9noBSa9t08pSyl3CY4frMrqp+aQXFGFkf5hEPbh/pIUFYWMK6ZLTfbmadxJVcJrdRoChlWQsA2VkJcDFK8hg== + dependencies: + "@jest/core" "^26.6.3" + "@jest/test-result" "^26.6.2" + "@jest/types" "^26.6.2" + chalk "^4.0.0" + exit "^0.1.2" + graceful-fs "^4.2.4" + import-local "^3.0.2" + is-ci "^2.0.0" + jest-config "^26.6.3" + jest-util "^26.6.2" + jest-validate "^26.6.2" + prompts "^2.0.1" + yargs "^15.4.1" + +jest-config@^26.6.3: + version "26.6.3" + resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-26.6.3.tgz#64f41444eef9eb03dc51d5c53b75c8c71f645349" + integrity sha512-t5qdIj/bCj2j7NFVHb2nFB4aUdfucDn3JRKgrZnplb8nieAirAzRSHP8uDEd+qV6ygzg9Pz4YG7UTJf94LPSyg== + dependencies: + "@babel/core" "^7.1.0" + "@jest/test-sequencer" "^26.6.3" + "@jest/types" "^26.6.2" + babel-jest "^26.6.3" + chalk "^4.0.0" + deepmerge "^4.2.2" + glob "^7.1.1" + graceful-fs "^4.2.4" + jest-environment-jsdom "^26.6.2" + jest-environment-node "^26.6.2" + jest-get-type "^26.3.0" + jest-jasmine2 "^26.6.3" + jest-regex-util "^26.0.0" + jest-resolve "^26.6.2" + jest-util "^26.6.2" + jest-validate "^26.6.2" + micromatch "^4.0.2" + pretty-format "^26.6.2" + +jest-diff@^26.0.0, jest-diff@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-26.6.2.tgz#1aa7468b52c3a68d7d5c5fdcdfcd5e49bd164394" + integrity sha512-6m+9Z3Gv9wN0WFVasqjCL/06+EFCMTqDEUl/b87HYK2rAPTyfz4ZIuSlPhY51PIQRWx5TaxeF1qmXKe9gfN3sA== + dependencies: + chalk "^4.0.0" + diff-sequences "^26.6.2" + jest-get-type "^26.3.0" + pretty-format "^26.6.2" + +jest-docblock@^26.0.0: + version "26.0.0" + resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-26.0.0.tgz#3e2fa20899fc928cb13bd0ff68bd3711a36889b5" + integrity sha512-RDZ4Iz3QbtRWycd8bUEPxQsTlYazfYn/h5R65Fc6gOfwozFhoImx+affzky/FFBuqISPTqjXomoIGJVKBWoo0w== + dependencies: + detect-newline "^3.0.0" + +jest-each@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-26.6.2.tgz#02526438a77a67401c8a6382dfe5999952c167cb" + integrity sha512-Mer/f0KaATbjl8MCJ+0GEpNdqmnVmDYqCTJYTvoo7rqmRiDllmp2AYN+06F93nXcY3ur9ShIjS+CO/uD+BbH4A== + dependencies: + "@jest/types" "^26.6.2" + chalk "^4.0.0" + jest-get-type "^26.3.0" + jest-util "^26.6.2" + pretty-format "^26.6.2" + +jest-environment-jsdom@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-26.6.2.tgz#78d09fe9cf019a357009b9b7e1f101d23bd1da3e" + integrity sha512-jgPqCruTlt3Kwqg5/WVFyHIOJHsiAvhcp2qiR2QQstuG9yWox5+iHpU3ZrcBxW14T4fe5Z68jAfLRh7joCSP2Q== + dependencies: + "@jest/environment" "^26.6.2" + "@jest/fake-timers" "^26.6.2" + "@jest/types" "^26.6.2" + "@types/node" "*" + jest-mock "^26.6.2" + jest-util "^26.6.2" + jsdom "^16.4.0" + +jest-environment-node@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-26.6.2.tgz#824e4c7fb4944646356f11ac75b229b0035f2b0c" + integrity sha512-zhtMio3Exty18dy8ee8eJ9kjnRyZC1N4C1Nt/VShN1apyXc8rWGtJ9lI7vqiWcyyXS4BVSEn9lxAM2D+07/Tag== + dependencies: + "@jest/environment" "^26.6.2" + "@jest/fake-timers" "^26.6.2" + "@jest/types" "^26.6.2" + "@types/node" "*" + jest-mock "^26.6.2" + jest-util "^26.6.2" + +jest-get-type@^26.3.0: + version "26.3.0" + resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-26.3.0.tgz#e97dc3c3f53c2b406ca7afaed4493b1d099199e0" + integrity sha512-TpfaviN1R2pQWkIihlfEanwOXK0zcxrKEE4MlU6Tn7keoXdN6/3gK/xl0yEh8DOunn5pOVGKf8hB4R9gVh04ig== + +jest-haste-map@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-26.6.2.tgz#dd7e60fe7dc0e9f911a23d79c5ff7fb5c2cafeaa" + integrity sha512-easWIJXIw71B2RdR8kgqpjQrbMRWQBgiBwXYEhtGUTaX+doCjBheluShdDMeR8IMfJiTqH4+zfhtg29apJf/8w== + dependencies: + "@jest/types" "^26.6.2" + "@types/graceful-fs" "^4.1.2" + "@types/node" "*" + anymatch "^3.0.3" + fb-watchman "^2.0.0" + graceful-fs "^4.2.4" + jest-regex-util "^26.0.0" + jest-serializer "^26.6.2" + jest-util "^26.6.2" + jest-worker "^26.6.2" + micromatch "^4.0.2" + sane "^4.0.3" + walker "^1.0.7" + optionalDependencies: + fsevents "^2.1.2" + +jest-jasmine2@^26.6.3: + version "26.6.3" + resolved "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-26.6.3.tgz#adc3cf915deacb5212c93b9f3547cd12958f2edd" + integrity sha512-kPKUrQtc8aYwBV7CqBg5pu+tmYXlvFlSFYn18ev4gPFtrRzB15N2gW/Roew3187q2w2eHuu0MU9TJz6w0/nPEg== + dependencies: + "@babel/traverse" "^7.1.0" + "@jest/environment" "^26.6.2" + "@jest/source-map" "^26.6.2" + "@jest/test-result" "^26.6.2" + "@jest/types" "^26.6.2" + "@types/node" "*" + chalk "^4.0.0" + co "^4.6.0" + expect "^26.6.2" + is-generator-fn "^2.0.0" + jest-each "^26.6.2" + jest-matcher-utils "^26.6.2" + jest-message-util "^26.6.2" + jest-runtime "^26.6.3" + jest-snapshot "^26.6.2" + jest-util "^26.6.2" + pretty-format "^26.6.2" + throat "^5.0.0" + +jest-leak-detector@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-26.6.2.tgz#7717cf118b92238f2eba65054c8a0c9c653a91af" + integrity sha512-i4xlXpsVSMeKvg2cEKdfhh0H39qlJlP5Ex1yQxwF9ubahboQYMgTtz5oML35AVA3B4Eu+YsmwaiKVev9KCvLxg== + dependencies: + jest-get-type "^26.3.0" + pretty-format "^26.6.2" + +jest-matcher-utils@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-26.6.2.tgz#8e6fd6e863c8b2d31ac6472eeb237bc595e53e7a" + integrity sha512-llnc8vQgYcNqDrqRDXWwMr9i7rS5XFiCwvh6DTP7Jqa2mqpcCBBlpCbn+trkG0KNhPu/h8rzyBkriOtBstvWhw== + dependencies: + chalk "^4.0.0" + jest-diff "^26.6.2" + jest-get-type "^26.3.0" + pretty-format "^26.6.2" + +jest-message-util@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-26.6.2.tgz#58173744ad6fc0506b5d21150b9be56ef001ca07" + integrity sha512-rGiLePzQ3AzwUshu2+Rn+UMFk0pHN58sOG+IaJbk5Jxuqo3NYO1U2/MIR4S1sKgsoYSXSzdtSa0TgrmtUwEbmA== + dependencies: + "@babel/code-frame" "^7.0.0" + "@jest/types" "^26.6.2" + "@types/stack-utils" "^2.0.0" + chalk "^4.0.0" + graceful-fs "^4.2.4" + micromatch "^4.0.2" + pretty-format "^26.6.2" + slash "^3.0.0" + stack-utils "^2.0.2" + +jest-mock@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-26.6.2.tgz#d6cb712b041ed47fe0d9b6fc3474bc6543feb302" + integrity sha512-YyFjePHHp1LzpzYcmgqkJ0nm0gg/lJx2aZFzFy1S6eUqNjXsOqTK10zNRff2dNfssgokjkG65OlWNcIlgd3zew== + dependencies: + "@jest/types" "^26.6.2" + "@types/node" "*" + +jest-pnp-resolver@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz#b704ac0ae028a89108a4d040b3f919dfddc8e33c" + integrity sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w== + +jest-regex-util@^26.0.0: + version "26.0.0" + resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-26.0.0.tgz#d25e7184b36e39fd466c3bc41be0971e821fee28" + integrity sha512-Gv3ZIs/nA48/Zvjrl34bf+oD76JHiGDUxNOVgUjh3j890sblXryjY4rss71fPtD/njchl6PSE2hIhvyWa1eT0A== + +jest-resolve-dependencies@^26.6.3: + version "26.6.3" + resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-26.6.3.tgz#6680859ee5d22ee5dcd961fe4871f59f4c784fb6" + integrity sha512-pVwUjJkxbhe4RY8QEWzN3vns2kqyuldKpxlxJlzEYfKSvY6/bMvxoFrYYzUO1Gx28yKWN37qyV7rIoIp2h8fTg== + dependencies: + "@jest/types" "^26.6.2" + jest-regex-util "^26.0.0" + jest-snapshot "^26.6.2" + +jest-resolve@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-26.6.2.tgz#a3ab1517217f469b504f1b56603c5bb541fbb507" + integrity sha512-sOxsZOq25mT1wRsfHcbtkInS+Ek7Q8jCHUB0ZUTP0tc/c41QHriU/NunqMfCUWsL4H3MHpvQD4QR9kSYhS7UvQ== + dependencies: + "@jest/types" "^26.6.2" + chalk "^4.0.0" + graceful-fs "^4.2.4" + jest-pnp-resolver "^1.2.2" + jest-util "^26.6.2" + read-pkg-up "^7.0.1" + resolve "^1.18.1" + slash "^3.0.0" + +jest-runner@^26.6.3: + version "26.6.3" + resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-26.6.3.tgz#2d1fed3d46e10f233fd1dbd3bfaa3fe8924be159" + integrity sha512-atgKpRHnaA2OvByG/HpGA4g6CSPS/1LK0jK3gATJAoptC1ojltpmVlYC3TYgdmGp+GLuhzpH30Gvs36szSL2JQ== + dependencies: + "@jest/console" "^26.6.2" + "@jest/environment" "^26.6.2" + "@jest/test-result" "^26.6.2" + "@jest/types" "^26.6.2" + "@types/node" "*" + chalk "^4.0.0" + emittery "^0.7.1" + exit "^0.1.2" + graceful-fs "^4.2.4" + jest-config "^26.6.3" + jest-docblock "^26.0.0" + jest-haste-map "^26.6.2" + jest-leak-detector "^26.6.2" + jest-message-util "^26.6.2" + jest-resolve "^26.6.2" + jest-runtime "^26.6.3" + jest-util "^26.6.2" + jest-worker "^26.6.2" + source-map-support "^0.5.6" + throat "^5.0.0" + +jest-runtime@^26.6.3: + version "26.6.3" + resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-26.6.3.tgz#4f64efbcfac398331b74b4b3c82d27d401b8fa2b" + integrity sha512-lrzyR3N8sacTAMeonbqpnSka1dHNux2uk0qqDXVkMv2c/A3wYnvQ4EXuI013Y6+gSKSCxdaczvf4HF0mVXHRdw== + dependencies: + "@jest/console" "^26.6.2" + "@jest/environment" "^26.6.2" + "@jest/fake-timers" "^26.6.2" + "@jest/globals" "^26.6.2" + "@jest/source-map" "^26.6.2" + "@jest/test-result" "^26.6.2" + "@jest/transform" "^26.6.2" + "@jest/types" "^26.6.2" + "@types/yargs" "^15.0.0" + chalk "^4.0.0" + cjs-module-lexer "^0.6.0" + collect-v8-coverage "^1.0.0" + exit "^0.1.2" + glob "^7.1.3" + graceful-fs "^4.2.4" + jest-config "^26.6.3" + jest-haste-map "^26.6.2" + jest-message-util "^26.6.2" + jest-mock "^26.6.2" + jest-regex-util "^26.0.0" + jest-resolve "^26.6.2" + jest-snapshot "^26.6.2" + jest-util "^26.6.2" + jest-validate "^26.6.2" + slash "^3.0.0" + strip-bom "^4.0.0" + yargs "^15.4.1" + +jest-serializer@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-serializer/-/jest-serializer-26.6.2.tgz#d139aafd46957d3a448f3a6cdabe2919ba0742d1" + integrity sha512-S5wqyz0DXnNJPd/xfIzZ5Xnp1HrJWBczg8mMfMpN78OJ5eDxXyf+Ygld9wX1DnUWbIbhM1YDY95NjR4CBXkb2g== + dependencies: + "@types/node" "*" + graceful-fs "^4.2.4" + +jest-snapshot@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-26.6.2.tgz#f3b0af1acb223316850bd14e1beea9837fb39c84" + integrity sha512-OLhxz05EzUtsAmOMzuupt1lHYXCNib0ECyuZ/PZOx9TrZcC8vL0x+DUG3TL+GLX3yHG45e6YGjIm0XwDc3q3og== + dependencies: + "@babel/types" "^7.0.0" + "@jest/types" "^26.6.2" + "@types/babel__traverse" "^7.0.4" + "@types/prettier" "^2.0.0" + chalk "^4.0.0" + expect "^26.6.2" + graceful-fs "^4.2.4" + jest-diff "^26.6.2" + jest-get-type "^26.3.0" + jest-haste-map "^26.6.2" + jest-matcher-utils "^26.6.2" + jest-message-util "^26.6.2" + jest-resolve "^26.6.2" + natural-compare "^1.4.0" + pretty-format "^26.6.2" + semver "^7.3.2" + +jest-util@^26.1.0, jest-util@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-26.6.2.tgz#907535dbe4d5a6cb4c47ac9b926f6af29576cbc1" + integrity sha512-MDW0fKfsn0OI7MS7Euz6h8HNDXVQ0gaM9uW6RjfDmd1DAFcaxX9OqIakHIqhbnmF08Cf2DLDG+ulq8YQQ0Lp0Q== + dependencies: + "@jest/types" "^26.6.2" + "@types/node" "*" + chalk "^4.0.0" + graceful-fs "^4.2.4" + is-ci "^2.0.0" + micromatch "^4.0.2" + +jest-validate@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-26.6.2.tgz#23d380971587150467342911c3d7b4ac57ab20ec" + integrity sha512-NEYZ9Aeyj0i5rQqbq+tpIOom0YS1u2MVu6+euBsvpgIme+FOfRmoC4R5p0JiAUpaFvFy24xgrpMknarR/93XjQ== + dependencies: + "@jest/types" "^26.6.2" + camelcase "^6.0.0" + chalk "^4.0.0" + jest-get-type "^26.3.0" + leven "^3.1.0" + pretty-format "^26.6.2" + +jest-watcher@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-26.6.2.tgz#a5b683b8f9d68dbcb1d7dae32172d2cca0592975" + integrity sha512-WKJob0P/Em2csiVthsI68p6aGKTIcsfjH9Gsx1f0A3Italz43e3ho0geSAVsmj09RWOELP1AZ/DXyJgOgDKxXQ== + dependencies: + "@jest/test-result" "^26.6.2" + "@jest/types" "^26.6.2" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + jest-util "^26.6.2" + string-length "^4.0.1" + +jest-worker@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-26.6.2.tgz#7f72cbc4d643c365e27b9fd775f9d0eaa9c7a8ed" + integrity sha512-KWYVV1c4i+jbMpaBC+U++4Va0cp8OisU185o73T1vo99hqi7w8tSJfUXYswwqqrjzwxa6KpRK54WhPvwf5w6PQ== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^7.0.0" + +jest@26.6.3, jest@^26.4.2, jest@~26.6.3: + version "26.6.3" + resolved "https://registry.yarnpkg.com/jest/-/jest-26.6.3.tgz#40e8fdbe48f00dfa1f0ce8121ca74b88ac9148ef" + integrity sha512-lGS5PXGAzR4RF7V5+XObhqz2KZIDUA1yD0DG6pBVmy10eh0ZIXQImRuzocsI/N2XZ1GrLFwTS27In2i2jlpq1Q== + dependencies: + "@jest/core" "^26.6.3" + import-local "^3.0.2" + jest-cli "^26.6.3" + +js-message@1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/js-message/-/js-message-1.0.7.tgz#fbddd053c7a47021871bb8b2c95397cc17c20e47" + integrity sha512-efJLHhLjIyKRewNS9EGZ4UpI8NguuL6fKkhRxVuMmrGV2xN/0APGdQYwLFky5w9naebSZ0OwAGp0G6/2Cg90rA== + +js-queue@2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/js-queue/-/js-queue-2.0.2.tgz#0be590338f903b36c73d33c31883a821412cd482" + integrity sha512-pbKLsbCfi7kriM3s1J4DDCo7jQkI58zPLHi0heXPzPlj0hjUsm+FesPUbE0DSbIVIK503A36aUBoCN7eMFedkA== + dependencies: + easy-stack "^1.0.1" + +"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +js-yaml@3.14.0, js-yaml@^3.13.1: + version "3.14.0" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.0.tgz#a7a34170f26a21bb162424d8adacb4113a69e482" + integrity sha512-/4IbIeHcD9VMHFqDR/gQ7EdZdLimOvW2DdcxFjdyyZ9NsbS+ccrXqVWDtab/lRl5AlUqmpBx8EhPaWR+OtY17A== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +js-yaml@^3.10.0, js-yaml@^3.14.0: + version "3.14.1" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" + integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +jsbn@~0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" + integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM= + +jsdom@^16.4.0: + version "16.4.0" + resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-16.4.0.tgz#36005bde2d136f73eee1a830c6d45e55408edddb" + integrity sha512-lYMm3wYdgPhrl7pDcRmvzPhhrGVBeVhPIqeHjzeiHN3DFmD1RBpbExbi8vU7BJdH8VAZYovR8DMt0PNNDM7k8w== + dependencies: + abab "^2.0.3" + acorn "^7.1.1" + acorn-globals "^6.0.0" + cssom "^0.4.4" + cssstyle "^2.2.0" + data-urls "^2.0.0" + decimal.js "^10.2.0" + domexception "^2.0.1" + escodegen "^1.14.1" + html-encoding-sniffer "^2.0.1" + is-potential-custom-element-name "^1.0.0" + nwsapi "^2.2.0" + parse5 "5.1.1" + request "^2.88.2" + request-promise-native "^1.0.8" + saxes "^5.0.0" + symbol-tree "^3.2.4" + tough-cookie "^3.0.1" + w3c-hr-time "^1.0.2" + w3c-xmlserializer "^2.0.0" + webidl-conversions "^6.1.0" + whatwg-encoding "^1.0.5" + whatwg-mimetype "^2.3.0" + whatwg-url "^8.0.0" + ws "^7.2.3" + xml-name-validator "^3.0.0" + +jsesc@^2.5.1: + version "2.5.2" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" + integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== + +json-buffer@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898" + integrity sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg= + +json-colorizer@2.2.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/json-colorizer/-/json-colorizer-2.2.2.tgz#07c2ac8cef36558075948e1566c6cfb4ac1668e6" + integrity sha512-56oZtwV1piXrQnRNTtJeqRv+B9Y/dXAYLqBBaYl/COcUdoZxgLBLAO88+CnkbT6MxNs0c5E9mPBIb2sFcNz3vw== + dependencies: + chalk "^2.4.1" + lodash.get "^4.4.2" + +json-parse-better-errors@^1.0.0, json-parse-better-errors@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" + integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw== + +json-parse-even-better-errors@^2.3.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" + integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== + +json-schema-ref-parser@^9.0.6: + version "9.0.6" + resolved "https://registry.yarnpkg.com/json-schema-ref-parser/-/json-schema-ref-parser-9.0.6.tgz#fc89a5e6b853f2abe8c0af30d3874196526adb60" + integrity sha512-z0JGv7rRD3CnJbZY/qCpscyArdtLJhr/wRBmFUdoZ8xMjsFyNdILSprG2degqRLjBjyhZHAEBpGOxniO9rKTxA== + dependencies: + "@apidevtools/json-schema-ref-parser" "9.0.6" + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json-schema@0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" + integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM= + +json-stable-stringify-without-jsonify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + integrity sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE= + +json-stringify-safe@^5.0.1, json-stringify-safe@~5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" + integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= + +json5@2.x, json5@^2.1.2: + version "2.1.3" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.3.tgz#c9b0f7fa9233bfe5807fe66fcf3a5617ed597d43" + integrity sha512-KXPvOm8K9IJKFM0bmdn8QXh7udDh1g/giieX0NLCaMnb4hEiVFqnop2ImTXCc5e0/oHz3LTqmHGtExn5hfMkOA== + dependencies: + minimist "^1.2.5" + +json5@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" + integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== + dependencies: + minimist "^1.2.0" + +json5@^2.1.3: + version "2.2.0" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.0.tgz#2dfefe720c6ba525d9ebd909950f0515316c89a3" + integrity sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA== + dependencies: + minimist "^1.2.5" + +jsonfile@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" + integrity sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss= + optionalDependencies: + graceful-fs "^4.1.6" + +jsonfile@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" + integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== + dependencies: + universalify "^2.0.0" + optionalDependencies: + graceful-fs "^4.1.6" + +jsonparse@^1.2.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/jsonparse/-/jsonparse-1.3.1.tgz#3f4dae4a91fac315f71062f8521cc239f1366280" + integrity sha1-P02uSpH6wxX3EGL4UhzCOfE2YoA= + +jsprim@^1.2.2: + version "1.4.1" + resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" + integrity sha1-MT5mvB5cwG5Di8G3SZwuXFastqI= + dependencies: + assert-plus "1.0.0" + extsprintf "1.3.0" + json-schema "0.2.3" + verror "1.10.0" + +"jsx-ast-utils@^2.4.1 || ^3.0.0": + version "3.1.0" + resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-3.1.0.tgz#642f1d7b88aa6d7eb9d8f2210e166478444fa891" + integrity sha512-d4/UOjg+mxAWxCiF0c5UTSwyqbchkbqCvK87aBovhnh8GtysTjWmgC63tY0cJx/HzGgm9qnA147jVBdpOiQ2RA== + dependencies: + array-includes "^3.1.1" + object.assign "^4.1.1" + +keyv@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/keyv/-/keyv-3.1.0.tgz#ecc228486f69991e49e9476485a5be1e8fc5c4d9" + integrity sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA== + dependencies: + json-buffer "3.0.0" + +kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: + version "3.2.2" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" + integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= + dependencies: + is-buffer "^1.1.5" + +kind-of@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" + integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc= + dependencies: + is-buffer "^1.1.5" + +kind-of@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" + integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== + +kind-of@^6.0.0, kind-of@^6.0.2, kind-of@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" + integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== + +kleur@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" + integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== + +kuler@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/kuler/-/kuler-2.0.0.tgz#e2c570a3800388fb44407e851531c1d670b061b3" + integrity sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A== + +latest-version@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-5.1.0.tgz#119dfe908fe38d15dfa43ecd13fa12ec8832face" + integrity sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA== + dependencies: + package-json "^6.3.0" + +lazy-cache@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-2.0.2.tgz#b9190a4f913354694840859f8a8f7084d8822264" + integrity sha1-uRkKT5EzVGlIQIWfio9whNiCImQ= + dependencies: + set-getter "^0.1.0" + +lerna@^3.22.1: + version "3.22.1" + resolved "https://registry.yarnpkg.com/lerna/-/lerna-3.22.1.tgz#82027ac3da9c627fd8bf02ccfeff806a98e65b62" + integrity sha512-vk1lfVRFm+UuEFA7wkLKeSF7Iz13W+N/vFd48aW2yuS7Kv0RbNm2/qcDPV863056LMfkRlsEe+QYOw3palj5Lg== + dependencies: + "@lerna/add" "3.21.0" + "@lerna/bootstrap" "3.21.0" + "@lerna/changed" "3.21.0" + "@lerna/clean" "3.21.0" + "@lerna/cli" "3.18.5" + "@lerna/create" "3.22.0" + "@lerna/diff" "3.21.0" + "@lerna/exec" "3.21.0" + "@lerna/import" "3.22.0" + "@lerna/info" "3.21.0" + "@lerna/init" "3.21.0" + "@lerna/link" "3.21.0" + "@lerna/list" "3.21.0" + "@lerna/publish" "3.22.1" + "@lerna/run" "3.21.0" + "@lerna/version" "3.22.1" + import-local "^2.0.0" + npmlog "^4.1.2" + +level-concat-iterator@~2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/level-concat-iterator/-/level-concat-iterator-2.0.1.tgz#1d1009cf108340252cb38c51f9727311193e6263" + integrity sha512-OTKKOqeav2QWcERMJR7IS9CUo1sHnke2C0gkSmcR7QuEtFNLLzHQAvnMw8ykvEcv0Qtkg0p7FOwP1v9e5Smdcw== + +level-errors@2.0.1, level-errors@~2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/level-errors/-/level-errors-2.0.1.tgz#2132a677bf4e679ce029f517c2f17432800c05c8" + integrity sha512-UVprBJXite4gPS+3VznfgDSU8PTRuVX0NXwoWW50KLxd2yw4Y1t2JUR5In1itQnudZqRMT9DlAM3Q//9NCjCFw== + dependencies: + errno "~0.1.1" + +level-iterator-stream@~4.0.0: + version "4.0.2" + resolved "https://registry.yarnpkg.com/level-iterator-stream/-/level-iterator-stream-4.0.2.tgz#7ceba69b713b0d7e22fcc0d1f128ccdc8a24f79c" + integrity sha512-ZSthfEqzGSOMWoUGhTXdX9jv26d32XJuHz/5YnuHZzH6wldfWMOVwI9TBtKcya4BKTyTt3XVA0A3cF3q5CY30Q== + dependencies: + inherits "^2.0.4" + readable-stream "^3.4.0" + xtend "^4.0.2" + +level-supports@~1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/level-supports/-/level-supports-1.0.1.tgz#2f530a596834c7301622521988e2c36bb77d122d" + integrity sha512-rXM7GYnW8gsl1vedTJIbzOrRv85c/2uCMpiiCzO2fndd06U/kUXEEU9evYn4zFggBOg36IsBW8LzqIpETwwQzg== + dependencies: + xtend "^4.0.2" + +leveldown@5.6.0: + version "5.6.0" + resolved "https://registry.yarnpkg.com/leveldown/-/leveldown-5.6.0.tgz#16ba937bb2991c6094e13ac5a6898ee66d3eee98" + integrity sha512-iB8O/7Db9lPaITU1aA2txU/cBEXAt4vWwKQRrrWuS6XDgbP4QZGj9BL2aNbwb002atoQ/lIotJkfyzz+ygQnUQ== + dependencies: + abstract-leveldown "~6.2.1" + napi-macros "~2.0.0" + node-gyp-build "~4.1.0" + +levelup@4.4.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/levelup/-/levelup-4.4.0.tgz#f89da3a228c38deb49c48f88a70fb71f01cafed6" + integrity sha512-94++VFO3qN95cM/d6eBXvd894oJE0w3cInq9USsyQzzoJxmiYzPAocNcuGCPGGjoXqDVJcr3C1jzt1TSjyaiLQ== + dependencies: + deferred-leveldown "~5.3.0" + level-errors "~2.0.0" + level-iterator-stream "~4.0.0" + level-supports "~1.0.0" + xtend "~4.0.0" + +leven@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" + integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== + +levn@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== + dependencies: + prelude-ls "^1.2.1" + type-check "~0.4.0" + +levn@~0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" + integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4= + dependencies: + prelude-ls "~1.1.2" + type-check "~0.3.2" + +lines-and-columns@^1.1.6: + version "1.1.6" + resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.1.6.tgz#1c00c743b433cd0a4e80758f7b64a57440d9ff00" + integrity sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA= + +load-json-file@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" + integrity sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA= + dependencies: + graceful-fs "^4.1.2" + parse-json "^2.2.0" + pify "^2.0.0" + pinkie-promise "^2.0.0" + strip-bom "^2.0.0" + +load-json-file@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-2.0.0.tgz#7947e42149af80d696cbf797bcaabcfe1fe29ca8" + integrity sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg= + dependencies: + graceful-fs "^4.1.2" + parse-json "^2.2.0" + pify "^2.0.0" + strip-bom "^3.0.0" + +load-json-file@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-4.0.0.tgz#2f5f45ab91e33216234fd53adab668eb4ec0993b" + integrity sha1-L19Fq5HjMhYjT9U62rZo607AmTs= + dependencies: + graceful-fs "^4.1.2" + parse-json "^4.0.0" + pify "^3.0.0" + strip-bom "^3.0.0" + +load-json-file@^5.3.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-5.3.0.tgz#4d3c1e01fa1c03ea78a60ac7af932c9ce53403f3" + integrity sha512-cJGP40Jc/VXUsp8/OrnyKyTZ1y6v/dphm3bioS+RrKXjK2BB6wHUd6JptZEFDGgGahMT+InnZO5i1Ei9mpC8Bw== + dependencies: + graceful-fs "^4.1.15" + parse-json "^4.0.0" + pify "^4.0.1" + strip-bom "^3.0.0" + type-fest "^0.3.0" + +load-json-file@^6.2.0: + version "6.2.0" + resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-6.2.0.tgz#5c7770b42cafa97074ca2848707c61662f4251a1" + integrity sha512-gUD/epcRms75Cw8RT1pUdHugZYM5ce64ucs2GEISABwkRsOQr0q2wm/MV2TKThycIe5e0ytRweW2RZxclogCdQ== + dependencies: + graceful-fs "^4.1.15" + parse-json "^5.0.0" + strip-bom "^4.0.0" + type-fest "^0.6.0" + +locate-path@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" + integrity sha1-K1aLJl7slExtnA3pw9u7ygNUzY4= + dependencies: + p-locate "^2.0.0" + path-exists "^3.0.0" + +locate-path@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" + integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== + dependencies: + p-locate "^3.0.0" + path-exists "^3.0.0" + +locate-path@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" + integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== + dependencies: + p-locate "^4.1.0" + +lodash-es@^4.17.11: + version "4.17.15" + resolved "https://registry.yarnpkg.com/lodash-es/-/lodash-es-4.17.15.tgz#21bd96839354412f23d7a10340e5eac6ee455d78" + integrity sha512-rlrc3yU3+JNOpZ9zj5pQtxnx2THmvRykwL4Xlxoa8I9lHBlVbbyPhgyPMioxVZ4NqyxaVVtaJnzsyOidQIhyyQ== + +lodash._reinterpolate@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/lodash._reinterpolate/-/lodash._reinterpolate-3.0.0.tgz#0ccf2d89166af03b3663c796538b75ac6e114d9d" + integrity sha1-DM8tiRZq8Ds2Y8eWU4t1rG4RTZ0= + +lodash.clonedeep@^4.5.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef" + integrity sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8= + +lodash.get@^4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99" + integrity sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk= + +lodash.ismatch@^4.4.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/lodash.ismatch/-/lodash.ismatch-4.4.0.tgz#756cb5150ca3ba6f11085a78849645f188f85f37" + integrity sha1-dWy1FQyjum8RCFp4hJZF8Yj4Xzc= + +lodash.memoize@4.x: + version "4.1.2" + resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" + integrity sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4= + +lodash.merge@^4.6.1, lodash.merge@^4.6.2: + version "4.6.2" + resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== + +lodash.set@^4.3.2: + version "4.3.2" + resolved "https://registry.yarnpkg.com/lodash.set/-/lodash.set-4.3.2.tgz#d8757b1da807dde24816b0d6a84bea1a76230b23" + integrity sha1-2HV7HagH3eJIFrDWqEvqGnYjCyM= + +lodash.sortby@^4.7.0: + version "4.7.0" + resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" + integrity sha1-7dFMgk4sycHgsKG0K7UhBRakJDg= + +lodash.template@^4.0.2, lodash.template@^4.4.0, lodash.template@^4.5.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/lodash.template/-/lodash.template-4.5.0.tgz#f976195cf3f347d0d5f52483569fe8031ccce8ab" + integrity sha512-84vYFxIkmidUiFxidA/KjjH9pAycqW+h980j7Fuz5qxRtO9pgB7MDFTdys1N7A5mcucRiDyEq4fusljItR1T/A== + dependencies: + lodash._reinterpolate "^3.0.0" + lodash.templatesettings "^4.0.0" + +lodash.templatesettings@^4.0.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/lodash.templatesettings/-/lodash.templatesettings-4.2.0.tgz#e481310f049d3cf6d47e912ad09313b154f0fb33" + integrity sha512-stgLz+i3Aa9mZgnjr/O+v9ruKZsPsndy7qPZOchbqk2cnTU1ZaldKK+v7m54WoKIyxiuMZTKT2H81F8BeAc3ZQ== + dependencies: + lodash._reinterpolate "^3.0.0" + +lodash.uniq@^4.5.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" + integrity sha1-0CJTc662Uq3BvILklFM5qEJ1R3M= + +lodash.zipobject@^4.1.3: + version "4.1.3" + resolved "https://registry.yarnpkg.com/lodash.zipobject/-/lodash.zipobject-4.1.3.tgz#b399f5aba8ff62a746f6979bf20b214f964dbef8" + integrity sha1-s5n1q6j/YqdG9peb8gshT5ZNvvg= + +lodash@4.17.20, lodash@^4.17.10, lodash@^4.17.11, lodash@^4.17.12, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.2.1, lodash@^4.5.1: + version "4.17.20" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.20.tgz#b44a9b6297bcb698f1c51a3545a2b3b368d59c52" + integrity sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA== + +log-symbols@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a" + integrity sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg== + dependencies: + chalk "^2.0.1" + +logform@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/logform/-/logform-2.2.0.tgz#40f036d19161fc76b68ab50fdc7fe495544492f2" + integrity sha512-N0qPlqfypFx7UHNn4B3lzS/b0uLqt2hmuoa+PpuXNYgozdJYAyauF5Ky0BWVjrxDlMWiT3qN4zPq3vVAfZy7Yg== + dependencies: + colors "^1.2.1" + fast-safe-stringify "^2.0.4" + fecha "^4.2.0" + ms "^2.1.1" + triple-beam "^1.3.0" + +loose-envify@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" + integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== + dependencies: + js-tokens "^3.0.0 || ^4.0.0" + +loud-rejection@^1.0.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/loud-rejection/-/loud-rejection-1.6.0.tgz#5b46f80147edee578870f086d04821cf998e551f" + integrity sha1-W0b4AUft7leIcPCG0Eghz5mOVR8= + dependencies: + currently-unhandled "^0.4.1" + signal-exit "^3.0.0" + +lowercase-keys@^1.0.0, lowercase-keys@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f" + integrity sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA== + +lowercase-keys@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479" + integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA== + +lru-cache@6.0.0, lru-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + +lru-cache@^4.0.1: + version "4.1.5" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd" + integrity sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g== + dependencies: + pseudomap "^1.0.2" + yallist "^2.1.2" + +lru-cache@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" + integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== + dependencies: + yallist "^3.0.2" + +lunr@^2.3.9: + version "2.3.9" + resolved "https://registry.yarnpkg.com/lunr/-/lunr-2.3.9.tgz#18b123142832337dd6e964df1a5a7707b25d35e1" + integrity sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow== + +macos-release@^2.2.0: + version "2.4.1" + resolved "https://registry.yarnpkg.com/macos-release/-/macos-release-2.4.1.tgz#64033d0ec6a5e6375155a74b1a1eba8e509820ac" + integrity sha512-H/QHeBIN1fIGJX517pvK8IEK53yQOW7YcEI55oYtgjDdoCQQz7eJS94qt5kNrscReEyuD/JcdFCm2XBEcGOITg== + +make-dir@^1.0.0, make-dir@^1.1.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-1.3.0.tgz#79c1033b80515bd6d24ec9933e860ca75ee27f0c" + integrity sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ== + dependencies: + pify "^3.0.0" + +make-dir@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5" + integrity sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA== + dependencies: + pify "^4.0.1" + semver "^5.6.0" + +make-dir@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" + integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== + dependencies: + semver "^6.0.0" + +make-error@1.x, make-error@^1.1.1: + version "1.3.6" + resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" + integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== + +make-fetch-happen@^5.0.0: + version "5.0.2" + resolved "https://registry.yarnpkg.com/make-fetch-happen/-/make-fetch-happen-5.0.2.tgz#aa8387104f2687edca01c8687ee45013d02d19bd" + integrity sha512-07JHC0r1ykIoruKO8ifMXu+xEU8qOXDFETylktdug6vJDACnP+HKevOu3PXyNPzFyTSlz8vrBYlBO1JZRe8Cag== + dependencies: + agentkeepalive "^3.4.1" + cacache "^12.0.0" + http-cache-semantics "^3.8.1" + http-proxy-agent "^2.1.0" + https-proxy-agent "^2.2.3" + lru-cache "^5.1.1" + mississippi "^3.0.0" + node-fetch-npm "^2.0.2" + promise-retry "^1.1.1" + socks-proxy-agent "^4.0.0" + ssri "^6.0.0" + +makeerror@1.0.x: + version "1.0.11" + resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.11.tgz#e01a5c9109f2af79660e4e8b9587790184f5a96c" + integrity sha1-4BpckQnyr3lmDk6LlYd5AYT1qWw= + dependencies: + tmpl "1.0.x" + +map-cache@^0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" + integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= + +map-obj@^1.0.0, map-obj@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" + integrity sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0= + +map-obj@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-2.0.0.tgz#a65cd29087a92598b8791257a523e021222ac1f9" + integrity sha1-plzSkIepJZi4eRJXpSPgISIqwfk= + +map-obj@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-4.1.0.tgz#b91221b542734b9f14256c0132c897c5d7256fd5" + integrity sha512-glc9y00wgtwcDmp7GaE/0b0OnxpNJsVf3ael/An6Fe2Q51LLwN1er6sdomLRzz5h0+yMpiYLhWYF5R7HeqVd4g== + +map-stream@~0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/map-stream/-/map-stream-0.1.0.tgz#e56aa94c4c8055a16404a0674b78f215f7c8e194" + integrity sha1-5WqpTEyAVaFkBKBnS3jyFffI4ZQ= + +map-visit@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" + integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= + dependencies: + object-visit "^1.0.0" + +marked@^1.1.1: + version "1.2.5" + resolved "https://registry.yarnpkg.com/marked/-/marked-1.2.5.tgz#a44b31f2a0b8b5bfd610f00d55d1952d1ac1dfdb" + integrity sha512-2AlqgYnVPOc9WDyWu7S5DJaEZsfk6dNh/neatQ3IHUW4QLutM/VPSH9lG7bif+XjFWc9K9XR3QvR+fXuECmfdA== + +media-typer@0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" + integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g= + +media-typer@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-1.1.0.tgz#6ab74b8f2d3320f2064b2a87a38e7931ff3a5561" + integrity sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw== + +mem-fs-editor@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/mem-fs-editor/-/mem-fs-editor-5.1.0.tgz#51972241640be8567680a04f7adaffe5fc603667" + integrity sha512-2Yt2GCYEbcotYbIJagmow4gEtHDqzpq5XN94+yAx/NT5+bGqIjkXnm3KCUQfE6kRfScGp9IZknScoGRKu8L78w== + dependencies: + commondir "^1.0.1" + deep-extend "^0.6.0" + ejs "^2.5.9" + glob "^7.0.3" + globby "^8.0.1" + isbinaryfile "^3.0.2" + mkdirp "^0.5.0" + multimatch "^2.0.0" + rimraf "^2.2.8" + through2 "^2.0.0" + vinyl "^2.0.1" + +mem-fs-editor@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/mem-fs-editor/-/mem-fs-editor-6.0.0.tgz#d63607cf0a52fe6963fc376c6a7aa52db3edabab" + integrity sha512-e0WfJAMm8Gv1mP5fEq/Blzy6Lt1VbLg7gNnZmZak7nhrBTibs+c6nQ4SKs/ZyJYHS1mFgDJeopsLAv7Ow0FMFg== + dependencies: + commondir "^1.0.1" + deep-extend "^0.6.0" + ejs "^2.6.1" + glob "^7.1.4" + globby "^9.2.0" + isbinaryfile "^4.0.0" + mkdirp "^0.5.0" + multimatch "^4.0.0" + rimraf "^2.6.3" + through2 "^3.0.1" + vinyl "^2.2.0" + +mem-fs-editor@^7.0.1: + version "7.1.0" + resolved "https://registry.yarnpkg.com/mem-fs-editor/-/mem-fs-editor-7.1.0.tgz#2a16f143228df87bf918874556723a7ee73bfe88" + integrity sha512-BH6QEqCXSqGeX48V7zu+e3cMwHU7x640NB8Zk8VNvVZniz+p4FK60pMx/3yfkzo6miI6G3a8pH6z7FeuIzqrzA== + dependencies: + commondir "^1.0.1" + deep-extend "^0.6.0" + ejs "^3.1.5" + glob "^7.1.4" + globby "^9.2.0" + isbinaryfile "^4.0.0" + mkdirp "^1.0.0" + multimatch "^4.0.0" + rimraf "^3.0.0" + through2 "^3.0.2" + vinyl "^2.2.1" + +mem-fs@^1.1.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/mem-fs/-/mem-fs-1.2.0.tgz#5f29b2d02a5875cd14cd836c388385892d556cde" + integrity sha512-b8g0jWKdl8pM0LqAPdK9i8ERL7nYrzmJfRhxMiWH2uYdfYnb7uXnmwVb0ZGe7xyEl4lj+nLIU3yf4zPUT+XsVQ== + dependencies: + through2 "^3.0.0" + vinyl "^2.0.1" + vinyl-file "^3.0.0" + +meow@^3.3.0, meow@^3.6.0, meow@^3.7.0: + version "3.7.0" + resolved "https://registry.yarnpkg.com/meow/-/meow-3.7.0.tgz#72cb668b425228290abbfa856892587308a801fb" + integrity sha1-cstmi0JSKCkKu/qFaJJYcwioAfs= + dependencies: + camelcase-keys "^2.0.0" + decamelize "^1.1.2" + loud-rejection "^1.0.0" + map-obj "^1.0.1" + minimist "^1.1.3" + normalize-package-data "^2.3.4" + object-assign "^4.0.1" + read-pkg-up "^1.0.1" + redent "^1.0.0" + trim-newlines "^1.0.0" + +meow@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/meow/-/meow-4.0.1.tgz#d48598f6f4b1472f35bf6317a95945ace347f975" + integrity sha512-xcSBHD5Z86zaOc+781KrupuHAzeGXSLtiAOmBsiLDiPSaYSB6hdew2ng9EBAnZ62jagG9MHAOdxpDi/lWBFJ/A== + dependencies: + camelcase-keys "^4.0.0" + decamelize-keys "^1.0.0" + loud-rejection "^1.0.0" + minimist "^1.1.3" + minimist-options "^3.0.1" + normalize-package-data "^2.3.4" + read-pkg-up "^3.0.0" + redent "^2.0.0" + trim-newlines "^2.0.0" + +meow@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/meow/-/meow-8.0.0.tgz#1aa10ee61046719e334ffdc038bb5069250ec99a" + integrity sha512-nbsTRz2fwniJBFgUkcdISq8y/q9n9VbiHYbfwklFh5V4V2uAcxtKQkDc0yCLPM/kP0d+inZBewn3zJqewHE7kg== + dependencies: + "@types/minimist" "^1.2.0" + camelcase-keys "^6.2.2" + decamelize-keys "^1.1.0" + hard-rejection "^2.1.0" + minimist-options "4.1.0" + normalize-package-data "^3.0.0" + read-pkg-up "^7.0.1" + redent "^3.0.0" + trim-newlines "^3.0.0" + type-fest "^0.18.0" + yargs-parser "^20.2.3" + +merge-descriptors@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" + integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E= + +merge-stream@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" + integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== + +merge2@^1.2.3, merge2@^1.3.0: + version "1.4.1" + resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== + +methods@1.1.2, methods@^1.1.2, methods@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" + integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= + +micromatch@^3.1.10, micromatch@^3.1.4: + version "3.1.10" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" + integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== + dependencies: + arr-diff "^4.0.0" + array-unique "^0.3.2" + braces "^2.3.1" + define-property "^2.0.2" + extend-shallow "^3.0.2" + extglob "^2.0.4" + fragment-cache "^0.2.1" + kind-of "^6.0.2" + nanomatch "^1.2.9" + object.pick "^1.3.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.2" + +micromatch@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.2.tgz#4fcb0999bf9fbc2fcbdd212f6d629b9a56c39259" + integrity sha512-y7FpHSbMUMoyPbYUSzO6PaZ6FyRnQOpHuKwbo1G+Knck95XVU4QAiKdGEnj5wwoS7PlOgthX/09u5iFJ+aYf5Q== + dependencies: + braces "^3.0.1" + picomatch "^2.0.5" + +mime-db@1.44.0: + version "1.44.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.44.0.tgz#fa11c5eb0aca1334b4233cb4d52f10c5a6272f92" + integrity sha512-/NOTfLrsPBVeH7YtFPgsVWveuL+4SjjYxaQ1xtM1KMFj7HdxlBlxeyNLzhyJVx7r4rZGJAZ/6lkKCitSc/Nmpg== + +mime-types@^2.1.12, mime-types@~2.1.19, mime-types@~2.1.24: + version "2.1.27" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.27.tgz#47949f98e279ea53119f5722e0f34e529bec009f" + integrity sha512-JIhqnCasI9yD+SsmkquHBxTSEuZdQX5BuQnS2Vc7puQQQ+8yiP5AY5uWhpdv4YL4VM5c6iliiYWPgJ/nJQLp7w== + dependencies: + mime-db "1.44.0" + +mime@1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" + integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== + +mime@^2.4.6: + version "2.4.6" + resolved "https://registry.yarnpkg.com/mime/-/mime-2.4.6.tgz#e5b407c90db442f2beb5b162373d07b69affa4d1" + integrity sha512-RZKhC3EmpBchfTGBVb8fb+RL2cWyw/32lshnsETttkBAyAUXSGHxbEJWWRXc751DrIxG1q04b8QwMbAwkRPpUA== + +mimic-fn@^1.0.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.2.0.tgz#820c86a39334640e99516928bd03fca88057d022" + integrity sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ== + +mimic-fn@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" + integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== + +mimic-response@^1.0.0, mimic-response@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.1.tgz#4923538878eef42063cb8a3e3b0798781487ab1b" + integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ== + +min-indent@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/min-indent/-/min-indent-1.0.1.tgz#a63f681673b30571fbe8bc25686ae746eefa9869" + integrity sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg== + +"minimatch@2 || 3", minimatch@^3.0.0, minimatch@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" + integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== + dependencies: + brace-expansion "^1.1.7" + +minimist-options@4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/minimist-options/-/minimist-options-4.1.0.tgz#c0655713c53a8a2ebd77ffa247d342c40f010619" + integrity sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A== + dependencies: + arrify "^1.0.1" + is-plain-obj "^1.1.0" + kind-of "^6.0.3" + +minimist-options@^3.0.1: + version "3.0.2" + resolved "https://registry.yarnpkg.com/minimist-options/-/minimist-options-3.0.2.tgz#fba4c8191339e13ecf4d61beb03f070103f3d954" + integrity sha512-FyBrT/d0d4+uiZRbqznPXqw3IpZZG3gl3wKWiX784FycUKVwBt0uLBFkQrtE4tZOrgo78nZp2jnKz3L65T5LdQ== + dependencies: + arrify "^1.0.1" + is-plain-obj "^1.1.0" + +minimist@^1.1.0, minimist@^1.1.1, minimist@^1.1.3, minimist@^1.2.0, minimist@^1.2.5: + version "1.2.5" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" + integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== + +minipass@^2.3.5, minipass@^2.6.0, minipass@^2.8.6, minipass@^2.9.0: + version "2.9.0" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.9.0.tgz#e713762e7d3e32fed803115cf93e04bca9fcc9a6" + integrity sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg== + dependencies: + safe-buffer "^5.1.2" + yallist "^3.0.0" + +minizlib@^1.2.1: + version "1.3.3" + resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.3.3.tgz#2290de96818a34c29551c8a8d301216bd65a861d" + integrity sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q== + dependencies: + minipass "^2.9.0" + +mississippi@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/mississippi/-/mississippi-3.0.0.tgz#ea0a3291f97e0b5e8776b363d5f0a12d94c67022" + integrity sha512-x471SsVjUtBRtcvd4BzKE9kFC+/2TeWgKCgw0bZcw1b9l2X3QX5vCWgF+KaZaYm87Ss//rHnWryupDrgLvmSkA== + dependencies: + concat-stream "^1.5.0" + duplexify "^3.4.2" + end-of-stream "^1.1.0" + flush-write-stream "^1.0.0" + from2 "^2.1.0" + parallel-transform "^1.1.0" + pump "^3.0.0" + pumpify "^1.3.3" + stream-each "^1.1.0" + through2 "^2.0.0" + +mixin-deep@^1.2.0: + version "1.3.2" + resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" + integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== + dependencies: + for-in "^1.0.2" + is-extendable "^1.0.1" + +mkdirp-classic@^0.5.2: + version "0.5.3" + resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" + integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A== + +mkdirp-promise@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/mkdirp-promise/-/mkdirp-promise-5.0.1.tgz#e9b8f68e552c68a9c1713b84883f7a1dd039b8a1" + integrity sha1-6bj2jlUsaKnBcTuEiD96HdA5uKE= + dependencies: + mkdirp "*" + +mkdirp@*, mkdirp@1.x, mkdirp@^1.0.0, mkdirp@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" + integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== + +mkdirp@^0.5.0, mkdirp@^0.5.1: + version "0.5.5" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" + integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== + dependencies: + minimist "^1.2.5" + +mock-stdin@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/mock-stdin/-/mock-stdin-1.0.0.tgz#efcfaf4b18077e14541742fd758b9cae4e5365ea" + integrity sha512-tukRdb9Beu27t6dN+XztSRHq9J0B/CoAOySGzHfn8UTfmqipA5yNT/sDUEyYdAV3Hpka6Wx6kOMxuObdOex60Q== + +modify-values@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/modify-values/-/modify-values-1.0.1.tgz#b3939fa605546474e3e3e3c63d64bd43b4ee6022" + integrity sha512-xV2bxeN6F7oYjZWTe/YPAy6MN2M+sL4u/Rlm2AHCIVGfo2p1yGmBHQ6vHehl4bRTZBdHu3TSkWdYgkwpYzAGSw== + +moment@^2.15.1, moment@^2.24.0: + version "2.29.1" + resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.1.tgz#b2be769fa31940be9eeea6469c075e35006fa3d3" + integrity sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ== + +move-concurrently@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/move-concurrently/-/move-concurrently-1.0.1.tgz#be2c005fda32e0b29af1f05d7c4b33214c701f92" + integrity sha1-viwAX9oy4LKa8fBdfEszIUxwH5I= + dependencies: + aproba "^1.1.1" + copy-concurrently "^1.0.0" + fs-write-stream-atomic "^1.0.8" + mkdirp "^0.5.1" + rimraf "^2.5.4" + run-queue "^1.0.3" + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= + +ms@2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a" + integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg== + +ms@2.1.2, ms@^2.0.0, ms@^2.1.1: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +multer@^1.4.2: + version "1.4.2" + resolved "https://registry.yarnpkg.com/multer/-/multer-1.4.2.tgz#2f1f4d12dbaeeba74cb37e623f234bf4d3d2057a" + integrity sha512-xY8pX7V+ybyUpbYMxtjM9KAiD9ixtg5/JkeKUTD6xilfDv0vzzOFcCp4Ljb1UU3tSOM3VTZtKo63OmzOrGi3Cg== + dependencies: + append-field "^1.0.0" + busboy "^0.2.11" + concat-stream "^1.5.2" + mkdirp "^0.5.1" + object-assign "^4.1.1" + on-finished "^2.3.0" + type-is "^1.6.4" + xtend "^4.0.0" + +multimatch@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/multimatch/-/multimatch-2.1.0.tgz#9c7906a22fb4c02919e2f5f75161b4cdbd4b2a2b" + integrity sha1-nHkGoi+0wCkZ4vX3UWG0zb1LKis= + dependencies: + array-differ "^1.0.0" + array-union "^1.0.1" + arrify "^1.0.0" + minimatch "^3.0.0" + +multimatch@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/multimatch/-/multimatch-3.0.0.tgz#0e2534cc6bc238d9ab67e1b9cd5fcd85a6dbf70b" + integrity sha512-22foS/gqQfANZ3o+W7ST2x25ueHDVNWl/b9OlGcLpy/iKxjCpvcNCM51YCenUi7Mt/jAjjqv8JwZRs8YP5sRjA== + dependencies: + array-differ "^2.0.3" + array-union "^1.0.2" + arrify "^1.0.1" + minimatch "^3.0.4" + +multimatch@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/multimatch/-/multimatch-4.0.0.tgz#8c3c0f6e3e8449ada0af3dd29efb491a375191b3" + integrity sha512-lDmx79y1z6i7RNx0ZGCPq1bzJ6ZoDDKbvh7jxr9SJcWLkShMzXrHbYVpTdnhNM5MXpDUxCQ4DgqVttVXlBgiBQ== + dependencies: + "@types/minimatch" "^3.0.3" + array-differ "^3.0.0" + array-union "^2.1.0" + arrify "^2.0.1" + minimatch "^3.0.4" + +mute-stream@0.0.7: + version "0.0.7" + resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.7.tgz#3075ce93bc21b8fab43e1bc4da7e8115ed1e7bab" + integrity sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s= + +mute-stream@0.0.8, mute-stream@~0.0.4: + version "0.0.8" + resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.8.tgz#1630c42b2251ff81e2a283de96a5497ea92e5e0d" + integrity sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA== + +mz@^2.4.0, mz@^2.5.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/mz/-/mz-2.7.0.tgz#95008057a56cafadc2bc63dde7f9ff6955948e32" + integrity sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q== + dependencies: + any-promise "^1.0.0" + object-assign "^4.0.1" + thenify-all "^1.0.0" + +nan@^2.14.0: + version "2.14.2" + resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.2.tgz#f5376400695168f4cc694ac9393d0c9585eeea19" + integrity sha512-M2ufzIiINKCuDfBSAUr1vWQ+vuVcA9kqx8JJUsbQi6yf1uGRyb7HfpdfUr5qLXf3B/t8dPvcjhKMmlfnP47EzQ== + +nanomatch@^1.2.9: + version "1.2.13" + resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" + integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== + dependencies: + arr-diff "^4.0.0" + array-unique "^0.3.2" + define-property "^2.0.2" + extend-shallow "^3.0.2" + fragment-cache "^0.2.1" + is-windows "^1.0.2" + kind-of "^6.0.2" + object.pick "^1.3.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + +napi-macros@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/napi-macros/-/napi-macros-2.0.0.tgz#2b6bae421e7b96eb687aa6c77a7858640670001b" + integrity sha512-A0xLykHtARfueITVDernsAWdtIMbOJgKgcluwENp3AlsKN/PloyO10HtmoqnFAQAcxPkgZN7wdfPfEd0zNGxbg== + +natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= + +natural-orderby@^2.0.1: + version "2.0.3" + resolved "https://registry.yarnpkg.com/natural-orderby/-/natural-orderby-2.0.3.tgz#8623bc518ba162f8ff1cdb8941d74deb0fdcc016" + integrity sha512-p7KTHxU0CUrcOXe62Zfrb5Z13nLvPhSWR/so3kFulUQU0sgUll2Z0LwpsLN351eOOD+hRGu/F1g+6xDfPeD++Q== + +ncp@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ncp/-/ncp-2.0.0.tgz#195a21d6c46e361d2fb1281ba38b91e9df7bdbb3" + integrity sha1-GVoh1sRuNh0vsSgbo4uR6d9727M= + +needle@^2.2.1: + version "2.5.2" + resolved "https://registry.yarnpkg.com/needle/-/needle-2.5.2.tgz#cf1a8fce382b5a280108bba90a14993c00e4010a" + integrity sha512-LbRIwS9BfkPvNwNHlsA41Q29kL2L/6VaOJ0qisM5lLWsTV3nP15abO5ITL6L81zqFhzjRKDAYjpcBcwM0AVvLQ== + dependencies: + debug "^3.2.6" + iconv-lite "^0.4.4" + sax "^1.2.4" + +negotiator@0.6.2: + version "0.6.2" + resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb" + integrity sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw== + +neo-async@^2.6.0: + version "2.6.2" + resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" + integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== + +nested-error-stacks@^1.0.0, nested-error-stacks@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/nested-error-stacks/-/nested-error-stacks-1.0.2.tgz#19f619591519f096769a5ba9a86e6eeec823c3cf" + integrity sha1-GfYZWRUZ8JZ2mlupqG5u7sgjw88= + dependencies: + inherits "~2.0.1" + +nice-napi@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/nice-napi/-/nice-napi-1.0.2.tgz#dc0ab5a1eac20ce548802fc5686eaa6bc654927b" + integrity sha512-px/KnJAJZf5RuBGcfD+Sp2pAKq0ytz8j+1NehvgIGFkvtvFrDM3T8E4x/JJODXK9WZow8RRGrbA9QQ3hs+pDhA== + dependencies: + node-addon-api "^3.0.0" + node-gyp-build "^4.2.2" + +nice-try@^1.0.4: + version "1.0.5" + resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" + integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== + +nock@^13.0.0: + version "13.0.5" + resolved "https://registry.yarnpkg.com/nock/-/nock-13.0.5.tgz#a618c6f86372cb79fac04ca9a2d1e4baccdb2414" + integrity sha512-1ILZl0zfFm2G4TIeJFW0iHknxr2NyA+aGCMTjDVUsBY4CkMRispF1pfIYkTRdAR/3Bg+UzdEuK0B6HczMQZcCg== + dependencies: + debug "^4.1.0" + json-stringify-safe "^5.0.1" + lodash.set "^4.3.2" + propagate "^2.0.0" + +node-addon-api@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-3.0.2.tgz#04bc7b83fd845ba785bb6eae25bc857e1ef75681" + integrity sha512-+D4s2HCnxPd5PjjI0STKwncjXTUKKqm74MDMz9OPXavjsGmjkvwgLtA5yoxJUdmpj52+2u+RrXgPipahKczMKg== + +node-cleanup@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/node-cleanup/-/node-cleanup-2.1.2.tgz#7ac19abd297e09a7f72a71545d951b517e4dde2c" + integrity sha1-esGavSl+Caf3KnFUXZUbUX5N3iw= + +node-fetch-npm@^2.0.2: + version "2.0.4" + resolved "https://registry.yarnpkg.com/node-fetch-npm/-/node-fetch-npm-2.0.4.tgz#6507d0e17a9ec0be3bec516958a497cec54bf5a4" + integrity sha512-iOuIQDWDyjhv9qSDrj9aq/klt6F9z1p2otB3AV7v3zBDcL/x+OfGsvGQZZCcMZbUf4Ujw1xGNQkjvGnVT22cKg== + dependencies: + encoding "^0.1.11" + json-parse-better-errors "^1.0.0" + safe-buffer "^5.1.1" + +node-fetch@2.6.1, node-fetch@^2.2.0, node-fetch@^2.5.0, node-fetch@^2.6.1: + version "2.6.1" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052" + integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw== + +node-forge@0.10.0: + version "0.10.0" + resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.10.0.tgz#32dea2afb3e9926f02ee5ce8794902691a676bf3" + integrity sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA== + +node-gyp-build@^4.2.2: + version "4.2.3" + resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.2.3.tgz#ce6277f853835f718829efb47db20f3e4d9c4739" + integrity sha512-MN6ZpzmfNCRM+3t57PTJHgHyw/h4OWnZ6mR8P5j/uZtqQr46RRuDE/P+g3n0YR/AiYXeWixZZzaip77gdICfRg== + +node-gyp-build@~4.1.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.1.1.tgz#d7270b5d86717068d114cc57fff352f96d745feb" + integrity sha512-dSq1xmcPDKPZ2EED2S6zw/b9NKsqzXRE6dVr8TVQnI3FJOTteUMuqF3Qqs6LZg+mLGYJWqQzMbIjMtJqTv87nQ== + +node-gyp@^5.0.2: + version "5.1.1" + resolved "https://registry.yarnpkg.com/node-gyp/-/node-gyp-5.1.1.tgz#eb915f7b631c937d282e33aed44cb7a025f62a3e" + integrity sha512-WH0WKGi+a4i4DUt2mHnvocex/xPLp9pYt5R6M2JdFB7pJ7Z34hveZ4nDTGTiLXCkitA9T8HFZjhinBCiVHYcWw== + dependencies: + env-paths "^2.2.0" + glob "^7.1.4" + graceful-fs "^4.2.2" + mkdirp "^0.5.1" + nopt "^4.0.1" + npmlog "^4.1.2" + request "^2.88.0" + rimraf "^2.6.3" + semver "^5.7.1" + tar "^4.4.12" + which "^1.3.1" + +node-int64@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" + integrity sha1-h6kGXNs1XTGC2PlM4RGIuCXGijs= + +node-ipc@9.1.3: + version "9.1.3" + resolved "https://registry.yarnpkg.com/node-ipc/-/node-ipc-9.1.3.tgz#1df3f069d103184ae9127fa885dbdaea56a4436f" + integrity sha512-8RS4RZyS/KMKKYG8mrje+cLxwATe9dBCuOiqKFSWND4oOuKytfuKCiR9yinvhoXF/nGdX/WnbywaUee+9U87zA== + dependencies: + event-pubsub "4.3.0" + js-message "1.0.7" + js-queue "2.0.2" + +node-modules-regexp@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/node-modules-regexp/-/node-modules-regexp-1.0.0.tgz#8d9dbe28964a4ac5712e9131642107c71e90ec40" + integrity sha1-jZ2+KJZKSsVxLpExZCEHxx6Q7EA= + +node-notifier@^8.0.0: + version "8.0.1" + resolved "https://registry.yarnpkg.com/node-notifier/-/node-notifier-8.0.1.tgz#f86e89bbc925f2b068784b31f382afdc6ca56be1" + integrity sha512-BvEXF+UmsnAfYfoapKM9nGxnP+Wn7P91YfXmrKnfcYCx6VBeoN5Ez5Ogck6I8Bi5k4RlpqRYaw75pAwzX9OphA== + dependencies: + growly "^1.3.0" + is-wsl "^2.2.0" + semver "^7.3.2" + shellwords "^0.1.1" + uuid "^8.3.0" + which "^2.0.2" + +node-pre-gyp@^0.13.0: + version "0.13.0" + resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.13.0.tgz#df9ab7b68dd6498137717838e4f92a33fc9daa42" + integrity sha512-Md1D3xnEne8b/HGVQkZZwV27WUi1ZRuZBij24TNaZwUPU3ZAFtvT6xxJGaUVillfmMKnn5oD1HoGsp2Ftik7SQ== + dependencies: + detect-libc "^1.0.2" + mkdirp "^0.5.1" + needle "^2.2.1" + nopt "^4.0.1" + npm-packlist "^1.1.6" + npmlog "^4.0.2" + rc "^1.2.7" + rimraf "^2.6.1" + semver "^5.3.0" + tar "^4" + +nodemon@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/nodemon/-/nodemon-2.0.6.tgz#1abe1937b463aaf62f0d52e2b7eaadf28cc2240d" + integrity sha512-4I3YDSKXg6ltYpcnZeHompqac4E6JeAMpGm8tJnB9Y3T0ehasLa4139dJOcCrB93HHrUMsCrKtoAlXTqT5n4AQ== + dependencies: + chokidar "^3.2.2" + debug "^3.2.6" + ignore-by-default "^1.0.1" + minimatch "^3.0.4" + pstree.remy "^1.1.7" + semver "^5.7.1" + supports-color "^5.5.0" + touch "^3.1.0" + undefsafe "^2.0.3" + update-notifier "^4.1.0" + +nopt@^4.0.1: + version "4.0.3" + resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.3.tgz#a375cad9d02fd921278d954c2254d5aa57e15e48" + integrity sha512-CvaGwVMztSMJLOeXPrez7fyfObdZqNUK1cPAEzLHrTybIua9pMdmmPR5YwtfNftIOMv3DPUhFaxsZMNTQO20Kg== + dependencies: + abbrev "1" + osenv "^0.1.4" + +nopt@~1.0.10: + version "1.0.10" + resolved "https://registry.yarnpkg.com/nopt/-/nopt-1.0.10.tgz#6ddd21bd2a31417b92727dd585f8a6f37608ebee" + integrity sha1-bd0hvSoxQXuScn3Vhfim83YI6+4= + dependencies: + abbrev "1" + +normalize-package-data@^2.0.0, normalize-package-data@^2.3.0, normalize-package-data@^2.3.2, normalize-package-data@^2.3.4, normalize-package-data@^2.3.5, normalize-package-data@^2.4.0, normalize-package-data@^2.5.0: + version "2.5.0" + resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" + integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== + dependencies: + hosted-git-info "^2.1.4" + resolve "^1.10.0" + semver "2 || 3 || 4 || 5" + validate-npm-package-license "^3.0.1" + +normalize-package-data@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-3.0.0.tgz#1f8a7c423b3d2e85eb36985eaf81de381d01301a" + integrity sha512-6lUjEI0d3v6kFrtgA/lOx4zHCWULXsFNIjHolnZCKCTLA6m/G625cdn3O7eNmT0iD3jfo6HZ9cdImGZwf21prw== + dependencies: + hosted-git-info "^3.0.6" + resolve "^1.17.0" + semver "^7.3.2" + validate-npm-package-license "^3.0.1" + +normalize-path@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" + integrity sha1-GrKLVW4Zg2Oowab35vogE3/mrtk= + dependencies: + remove-trailing-separator "^1.0.1" + +normalize-path@^3.0.0, normalize-path@~3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== + +normalize-url@^3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-3.3.0.tgz#b2e1c4dc4f7c6d57743df733a4f5978d18650559" + integrity sha512-U+JJi7duF1o+u2pynbp2zXDW2/PADgC30f0GsHZtRh+HOcXHnw137TrNlyxxRvWW5fjKd3bcLHPxofWuCjaeZg== + +normalize-url@^4.1.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-4.5.0.tgz#453354087e6ca96957bd8f5baf753f5982142129" + integrity sha512-2s47yzUxdexf1OhyRi4Em83iQk0aPvwTddtFz4hnSSw9dCEsLEGf6SwIO8ss/19S9iBb5sJaOuTvTGDeZI00BQ== + +npm-api@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/npm-api/-/npm-api-1.0.0.tgz#6033c283bb04ddb0185344c1ad07ed4f67c77989" + integrity sha512-gtJhIhGq07g9H5sIAB9TZzTySW8MYtcYqg+e+J+5q1GmDsDLLVfyvVBL1VklzjtRsElph11GUtLBS191RDOJxQ== + dependencies: + JSONStream "^1.3.5" + clone-deep "^4.0.1" + download-stats "^0.3.4" + moment "^2.24.0" + paged-request "^2.0.1" + request "^2.88.0" + +npm-bundled@^1.0.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.1.1.tgz#1edd570865a94cdb1bc8220775e29466c9fb234b" + integrity sha512-gqkfgGePhTpAEgUsGEgcq1rqPXA+tv/aVBlgEzfXwA1yiUJF7xtEt3CtVwOjNYQOVknDk0F20w58Fnm3EtG0fA== + dependencies: + npm-normalize-package-bin "^1.0.1" + +npm-lifecycle@^3.1.2: + version "3.1.5" + resolved "https://registry.yarnpkg.com/npm-lifecycle/-/npm-lifecycle-3.1.5.tgz#9882d3642b8c82c815782a12e6a1bfeed0026309" + integrity sha512-lDLVkjfZmvmfvpvBzA4vzee9cn+Me4orq0QF8glbswJVEbIcSNWib7qGOffolysc3teCqbbPZZkzbr3GQZTL1g== + dependencies: + byline "^5.0.0" + graceful-fs "^4.1.15" + node-gyp "^5.0.2" + resolve-from "^4.0.0" + slide "^1.1.6" + uid-number "0.0.6" + umask "^1.1.0" + which "^1.3.1" + +npm-normalize-package-bin@^1.0.0, npm-normalize-package-bin@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz#6e79a41f23fd235c0623218228da7d9c23b8f6e2" + integrity sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA== + +"npm-package-arg@^4.0.0 || ^5.0.0 || ^6.0.0", npm-package-arg@^6.0.0, npm-package-arg@^6.1.0: + version "6.1.1" + resolved "https://registry.yarnpkg.com/npm-package-arg/-/npm-package-arg-6.1.1.tgz#02168cb0a49a2b75bf988a28698de7b529df5cb7" + integrity sha512-qBpssaL3IOZWi5vEKUKW0cO7kzLeT+EQO9W8RsLOZf76KF9E/K9+wH0C7t06HXPpaH8WH5xF1MExLuCwbTqRUg== + dependencies: + hosted-git-info "^2.7.1" + osenv "^0.1.5" + semver "^5.6.0" + validate-npm-package-name "^3.0.0" + +npm-packlist@^1.1.6, npm-packlist@^1.4.4: + version "1.4.8" + resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.4.8.tgz#56ee6cc135b9f98ad3d51c1c95da22bbb9b2ef3e" + integrity sha512-5+AZgwru5IevF5ZdnFglB5wNlHG1AOOuw28WhUq8/8emhBmLv6jX5by4WJCh7lW0uSYZYS6DXqIsyZVIXRZU9A== + dependencies: + ignore-walk "^3.0.1" + npm-bundled "^1.0.1" + npm-normalize-package-bin "^1.0.1" + +npm-pick-manifest@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/npm-pick-manifest/-/npm-pick-manifest-3.0.2.tgz#f4d9e5fd4be2153e5f4e5f9b7be8dc419a99abb7" + integrity sha512-wNprTNg+X5nf+tDi+hbjdHhM4bX+mKqv6XmPh7B5eG+QY9VARfQPfCEH013H5GqfNj6ee8Ij2fg8yk0mzps1Vw== + dependencies: + figgy-pudding "^3.5.1" + npm-package-arg "^6.0.0" + semver "^5.4.1" + +npm-run-path@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" + integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8= + dependencies: + path-key "^2.0.0" + +npm-run-path@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" + integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== + dependencies: + path-key "^3.0.0" + +npmlog@^4.0.2, npmlog@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b" + integrity sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg== + dependencies: + are-we-there-yet "~1.1.2" + console-control-strings "~1.1.0" + gauge "~2.7.3" + set-blocking "~2.0.0" + +nps-utils@^1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/nps-utils/-/nps-utils-1.7.0.tgz#88ef27925ee7cd916f15a25f1b1a6648e9b21a7d" + integrity sha512-wq/gUxTR3sgLYTLHlUKwMG2paUV3QdfwMj5aifWpQ60Vv9RQzA6pXDkwrAlMA8SNqoz8FwaGPc1X/7I+qjuYwg== + dependencies: + any-shell-escape "^0.1.1" + common-tags "^1.4.0" + concurrently "^3.4.0" + cpy-cli "^1.0.1" + cross-env "^3.1.4" + is-windows "^1.0.0" + mkdirp "^0.5.1" + ncp "2.0.0" + opn-cli "^3.1.0" + rimraf "^2.6.1" + +number-is-nan@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" + integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0= + +nwsapi@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.0.tgz#204879a9e3d068ff2a55139c2c772780681a38b7" + integrity sha512-h2AatdwYH+JHiZpv7pt/gSX1XoRGb7L/qSIeuqA6GwYoF9w1vP1cw42TO0aI2pNyshRK5893hNSl+1//vHK7hQ== + +oauth-sign@~0.9.0: + version "0.9.0" + resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" + integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== + +object-assign@^4, object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= + +object-copy@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" + integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw= + dependencies: + copy-descriptor "^0.1.0" + define-property "^0.2.5" + kind-of "^3.0.3" + +object-hash@2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/object-hash/-/object-hash-2.0.3.tgz#d12db044e03cd2ca3d77c0570d87225b02e1e6ea" + integrity sha512-JPKn0GMu+Fa3zt3Bmr66JhokJU5BaNBIh4ZeTlaCBzrBsOeXzwcKKAK1tbLiPKgvwmPXsDvvLHoWh5Bm7ofIYg== + +object-inspect@^1.8.0: + version "1.9.0" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.9.0.tgz#c90521d74e1127b67266ded3394ad6116986533a" + integrity sha512-i3Bp9iTqwhaLZBxGkRfo5ZbE07BQRT7MGu8+nNgwW9ItGp1TzCTw2DLEoWwjClxBjOFI/hWljTAmYGCEwmtnOw== + +object-keys@^1.0.12, object-keys@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + +object-path@^0.11.4: + version "0.11.5" + resolved "https://registry.yarnpkg.com/object-path/-/object-path-0.11.5.tgz#d4e3cf19601a5140a55a16ad712019a9c50b577a" + integrity sha512-jgSbThcoR/s+XumvGMTMf81QVBmah+/Q7K7YduKeKVWL7N111unR2d6pZZarSk6kY/caeNxUDyxOvMWyzoU2eg== + +object-treeify@^1.1.4: + version "1.1.30" + resolved "https://registry.yarnpkg.com/object-treeify/-/object-treeify-1.1.30.tgz#59559fefc53af923720ac257fd5ea6aef326d84d" + integrity sha512-BhsTZj8kbeCnyBKWuAgAakbGgrcVV/IJhUAGF25lOSwDZoHoDmnynUtXfyrrDn8A1Xy3G9k5uLP+V5onOOq3WA== + +object-visit@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" + integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= + dependencies: + isobject "^3.0.0" + +object.assign@^4.1.1: + version "4.1.2" + resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.2.tgz#0ed54a342eceb37b38ff76eb831a0e788cb63940" + integrity sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ== + dependencies: + call-bind "^1.0.0" + define-properties "^1.1.3" + has-symbols "^1.0.1" + object-keys "^1.1.1" + +object.entries@^1.1.2: + version "1.1.3" + resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.3.tgz#c601c7f168b62374541a07ddbd3e2d5e4f7711a6" + integrity sha512-ym7h7OZebNS96hn5IJeyUmaWhaSM4SVtAPPfNLQEI2MYWCO2egsITb9nab2+i/Pwibx+R0mtn+ltKJXRSeTMGg== + dependencies: + call-bind "^1.0.0" + define-properties "^1.1.3" + es-abstract "^1.18.0-next.1" + has "^1.0.3" + +object.fromentries@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/object.fromentries/-/object.fromentries-2.0.3.tgz#13cefcffa702dc67750314a3305e8cb3fad1d072" + integrity sha512-IDUSMXs6LOSJBWE++L0lzIbSqHl9KDCfff2x/JSEIDtEUavUnyMYC2ZGay/04Zq4UT8lvd4xNhU4/YHKibAOlw== + dependencies: + call-bind "^1.0.0" + define-properties "^1.1.3" + es-abstract "^1.18.0-next.1" + has "^1.0.3" + +object.getownpropertydescriptors@^2.0.3: + version "2.1.1" + resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.1.tgz#0dfda8d108074d9c563e80490c883b6661091544" + integrity sha512-6DtXgZ/lIZ9hqx4GtZETobXLR/ZLaa0aqV0kzbn80Rf8Z2e/XFnhA0I7p07N2wH8bBBltr2xQPi6sbKWAY2Eng== + dependencies: + call-bind "^1.0.0" + define-properties "^1.1.3" + es-abstract "^1.18.0-next.1" + +object.pick@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" + integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= + dependencies: + isobject "^3.0.1" + +object.values@^1.1.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.2.tgz#7a2015e06fcb0f546bd652486ce8583a4731c731" + integrity sha512-MYC0jvJopr8EK6dPBiO8Nb9mvjdypOachO5REGk6MXzujbBrAisKo3HmdEI6kZDL6fC31Mwee/5YbtMebixeag== + dependencies: + call-bind "^1.0.0" + define-properties "^1.1.3" + es-abstract "^1.18.0-next.1" + has "^1.0.3" + +oclif@1.16.1: + version "1.16.1" + resolved "https://registry.yarnpkg.com/oclif/-/oclif-1.16.1.tgz#24131db5ed2f3a4ae538111bd8b75aa994dd6090" + integrity sha512-jCHGL6lH2tFWi41fUYSO4KSMvJLX/bEBetK3/w0H/LlxtFKdo2JAKSYuu5QwTn/HLnWkxzjBvdgbkNZGg8NBkA== + dependencies: + "@oclif/command" "^1.6" + "@oclif/config" "^1.12.6" + "@oclif/errors" "^1.2.2" + "@oclif/fixpack" "^2.3.0" + "@oclif/plugin-help" "^3" + "@oclif/plugin-not-found" "^1.2.2" + "@oclif/plugin-warn-if-update-available" "^1.5.4" + debug "^4.1.1" + lodash "^4.17.11" + nps-utils "^1.7.0" + sort-pjson "^1.0.3" + tslib "^1.9.3" + yeoman-environment "^2.3.4" + yeoman-generator "^3.2.0" + yosay "^2.0.2" + +octokit-pagination-methods@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/octokit-pagination-methods/-/octokit-pagination-methods-1.1.0.tgz#cf472edc9d551055f9ef73f6e42b4dbb4c80bea4" + integrity sha512-fZ4qZdQ2nxJvtcasX7Ghl+WlWS/d9IgnBIwFZXVNNZUmzpno91SX5bc5vuxiuKoCtK78XxGGNuSCrDC7xYB3OQ== + +on-finished@^2.3.0, on-finished@~2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" + integrity sha1-IPEzZIGwg811M3mSoWlxqi2QaUc= + dependencies: + ee-first "1.1.1" + +once@^1.3.0, once@^1.3.1, once@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= + dependencies: + wrappy "1" + +one-time@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/one-time/-/one-time-1.0.0.tgz#e06bc174aed214ed58edede573b433bbf827cb45" + integrity sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g== + dependencies: + fn.name "1.x.x" + +onetime@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-2.0.1.tgz#067428230fd67443b2794b22bba528b6867962d4" + integrity sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ= + dependencies: + mimic-fn "^1.0.0" + +onetime@^5.1.0: + version "5.1.2" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" + integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== + dependencies: + mimic-fn "^2.1.0" + +ono@^7.1.3: + version "7.1.3" + resolved "https://registry.yarnpkg.com/ono/-/ono-7.1.3.tgz#a054e96a388f566a6c4c95e1e92b9b253722d286" + integrity sha512-9jnfVriq7uJM4o5ganUY54ntUm+5EK21EGaQ5NWnkWg3zz5ywbbonlBguRcnmF1/HDiIe3zxNxXcO1YPBmPcQQ== + dependencies: + "@jsdevtools/ono" "7.1.3" + +openapi-default-setter@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/openapi-default-setter/-/openapi-default-setter-7.0.1.tgz#08679bbd6bcf74a13407965cef78de128e8046ae" + integrity sha512-O9jhaZPEEJzI1HSG3Yw5rOIC0EpZ9PjRJgtksXKuSMyEoxUDnl7zQ27LuFRR1ykSMVhMt8vHMrQBQIwLW8S0yQ== + dependencies: + openapi-types "^7.0.1" + +openapi-framework@^7.2.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/openapi-framework/-/openapi-framework-7.2.0.tgz#633a381fa2bcfcf7c82becacc27fb617c4b87c81" + integrity sha512-rC4U+SIBVxoTujSIrk84PMquBwkNJfhYC7KTTDUUc7yfIyMVKRat5TOuMDyc49Ovsv+7bdkx1stf7d0N9LbtLg== + dependencies: + difunc "0.0.4" + fs-routes "^7.0.1" + glob "*" + is-dir "^1.0.0" + js-yaml "^3.10.0" + openapi-default-setter "^7.0.1" + openapi-request-coercer "^7.1.0" + openapi-request-validator "^7.2.0" + openapi-response-validator "^7.0.1" + openapi-schema-validator "^7.0.1" + openapi-security-handler "^7.0.1" + openapi-types "^7.0.1" + ts-log "^2.1.4" + +openapi-jsonschema-parameters@^7.0.2: + version "7.0.2" + resolved "https://registry.yarnpkg.com/openapi-jsonschema-parameters/-/openapi-jsonschema-parameters-7.0.2.tgz#165bcec0103e282223c5a5e85c9d8517e0ee79f5" + integrity sha512-hCC8wsWu9qU/pWCUClAYmUyXRhAeXSZUCRV7NVlj/8+3fWrtTBwk8GKI2dRa5Up0yZ3pstGi3Ewzzuixbmh8sw== + dependencies: + openapi-types "^7.0.1" + +openapi-request-coercer@^7.1.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/openapi-request-coercer/-/openapi-request-coercer-7.1.0.tgz#6890d7d962d993d1183afe4cdaa80e364ad8525d" + integrity sha512-6nvSgvOvLYMkUBu3NbHQU6Lcol1WxDr0DsOe3oYHb2tZhokrNEuOF20QYPV+CGZYyEzc0f+Hdas774n5B0euLg== + dependencies: + openapi-types "^7.0.1" + ts-log "^2.1.4" + +openapi-request-validator@^7.2.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/openapi-request-validator/-/openapi-request-validator-7.2.0.tgz#1e8f2177b5a60802def702d1a311608dea55b0d9" + integrity sha512-LgXvKco6XR5SKr8QBaM6v0++QXY5MP2yvvKv0Ckutef3css9MAyIcokDsBj6DYYzNnjmFxPx4ntuY7CZTC2ZFA== + dependencies: + ajv "^6.5.4" + content-type "^1.0.4" + openapi-jsonschema-parameters "^7.0.2" + openapi-types "^7.0.1" + ts-log "^2.1.4" + +openapi-response-validator@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/openapi-response-validator/-/openapi-response-validator-7.0.1.tgz#9c3526594c27cdfedbb31b0b81abb1333555a223" + integrity sha512-Fxr9YdQ6s7/SIvvM888iWnc1GUn/fFxTaMFqHkUv0/eNCYoBfOwAKj9aptaRfL+BJXlsVdXWCJd3GWkwn8sIJA== + dependencies: + ajv "^6.5.4" + openapi-types "^7.0.1" + +openapi-schema-validator@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/openapi-schema-validator/-/openapi-schema-validator-7.0.1.tgz#0dd333419f55ef882183294bce5db6ee980fab59" + integrity sha512-P/dmF14xWbyaFVcoS1Fs2tUP4AhJO+eEnZV+jbApeo3569/Z2fiki6Mb6Rs7cfi0ewNnV4L4HiYH+HPZaKWnjQ== + dependencies: + ajv "^6.5.2" + lodash.merge "^4.6.1" + openapi-types "^7.0.1" + swagger-schema-official "2.0.0-bab6bed" + +openapi-security-handler@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/openapi-security-handler/-/openapi-security-handler-7.0.1.tgz#52a05795de7ad36395ee555a73d74a298c309115" + integrity sha512-fiRJE2Z5F0tY9QBssBX9g8Txtr0oj1BOU0nOZ6QHHXQdCYxebszGgcXD63uy0UJQwzwVOMs/AlCnKNVS/yMSEg== + dependencies: + openapi-types "^7.0.1" + +openapi-types@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/openapi-types/-/openapi-types-7.0.1.tgz#966bcacfd14119fa12000dbc9d588bfd8df2e4d1" + integrity sha512-6pi4/Fw+JIW1HHda2Ij7LRJ5QJ8f6YzaXnsRA6m44BJz8nLq/j5gVFzPBKJo+uOFhAeHqZC/3uzhTpYPga3Q/A== + +opn-cli@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/opn-cli/-/opn-cli-3.1.0.tgz#f819ae6cae0b411bd0149b8560fe6c88adad20f8" + integrity sha1-+BmubK4LQRvQFJuFYP5siK2tIPg= + dependencies: + file-type "^3.6.0" + get-stdin "^5.0.1" + meow "^3.7.0" + opn "^4.0.0" + temp-write "^2.1.0" + +opn@^4.0.0: + version "4.0.2" + resolved "https://registry.yarnpkg.com/opn/-/opn-4.0.2.tgz#7abc22e644dff63b0a96d5ab7f2790c0f01abc95" + integrity sha1-erwi5kTf9jsKltWrfyeQwPAavJU= + dependencies: + object-assign "^4.0.1" + pinkie-promise "^2.0.0" + +optionator@^0.8.1: + version "0.8.3" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" + integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== + dependencies: + deep-is "~0.1.3" + fast-levenshtein "~2.0.6" + levn "~0.3.0" + prelude-ls "~1.1.2" + type-check "~0.3.2" + word-wrap "~1.2.3" + +optionator@^0.9.1: + version "0.9.1" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" + integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== + dependencies: + deep-is "^0.1.3" + fast-levenshtein "^2.0.6" + levn "^0.4.1" + prelude-ls "^1.2.1" + type-check "^0.4.0" + word-wrap "^1.2.3" + +os-homedir@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" + integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M= + +os-name@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/os-name/-/os-name-3.1.0.tgz#dec19d966296e1cd62d701a5a66ee1ddeae70801" + integrity sha512-h8L+8aNjNcMpo/mAIBPn5PXCM16iyPGjHNWo6U1YO8sJTMHtEtyczI6QJnLoplswm6goopQkqc7OAnjhWcugVg== + dependencies: + macos-release "^2.2.0" + windows-release "^3.1.0" + +os-tmpdir@^1.0.0, os-tmpdir@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" + integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= + +osenv@^0.1.4, osenv@^0.1.5: + version "0.1.5" + resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410" + integrity sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g== + dependencies: + os-homedir "^1.0.0" + os-tmpdir "^1.0.0" + +p-cancelable@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-0.3.0.tgz#b9e123800bcebb7ac13a479be195b507b98d30fa" + integrity sha512-RVbZPLso8+jFeq1MfNvgXtCRED2raz/dKpacfTNxsx6pLEpEomM7gah6VeHSYV3+vo0OAi4MkArtQcWWXuQoyw== + +p-cancelable@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-1.1.0.tgz#d078d15a3af409220c886f1d9a0ca2e441ab26cc" + integrity sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw== + +p-each-series@^2.1.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/p-each-series/-/p-each-series-2.2.0.tgz#105ab0357ce72b202a8a8b94933672657b5e2a9a" + integrity sha512-ycIL2+1V32th+8scbpTvyHNaHe02z0sjgh91XXjAk+ZeXoPN4Z46DVUnzdso0aX4KckKw0FNNFHdjZ2UsZvxiA== + +p-finally@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" + integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= + +p-limit@^1.1.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.3.0.tgz#b86bd5f0c25690911c7590fcbfc2010d54b3ccb8" + integrity sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q== + dependencies: + p-try "^1.0.0" + +p-limit@^2.0.0, p-limit@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" + integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== + dependencies: + p-try "^2.0.0" + +p-locate@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" + integrity sha1-IKAQOyIqcMj9OcwuWAaA893l7EM= + dependencies: + p-limit "^1.1.0" + +p-locate@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" + integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== + dependencies: + p-limit "^2.0.0" + +p-locate@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" + integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== + dependencies: + p-limit "^2.2.0" + +p-map-series@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-map-series/-/p-map-series-1.0.0.tgz#bf98fe575705658a9e1351befb85ae4c1f07bdca" + integrity sha1-v5j+V1cFZYqeE1G++4WuTB8Hvco= + dependencies: + p-reduce "^1.0.0" + +p-map@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" + integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== + +p-pipe@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/p-pipe/-/p-pipe-1.2.0.tgz#4b1a11399a11520a67790ee5a0c1d5881d6befe9" + integrity sha1-SxoROZoRUgpneQ7loMHViB1r7+k= + +p-queue@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/p-queue/-/p-queue-4.0.0.tgz#ed0eee8798927ed6f2c2f5f5b77fdb2061a5d346" + integrity sha512-3cRXXn3/O0o3+eVmUroJPSj/esxoEFIm0ZOno/T+NzG/VZgPOqQ8WKmlNqubSEpZmCIngEy34unkHGg83ZIBmg== + dependencies: + eventemitter3 "^3.1.0" + +p-reduce@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-reduce/-/p-reduce-1.0.0.tgz#18c2b0dd936a4690a529f8231f58a0fdb6a47dfa" + integrity sha1-GMKw3ZNqRpClKfgjH1ig/bakffo= + +p-timeout@^1.1.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-1.2.1.tgz#5eb3b353b7fce99f101a1038880bb054ebbea386" + integrity sha1-XrOzU7f86Z8QGhA4iAuwVOu+o4Y= + dependencies: + p-finally "^1.0.0" + +p-try@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3" + integrity sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M= + +p-try@^2.0.0, p-try@^2.1.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" + integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== + +p-waterfall@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-waterfall/-/p-waterfall-1.0.0.tgz#7ed94b3ceb3332782353af6aae11aa9fc235bb00" + integrity sha1-ftlLPOszMngjU69qrhGqn8I1uwA= + dependencies: + p-reduce "^1.0.0" + +package-json@^6.3.0: + version "6.5.0" + resolved "https://registry.yarnpkg.com/package-json/-/package-json-6.5.0.tgz#6feedaca35e75725876d0b0e64974697fed145b0" + integrity sha512-k3bdm2n25tkyxcjSKzB5x8kfVxlMdgsbPr0GkZcwHsLpba6cBjqCt1KlcChKEvxHIcTB1FVMuwoijZ26xex5MQ== + dependencies: + got "^9.6.0" + registry-auth-token "^4.0.0" + registry-url "^5.0.0" + semver "^6.2.0" + +packet-reader@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/packet-reader/-/packet-reader-1.0.0.tgz#9238e5480dedabacfe1fe3f2771063f164157d74" + integrity sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ== + +pad-component@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/pad-component/-/pad-component-0.0.1.tgz#ad1f22ce1bf0fdc0d6ddd908af17f351a404b8ac" + integrity sha1-rR8izhvw/cDW3dkIrxfzUaQEuKw= + +paged-request@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/paged-request/-/paged-request-2.0.1.tgz#91164f042231feb68643542d2530476a518ff4de" + integrity sha512-C0bB/PFk9rQskD1YEiz7uuchzqKDQGgdsEHN1ahify0UUWzgmMK4NDG9fhlQg2waogmNFwEvEeHfMRvJySpdVw== + dependencies: + axios "^0.18.0" + +pako@^1.0.3: + version "1.0.11" + resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.11.tgz#6c9599d340d54dfd3946380252a35705a6b992bf" + integrity sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw== + +parallel-transform@^1.1.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/parallel-transform/-/parallel-transform-1.2.0.tgz#9049ca37d6cb2182c3b1d2c720be94d14a5814fc" + integrity sha512-P2vSmIu38uIlvdcU7fDkyrxj33gTUy/ABO5ZUbGowxNCopBq/OoD42bP4UmMrJoPyk4Uqf0mu3mtWBhHCZD8yg== + dependencies: + cyclist "^1.0.1" + inherits "^2.0.3" + readable-stream "^2.1.5" + +parent-module@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== + dependencies: + callsites "^3.0.0" + +parent-require@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/parent-require/-/parent-require-1.0.0.tgz#746a167638083a860b0eef6732cb27ed46c32977" + integrity sha1-dGoWdjgIOoYLDu9nMssn7UbDKXc= + +parse-github-repo-url@^1.3.0: + version "1.4.1" + resolved "https://registry.yarnpkg.com/parse-github-repo-url/-/parse-github-repo-url-1.4.1.tgz#9e7d8bb252a6cb6ba42595060b7bf6df3dbc1f50" + integrity sha1-nn2LslKmy2ukJZUGC3v23z28H1A= + +parse-json@5.1.0, parse-json@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.1.0.tgz#f96088cdf24a8faa9aea9a009f2d9d942c999646" + integrity sha512-+mi/lmVVNKFNVyLXV31ERiy2CY5E1/F6QtJFEzoChPRwwngMNXRDQ9GJ5WdE2Z2P4AujsOi0/+2qHID68KwfIQ== + dependencies: + "@babel/code-frame" "^7.0.0" + error-ex "^1.3.1" + json-parse-even-better-errors "^2.3.0" + lines-and-columns "^1.1.6" + +parse-json@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" + integrity sha1-9ID0BDTvgHQfhGkJn43qGPVaTck= + dependencies: + error-ex "^1.2.0" + +parse-json@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0" + integrity sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA= + dependencies: + error-ex "^1.3.1" + json-parse-better-errors "^1.0.1" + +parse-path@^4.0.0: + version "4.0.2" + resolved "https://registry.yarnpkg.com/parse-path/-/parse-path-4.0.2.tgz#ef14f0d3d77bae8dd4bc66563a4c151aac9e65aa" + integrity sha512-HSqVz6iuXSiL8C1ku5Gl1Z5cwDd9Wo0q8CoffdAghP6bz8pJa1tcMC+m4N+z6VAS8QdksnIGq1TB6EgR4vPR6w== + dependencies: + is-ssh "^1.3.0" + protocols "^1.4.0" + +parse-url@^5.0.0: + version "5.0.2" + resolved "https://registry.yarnpkg.com/parse-url/-/parse-url-5.0.2.tgz#856a3be1fcdf78dc93fc8b3791f169072d898b59" + integrity sha512-Czj+GIit4cdWtxo3ISZCvLiUjErSo0iI3wJ+q9Oi3QuMYTI6OZu+7cewMWZ+C1YAnKhYTk6/TLuhIgCypLthPA== + dependencies: + is-ssh "^1.3.0" + normalize-url "^3.3.0" + parse-path "^4.0.0" + protocols "^1.4.0" + +parse5-htmlparser2-tree-adapter@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-6.0.1.tgz#2cdf9ad823321140370d4dbf5d3e92c7c8ddc6e6" + integrity sha512-qPuWvbLgvDGilKc5BoicRovlT4MtYT6JfJyBOMDsKoiT+GiuP5qyrPCnR9HcPECIJJmZh5jRndyNThnhhb/vlA== + dependencies: + parse5 "^6.0.1" + +parse5@5.1.1, parse5@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/parse5/-/parse5-5.1.1.tgz#f68e4e5ba1852ac2cadc00f4555fff6c2abb6178" + integrity sha512-ugq4DFI0Ptb+WWjAdOK16+u/nHfiIrcE+sh8kZMaM0WllQKLI9rOUq6c2b7cwPkXdzfQESqvoqK6ug7U/Yyzug== + +parse5@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b" + integrity sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw== + +parseurl@~1.3.3: + version "1.3.3" + resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" + integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== + +pascalcase@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" + integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= + +password-prompt@^1.0.7, password-prompt@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/password-prompt/-/password-prompt-1.1.2.tgz#85b2f93896c5bd9e9f2d6ff0627fa5af3dc00923" + integrity sha512-bpuBhROdrhuN3E7G/koAju0WjVw9/uQOG5Co5mokNj0MiOSBVZS1JTwM4zl55hu0WFmIEFvO9cU9sJQiBIYeIA== + dependencies: + ansi-escapes "^3.1.0" + cross-spawn "^6.0.5" + +path-dirname@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" + integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= + +path-exists@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" + integrity sha1-D+tsZPD8UY2adU3V77YscCJ2H0s= + dependencies: + pinkie-promise "^2.0.0" + +path-exists@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" + integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= + +path-exists@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" + integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= + +path-key@^2.0.0, path-key@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" + integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= + +path-key@^3.0.0, path-key@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-parse@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c" + integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw== + +path-to-regexp@0.1.7: + version "0.1.7" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" + integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w= + +path-to-regexp@3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-3.2.0.tgz#fa7877ecbc495c601907562222453c43cc204a5f" + integrity sha512-jczvQbCUS7XmS7o+y1aEO9OBVFeZBQ1MDSEqmO7xSoPgOPoowY/SxLpZ6Vh97/8qHZOteiCKb7gkG9gA2ZUxJA== + +path-to-regexp@^6.2.0: + version "6.2.0" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-6.2.0.tgz#f7b3803336104c346889adece614669230645f38" + integrity sha512-f66KywYG6+43afgE/8j/GoiNyygk/bnoCbps++3ErRKsIYkGGupyv07R2Ok5m9i67Iqc+T2g1eAUGUPzWhYTyg== + +path-type@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" + integrity sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE= + dependencies: + graceful-fs "^4.1.2" + pify "^2.0.0" + pinkie-promise "^2.0.0" + +path-type@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-2.0.0.tgz#f012ccb8415b7096fc2daa1054c3d72389594c73" + integrity sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM= + dependencies: + pify "^2.0.0" + +path-type@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-3.0.0.tgz#cef31dc8e0a1a3bb0d105c0cd97cf3bf47f4e36f" + integrity sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg== + dependencies: + pify "^3.0.0" + +path-type@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" + integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== + +pathval@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/pathval/-/pathval-1.1.0.tgz#b942e6d4bde653005ef6b71361def8727d0645e0" + integrity sha1-uULm1L3mUwBe9rcTYd74cn0GReA= + +pause-stream@0.0.11: + version "0.0.11" + resolved "https://registry.yarnpkg.com/pause-stream/-/pause-stream-0.0.11.tgz#fe5a34b0cbce12b5aa6a2b403ee2e73b602f1445" + integrity sha1-/lo0sMvOErWqaitAPuLnO2AvFEU= + dependencies: + through "~2.3" + +performance-now@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" + integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= + +pg-connection-string@^2.4.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.4.0.tgz#c979922eb47832999a204da5dbe1ebf2341b6a10" + integrity sha512-3iBXuv7XKvxeMrIgym7njT+HlZkwZqqGX4Bu9cci8xHZNT+Um1gWKqCsAzcC0d95rcKMU5WBg6YRUcHyV0HZKQ== + +pg-int8@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/pg-int8/-/pg-int8-1.0.1.tgz#943bd463bf5b71b4170115f80f8efc9a0c0eb78c" + integrity sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw== + +pg-pool@^3.2.2: + version "3.2.2" + resolved "https://registry.yarnpkg.com/pg-pool/-/pg-pool-3.2.2.tgz#a560e433443ed4ad946b84d774b3f22452694dff" + integrity sha512-ORJoFxAlmmros8igi608iVEbQNNZlp89diFVx6yV5v+ehmpMY9sK6QgpmgoXbmkNaBAx8cOOZh9g80kJv1ooyA== + +pg-protocol@^1.2.0, pg-protocol@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/pg-protocol/-/pg-protocol-1.4.0.tgz#43a71a92f6fe3ac559952555aa3335c8cb4908be" + integrity sha512-El+aXWcwG/8wuFICMQjM5ZSAm6OWiJicFdNYo+VY3QP+8vI4SvLIWVe51PppTzMhikUJR+PsyIFKqfdXPz/yxA== + +pg-types@^2.1.0, pg-types@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/pg-types/-/pg-types-2.2.0.tgz#2d0250d636454f7cfa3b6ae0382fdfa8063254a3" + integrity sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA== + dependencies: + pg-int8 "1.0.1" + postgres-array "~2.0.0" + postgres-bytea "~1.0.0" + postgres-date "~1.0.4" + postgres-interval "^1.1.0" + +pg@8.5.1, "pg@>=6.5 <9": + version "8.5.1" + resolved "https://registry.yarnpkg.com/pg/-/pg-8.5.1.tgz#34dcb15f6db4a29c702bf5031ef2e1e25a06a120" + integrity sha512-9wm3yX9lCfjvA98ybCyw2pADUivyNWT/yIP4ZcDVpMN0og70BUWYEGXPCTAQdGTAqnytfRADb7NERrY1qxhIqw== + dependencies: + buffer-writer "2.0.0" + packet-reader "1.0.0" + pg-connection-string "^2.4.0" + pg-pool "^3.2.2" + pg-protocol "^1.4.0" + pg-types "^2.1.0" + pgpass "1.x" + +pgpass@1.x: + version "1.0.4" + resolved "https://registry.yarnpkg.com/pgpass/-/pgpass-1.0.4.tgz#85eb93a83800b20f8057a2b029bf05abaf94ea9c" + integrity sha512-YmuA56alyBq7M59vxVBfPJrGSozru8QAdoNlWuW3cz8l+UX3cWge0vTvjKhsSHSJpo3Bom8/Mm6hf0TR5GY0+w== + dependencies: + split2 "^3.1.1" + +picomatch@^2.0.4, picomatch@^2.0.5, picomatch@^2.2.1: + version "2.2.2" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad" + integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg== + +pify@^2.0.0, pify@^2.2.0, pify@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" + integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= + +pify@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" + integrity sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY= + +pify@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" + integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== + +pinkie-promise@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" + integrity sha1-ITXW36ejWMBprJsXh3YogihFD/o= + dependencies: + pinkie "^2.0.0" + +pinkie@^2.0.0: + version "2.0.4" + resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" + integrity sha1-clVrgM+g1IqXToDnckjoDtT3+HA= + +pirates@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.1.tgz#643a92caf894566f91b2b986d2c66950a8e2fb87" + integrity sha512-WuNqLTbMI3tmfef2TKxlQmAiLHKtFhlsCZnPIpuv2Ow0RDVO8lfy1Opf4NUzlMXLjPl+Men7AuVdX6TA+s+uGA== + dependencies: + node-modules-regexp "^1.0.0" + +piscina@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/piscina/-/piscina-2.1.0.tgz#947d937e0efd0de7a0e8bf10363c2e053c619413" + integrity sha512-3FgX36QyZcU4prKuNKl7/lWlOF3HAv9n7JpCjw09Zbql2KkzXXQ7E5xUS+RV5wV24Rn0r6Lr8jLdtU/cNZHAnA== + dependencies: + eventemitter-asyncresource "^1.0.0" + hdr-histogram-js "^2.0.1" + hdr-histogram-percentiles-obj "^3.0.0" + optionalDependencies: + nice-napi "^1.0.2" + +pkg-dir@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-2.0.0.tgz#f6d5d1109e19d63edf428e0bd57e12777615334b" + integrity sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s= + dependencies: + find-up "^2.1.0" + +pkg-dir@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-3.0.0.tgz#2749020f239ed990881b1f71210d51eb6523bea3" + integrity sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw== + dependencies: + find-up "^3.0.0" + +pkg-dir@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" + integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== + dependencies: + find-up "^4.0.0" + +posix-character-classes@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" + integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= + +postgres-array@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/postgres-array/-/postgres-array-2.0.0.tgz#48f8fce054fbc69671999329b8834b772652d82e" + integrity sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA== + +postgres-bytea@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/postgres-bytea/-/postgres-bytea-1.0.0.tgz#027b533c0aa890e26d172d47cf9ccecc521acd35" + integrity sha1-AntTPAqokOJtFy1Hz5zOzFIazTU= + +postgres-date@~1.0.4: + version "1.0.7" + resolved "https://registry.yarnpkg.com/postgres-date/-/postgres-date-1.0.7.tgz#51bc086006005e5061c591cee727f2531bf641a8" + integrity sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q== + +postgres-interval@^1.1.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/postgres-interval/-/postgres-interval-1.2.0.tgz#b460c82cb1587507788819a06aa0fffdb3544695" + integrity sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ== + dependencies: + xtend "^4.0.0" + +prelude-ls@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== + +prelude-ls@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" + integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= + +prepend-http@^1.0.1: + version "1.0.4" + resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-1.0.4.tgz#d4f4562b0ce3696e41ac52d0e002e57a635dc6dc" + integrity sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw= + +prepend-http@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" + integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc= + +prettier-linter-helpers@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz#d23d41fe1375646de2d0104d3454a3008802cf7b" + integrity sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w== + dependencies: + fast-diff "^1.1.2" + +prettier@2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.1.2.tgz#3050700dae2e4c8b67c4c3f666cdb8af405e1ce5" + integrity sha512-16c7K+x4qVlJg9rEbXl7HEGmQyZlG4R9AgP+oHKRMsMsuk8s+ATStlf1NpDqyBI1HpVyfjLOeMhH2LvuNvV5Vg== + +prettier@2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.2.1.tgz#795a1a78dd52f073da0cd42b21f9c91381923ff5" + integrity sha512-PqyhM2yCjg/oKkFPtTGUojv7gnZAoG80ttl45O6x2Ug/rMJw4wcc9k6aaf2hibP7BGVCCM33gZoGjyvt9mm16Q== + +pretty-bytes@^5.1.0, pretty-bytes@^5.2.0: + version "5.4.1" + resolved "https://registry.yarnpkg.com/pretty-bytes/-/pretty-bytes-5.4.1.tgz#cd89f79bbcef21e3d21eb0da68ffe93f803e884b" + integrity sha512-s1Iam6Gwz3JI5Hweaz4GoCD1WUNUIyzePFy5+Js2hjwGVt2Z79wNN+ZKOZ2vB6C+Xs6njyB84Z1IthQg8d9LxA== + +pretty-format@^26.0.0, pretty-format@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-26.6.2.tgz#e35c2705f14cb7fe2fe94fa078345b444120fc93" + integrity sha512-7AeGuCYNGmycyQbCqd/3PWH4eOoX/OiCa0uphp57NVTeAGdJGaAliecxwBDHYQCIvrW7aDBZCYeNTP/WX69mkg== + dependencies: + "@jest/types" "^26.6.2" + ansi-regex "^5.0.0" + ansi-styles "^4.0.0" + react-is "^17.0.1" + +process-nextick-args@^2.0.0, process-nextick-args@~2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" + integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== + +progress@^2.0.0, progress@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" + integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== + +promise-inflight@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3" + integrity sha1-mEcocL8igTL8vdhoEputEsPAKeM= + +promise-retry@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/promise-retry/-/promise-retry-1.1.1.tgz#6739e968e3051da20ce6497fb2b50f6911df3d6d" + integrity sha1-ZznpaOMFHaIM5kl/srUPaRHfPW0= + dependencies: + err-code "^1.0.0" + retry "^0.10.0" + +prompts@^2.0.1: + version "2.4.0" + resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.0.tgz#4aa5de0723a231d1ee9121c40fdf663df73f61d7" + integrity sha512-awZAKrk3vN6CroQukBL+R9051a4R3zCZBlJm/HBfrSZ8iTpYix3VX1vU4mveiLpiwmOJT4wokTF9m6HUk4KqWQ== + dependencies: + kleur "^3.0.3" + sisteransi "^1.0.5" + +promzard@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/promzard/-/promzard-0.3.0.tgz#26a5d6ee8c7dee4cb12208305acfb93ba382a9ee" + integrity sha1-JqXW7ox97kyxIggwWs+5O6OCqe4= + dependencies: + read "1" + +prop-types@^15.7.2: + version "15.7.2" + resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.7.2.tgz#52c41e75b8c87e72b9d9360e0206b99dcbffa6c5" + integrity sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ== + dependencies: + loose-envify "^1.4.0" + object-assign "^4.1.1" + react-is "^16.8.1" + +propagate@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/propagate/-/propagate-2.0.1.tgz#40cdedab18085c792334e64f0ac17256d38f9a45" + integrity sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag== + +property-expr@^2.0.2: + version "2.0.4" + resolved "https://registry.yarnpkg.com/property-expr/-/property-expr-2.0.4.tgz#37b925478e58965031bb612ec5b3260f8241e910" + integrity sha512-sFPkHQjVKheDNnPvotjQmm3KD3uk1fWKUN7CrpdbwmUx3CrG3QiM8QpTSimvig5vTXmTvjz7+TDvXOI9+4rkcg== + +proto-list@~1.2.1: + version "1.2.4" + resolved "https://registry.yarnpkg.com/proto-list/-/proto-list-1.2.4.tgz#212d5bfe1318306a420f6402b8e26ff39647a849" + integrity sha1-IS1b/hMYMGpCD2QCuOJv85ZHqEk= + +protocols@^1.1.0, protocols@^1.4.0: + version "1.4.8" + resolved "https://registry.yarnpkg.com/protocols/-/protocols-1.4.8.tgz#48eea2d8f58d9644a4a32caae5d5db290a075ce8" + integrity sha512-IgjKyaUSjsROSO8/D49Ab7hP8mJgTYcqApOqdPhLoPxAplXmkp+zRvsrSQjFn5by0rhm4VH0GAUELIPpx7B1yg== + +protoduck@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/protoduck/-/protoduck-5.0.1.tgz#03c3659ca18007b69a50fd82a7ebcc516261151f" + integrity sha512-WxoCeDCoCBY55BMvj4cAEjdVUFGRWed9ZxPlqTKYyw1nDDTQ4pqmnIMAGfJlg7Dx35uB/M+PHJPTmGOvaCaPTg== + dependencies: + genfun "^5.0.0" + +proxy-addr@~2.0.5: + version "2.0.6" + resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.6.tgz#fdc2336505447d3f2f2c638ed272caf614bbb2bf" + integrity sha512-dh/frvCBVmSsDYzw6n926jv974gddhkFPfiN8hPOi30Wax25QZyZEGveluCgliBnqmuM+UJmBErbAUFIoDbjOw== + dependencies: + forwarded "~0.1.2" + ipaddr.js "1.9.1" + +prr@~1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" + integrity sha1-0/wRS6BplaRexok/SEzrHXj19HY= + +ps-tree@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/ps-tree/-/ps-tree-1.2.0.tgz#5e7425b89508736cdd4f2224d028f7bb3f722ebd" + integrity sha512-0VnamPPYHl4uaU/nSFeZZpR21QAWRz+sRv4iW9+v/GS/J5U5iZB5BNN6J0RMoOvdx2gWM2+ZFMIm58q24e4UYA== + dependencies: + event-stream "=3.3.4" + +pseudomap@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" + integrity sha1-8FKijacOYYkX7wqKw0wa5aaChrM= + +psl@^1.1.28: + version "1.8.0" + resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24" + integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ== + +pstree.remy@^1.1.7: + version "1.1.8" + resolved "https://registry.yarnpkg.com/pstree.remy/-/pstree.remy-1.1.8.tgz#c242224f4a67c21f686839bbdb4ac282b8373d3a" + integrity sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w== + +pump@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/pump/-/pump-2.0.1.tgz#12399add6e4cf7526d973cbc8b5ce2e2908b3909" + integrity sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA== + dependencies: + end-of-stream "^1.1.0" + once "^1.3.1" + +pump@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" + integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== + dependencies: + end-of-stream "^1.1.0" + once "^1.3.1" + +pumpify@^1.3.3: + version "1.5.1" + resolved "https://registry.yarnpkg.com/pumpify/-/pumpify-1.5.1.tgz#36513be246ab27570b1a374a5ce278bfd74370ce" + integrity sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ== + dependencies: + duplexify "^3.6.0" + inherits "^2.0.3" + pump "^2.0.0" + +punycode@^2.1.0, punycode@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" + integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + +pupa@^2.0.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/pupa/-/pupa-2.1.1.tgz#f5e8fd4afc2c5d97828faa523549ed8744a20d62" + integrity sha512-l1jNAspIBSFqbT+y+5FosojNpVpF94nlI+wDUpqP9enwOTfHx9f0gh5nB96vl+6yTpsJsypeNrwfzPrKuHB41A== + dependencies: + escape-goat "^2.0.0" + +q@^1.5.1: + version "1.5.1" + resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" + integrity sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc= + +qqjs@^0.3.10: + version "0.3.11" + resolved "https://registry.yarnpkg.com/qqjs/-/qqjs-0.3.11.tgz#795b9f7d00807d75c391b1241b5be3077143d9ea" + integrity sha512-pB2X5AduTl78J+xRSxQiEmga1jQV0j43jOPs/MTgTLApGFEOn6NgdE2dEjp7nvDtjkIOZbvFIojAiYUx6ep3zg== + dependencies: + chalk "^2.4.1" + debug "^4.1.1" + execa "^0.10.0" + fs-extra "^6.0.1" + get-stream "^5.1.0" + glob "^7.1.2" + globby "^10.0.1" + http-call "^5.1.2" + load-json-file "^6.2.0" + pkg-dir "^4.2.0" + tar-fs "^2.0.0" + tmp "^0.1.0" + write-json-file "^4.1.1" + +qs@6.7.0: + version "6.7.0" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.7.0.tgz#41dc1a015e3d581f1621776be31afb2876a9b1bc" + integrity sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ== + +qs@^6.9.4: + version "6.9.4" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.9.4.tgz#9090b290d1f91728d3c22e54843ca44aea5ab687" + integrity sha512-A1kFqHekCTM7cz0udomYUoYNWjBebHm/5wzU/XqrBRBNWectVH0QIiN+NEcZ0Dte5hvzHwbr8+XQmguPhJ6WdQ== + +qs@~6.5.2: + version "6.5.2" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" + integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA== + +queue-microtask@^1.2.0: + version "1.2.2" + resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.2.tgz#abf64491e6ecf0f38a6502403d4cda04f372dfd3" + integrity sha512-dB15eXv3p2jDlbOiNLyMabYg1/sXvppd8DP2J3EOCQ0AkuSXCW2tP7mnVouVLJKgUMY6yP0kcQDVpLCN13h4Xg== + +quick-lru@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-1.1.0.tgz#4360b17c61136ad38078397ff11416e186dcfbb8" + integrity sha1-Q2CxfGETatOAeDl/8RQW4Ybc+7g= + +quick-lru@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-4.0.1.tgz#5b8878f113a58217848c6482026c73e1ba57727f" + integrity sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g== + +randombytes@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" + integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== + dependencies: + safe-buffer "^5.1.0" + +range-parser@~1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" + integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== + +raw-body@2.4.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.4.0.tgz#a1ce6fb9c9bc356ca52e89256ab59059e13d0332" + integrity sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q== + dependencies: + bytes "3.1.0" + http-errors "1.7.2" + iconv-lite "0.4.24" + unpipe "1.0.0" + +rc@^1.2.7, rc@^1.2.8: + version "1.2.8" + resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" + integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== + dependencies: + deep-extend "^0.6.0" + ini "~1.3.0" + minimist "^1.2.0" + strip-json-comments "~2.0.1" + +react-is@^16.8.1: + version "16.13.1" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" + integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== + +react-is@^17.0.1: + version "17.0.1" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-17.0.1.tgz#5b3531bd76a645a4c9fb6e693ed36419e3301339" + integrity sha512-NAnt2iGDXohE5LI7uBnLnqvLQMtzhkiAOLXTmv+qnF9Ky7xAPcX8Up/xWIhxvLVGJvuLiNc4xQLtuqDRzb4fSA== + +read-chunk@^3.0.0, read-chunk@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/read-chunk/-/read-chunk-3.2.0.tgz#2984afe78ca9bfbbdb74b19387bf9e86289c16ca" + integrity sha512-CEjy9LCzhmD7nUpJ1oVOE6s/hBkejlcJEgLQHVnQznOSilOPb+kpKktlLfFDK3/WP43+F80xkUTM2VOkYoSYvQ== + dependencies: + pify "^4.0.1" + with-open-file "^0.1.6" + +read-cmd-shim@^1.0.1: + version "1.0.5" + resolved "https://registry.yarnpkg.com/read-cmd-shim/-/read-cmd-shim-1.0.5.tgz#87e43eba50098ba5a32d0ceb583ab8e43b961c16" + integrity sha512-v5yCqQ/7okKoZZkBQUAfTsQ3sVJtXdNfbPnI5cceppoxEVLYA3k+VtV2omkeo8MS94JCy4fSiUwlRBAwCVRPUA== + dependencies: + graceful-fs "^4.1.2" + +"read-package-json@1 || 2", read-package-json@^2.0.0, read-package-json@^2.0.13: + version "2.1.2" + resolved "https://registry.yarnpkg.com/read-package-json/-/read-package-json-2.1.2.tgz#6992b2b66c7177259feb8eaac73c3acd28b9222a" + integrity sha512-D1KmuLQr6ZSJS0tW8hf3WGpRlwszJOXZ3E8Yd/DNRaM5d+1wVRZdHlpGBLAuovjr28LbWvjpWkBHMxpRGGjzNA== + dependencies: + glob "^7.1.1" + json-parse-even-better-errors "^2.3.0" + normalize-package-data "^2.0.0" + npm-normalize-package-bin "^1.0.0" + +read-package-tree@^5.1.6: + version "5.3.1" + resolved "https://registry.yarnpkg.com/read-package-tree/-/read-package-tree-5.3.1.tgz#a32cb64c7f31eb8a6f31ef06f9cedf74068fe636" + integrity sha512-mLUDsD5JVtlZxjSlPPx1RETkNjjvQYuweKwNVt1Sn8kP5Jh44pvYuUHCp6xSVDZWbNxVxG5lyZJ921aJH61sTw== + dependencies: + read-package-json "^2.0.0" + readdir-scoped-modules "^1.0.0" + util-promisify "^2.1.0" + +read-pkg-up@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" + integrity sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI= + dependencies: + find-up "^1.0.0" + read-pkg "^1.0.0" + +read-pkg-up@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-2.0.0.tgz#6b72a8048984e0c41e79510fd5e9fa99b3b549be" + integrity sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4= + dependencies: + find-up "^2.0.0" + read-pkg "^2.0.0" + +read-pkg-up@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-3.0.0.tgz#3ed496685dba0f8fe118d0691dc51f4a1ff96f07" + integrity sha1-PtSWaF26D4/hGNBpHcUfSh/5bwc= + dependencies: + find-up "^2.0.0" + read-pkg "^3.0.0" + +read-pkg-up@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-4.0.0.tgz#1b221c6088ba7799601c808f91161c66e58f8978" + integrity sha512-6etQSH7nJGsK0RbG/2TeDzZFa8shjQ1um+SwQQ5cwKy0dhSXdOncEhb1CPpvQG4h7FyOV6EB6YlV0yJvZQNAkA== + dependencies: + find-up "^3.0.0" + read-pkg "^3.0.0" + +read-pkg-up@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-5.0.0.tgz#b6a6741cb144ed3610554f40162aa07a6db621b8" + integrity sha512-XBQjqOBtTzyol2CpsQOw8LHV0XbDZVG7xMMjmXAJomlVY03WOBRmYgDJETlvcg0H63AJvPRwT7GFi5rvOzUOKg== + dependencies: + find-up "^3.0.0" + read-pkg "^5.0.0" + +read-pkg-up@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-7.0.1.tgz#f3a6135758459733ae2b95638056e1854e7ef507" + integrity sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg== + dependencies: + find-up "^4.1.0" + read-pkg "^5.2.0" + type-fest "^0.8.1" + +read-pkg@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" + integrity sha1-9f+qXs0pyzHAR0vKfXVra7KePyg= + dependencies: + load-json-file "^1.0.0" + normalize-package-data "^2.3.2" + path-type "^1.0.0" + +read-pkg@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-2.0.0.tgz#8ef1c0623c6a6db0dc6713c4bfac46332b2368f8" + integrity sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg= + dependencies: + load-json-file "^2.0.0" + normalize-package-data "^2.3.2" + path-type "^2.0.0" + +read-pkg@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-3.0.0.tgz#9cbc686978fee65d16c00e2b19c237fcf6e38389" + integrity sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k= + dependencies: + load-json-file "^4.0.0" + normalize-package-data "^2.3.2" + path-type "^3.0.0" + +read-pkg@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-4.0.1.tgz#963625378f3e1c4d48c85872b5a6ec7d5d093237" + integrity sha1-ljYlN48+HE1IyFhytabsfV0JMjc= + dependencies: + normalize-package-data "^2.3.2" + parse-json "^4.0.0" + pify "^3.0.0" + +read-pkg@^5.0.0, read-pkg@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-5.2.0.tgz#7bf295438ca5a33e56cd30e053b34ee7250c93cc" + integrity sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg== + dependencies: + "@types/normalize-package-data" "^2.4.0" + normalize-package-data "^2.5.0" + parse-json "^5.0.0" + type-fest "^0.6.0" + +read@1, read@~1.0.1: + version "1.0.7" + resolved "https://registry.yarnpkg.com/read/-/read-1.0.7.tgz#b3da19bd052431a97671d44a42634adf710b40c4" + integrity sha1-s9oZvQUkMal2cdRKQmNK33ELQMQ= + dependencies: + mute-stream "~0.0.4" + +"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.1.4, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.5, readable-stream@^2.3.6, readable-stream@^2.3.7, readable-stream@~2.3.6: + version "2.3.7" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" + integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + +readable-stream@1.1.x: + version "1.1.14" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.1.14.tgz#7cf4c54ef648e3813084c636dd2079e166c081d9" + integrity sha1-fPTFTvZI44EwhMY23SB54WbAgdk= + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.1" + isarray "0.0.1" + string_decoder "~0.10.x" + +"readable-stream@2 || 3", readable-stream@3, readable-stream@^3.0.0, readable-stream@^3.0.2, readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" + integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + +readdir-scoped-modules@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/readdir-scoped-modules/-/readdir-scoped-modules-1.1.0.tgz#8d45407b4f870a0dcaebc0e28670d18e74514309" + integrity sha512-asaikDeqAQg7JifRsZn1NJZXo9E+VwlyCfbkZhwyISinqk5zNS6266HS5kah6P0SaQKGF6SkNnZVHUzHFYxYDw== + dependencies: + debuglog "^1.0.1" + dezalgo "^1.0.0" + graceful-fs "^4.1.2" + once "^1.3.0" + +readdirp@~3.5.0: + version "3.5.0" + resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.5.0.tgz#9ba74c019b15d365278d2e91bb8c48d7b4d42c9e" + integrity sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ== + dependencies: + picomatch "^2.2.1" + +rechoir@^0.6.2: + version "0.6.2" + resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.6.2.tgz#85204b54dba82d5742e28c96756ef43af50e3384" + integrity sha1-hSBLVNuoLVdC4oyWdW70OvUOM4Q= + dependencies: + resolve "^1.1.6" + +redent@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/redent/-/redent-1.0.0.tgz#cf916ab1fd5f1f16dfb20822dd6ec7f730c2afde" + integrity sha1-z5Fqsf1fHxbfsggi3W7H9zDCr94= + dependencies: + indent-string "^2.1.0" + strip-indent "^1.0.1" + +redent@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/redent/-/redent-2.0.0.tgz#c1b2007b42d57eb1389079b3c8333639d5e1ccaa" + integrity sha1-wbIAe0LVfrE4kHmzyDM2OdXhzKo= + dependencies: + indent-string "^3.0.0" + strip-indent "^2.0.0" + +redent@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f" + integrity sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg== + dependencies: + indent-string "^4.0.0" + strip-indent "^3.0.0" + +redeyed@~2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/redeyed/-/redeyed-2.1.1.tgz#8984b5815d99cb220469c99eeeffe38913e6cc0b" + integrity sha1-iYS1gV2ZyyIEacme7v/jiRPmzAs= + dependencies: + esprima "~4.0.0" + +reflect-metadata@0.1.13, reflect-metadata@^0.1.13: + version "0.1.13" + resolved "https://registry.yarnpkg.com/reflect-metadata/-/reflect-metadata-0.1.13.tgz#67ae3ca57c972a2aa1642b10fe363fe32d49dc08" + integrity sha512-Ts1Y/anZELhSsjMcU605fU9RE4Oi3p5ORujwbIKXfWa+0Zxs510Qrmrce5/Jowq3cHSZSJqBjypxmHarc+vEWg== + +regenerator-runtime@^0.13.4: + version "0.13.7" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.7.tgz#cac2dacc8a1ea675feaabaeb8ae833898ae46f55" + integrity sha512-a54FxoJDIr27pgf7IgeQGxmqUNYrcV338lf/6gH456HZ/PhX+5BcwHXG9ajESmwe6WRO0tAzRUrRmNONWgkrew== + +regex-not@^1.0.0, regex-not@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" + integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== + dependencies: + extend-shallow "^3.0.2" + safe-regex "^1.1.0" + +regexp.prototype.flags@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.3.0.tgz#7aba89b3c13a64509dabcf3ca8d9fbb9bdf5cb75" + integrity sha512-2+Q0C5g951OlYlJz6yu5/M33IcsESLlLfsyIaLJaG4FA2r4yP8MvVMJUUP/fVBkSpbbbZlS5gynbEWLipiiXiQ== + dependencies: + define-properties "^1.1.3" + es-abstract "^1.17.0-next.1" + +regexpp@^3.0.0, regexpp@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.1.0.tgz#206d0ad0a5648cffbdb8ae46438f3dc51c9f78e2" + integrity sha512-ZOIzd8yVsQQA7j8GCSlPGXwg5PfmA1mrq0JP4nGhh54LaKN3xdai/vHUDu74pKwV8OxseMS65u2NImosQcSD0Q== + +registry-auth-token@^4.0.0: + version "4.2.1" + resolved "https://registry.yarnpkg.com/registry-auth-token/-/registry-auth-token-4.2.1.tgz#6d7b4006441918972ccd5fedcd41dc322c79b250" + integrity sha512-6gkSb4U6aWJB4SF2ZvLb76yCBjcvufXBqvvEx1HbmKPkutswjW1xNVRY0+daljIYRbogN7O0etYSlbiaEQyMyw== + dependencies: + rc "^1.2.8" + +registry-url@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/registry-url/-/registry-url-5.1.0.tgz#e98334b50d5434b81136b44ec638d9c2009c5009" + integrity sha512-8acYXXTI0AkQv6RAOjE3vOaIXZkT9wo4LOFbBKYQEEnnMNBpKqdUrI6S4NT0KPIo/WVvJ5tE/X5LF/TQUf0ekw== + dependencies: + rc "^1.2.8" + +remove-trailing-separator@^1.0.1: + version "1.1.0" + resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" + integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= + +repeat-element@^1.1.2: + version "1.1.3" + resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce" + integrity sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g== + +repeat-string@^1.6.1: + version "1.6.1" + resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" + integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= + +repeating@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/repeating/-/repeating-2.0.1.tgz#5214c53a926d3552707527fbab415dbc08d06dda" + integrity sha1-UhTFOpJtNVJwdSf7q0FdvAjQbdo= + dependencies: + is-finite "^1.0.0" + +replace-ext@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/replace-ext/-/replace-ext-1.0.1.tgz#2d6d996d04a15855d967443631dd5f77825b016a" + integrity sha512-yD5BHCe7quCgBph4rMQ+0KkIRKwWCrHDOX1p1Gp6HwjPM5kVoCdKGNhN7ydqqsX6lJEnQDKZ/tFMiEdQ1dvPEw== + +request-promise-core@1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/request-promise-core/-/request-promise-core-1.1.4.tgz#3eedd4223208d419867b78ce815167d10593a22f" + integrity sha512-TTbAfBBRdWD7aNNOoVOBH4pN/KigV6LyapYNNlAPA8JwbovRti1E88m3sYAwsLi5ryhPKsE9APwnjFTgdUjTpw== + dependencies: + lodash "^4.17.19" + +request-promise-native@^1.0.8: + version "1.0.9" + resolved "https://registry.yarnpkg.com/request-promise-native/-/request-promise-native-1.0.9.tgz#e407120526a5efdc9a39b28a5679bf47b9d9dc28" + integrity sha512-wcW+sIUiWnKgNY0dqCpOZkUbF/I+YPi+f09JZIDa39Ec+q82CpSYniDp+ISgTTbKmnpJWASeJBPZmoxH84wt3g== + dependencies: + request-promise-core "1.1.4" + stealthy-require "^1.1.1" + tough-cookie "^2.3.3" + +request@^2.88.0, request@^2.88.2: + version "2.88.2" + resolved "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3" + integrity sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw== + dependencies: + aws-sign2 "~0.7.0" + aws4 "^1.8.0" + caseless "~0.12.0" + combined-stream "~1.0.6" + extend "~3.0.2" + forever-agent "~0.6.1" + form-data "~2.3.2" + har-validator "~5.1.3" + http-signature "~1.2.0" + is-typedarray "~1.0.0" + isstream "~0.1.2" + json-stringify-safe "~5.0.1" + mime-types "~2.1.19" + oauth-sign "~0.9.0" + performance-now "^2.1.0" + qs "~6.5.2" + safe-buffer "^5.1.2" + tough-cookie "~2.5.0" + tunnel-agent "^0.6.0" + uuid "^3.3.2" + +require-directory@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= + +require-main-filename@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" + integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== + +resolve-cwd@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-2.0.0.tgz#00a9f7387556e27038eae232caa372a6a59b665a" + integrity sha1-AKn3OHVW4nA46uIyyqNypqWbZlo= + dependencies: + resolve-from "^3.0.0" + +resolve-cwd@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" + integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== + dependencies: + resolve-from "^5.0.0" + +resolve-from@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" + integrity sha1-six699nWiBvItuZTM17rywoYh0g= + +resolve-from@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + +resolve-from@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" + integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== + +resolve-url@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" + integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= + +resolve@^1.1.6, resolve@^1.10.0, resolve@^1.13.1, resolve@^1.17.0, resolve@^1.18.1, resolve@^1.3.2: + version "1.19.0" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.19.0.tgz#1af5bf630409734a067cae29318aac7fa29a267c" + integrity sha512-rArEXAgsBG4UgRGcynxWIWKFvh/XZCcS8UJdHhwy91zwAvCZIbcs+vAbflgBnNjYMs/i/i+/Ux6IZhML1yPvxg== + dependencies: + is-core-module "^2.1.0" + path-parse "^1.0.6" + +responselike@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/responselike/-/responselike-1.0.2.tgz#918720ef3b631c5642be068f15ade5a46f4ba1e7" + integrity sha1-kYcg7ztjHFZCvgaPFa3lpG9Loec= + dependencies: + lowercase-keys "^1.0.0" + +restore-cursor@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-2.0.0.tgz#9f7ee287f82fd326d4fd162923d62129eee0dfaf" + integrity sha1-n37ih/gv0ybU/RYpI9YhKe7g368= + dependencies: + onetime "^2.0.0" + signal-exit "^3.0.2" + +restore-cursor@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-3.1.0.tgz#39f67c54b3a7a58cea5236d95cf0034239631f7e" + integrity sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA== + dependencies: + onetime "^5.1.0" + signal-exit "^3.0.2" + +ret@~0.1.10: + version "0.1.15" + resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" + integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== + +retry@^0.10.0: + version "0.10.1" + resolved "https://registry.yarnpkg.com/retry/-/retry-0.10.1.tgz#e76388d217992c252750241d3d3956fed98d8ff4" + integrity sha1-52OI0heZLCUnUCQdPTlW/tmNj/Q= + +reusify@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + +rimraf@2.6.3: + version "2.6.3" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab" + integrity sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA== + dependencies: + glob "^7.1.3" + +rimraf@^2.2.8, rimraf@^2.5.4, rimraf@^2.6.1, rimraf@^2.6.2, rimraf@^2.6.3: + version "2.7.1" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" + integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== + dependencies: + glob "^7.1.3" + +rimraf@^3.0.0, rimraf@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== + dependencies: + glob "^7.1.3" + +"rosetta-specifications@https://github.com/coinbase/rosetta-specifications.git": + version "0.0.0" + resolved "https://github.com/coinbase/rosetta-specifications.git#b76365b7a76ff6fd81a18479f0a603d9eb5e8db7" + +rsvp@^4.8.4: + version "4.8.5" + resolved "https://registry.yarnpkg.com/rsvp/-/rsvp-4.8.5.tgz#c8f155311d167f68f21e168df71ec5b083113734" + integrity sha512-nfMOlASu9OnRJo1mbEk2cz0D56a1MBNrJ7orjRZQG10XDyuvwksKbuXNp6qa+kbn839HwjwhBzhFmdsaEAfauA== + +run-async@^2.0.0, run-async@^2.2.0, run-async@^2.4.0: + version "2.4.1" + resolved "https://registry.yarnpkg.com/run-async/-/run-async-2.4.1.tgz#8440eccf99ea3e70bd409d49aab88e10c189a455" + integrity sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ== + +run-parallel@^1.1.9: + version "1.1.10" + resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.1.10.tgz#60a51b2ae836636c81377df16cb107351bcd13ef" + integrity sha512-zb/1OuZ6flOlH6tQyMPUrE3x3Ulxjlo9WIVXR4yVYi4H9UXQaeIsPbLn2R3O3vQCnDKkAl2qHiuocKKX4Tz/Sw== + +run-queue@^1.0.0, run-queue@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/run-queue/-/run-queue-1.0.3.tgz#e848396f057d223f24386924618e25694161ec47" + integrity sha1-6Eg5bwV9Ij8kOGkkYY4laUFh7Ec= + dependencies: + aproba "^1.1.1" + +rx@2.3.24: + version "2.3.24" + resolved "https://registry.yarnpkg.com/rx/-/rx-2.3.24.tgz#14f950a4217d7e35daa71bbcbe58eff68ea4b2b7" + integrity sha1-FPlQpCF9fjXapxu8vljv9o6ksrc= + +rxjs@6.6.3, rxjs@^6.4.0, rxjs@^6.5.2, rxjs@^6.6.0: + version "6.6.3" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.3.tgz#8ca84635c4daa900c0d3967a6ee7ac60271ee552" + integrity sha512-trsQc+xYYXZ3urjOiJOuCOa5N3jAZ3eiSpQB5hIT8zGlL2QfnHLJ2r7GMkBGuIausdJN1OneaI6gQlsqNHHmZQ== + dependencies: + tslib "^1.9.0" + +safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.0, safe-buffer@~5.2.0: + version "5.2.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + +safe-regex@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" + integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4= + dependencies: + ret "~0.1.10" + +"safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: + version "2.1.2" + resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + +sane@^4.0.3: + version "4.1.0" + resolved "https://registry.yarnpkg.com/sane/-/sane-4.1.0.tgz#ed881fd922733a6c461bc189dc2b6c006f3ffded" + integrity sha512-hhbzAgTIX8O7SHfp2c8/kREfEn4qO/9q8C9beyY6+tvZ87EpoZ3i1RIEvp27YBswnNbY9mWd6paKVmKbAgLfZA== + dependencies: + "@cnakazawa/watch" "^1.0.3" + anymatch "^2.0.0" + capture-exit "^2.0.0" + exec-sh "^0.3.2" + execa "^1.0.0" + fb-watchman "^2.0.0" + micromatch "^3.1.4" + minimist "^1.1.1" + walker "~1.0.5" + +sax@>=0.6.0, sax@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" + integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== + +saxes@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/saxes/-/saxes-5.0.1.tgz#eebab953fa3b7608dbe94e5dadb15c888fa6696d" + integrity sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw== + dependencies: + xmlchars "^2.2.0" + +scoped-regex@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/scoped-regex/-/scoped-regex-1.0.0.tgz#a346bb1acd4207ae70bd7c0c7ca9e566b6baddb8" + integrity sha1-o0a7Gs1CB65wvXwMfKnlZra63bg= + +segfault-handler@1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/segfault-handler/-/segfault-handler-1.3.0.tgz#054bc847832fa14f218ba6a79e42877501c8870e" + integrity sha512-p7kVHo+4uoYkr0jmIiTBthwV5L2qmWtben/KDunDZ834mbos+tY+iO0//HpAJpOFSQZZ+wxKWuRo4DxV02B7Lg== + dependencies: + bindings "^1.2.1" + nan "^2.14.0" + +semver-diff@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-3.1.1.tgz#05f77ce59f325e00e2706afd67bb506ddb1ca32b" + integrity sha512-GX0Ix/CJcHyB8c4ykpHGIAvLyOwOobtM/8d+TQkAd81/bEjgPHrfba41Vpesr7jX/t8Uh+R3EX9eAS5be+jQYg== + dependencies: + semver "^6.3.0" + +"semver@2 || 3 || 4 || 5", "semver@2.x || 3.x || 4 || 5", semver@^5.3.0, semver@^5.4.1, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0, semver@^5.7.0, semver@^5.7.1: + version "5.7.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" + integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== + +semver@7.x, semver@^7.1.3, semver@^7.2.1, semver@^7.3.2: + version "7.3.4" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.4.tgz#27aaa7d2e4ca76452f98d3add093a72c943edc97" + integrity sha512-tCfb2WLjqFAtXn4KEdxIhalnRtoKFN7nAwj0B3ZXCbQloV2tq5eDbcTmT68JJD3nRJq24/XgxtQKFIpQdtvmVw== + dependencies: + lru-cache "^6.0.0" + +semver@^6.0.0, semver@^6.2.0, semver@^6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" + integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== + +send@0.17.1: + version "0.17.1" + resolved "https://registry.yarnpkg.com/send/-/send-0.17.1.tgz#c1d8b059f7900f7466dd4938bdc44e11ddb376c8" + integrity sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg== + dependencies: + debug "2.6.9" + depd "~1.1.2" + destroy "~1.0.4" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + fresh "0.5.2" + http-errors "~1.7.2" + mime "1.6.0" + ms "2.1.1" + on-finished "~2.3.0" + range-parser "~1.2.1" + statuses "~1.5.0" + +serve-static@1.14.1: + version "1.14.1" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.14.1.tgz#666e636dc4f010f7ef29970a88a674320898b2f9" + integrity sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg== + dependencies: + encodeurl "~1.0.2" + escape-html "~1.0.3" + parseurl "~1.3.3" + send "0.17.1" + +set-blocking@^2.0.0, set-blocking@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" + integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= + +set-getter@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/set-getter/-/set-getter-0.1.0.tgz#d769c182c9d5a51f409145f2fba82e5e86e80376" + integrity sha1-12nBgsnVpR9AkUXy+6guXoboA3Y= + dependencies: + to-object-path "^0.3.0" + +set-value@^2.0.0, set-value@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" + integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== + dependencies: + extend-shallow "^2.0.1" + is-extendable "^0.1.1" + is-plain-object "^2.0.3" + split-string "^3.0.1" + +setprototypeof@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.1.tgz#7e95acb24aa92f5885e0abef5ba131330d4ae683" + integrity sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw== + +sha.js@^2.4.11: + version "2.4.11" + resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7" + integrity sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ== + dependencies: + inherits "^2.0.1" + safe-buffer "^5.0.1" + +shallow-clone@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/shallow-clone/-/shallow-clone-3.0.1.tgz#8f2981ad92531f55035b01fb230769a40e02efa3" + integrity sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA== + dependencies: + kind-of "^6.0.2" + +shebang-command@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" + integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo= + dependencies: + shebang-regex "^1.0.0" + +shebang-command@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + +shebang-regex@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" + integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= + +shebang-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + +shelljs@^0.8.0, shelljs@^0.8.3, shelljs@^0.8.4: + version "0.8.4" + resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.8.4.tgz#de7684feeb767f8716b326078a8a00875890e3c2" + integrity sha512-7gk3UZ9kOfPLIAbslLzyWeGiEqx9e3rxwZM0KE6EL8GlGwjym9Mrlx5/p33bWTu9YG6vcS4MBxYZDHYr5lr8BQ== + dependencies: + glob "^7.0.0" + interpret "^1.0.0" + rechoir "^0.6.2" + +shellwords@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/shellwords/-/shellwords-0.1.1.tgz#d6b9181c1a48d397324c84871efbcfc73fc0654b" + integrity sha512-vFwSUfQvqybiICwZY5+DAWIPLKsWO31Q91JSKl3UYv+K5c2QRPzn0qzec6QPu1Qc9eHYItiP3NdJqNVqetYAww== + +side-channel@^1.0.2, side-channel@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.3.tgz#cdc46b057550bbab63706210838df5d4c19519c3" + integrity sha512-A6+ByhlLkksFoUepsGxfj5x1gTSrs+OydsRptUxeNCabQpCFUvcwIczgOigI8vhY/OJCnPnyE9rGiwgvr9cS1g== + dependencies: + es-abstract "^1.18.0-next.0" + object-inspect "^1.8.0" + +signal-exit@^3.0.0, signal-exit@^3.0.2: + version "3.0.3" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c" + integrity sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA== + +simple-peer@9.9.3: + version "9.9.3" + resolved "https://registry.yarnpkg.com/simple-peer/-/simple-peer-9.9.3.tgz#b52c39d1173620d06c8b29ada7ee2ad3384bb469" + integrity sha512-T3wuv0UqBpDTV0x0pJPPsz4thy0tC0fTOHE4g9+AF43RUxxT+MWeXVtdQcK5Xuzv/XTVrB2NrGzdfO1IFBqOkw== + dependencies: + buffer "^6.0.2" + debug "^4.2.0" + err-code "^2.0.3" + get-browser-rtc "^1.0.2" + queue-microtask "^1.2.0" + randombytes "^2.1.0" + readable-stream "^3.6.0" + +simple-swizzle@^0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/simple-swizzle/-/simple-swizzle-0.2.2.tgz#a4da6b635ffcccca33f70d17cb92592de95e557a" + integrity sha1-pNprY1/8zMoz9w0Xy5JZLeleVXo= + dependencies: + is-arrayish "^0.3.1" + +sisteransi@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" + integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== + +slash@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-1.0.0.tgz#c41f2f6c39fc16d1cd17ad4b5d896114ae470d55" + integrity sha1-xB8vbDn8FtHNF61LXYlhFK5HDVU= + +slash@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-2.0.0.tgz#de552851a1759df3a8f206535442f5ec4ddeab44" + integrity sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A== + +slash@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" + integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== + +slice-ansi@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-2.1.0.tgz#cacd7693461a637a5788d92a7dd4fba068e81636" + integrity sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ== + dependencies: + ansi-styles "^3.2.0" + astral-regex "^1.0.0" + is-fullwidth-code-point "^2.0.0" + +slide@^1.1.6: + version "1.1.6" + resolved "https://registry.yarnpkg.com/slide/-/slide-1.1.6.tgz#56eb027d65b4d2dce6cb2e2d32c4d4afc9e1d707" + integrity sha1-VusCfWW00tzmyy4tMsTUr8nh1wc= + +smart-buffer@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/smart-buffer/-/smart-buffer-4.1.0.tgz#91605c25d91652f4661ea69ccf45f1b331ca21ba" + integrity sha512-iVICrxOzCynf/SNaBQCw34eM9jROU/s5rzIhpOvzhzuYHfJR/DhZfDkXiZSgKXfgv26HT3Yni3AV/DGw0cGnnw== + +snapdragon-node@^2.0.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" + integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== + dependencies: + define-property "^1.0.0" + isobject "^3.0.0" + snapdragon-util "^3.0.1" + +snapdragon-util@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" + integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== + dependencies: + kind-of "^3.2.0" + +snapdragon@^0.8.1: + version "0.8.2" + resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" + integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== + dependencies: + base "^0.11.1" + debug "^2.2.0" + define-property "^0.2.5" + extend-shallow "^2.0.1" + map-cache "^0.2.2" + source-map "^0.5.6" + source-map-resolve "^0.5.0" + use "^3.1.0" + +socks-proxy-agent@^4.0.0: + version "4.0.2" + resolved "https://registry.yarnpkg.com/socks-proxy-agent/-/socks-proxy-agent-4.0.2.tgz#3c8991f3145b2799e70e11bd5fbc8b1963116386" + integrity sha512-NT6syHhI9LmuEMSK6Kd2V7gNv5KFZoLE7V5udWmn0de+3Mkj3UMA/AJPLyeNUVmElCurSHtUdM3ETpR3z770Wg== + dependencies: + agent-base "~4.2.1" + socks "~2.3.2" + +socks@~2.3.2: + version "2.3.3" + resolved "https://registry.yarnpkg.com/socks/-/socks-2.3.3.tgz#01129f0a5d534d2b897712ed8aceab7ee65d78e3" + integrity sha512-o5t52PCNtVdiOvzMry7wU4aOqYWL0PeCXRWBEiJow4/i/wr+wpsJQ9awEu1EonLIqsfGd5qSgDdxEOvCdmBEpA== + dependencies: + ip "1.1.5" + smart-buffer "^4.1.0" + +sort-keys@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-2.0.0.tgz#658535584861ec97d730d6cf41822e1f56684128" + integrity sha1-ZYU1WEhh7JfXMNbPQYIuH1ZoQSg= + dependencies: + is-plain-obj "^1.0.0" + +sort-keys@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-4.1.0.tgz#727edc12fee49ce482848db07369ec44e0f3e9f2" + integrity sha512-/sRdxzkkPFUYiCrTr/2t+104nDc9AgDmEpeVYuvOWYQe3Djk1GWO6lVw3Vx2jfh1SsR0eehhd1nvFYlzt5e99w== + dependencies: + is-plain-obj "^2.0.0" + +sort-pjson@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/sort-pjson/-/sort-pjson-1.0.3.tgz#79435ab838db3700cc255278175c111b407b6bb5" + integrity sha512-h/xRX+8zuV9tbnbkRwmdNNyyJbjzVTh8YFpMgEgGU2umFDFg2EDfWKtA5YOfnBwT4YoZfJf6hrc0yuXLUvUDFA== + dependencies: + "@oclif/fixpack" "^2.3.0" + +source-map-resolve@^0.5.0: + version "0.5.3" + resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.3.tgz#190866bece7553e1f8f267a2ee82c606b5509a1a" + integrity sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw== + dependencies: + atob "^2.1.2" + decode-uri-component "^0.2.0" + resolve-url "^0.2.1" + source-map-url "^0.4.0" + urix "^0.1.0" + +source-map-support@^0.5.17, source-map-support@^0.5.6: + version "0.5.19" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.19.tgz#a98b62f86dcaf4f67399648c085291ab9e8fed61" + integrity sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + +source-map-url@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3" + integrity sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM= + +source-map@^0.5.0, source-map@^0.5.6: + version "0.5.7" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" + integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= + +source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + +source-map@^0.7.3: + version "0.7.3" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.3.tgz#5302f8169031735226544092e64981f751750383" + integrity sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ== + +spawn-command@^0.0.2-1: + version "0.0.2-1" + resolved "https://registry.yarnpkg.com/spawn-command/-/spawn-command-0.0.2-1.tgz#62f5e9466981c1b796dc5929937e11c9c6921bd0" + integrity sha1-YvXpRmmBwbeW3Fkpk34RycaSG9A= + +spdx-correct@^3.0.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.1.tgz#dece81ac9c1e6713e5f7d1b6f17d468fa53d89a9" + integrity sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w== + dependencies: + spdx-expression-parse "^3.0.0" + spdx-license-ids "^3.0.0" + +spdx-exceptions@^2.1.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz#3f28ce1a77a00372683eade4a433183527a2163d" + integrity sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A== + +spdx-expression-parse@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz#cf70f50482eefdc98e3ce0a6833e4a53ceeba679" + integrity sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q== + dependencies: + spdx-exceptions "^2.1.0" + spdx-license-ids "^3.0.0" + +spdx-license-ids@^3.0.0: + version "3.0.7" + resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.7.tgz#e9c18a410e5ed7e12442a549fbd8afa767038d65" + integrity sha512-U+MTEOO0AiDzxwFvoa4JVnMV6mZlJKk2sBLt90s7G0Gd0Mlknc7kxEn3nuDPNZRta7O2uy8oLcZLVT+4sqNZHQ== + +split-string@^3.0.1, split-string@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" + integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== + dependencies: + extend-shallow "^3.0.0" + +split2@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/split2/-/split2-2.2.0.tgz#186b2575bcf83e85b7d18465756238ee4ee42493" + integrity sha512-RAb22TG39LhI31MbreBgIuKiIKhVsawfTgEGqKHTK87aG+ul/PB8Sqoi3I7kVdRWiCfrKxK3uo4/YUkpNvhPbw== + dependencies: + through2 "^2.0.2" + +split2@^3.1.1: + version "3.2.2" + resolved "https://registry.yarnpkg.com/split2/-/split2-3.2.2.tgz#bf2cf2a37d838312c249c89206fd7a17dd12365f" + integrity sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg== + dependencies: + readable-stream "^3.0.0" + +split@0.3: + version "0.3.3" + resolved "https://registry.yarnpkg.com/split/-/split-0.3.3.tgz#cd0eea5e63a211dfff7eb0f091c4133e2d0dd28f" + integrity sha1-zQ7qXmOiEd//frDwkcQTPi0N0o8= + dependencies: + through "2" + +split@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/split/-/split-1.0.1.tgz#605bd9be303aa59fb35f9229fbea0ddec9ea07d9" + integrity sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg== + dependencies: + through "2" + +sprintf-js@~1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= + +sshpk@^1.7.0: + version "1.16.1" + resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.16.1.tgz#fb661c0bef29b39db40769ee39fa70093d6f6877" + integrity sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg== + dependencies: + asn1 "~0.2.3" + assert-plus "^1.0.0" + bcrypt-pbkdf "^1.0.0" + dashdash "^1.12.0" + ecc-jsbn "~0.1.1" + getpass "^0.1.1" + jsbn "~0.1.0" + safer-buffer "^2.0.2" + tweetnacl "~0.14.0" + +ssri@^6.0.0, ssri@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/ssri/-/ssri-6.0.1.tgz#2a3c41b28dd45b62b63676ecb74001265ae9edd8" + integrity sha512-3Wge10hNcT1Kur4PDFwEieXSCMCJs/7WvSACcrMYrNp+b8kDL1/0wJch5Ni2WrtwEa2IO8OsVfeKIciKCDx/QA== + dependencies: + figgy-pudding "^3.5.1" + +stack-trace@0.0.x: + version "0.0.10" + resolved "https://registry.yarnpkg.com/stack-trace/-/stack-trace-0.0.10.tgz#547c70b347e8d32b4e108ea1a2a159e5fdde19c0" + integrity sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA= + +stack-utils@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.3.tgz#cd5f030126ff116b78ccb3c027fe302713b61277" + integrity sha512-gL//fkxfWUsIlFL2Tl42Cl6+HFALEaB1FU76I/Fy+oZjRreP7OPMXFlGbxM7NQsI0ZpUfw76sHnv0WNYuTb7Iw== + dependencies: + escape-string-regexp "^2.0.0" + +static-extend@^0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" + integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= + dependencies: + define-property "^0.2.5" + object-copy "^0.1.0" + +"statuses@>= 1.5.0 < 2", statuses@~1.5.0: + version "1.5.0" + resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" + integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= + +stdout-stderr@^0.1.9: + version "0.1.13" + resolved "https://registry.yarnpkg.com/stdout-stderr/-/stdout-stderr-0.1.13.tgz#54e3450f3d4c54086a49c0c7f8786a44d1844b6f" + integrity sha512-Xnt9/HHHYfjZ7NeQLvuQDyL1LnbsbddgMFKCuaQKwGCdJm8LnstZIXop+uOY36UR1UXXoHXfMbC1KlVdVd2JLA== + dependencies: + debug "^4.1.1" + strip-ansi "^6.0.0" + +stealthy-require@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/stealthy-require/-/stealthy-require-1.1.1.tgz#35b09875b4ff49f26a777e509b3090a3226bf24b" + integrity sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks= + +stream-combiner@~0.0.4: + version "0.0.4" + resolved "https://registry.yarnpkg.com/stream-combiner/-/stream-combiner-0.0.4.tgz#4d5e433c185261dde623ca3f44c586bcf5c4ad14" + integrity sha1-TV5DPBhSYd3mI8o/RMWGvPXErRQ= + dependencies: + duplexer "~0.1.1" + +stream-each@^1.1.0: + version "1.2.3" + resolved "https://registry.yarnpkg.com/stream-each/-/stream-each-1.2.3.tgz#ebe27a0c389b04fbcc233642952e10731afa9bae" + integrity sha512-vlMC2f8I2u/bZGqkdfLQW/13Zihpej/7PmSiMQsbYddxuTsJp8vRe2x2FvVExZg7FaOds43ROAuFJwPR4MTZLw== + dependencies: + end-of-stream "^1.1.0" + stream-shift "^1.0.0" + +stream-events@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/stream-events/-/stream-events-1.0.5.tgz#bbc898ec4df33a4902d892333d47da9bf1c406d5" + integrity sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg== + dependencies: + stubs "^3.0.0" + +stream-shift@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.1.tgz#d7088281559ab2778424279b0877da3c392d5a3d" + integrity sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ== + +streamsearch@0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-0.1.2.tgz#808b9d0e56fc273d809ba57338e929919a1a9f1a" + integrity sha1-gIudDlb8Jz2Am6VzOOkpkZoanxo= + +string-argv@^0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/string-argv/-/string-argv-0.1.2.tgz#c5b7bc03fb2b11983ba3a72333dd0559e77e4738" + integrity sha512-mBqPGEOMNJKXRo7z0keX0wlAhbBAjilUdPW13nN0PecVryZxdHIeM7TqbsSUA7VYuS00HGC6mojP7DlQzfa9ZA== + +string-length@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.1.tgz#4a973bf31ef77c4edbceadd6af2611996985f8a1" + integrity sha512-PKyXUd0LK0ePjSOnWn34V2uD6acUWev9uy0Ft05k0E8xRW+SKcA0F7eMr7h5xlzfn+4O3N+55rduYyet3Jk+jw== + dependencies: + char-regex "^1.0.2" + strip-ansi "^6.0.0" + +string-template@~0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/string-template/-/string-template-0.2.1.tgz#42932e598a352d01fc22ec3367d9d84eec6c9add" + integrity sha1-QpMuWYo1LQH8IuwzZ9nYTuxsmt0= + +string-width@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" + integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M= + dependencies: + code-point-at "^1.0.0" + is-fullwidth-code-point "^1.0.0" + strip-ansi "^3.0.0" + +"string-width@^1.0.2 || 2", string-width@^2.0.0, string-width@^2.1.0, string-width@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" + integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== + dependencies: + is-fullwidth-code-point "^2.0.0" + strip-ansi "^4.0.0" + +string-width@^3.0.0, string-width@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" + integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w== + dependencies: + emoji-regex "^7.0.1" + is-fullwidth-code-point "^2.0.0" + strip-ansi "^5.1.0" + +string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.0.tgz#952182c46cc7b2c313d1596e623992bd163b72b5" + integrity sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.0" + +string.prototype.matchall@^4.0.2: + version "4.0.3" + resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.3.tgz#24243399bc31b0a49d19e2b74171a15653ec996a" + integrity sha512-OBxYDA2ifZQ2e13cP82dWFMaCV9CGF8GzmN4fljBVw5O5wep0lu4gacm1OL6MjROoUnB8VbkWRThqkV2YFLNxw== + dependencies: + call-bind "^1.0.0" + define-properties "^1.1.3" + es-abstract "^1.18.0-next.1" + has-symbols "^1.0.1" + internal-slot "^1.0.2" + regexp.prototype.flags "^1.3.0" + side-channel "^1.0.3" + +string.prototype.trimend@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.3.tgz#a22bd53cca5c7cf44d7c9d5c732118873d6cd18b" + integrity sha512-ayH0pB+uf0U28CtjlLvL7NaohvR1amUvVZk+y3DYb0Ey2PUV5zPkkKy9+U1ndVEIXO8hNg18eIv9Jntbii+dKw== + dependencies: + call-bind "^1.0.0" + define-properties "^1.1.3" + +string.prototype.trimstart@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.3.tgz#9b4cb590e123bb36564401d59824298de50fd5aa" + integrity sha512-oBIBUy5lea5tt0ovtOFiEQaBkoBBkyJhZXzJYrSmDo5IUUqbOPvVezuRs/agBIdZ2p2Eo1FD6bD9USyBLfl3xg== + dependencies: + call-bind "^1.0.0" + define-properties "^1.1.3" + +string_decoder@^1.1.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== + dependencies: + safe-buffer "~5.2.0" + +string_decoder@~0.10.x: + version "0.10.31" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94" + integrity sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ= + +string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== + dependencies: + safe-buffer "~5.1.0" + +strip-ansi@^3.0.0, strip-ansi@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" + integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= + dependencies: + ansi-regex "^2.0.0" + +strip-ansi@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" + integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8= + dependencies: + ansi-regex "^3.0.0" + +strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" + integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== + dependencies: + ansi-regex "^4.1.0" + +strip-ansi@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.0.tgz#0b1571dd7669ccd4f3e06e14ef1eed26225ae532" + integrity sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w== + dependencies: + ansi-regex "^5.0.0" + +strip-bom-buf@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/strip-bom-buf/-/strip-bom-buf-1.0.0.tgz#1cb45aaf57530f4caf86c7f75179d2c9a51dd572" + integrity sha1-HLRar1dTD0yvhsf3UXnSyaUd1XI= + dependencies: + is-utf8 "^0.2.1" + +strip-bom-stream@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/strip-bom-stream/-/strip-bom-stream-2.0.0.tgz#f87db5ef2613f6968aa545abfe1ec728b6a829ca" + integrity sha1-+H217yYT9paKpUWr/h7HKLaoKco= + dependencies: + first-chunk-stream "^2.0.0" + strip-bom "^2.0.0" + +strip-bom@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e" + integrity sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4= + dependencies: + is-utf8 "^0.2.0" + +strip-bom@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + integrity sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM= + +strip-bom@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" + integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== + +strip-eof@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" + integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8= + +strip-final-newline@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" + integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== + +strip-indent@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-1.0.1.tgz#0c7962a6adefa7bbd4ac366460a638552ae1a0a2" + integrity sha1-DHlipq3vp7vUrDZkYKY4VSrhoKI= + dependencies: + get-stdin "^4.0.1" + +strip-indent@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-2.0.0.tgz#5ef8db295d01e6ed6cbf7aab96998d7822527b68" + integrity sha1-XvjbKV0B5u1sv3qrlpmNeCJSe2g= + +strip-indent@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-3.0.0.tgz#c32e1cee940b6b3432c771bc2c54bcce73cd3001" + integrity sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ== + dependencies: + min-indent "^1.0.0" + +strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== + +strip-json-comments@~2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" + integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= + +strong-log-transformer@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/strong-log-transformer/-/strong-log-transformer-2.1.0.tgz#0f5ed78d325e0421ac6f90f7f10e691d6ae3ae10" + integrity sha512-B3Hgul+z0L9a236FAUC9iZsL+nVHgoCJnqCbN588DjYxvGXaXaaFbfmQ/JhvKjZwsOukuR72XbHv71Qkug0HxA== + dependencies: + duplexer "^0.1.1" + minimist "^1.2.0" + through "^2.3.4" + +stubs@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/stubs/-/stubs-3.0.0.tgz#e8d2ba1fa9c90570303c030b6900f7d5f89abe5b" + integrity sha1-6NK6H6nJBXAwPAMLaQD31fiavls= + +superagent@6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/superagent/-/superagent-6.1.0.tgz#09f08807bc41108ef164cfb4be293cebd480f4a6" + integrity sha512-OUDHEssirmplo3F+1HWKUrUjvnQuA+nZI6i/JJBdXb5eq9IyEQwPyPpqND+SSsxf6TygpBEkUjISVRN4/VOpeg== + dependencies: + component-emitter "^1.3.0" + cookiejar "^2.1.2" + debug "^4.1.1" + fast-safe-stringify "^2.0.7" + form-data "^3.0.0" + formidable "^1.2.2" + methods "^1.1.2" + mime "^2.4.6" + qs "^6.9.4" + readable-stream "^3.6.0" + semver "^7.3.2" + +supertest@6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/supertest/-/supertest-6.0.1.tgz#f6b54370de85c45d6557192c8d7df604ca2c9e18" + integrity sha512-8yDNdm+bbAN/jeDdXsRipbq9qMpVF7wRsbwLgsANHqdjPsCoecmlTuqEcLQMGpmojFBhxayZ0ckXmLXYq7e+0g== + dependencies: + methods "1.1.2" + superagent "6.1.0" + +supports-color@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" + integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= + +supports-color@^3.2.3: + version "3.2.3" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6" + integrity sha1-ZawFBLOVQXHYpklGsq48u4pfVPY= + dependencies: + has-flag "^1.0.0" + +supports-color@^5.0.0, supports-color@^5.3.0, supports-color@^5.4.0, supports-color@^5.5.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== + dependencies: + has-flag "^3.0.0" + +supports-color@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.1.0.tgz#0764abc69c63d5ac842dd4867e8d025e880df8f3" + integrity sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ== + dependencies: + has-flag "^3.0.0" + +supports-color@^7.0.0, supports-color@^7.1.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + dependencies: + has-flag "^4.0.0" + +supports-hyperlinks@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-1.0.1.tgz#71daedf36cc1060ac5100c351bb3da48c29c0ef7" + integrity sha512-HHi5kVSefKaJkGYXbDuKbUGRVxqnWGn3J2e39CYcNJEfWciGq2zYtOhXLTlvrOZW1QU7VX67w7fMmWafHX9Pfw== + dependencies: + has-flag "^2.0.0" + supports-color "^5.0.0" + +supports-hyperlinks@^2.0.0, supports-hyperlinks@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-2.1.0.tgz#f663df252af5f37c5d49bbd7eeefa9e0b9e59e47" + integrity sha512-zoE5/e+dnEijk6ASB6/qrK+oYdm2do1hjoLWrqUC/8WEIW1gbxFcKuBof7sW8ArN6e+AYvsE8HBGiVRWL/F5CA== + dependencies: + has-flag "^4.0.0" + supports-color "^7.0.0" + +swagger-routes-express@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/swagger-routes-express/-/swagger-routes-express-3.2.1.tgz#c929ee186177e0b07cb1c0664d86d7374003f575" + integrity sha512-HqDDFPiuD8A3x+1dCcAaXnElUNWWNnkIkqOZ8KVC6CzoJpbM3NXRDMniuSXXmVS27jmSxylKanhJyERs/FYtaA== + dependencies: + semver "^7.3.2" + +swagger-schema-official@2.0.0-bab6bed: + version "2.0.0-bab6bed" + resolved "https://registry.yarnpkg.com/swagger-schema-official/-/swagger-schema-official-2.0.0-bab6bed.tgz#70070468d6d2977ca5237b2e519ca7d06a2ea3fd" + integrity sha1-cAcEaNbSl3ylI3suUZyn0Gouo/0= + +swagger-ui-dist@^3.18.1: + version "3.37.2" + resolved "https://registry.yarnpkg.com/swagger-ui-dist/-/swagger-ui-dist-3.37.2.tgz#b250ae8d0b3b5ee15b6eb7a53aa9b69cd5888ad3" + integrity sha512-XIT4asxgeL4GUNPPsqpEqLt20M/u6OhFYqTh42IoEAvAyv5e9EGw5uhP9dLAD10opcMYqdkJ5qU+MpN2HZ5xyA== + +swagger-ui-express@^4.1.5: + version "4.1.5" + resolved "https://registry.yarnpkg.com/swagger-ui-express/-/swagger-ui-express-4.1.5.tgz#114f3ce017ca6d6069d960ea8ffef677784b6f2f" + integrity sha512-hs9OqBu2jwmhYyFUhdTiwurvbZC+bq2XnWmmbYymVdwhgJCcGkLdnqymX24ZYUve2nkYSvKPEDCo20ZF+vyw9A== + dependencies: + swagger-ui-dist "^3.18.1" + +symbol-tree@^3.2.4: + version "3.2.4" + resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" + integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== + +synchronous-promise@^2.0.13: + version "2.0.15" + resolved "https://registry.yarnpkg.com/synchronous-promise/-/synchronous-promise-2.0.15.tgz#07ca1822b9de0001f5ff73595f3d08c4f720eb8e" + integrity sha512-k8uzYIkIVwmT+TcglpdN50pS2y1BDcUnBPK9iJeGu0Pl1lOI8pD6wtzgw91Pjpe+RxtTncw32tLxs/R0yNL2Mg== + +table@^5.2.3: + version "5.4.6" + resolved "https://registry.yarnpkg.com/table/-/table-5.4.6.tgz#1292d19500ce3f86053b05f0e8e7e4a3bb21079e" + integrity sha512-wmEc8m4fjnob4gt5riFRtTu/6+4rSe12TpAELNSqHMfF3IqnA+CH37USM6/YR3qRZv7e56kAEAtd6nKZaxe0Ug== + dependencies: + ajv "^6.10.2" + lodash "^4.17.14" + slice-ansi "^2.1.0" + string-width "^3.0.0" + +taketalk@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/taketalk/-/taketalk-1.0.0.tgz#b4d4f0deed206ae7df775b129ea2ca6de52f26dd" + integrity sha1-tNTw3u0gauffd1sSnqLKbeUvJt0= + dependencies: + get-stdin "^4.0.1" + minimist "^1.1.0" + +tar-fs@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.1.tgz#489a15ab85f1f0befabb370b7de4f9eb5cbe8784" + integrity sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng== + dependencies: + chownr "^1.1.1" + mkdirp-classic "^0.5.2" + pump "^3.0.0" + tar-stream "^2.1.4" + +tar-stream@^2.1.4: + version "2.1.4" + resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.1.4.tgz#c4fb1a11eb0da29b893a5b25476397ba2d053bfa" + integrity sha512-o3pS2zlG4gxr67GmFYBLlq+dM8gyRGUOvsrHclSkvtVtQbjV0s/+ZE8OpICbaj8clrX3tjeHngYGP7rweaBnuw== + dependencies: + bl "^4.0.3" + end-of-stream "^1.4.1" + fs-constants "^1.0.0" + inherits "^2.0.3" + readable-stream "^3.1.1" + +tar@^4, tar@^4.4.10, tar@^4.4.12, tar@^4.4.8: + version "4.4.13" + resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.13.tgz#43b364bc52888d555298637b10d60790254ab525" + integrity sha512-w2VwSrBoHa5BsSyH+KxEqeQBAllHhccyMFVHtGtdMpF4W7IRWfZjFiQceJPChOeTsSDVUpER2T8FA93pr0L+QA== + dependencies: + chownr "^1.1.1" + fs-minipass "^1.2.5" + minipass "^2.8.6" + minizlib "^1.2.1" + mkdirp "^0.5.0" + safe-buffer "^5.1.2" + yallist "^3.0.3" + +teeny-request@6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/teeny-request/-/teeny-request-6.0.1.tgz#9b1f512cef152945827ba7e34f62523a4ce2c5b0" + integrity sha512-TAK0c9a00ELOqLrZ49cFxvPVogMUFaWY8dUsQc/0CuQPGF+BOxOQzXfE413BAk2kLomwNplvdtMpeaeGWmoc2g== + dependencies: + http-proxy-agent "^4.0.0" + https-proxy-agent "^4.0.0" + node-fetch "^2.2.0" + stream-events "^1.0.5" + uuid "^3.3.2" + +temp-dir@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/temp-dir/-/temp-dir-1.0.0.tgz#0a7c0ea26d3a39afa7e0ebea9c1fc0bc4daa011d" + integrity sha1-CnwOom06Oa+n4OvqnB/AvE2qAR0= + +temp-write@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/temp-write/-/temp-write-2.1.0.tgz#59890918e0ef09d548aaa342f4bd3409d8404e96" + integrity sha1-WYkJGODvCdVIqqNC9L00CdhATpY= + dependencies: + graceful-fs "^4.1.2" + mkdirp "^0.5.0" + os-tmpdir "^1.0.0" + pify "^2.2.0" + pinkie-promise "^2.0.0" + uuid "^2.0.1" + +temp-write@^3.4.0: + version "3.4.0" + resolved "https://registry.yarnpkg.com/temp-write/-/temp-write-3.4.0.tgz#8cff630fb7e9da05f047c74ce4ce4d685457d492" + integrity sha1-jP9jD7fp2gXwR8dM5M5NaFRX1JI= + dependencies: + graceful-fs "^4.1.2" + is-stream "^1.1.0" + make-dir "^1.0.0" + pify "^3.0.0" + temp-dir "^1.0.0" + uuid "^3.0.1" + +term-size@^2.1.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/term-size/-/term-size-2.2.1.tgz#2a6a54840432c2fb6320fea0f415531e90189f54" + integrity sha512-wK0Ri4fOGjv/XPy8SBHZChl8CM7uMc5VML7SqiQ0zG7+J5Vr+RMQDoHa2CNT6KHUnTGIXH34UDMkPzAUyapBZg== + +terminal-link@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/terminal-link/-/terminal-link-2.1.1.tgz#14a64a27ab3c0df933ea546fba55f2d078edc994" + integrity sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ== + dependencies: + ansi-escapes "^4.2.1" + supports-hyperlinks "^2.0.0" + +test-exclude@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" + integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== + dependencies: + "@istanbuljs/schema" "^0.1.2" + glob "^7.1.4" + minimatch "^3.0.4" + +text-extensions@^1.0.0: + version "1.9.0" + resolved "https://registry.yarnpkg.com/text-extensions/-/text-extensions-1.9.0.tgz#1853e45fee39c945ce6f6c36b2d659b5aabc2a26" + integrity sha512-wiBrwC1EhBelW12Zy26JeOUkQ5mRu+5o8rpsJk5+2t+Y5vE7e842qtZDQ2g1NpX/29HdyFeJ4nSIhI47ENSxlQ== + +text-hex@1.0.x: + version "1.0.0" + resolved "https://registry.yarnpkg.com/text-hex/-/text-hex-1.0.0.tgz#69dc9c1b17446ee79a92bf5b884bb4b9127506f5" + integrity sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg== + +text-table@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ= + +textextensions@^2.5.0: + version "2.6.0" + resolved "https://registry.yarnpkg.com/textextensions/-/textextensions-2.6.0.tgz#d7e4ab13fe54e32e08873be40d51b74229b00fc4" + integrity sha512-49WtAWS+tcsy93dRt6P0P3AMD2m5PvXRhuEA0kaXos5ZLlujtYmpmFsB+QvWUSxE1ZsstmYXfQ7L40+EcQgpAQ== + +thenify-all@^1.0.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/thenify-all/-/thenify-all-1.6.0.tgz#1a1918d402d8fc3f98fbf234db0bcc8cc10e9726" + integrity sha1-GhkY1ALY/D+Y+/I02wvMjMEOlyY= + dependencies: + thenify ">= 3.1.0 < 4" + +"thenify@>= 3.1.0 < 4": + version "3.3.1" + resolved "https://registry.yarnpkg.com/thenify/-/thenify-3.3.1.tgz#8932e686a4066038a016dd9e2ca46add9838a95f" + integrity sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw== + dependencies: + any-promise "^1.0.0" + +throat@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/throat/-/throat-5.0.0.tgz#c5199235803aad18754a667d659b5e72ce16764b" + integrity sha512-fcwX4mndzpLQKBS1DVYhGAcYaYt7vsHNIvQV+WXMvnow5cgjPphq5CaayLaGsjRdSCKZFNGt7/GYAuXaNOiYCA== + +through2@^2.0.0, through2@^2.0.2: + version "2.0.5" + resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" + integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== + dependencies: + readable-stream "~2.3.6" + xtend "~4.0.1" + +through2@^3.0.0, through2@^3.0.1, through2@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/through2/-/through2-3.0.2.tgz#99f88931cfc761ec7678b41d5d7336b5b6a07bf4" + integrity sha512-enaDQ4MUyP2W6ZyT6EsMzqBPZaM/avg8iuo+l2d3QCs0J+6RaqkHV/2/lOwDTueBHeJ/2LG9lrLW3d5rWPucuQ== + dependencies: + inherits "^2.0.4" + readable-stream "2 || 3" + +through2@^4.0.0: + version "4.0.2" + resolved "https://registry.yarnpkg.com/through2/-/through2-4.0.2.tgz#a7ce3ac2a7a8b0b966c80e7c49f0484c3b239764" + integrity sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw== + dependencies: + readable-stream "3" + +through@2, "through@>=2.2.7 <3", through@^2.3.4, through@^2.3.6, through@~2.3, through@~2.3.1: + version "2.3.8" + resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" + integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= + +timed-out@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/timed-out/-/timed-out-4.0.1.tgz#f32eacac5a175bea25d7fab565ab3ed8741ef56f" + integrity sha1-8y6srFoXW+ol1/q1Zas+2HQe9W8= + +tmp@^0.0.33: + version "0.0.33" + resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" + integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== + dependencies: + os-tmpdir "~1.0.2" + +tmp@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.1.0.tgz#ee434a4e22543082e294ba6201dcc6eafefa2877" + integrity sha512-J7Z2K08jbGcdA1kkQpJSqLF6T0tdQqpR2pnSUXsIchbPdTI9v3e85cLW0d6WDhwuAleOV71j2xWs8qMPfK7nKw== + dependencies: + rimraf "^2.6.3" + +tmpl@1.0.x: + version "1.0.4" + resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1" + integrity sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE= + +to-fast-properties@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" + integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= + +to-object-path@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" + integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= + dependencies: + kind-of "^3.0.2" + +to-readable-stream@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/to-readable-stream/-/to-readable-stream-1.0.0.tgz#ce0aa0c2f3df6adf852efb404a783e77c0475771" + integrity sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q== + +to-regex-range@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" + integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= + dependencies: + is-number "^3.0.0" + repeat-string "^1.6.1" + +to-regex-range@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== + dependencies: + is-number "^7.0.0" + +to-regex@^3.0.1, to-regex@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" + integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== + dependencies: + define-property "^2.0.2" + extend-shallow "^3.0.2" + regex-not "^1.0.2" + safe-regex "^1.1.0" + +toidentifier@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553" + integrity sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw== + +toposort@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/toposort/-/toposort-2.0.2.tgz#ae21768175d1559d48bef35420b2f4962f09c330" + integrity sha1-riF2gXXRVZ1IvvNUILL0li8JwzA= + +touch@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/touch/-/touch-3.1.0.tgz#fe365f5f75ec9ed4e56825e0bb76d24ab74af83b" + integrity sha512-WBx8Uy5TLtOSRtIq+M03/sKDrXCLHxwDcquSP2c43Le03/9serjQBIztjRz6FkJez9D/hleyAXTBGLwwZUw9lA== + dependencies: + nopt "~1.0.10" + +tough-cookie@^2.3.3, tough-cookie@~2.5.0: + version "2.5.0" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2" + integrity sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g== + dependencies: + psl "^1.1.28" + punycode "^2.1.1" + +tough-cookie@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-3.0.1.tgz#9df4f57e739c26930a018184887f4adb7dca73b2" + integrity sha512-yQyJ0u4pZsv9D4clxO69OEjLWYw+jbgspjTue4lTQZLfV0c5l1VmK2y1JK8E9ahdpltPOaAThPcp5nKPUgSnsg== + dependencies: + ip-regex "^2.1.0" + psl "^1.1.28" + punycode "^2.1.1" + +tr46@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-1.0.1.tgz#a8b13fd6bfd2489519674ccde55ba3693b706d09" + integrity sha1-qLE/1r/SSJUZZ0zN5VujaTtwbQk= + dependencies: + punycode "^2.1.0" + +tr46@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-2.0.2.tgz#03273586def1595ae08fedb38d7733cee91d2479" + integrity sha512-3n1qG+/5kg+jrbTzwAykB5yRYtQCTqOGKq5U5PE3b0a1/mzo6snDhjGS0zJVJunO0NrT3Dg1MLy5TjWP/UJppg== + dependencies: + punycode "^2.1.1" + +tree-kill@^1.1.0, tree-kill@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/tree-kill/-/tree-kill-1.2.2.tgz#4ca09a9092c88b73a7cdc5e8a01b507b0790a0cc" + integrity sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A== + +treeify@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/treeify/-/treeify-1.1.0.tgz#4e31c6a463accd0943879f30667c4fdaff411bb8" + integrity sha512-1m4RA7xVAJrSGrrXGs0L3YTwyvBs2S8PbRHaLZAkFw7JR8oIFwYtysxlBZhYIa7xSyiYJKZ3iGrrk55cGA3i9A== + +trim-newlines@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-1.0.0.tgz#5887966bb582a4503a41eb524f7d35011815a613" + integrity sha1-WIeWa7WCpFA6QetST301ARgVphM= + +trim-newlines@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-2.0.0.tgz#b403d0b91be50c331dfc4b82eeceb22c3de16d20" + integrity sha1-tAPQuRvlDDMd/EuC7s6yLD3hbSA= + +trim-newlines@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-3.0.0.tgz#79726304a6a898aa8373427298d54c2ee8b1cb30" + integrity sha512-C4+gOpvmxaSMKuEf9Qc134F1ZuOHVXKRbtEflf4NTtuuJDEIJ9p5PXsalL8SkeRw+qit1Mo+yuvMPAKwWg/1hA== + +trim-off-newlines@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/trim-off-newlines/-/trim-off-newlines-1.0.1.tgz#9f9ba9d9efa8764c387698bcbfeb2c848f11adb3" + integrity sha1-n5up2e+odkw4dpi8v+sshI8RrbM= + +triple-beam@^1.2.0, triple-beam@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/triple-beam/-/triple-beam-1.3.0.tgz#a595214c7298db8339eeeee083e4d10bd8cb8dd9" + integrity sha512-XrHUvV5HpdLmIj4uVMxHggLbFSZYIn7HEWsqePZcI50pco+MPqJ50wMGY794X7AOOhxOBAjbkqfAbEe/QMp2Lw== + +ts-jest@26.4.4, ts-jest@^26.4.0: + version "26.4.4" + resolved "https://registry.yarnpkg.com/ts-jest/-/ts-jest-26.4.4.tgz#61f13fb21ab400853c532270e52cc0ed7e502c49" + integrity sha512-3lFWKbLxJm34QxyVNNCgXX1u4o/RV0myvA2y2Bxm46iGIjKlaY0own9gIckbjZJPn+WaJEnfPPJ20HHGpoq4yg== + dependencies: + "@types/jest" "26.x" + bs-logger "0.x" + buffer-from "1.x" + fast-json-stable-stringify "2.x" + jest-util "^26.1.0" + json5 "2.x" + lodash.memoize "4.x" + make-error "1.x" + mkdirp "1.x" + semver "7.x" + yargs-parser "20.x" + +ts-log@^2.1.4: + version "2.2.3" + resolved "https://registry.yarnpkg.com/ts-log/-/ts-log-2.2.3.tgz#4da5640fe25a9fb52642cd32391c886721318efb" + integrity sha512-XvB+OdKSJ708Dmf9ore4Uf/q62AYDTzFcAdxc8KNML1mmAWywRFVt/dn1KYJH8Agt5UJNujfM3znU5PxgAzA2w== + +ts-node@9.1.1: + version "9.1.1" + resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-9.1.1.tgz#51a9a450a3e959401bda5f004a72d54b936d376d" + integrity sha512-hPlt7ZACERQGf03M253ytLY3dHbGNGrAq9qIHWUY9XHYl1z7wYngSr3OQ5xmui8o2AaxsONxIzjafLUiWBo1Fg== + dependencies: + arg "^4.1.0" + create-require "^1.1.0" + diff "^4.0.1" + make-error "^1.1.1" + source-map-support "^0.5.17" + yn "3.1.1" + +tsc-watch@4.2.9: + version "4.2.9" + resolved "https://registry.yarnpkg.com/tsc-watch/-/tsc-watch-4.2.9.tgz#d93fc74233ca4ef7ee6b12d08c0fe6aca3e19044" + integrity sha512-DlTaoDs74+KUpyWr7dCGhuscAUKCz6CiFduBN7R9RbLJSSN1moWdwoCLASE7+zLgGvV5AwXfYDiEMAsPGaO+Vw== + dependencies: + cross-spawn "^7.0.3" + node-cleanup "^2.1.2" + ps-tree "^1.2.0" + string-argv "^0.1.1" + strip-ansi "^6.0.0" + +tsconfig-paths@^3.9.0: + version "3.9.0" + resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.9.0.tgz#098547a6c4448807e8fcb8eae081064ee9a3c90b" + integrity sha512-dRcuzokWhajtZWkQsDVKbWyY+jgcLC5sqJhg2PSgf4ZkH2aHPvaOY8YWGhmjb68b5qqTfasSsDO9k7RUiEmZAw== + dependencies: + "@types/json5" "^0.0.29" + json5 "^1.0.1" + minimist "^1.2.0" + strip-bom "^3.0.0" + +tslib@2.0.3, tslib@^2.0.0, tslib@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.0.3.tgz#8e0741ac45fc0c226e58a17bfc3e64b9bc6ca61c" + integrity sha512-uZtkfKblCEQtZKBF6EBXVZeQNl82yqtDQdv+eck8u7tdPxjLu2/lp5/uPW+um2tpuxINHWy3GhiccY7QgEaVHQ== + +tslib@^1, tslib@^1.13.0, tslib@^1.8.1, tslib@^1.9.0, tslib@^1.9.3: + version "1.14.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" + integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== + +tslib@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.1.0.tgz#da60860f1c2ecaa5703ab7d39bc05b6bf988b97a" + integrity sha512-hcVC3wYEziELGGmEEXue7D75zbwIIVUMWAVbHItGPx0ziyXxrOMQx4rQEVEV45Ut/1IotuEvwqPopzIOkDMf0A== + +tsutils@^3.17.1: + version "3.17.1" + resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.17.1.tgz#ed719917f11ca0dee586272b2ac49e015a2dd759" + integrity sha512-kzeQ5B8H3w60nFY2g8cJIuH7JDpsALXySGtwGJ0p2LSjLgay3NdIpqq5SoOBe46bKDW2iq25irHCr8wjomUS2g== + dependencies: + tslib "^1.8.1" + +tunnel-agent@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" + integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0= + dependencies: + safe-buffer "^5.0.1" + +tweetnacl@1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-1.0.3.tgz#ac0af71680458d8a6378d0d0d050ab1407d35596" + integrity sha512-6rt+RN7aOi1nGMyC4Xa5DdYiukl2UWCbcJft7YhxReBGQD7OAM8Pbxw6YMo4r2diNEA8FEmu32YOn9rhaiE5yw== + +tweetnacl@^0.14.3, tweetnacl@~0.14.0: + version "0.14.5" + resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" + integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= + +type-check@^0.4.0, type-check@~0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== + dependencies: + prelude-ls "^1.2.1" + +type-check@~0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" + integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I= + dependencies: + prelude-ls "~1.1.2" + +type-detect@4.0.8, type-detect@^4.0.0, type-detect@^4.0.5: + version "4.0.8" + resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" + integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== + +type-fest@^0.11.0: + version "0.11.0" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.11.0.tgz#97abf0872310fed88a5c466b25681576145e33f1" + integrity sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ== + +type-fest@^0.18.0: + version "0.18.1" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.18.1.tgz#db4bc151a4a2cf4eebf9add5db75508db6cc841f" + integrity sha512-OIAYXk8+ISY+qTOwkHtKqzAuxchoMiD9Udx+FSGQDuiRR+PJKJHc2NJAXlbhkGwTt/4/nKZxELY1w3ReWOL8mw== + +type-fest@^0.3.0: + version "0.3.1" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.3.1.tgz#63d00d204e059474fe5e1b7c011112bbd1dc29e1" + integrity sha512-cUGJnCdr4STbePCgqNFbpVNCepa+kAVohJs1sLhxzdH+gnEoOd8VhbYa7pD3zZYGiURWM2xzEII3fQcRizDkYQ== + +type-fest@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.6.0.tgz#8d2a2370d3df886eb5c90ada1c5bf6188acf838b" + integrity sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg== + +type-fest@^0.8.1: + version "0.8.1" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d" + integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA== + +type-is@^1.6.4, type-is@~1.6.17, type-is@~1.6.18: + version "1.6.18" + resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" + integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== + dependencies: + media-typer "0.3.0" + mime-types "~2.1.24" + +typedarray-to-buffer@^3.1.5: + version "3.1.5" + resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080" + integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q== + dependencies: + is-typedarray "^1.0.0" + +typedarray@^0.0.6: + version "0.0.6" + resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" + integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= + +typedoc-default-themes@^0.11.4: + version "0.11.4" + resolved "https://registry.yarnpkg.com/typedoc-default-themes/-/typedoc-default-themes-0.11.4.tgz#1bc55b7c8d1132844616ff6f570e1e2cd0eb7343" + integrity sha512-Y4Lf+qIb9NTydrexlazAM46SSLrmrQRqWiD52593g53SsmUFioAsMWt8m834J6qsp+7wHRjxCXSZeiiW5cMUdw== + +typedoc@0.19.2: + version "0.19.2" + resolved "https://registry.yarnpkg.com/typedoc/-/typedoc-0.19.2.tgz#842a63a581f4920f76b0346bb80eb2a49afc2c28" + integrity sha512-oDEg1BLEzi1qvgdQXc658EYgJ5qJLVSeZ0hQ57Eq4JXy6Vj2VX4RVo18qYxRWz75ifAaYuYNBUCnbhjd37TfOg== + dependencies: + fs-extra "^9.0.1" + handlebars "^4.7.6" + highlight.js "^10.2.0" + lodash "^4.17.20" + lunr "^2.3.9" + marked "^1.1.1" + minimatch "^3.0.0" + progress "^2.0.3" + semver "^7.3.2" + shelljs "^0.8.4" + typedoc-default-themes "^0.11.4" + +typeorm@0.2.29: + version "0.2.29" + resolved "https://registry.yarnpkg.com/typeorm/-/typeorm-0.2.29.tgz#401289dc91900d72eccb26e31cdb7f0591a2272e" + integrity sha512-ih1vrTe3gEAGKRcWlcsTRxTL7gNjacQE498wVGuJ3ZRujtMqPZlbAWuC7xDzWCRjQnkZYNwZQeG9UgKfxSHB5g== + dependencies: + "@sqltools/formatter" "1.2.2" + app-root-path "^3.0.0" + buffer "^5.5.0" + chalk "^4.1.0" + cli-highlight "^2.1.4" + debug "^4.1.1" + dotenv "^8.2.0" + glob "^7.1.6" + js-yaml "^3.14.0" + mkdirp "^1.0.4" + reflect-metadata "^0.1.13" + sha.js "^2.4.11" + tslib "^1.13.0" + xml2js "^0.4.23" + yargonaut "^1.1.2" + yargs "^16.0.3" + +typescript@4.1.2, typescript@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.1.2.tgz#6369ef22516fe5e10304aae5a5c4862db55380e9" + integrity sha512-thGloWsGH3SOxv1SoY7QojKi0tc+8FnOmiarEGMbd/lar7QOEd3hvlx3Fp5y6FlDUGl9L+pd4n2e+oToGMmhRQ== + +typescript@^3.9.7: + version "3.9.7" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.9.7.tgz#98d600a5ebdc38f40cb277522f12dc800e9e25fa" + integrity sha512-BLbiRkiBzAwsjut4x/dsibSTB6yWpwT5qWmC2OfuCg3GgVQCSgMs4vEctYPhsaGtd0AeuuHMkjZ2h2WG8MSzRw== + +uglify-js@^3.1.4: + version "3.12.1" + resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.12.1.tgz#78307f539f7b9ca5557babb186ea78ad30cc0375" + integrity sha512-o8lHP20KjIiQe5b/67Rh68xEGRrc2SRsCuuoYclXXoC74AfSRGblU1HKzJWH3HxPZ+Ort85fWHpSX7KwBUC9CQ== + +uid-number@0.0.6: + version "0.0.6" + resolved "https://registry.yarnpkg.com/uid-number/-/uid-number-0.0.6.tgz#0ea10e8035e8eb5b8e4449f06da1c730663baa81" + integrity sha1-DqEOgDXo61uOREnwbaHHMGY7qoE= + +umask@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/umask/-/umask-1.1.0.tgz#f29cebf01df517912bb58ff9c4e50fde8e33320d" + integrity sha1-8pzr8B31F5ErtY/5xOUP3o4zMg0= + +undefsafe@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/undefsafe/-/undefsafe-2.0.3.tgz#6b166e7094ad46313b2202da7ecc2cd7cc6e7aae" + integrity sha512-nrXZwwXrD/T/JXeygJqdCO6NZZ1L66HrxM/Z7mIq2oPanoN0F1nLx3lwJMu6AwJY69hdixaFQOuoYsMjE5/C2A== + dependencies: + debug "^2.2.0" + +union-value@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" + integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== + dependencies: + arr-union "^3.1.0" + get-value "^2.0.6" + is-extendable "^0.1.1" + set-value "^2.0.1" + +unique-filename@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-1.1.1.tgz#1d69769369ada0583103a1e6ae87681b56573230" + integrity sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ== + dependencies: + unique-slug "^2.0.0" + +unique-slug@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/unique-slug/-/unique-slug-2.0.2.tgz#baabce91083fc64e945b0f3ad613e264f7cd4e6c" + integrity sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w== + dependencies: + imurmurhash "^0.1.4" + +unique-string@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d" + integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg== + dependencies: + crypto-random-string "^2.0.0" + +universal-user-agent@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-4.0.1.tgz#fd8d6cb773a679a709e967ef8288a31fcc03e557" + integrity sha512-LnST3ebHwVL2aNe4mejI9IQh2HfZ1RLo8Io2HugSif8ekzD1TlWpHpColOB/eh8JHMLkGH3Akqf040I+4ylNxg== + dependencies: + os-name "^3.1.0" + +universal-user-agent@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-6.0.0.tgz#3381f8503b251c0d9cd21bc1de939ec9df5480ee" + integrity sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w== + +universalify@^0.1.0: + version "0.1.2" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" + integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== + +universalify@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-1.0.0.tgz#b61a1da173e8435b2fe3c67d29b9adf8594bd16d" + integrity sha512-rb6X1W158d7pRQBg5gkR8uPaSfiids68LTJQYOtEUhoJUWBdaQHsuT/EUduxXYxcrt4r5PJ4fuHW1MHT6p0qug== + +universalify@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" + integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== + +unpipe@1.0.0, unpipe@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" + integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw= + +unset-value@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" + integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= + dependencies: + has-value "^0.3.1" + isobject "^3.0.0" + +untildify@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/untildify/-/untildify-3.0.3.tgz#1e7b42b140bcfd922b22e70ca1265bfe3634c7c9" + integrity sha512-iSk/J8efr8uPT/Z4eSUywnqyrQU7DSdMfdqK4iWEaUVVmcP5JcnpRqmVMwcwcnmI1ATFNgC5V90u09tBynNFKA== + +unzip-response@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/unzip-response/-/unzip-response-2.0.1.tgz#d2f0f737d16b0615e72a6935ed04214572d56f97" + integrity sha1-0vD3N9FrBhXnKmk17QQhRXLVb5c= + +upath@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" + integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== + +update-notifier@^4.1.0: + version "4.1.3" + resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-4.1.3.tgz#be86ee13e8ce48fb50043ff72057b5bd598e1ea3" + integrity sha512-Yld6Z0RyCYGB6ckIjffGOSOmHXj1gMeE7aROz4MG+XMkmixBX4jUngrGXNYz7wPKBmtoD4MnBa2Anu7RSKht/A== + dependencies: + boxen "^4.2.0" + chalk "^3.0.0" + configstore "^5.0.1" + has-yarn "^2.1.0" + import-lazy "^2.1.0" + is-ci "^2.0.0" + is-installed-globally "^0.3.1" + is-npm "^4.0.0" + is-yarn-global "^0.3.0" + latest-version "^5.0.0" + pupa "^2.0.1" + semver-diff "^3.1.1" + xdg-basedir "^4.0.0" + +uri-js@^4.2.2: + version "4.4.0" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.0.tgz#aa714261de793e8a82347a7bcc9ce74e86f28602" + integrity sha512-B0yRTzYdUCCn9n+F4+Gh4yIDtMQcaJsmYBDsTSG8g/OejKBodLQ2IHfN3bM7jUsRXndopT7OIXWdYqc1fjmV6g== + dependencies: + punycode "^2.1.0" + +urix@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" + integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= + +url-parse-lax@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-1.0.0.tgz#7af8f303645e9bd79a272e7a14ac68bc0609da73" + integrity sha1-evjzA2Rem9eaJy56FKxovAYJ2nM= + dependencies: + prepend-http "^1.0.1" + +url-parse-lax@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-3.0.0.tgz#16b5cafc07dbe3676c1b1999177823d6503acb0c" + integrity sha1-FrXK/Afb42dsGxmZF3gj1lA6yww= + dependencies: + prepend-http "^2.0.0" + +url-to-options@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/url-to-options/-/url-to-options-1.0.1.tgz#1505a03a289a48cbd7a434efbaeec5055f5633a9" + integrity sha1-FQWgOiiaSMvXpDTvuu7FBV9WM6k= + +urlgrey@0.4.4: + version "0.4.4" + resolved "https://registry.yarnpkg.com/urlgrey/-/urlgrey-0.4.4.tgz#892fe95960805e85519f1cd4389f2cb4cbb7652f" + integrity sha1-iS/pWWCAXoVRnxzUOJ8stMu3ZS8= + +use@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" + integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== + +util-deprecate@^1.0.1, util-deprecate@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= + +util-promisify@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/util-promisify/-/util-promisify-2.1.0.tgz#3c2236476c4d32c5ff3c47002add7c13b9a82a53" + integrity sha1-PCI2R2xNMsX/PEcAKt18E7moKlM= + dependencies: + object.getownpropertydescriptors "^2.0.3" + +utils-merge@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" + integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM= + +uuid@8.3.2: + version "8.3.2" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" + integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== + +uuid@^2.0.1: + version "2.0.3" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-2.0.3.tgz#67e2e863797215530dff318e5bf9dcebfd47b21a" + integrity sha1-Z+LoY3lyFVMN/zGOW/nc6/1Hsho= + +uuid@^3.0.1, uuid@^3.3.2: + version "3.4.0" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" + integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== + +uuid@^8.3.0: + version "8.3.1" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.1.tgz#2ba2e6ca000da60fce5a196954ab241131e05a31" + integrity sha512-FOmRr+FmWEIG8uhZv6C2bTgEVXsHk08kE7mPlrBbEe+c3r9pjceVPgupIfNIhc4yx55H69OXANrUaSuu9eInKg== + +v8-compile-cache@^2.0.3: + version "2.2.0" + resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.2.0.tgz#9471efa3ef9128d2f7c6a7ca39c4dd6b5055b132" + integrity sha512-gTpR5XQNKFwOd4clxfnhaqvfqMpqEwr4tOtCyz4MtYZX2JYhfr1JvBFKdS+7K/9rfpZR3VLX+YWBbKoxCgS43Q== + +v8-to-istanbul@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-7.0.0.tgz#b4fe00e35649ef7785a9b7fcebcea05f37c332fc" + integrity sha512-fLL2rFuQpMtm9r8hrAV2apXX/WqHJ6+IC4/eQVdMDGBUgH/YMV4Gv3duk3kjmyg6uiQWBAA9nJwue4iJUOkHeA== + dependencies: + "@types/istanbul-lib-coverage" "^2.0.1" + convert-source-map "^1.6.0" + source-map "^0.7.3" + +validate-npm-package-license@^3.0.1, validate-npm-package-license@^3.0.3: + version "3.0.4" + resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" + integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== + dependencies: + spdx-correct "^3.0.0" + spdx-expression-parse "^3.0.0" + +validate-npm-package-name@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/validate-npm-package-name/-/validate-npm-package-name-3.0.0.tgz#5fa912d81eb7d0c74afc140de7317f0ca7df437e" + integrity sha1-X6kS2B630MdK/BQN5zF/DKffQ34= + dependencies: + builtins "^1.0.3" + +validator@^13.5.1: + version "13.5.1" + resolved "https://registry.yarnpkg.com/validator/-/validator-13.5.1.tgz#3926dfdd07a7e7325ab657fe46f2143eac55f076" + integrity sha512-s+7LW1Xi0OzPNfGN7Hb2vk0YB/epp9KFHHGC5JtqZOE1dUkN4ULPFZAQ1inCu7ceAsWmOJu6sn9cnwm3R+ghWQ== + +vary@^1, vary@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" + integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw= + +verror@1.10.0: + version "1.10.0" + resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" + integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA= + dependencies: + assert-plus "^1.0.0" + core-util-is "1.0.2" + extsprintf "^1.2.0" + +vinyl-file@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/vinyl-file/-/vinyl-file-3.0.0.tgz#b104d9e4409ffa325faadd520642d0a3b488b365" + integrity sha1-sQTZ5ECf+jJfqt1SBkLQo7SIs2U= + dependencies: + graceful-fs "^4.1.2" + pify "^2.3.0" + strip-bom-buf "^1.0.0" + strip-bom-stream "^2.0.0" + vinyl "^2.0.1" + +vinyl@^2.0.1, vinyl@^2.2.0, vinyl@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/vinyl/-/vinyl-2.2.1.tgz#23cfb8bbab5ece3803aa2c0a1eb28af7cbba1974" + integrity sha512-LII3bXRFBZLlezoG5FfZVcXflZgWP/4dCwKtxd5ky9+LOtM4CS3bIRQsmR1KMnMW07jpE8fqR2lcxPZ+8sJIcw== + dependencies: + clone "^2.1.1" + clone-buffer "^1.0.0" + clone-stats "^1.0.0" + cloneable-readable "^1.0.0" + remove-trailing-separator "^1.0.1" + replace-ext "^1.0.0" + +w3c-hr-time@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd" + integrity sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ== + dependencies: + browser-process-hrtime "^1.0.0" + +w3c-xmlserializer@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz#3e7104a05b75146cc60f564380b7f683acf1020a" + integrity sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA== + dependencies: + xml-name-validator "^3.0.0" + +walker@^1.0.7, walker@~1.0.5: + version "1.0.7" + resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.7.tgz#2f7f9b8fd10d677262b18a884e28d19618e028fb" + integrity sha1-L3+bj9ENZ3JisYqITijRlhjgKPs= + dependencies: + makeerror "1.0.x" + +wcwidth@>=1.0.1, wcwidth@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/wcwidth/-/wcwidth-1.0.1.tgz#f0b0dcf915bc5ff1528afadb2c0e17b532da2fe8" + integrity sha1-8LDc+RW8X/FSivrbLA4XtTLaL+g= + dependencies: + defaults "^1.0.3" + +webidl-conversions@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" + integrity sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg== + +webidl-conversions@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-5.0.0.tgz#ae59c8a00b121543a2acc65c0434f57b0fc11aff" + integrity sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA== + +webidl-conversions@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-6.1.0.tgz#9111b4d7ea80acd40f5270d666621afa78b69514" + integrity sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w== + +whatwg-encoding@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0" + integrity sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw== + dependencies: + iconv-lite "0.4.24" + +whatwg-mimetype@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf" + integrity sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g== + +whatwg-url@^7.0.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-7.1.0.tgz#c2c492f1eca612988efd3d2266be1b9fc6170d06" + integrity sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg== + dependencies: + lodash.sortby "^4.7.0" + tr46 "^1.0.1" + webidl-conversions "^4.0.2" + +whatwg-url@^8.0.0: + version "8.4.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-8.4.0.tgz#50fb9615b05469591d2b2bd6dfaed2942ed72837" + integrity sha512-vwTUFf6V4zhcPkWp/4CQPr1TW9Ml6SF4lVyaIMBdJw5i6qUUJ1QWM4Z6YYVkfka0OUIzVo/0aNtGVGk256IKWw== + dependencies: + lodash.sortby "^4.7.0" + tr46 "^2.0.2" + webidl-conversions "^6.1.0" + +which-module@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" + integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= + +which@^1.2.9, which@^1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" + integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== + dependencies: + isexe "^2.0.0" + +which@^2.0.1, which@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +wide-align@^1.1.0: + version "1.1.3" + resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457" + integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA== + dependencies: + string-width "^1.0.2 || 2" + +widest-line@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-3.1.0.tgz#8292333bbf66cb45ff0de1603b136b7ae1496eca" + integrity sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg== + dependencies: + string-width "^4.0.0" + +windows-release@^3.1.0: + version "3.3.3" + resolved "https://registry.yarnpkg.com/windows-release/-/windows-release-3.3.3.tgz#1c10027c7225743eec6b89df160d64c2e0293999" + integrity sha512-OSOGH1QYiW5yVor9TtmXKQvt2vjQqbYS+DqmsZw+r7xDwLXEeT3JGW0ZppFmHx4diyXmxt238KFR3N9jzevBRg== + dependencies: + execa "^1.0.0" + +winston-transport@^4.4.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/winston-transport/-/winston-transport-4.4.0.tgz#17af518daa690d5b2ecccaa7acf7b20ca7925e59" + integrity sha512-Lc7/p3GtqtqPBYYtS6KCN3c77/2QCev51DvcJKbkFPQNoj1sinkGwLGFDxkXY9J6p9+EPnYs+D90uwbnaiURTw== + dependencies: + readable-stream "^2.3.7" + triple-beam "^1.2.0" + +winston@*, winston@^3.3.3: + version "3.3.3" + resolved "https://registry.yarnpkg.com/winston/-/winston-3.3.3.tgz#ae6172042cafb29786afa3d09c8ff833ab7c9170" + integrity sha512-oEXTISQnC8VlSAKf1KYSSd7J6IWuRPQqDdo8eoRNaYKLvwSb5+79Z3Yi1lrl6KDpU6/VWaxpakDAtb1oQ4n9aw== + dependencies: + "@dabh/diagnostics" "^2.0.2" + async "^3.1.0" + is-stream "^2.0.0" + logform "^2.2.0" + one-time "^1.0.0" + readable-stream "^3.4.0" + stack-trace "0.0.x" + triple-beam "^1.3.0" + winston-transport "^4.4.0" + +with-open-file@^0.1.6: + version "0.1.7" + resolved "https://registry.yarnpkg.com/with-open-file/-/with-open-file-0.1.7.tgz#e2de8d974e8a8ae6e58886be4fe8e7465b58a729" + integrity sha512-ecJS2/oHtESJ1t3ZfMI3B7KIDKyfN0O16miWxdn30zdh66Yd3LsRFebXZXq6GU4xfxLf6nVxp9kIqElb5fqczA== + dependencies: + p-finally "^1.0.0" + p-try "^2.1.0" + pify "^4.0.1" + +word-wrap@^1.2.3, word-wrap@~1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" + integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== + +wordwrap@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" + integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus= + +wrap-ansi@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" + integrity sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU= + dependencies: + string-width "^1.0.1" + strip-ansi "^3.0.1" + +wrap-ansi@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-4.0.0.tgz#b3570d7c70156159a2d42be5cc942e957f7b1131" + integrity sha512-uMTsj9rDb0/7kk1PbcbCcwvHUxp60fGDB/NNXpVa0Q+ic/e7y5+BwTxKfQ33VYgDppSwi/FBzpetYzo8s6tfbg== + dependencies: + ansi-styles "^3.2.0" + string-width "^2.1.1" + strip-ansi "^4.0.0" + +wrap-ansi@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09" + integrity sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q== + dependencies: + ansi-styles "^3.2.0" + string-width "^3.0.0" + strip-ansi "^5.0.0" + +wrap-ansi@^6.2.0: + version "6.2.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53" + integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrap-ansi@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= + +write-file-atomic@^2.0.0, write-file-atomic@^2.3.0, write-file-atomic@^2.4.2: + version "2.4.3" + resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-2.4.3.tgz#1fd2e9ae1df3e75b8d8c367443c692d4ca81f481" + integrity sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ== + dependencies: + graceful-fs "^4.1.11" + imurmurhash "^0.1.4" + signal-exit "^3.0.2" + +write-file-atomic@^3.0.0: + version "3.0.3" + resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8" + integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q== + dependencies: + imurmurhash "^0.1.4" + is-typedarray "^1.0.0" + signal-exit "^3.0.2" + typedarray-to-buffer "^3.1.5" + +write-json-file@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/write-json-file/-/write-json-file-2.3.0.tgz#2b64c8a33004d54b8698c76d585a77ceb61da32f" + integrity sha1-K2TIozAE1UuGmMdtWFp3zrYdoy8= + dependencies: + detect-indent "^5.0.0" + graceful-fs "^4.1.2" + make-dir "^1.0.0" + pify "^3.0.0" + sort-keys "^2.0.0" + write-file-atomic "^2.0.0" + +write-json-file@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/write-json-file/-/write-json-file-3.2.0.tgz#65bbdc9ecd8a1458e15952770ccbadfcff5fe62a" + integrity sha512-3xZqT7Byc2uORAatYiP3DHUUAVEkNOswEWNs9H5KXiicRTvzYzYqKjYc4G7p+8pltvAw641lVByKVtMpf+4sYQ== + dependencies: + detect-indent "^5.0.0" + graceful-fs "^4.1.15" + make-dir "^2.1.0" + pify "^4.0.1" + sort-keys "^2.0.0" + write-file-atomic "^2.4.2" + +write-json-file@^4.1.1: + version "4.3.0" + resolved "https://registry.yarnpkg.com/write-json-file/-/write-json-file-4.3.0.tgz#908493d6fd23225344af324016e4ca8f702dd12d" + integrity sha512-PxiShnxf0IlnQuMYOPPhPkhExoCQuTUNPOa/2JWCYTmBquU9njyyDuwRKN26IZBlp4yn1nt+Agh2HOOBl+55HQ== + dependencies: + detect-indent "^6.0.0" + graceful-fs "^4.1.15" + is-plain-obj "^2.0.0" + make-dir "^3.0.0" + sort-keys "^4.0.0" + write-file-atomic "^3.0.0" + +write-pkg@^3.1.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/write-pkg/-/write-pkg-3.2.0.tgz#0e178fe97820d389a8928bc79535dbe68c2cff21" + integrity sha512-tX2ifZ0YqEFOF1wjRW2Pk93NLsj02+n1UP5RvO6rCs0K6R2g1padvf006cY74PQJKMGS2r42NK7FD0dG6Y6paw== + dependencies: + sort-keys "^2.0.0" + write-json-file "^2.2.0" + +write@1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/write/-/write-1.0.3.tgz#0800e14523b923a387e415123c865616aae0f5c3" + integrity sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig== + dependencies: + mkdirp "^0.5.1" + +wrtc@^0.4.6: + version "0.4.6" + resolved "https://registry.yarnpkg.com/wrtc/-/wrtc-0.4.6.tgz#909ace2cffd34e11633d4e1c20527510a12e7b5b" + integrity sha512-4uD+oFoY2yuo3AV9fum3cXUXR6v8YQHZlqBrKkCRGjW1BvKrVHtLNH4UaNLBLiJu9DL89WqUWmbzsQ9RxMzANw== + dependencies: + node-pre-gyp "^0.13.0" + optionalDependencies: + domexception "^1.0.1" + +ws@7.4.1, ws@^7.3.1: + version "7.4.1" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.1.tgz#a333be02696bd0e54cea0434e21dcc8a9ac294bb" + integrity sha512-pTsP8UAfhy3sk1lSk/O/s4tjD0CRwvMnzvwr4OKGX7ZvqZtUyx4KIJB5JWbkykPoc55tixMGgTNoh3k4FkNGFQ== + +ws@^7.2.3: + version "7.4.0" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.0.tgz#a5dd76a24197940d4a8bb9e0e152bb4503764da7" + integrity sha512-kyFwXuV/5ymf+IXhS6f0+eAFvydbaBW3zjpT6hUdAh/hbVjTIB5EHBGi0bPoCLSK2wcuz3BrEkB9LrYv1Nm4NQ== + +xdg-basedir@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-4.0.0.tgz#4bc8d9984403696225ef83a1573cbbcb4e79db13" + integrity sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q== + +xml-name-validator@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" + integrity sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw== + +xml2js@^0.4.23: + version "0.4.23" + resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.23.tgz#a0c69516752421eb2ac758ee4d4ccf58843eac66" + integrity sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug== + dependencies: + sax ">=0.6.0" + xmlbuilder "~11.0.0" + +xmlbuilder@~11.0.0: + version "11.0.1" + resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-11.0.1.tgz#be9bae1c8a046e76b31127726347d0ad7002beb3" + integrity sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA== + +xmlchars@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" + integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== + +xtend@^4.0.0, xtend@^4.0.2, xtend@~4.0.0, xtend@~4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" + integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== + +y18n@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.1.tgz#8db2b83c31c5d75099bb890b23f3094891e247d4" + integrity sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ== + +y18n@^5.0.5: + version "5.0.5" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.5.tgz#8769ec08d03b1ea2df2500acef561743bbb9ab18" + integrity sha512-hsRUr4FFrvhhRH12wOdfs38Gy7k2FFzB9qgN9v3aLykRq0dRcdcpz5C9FxdS2NuhOrI/628b/KSTJ3rwHysYSg== + +yallist@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" + integrity sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI= + +yallist@^3.0.0, yallist@^3.0.2, yallist@^3.0.3: + version "3.1.1" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" + integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== + +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + +yaml@^1.10.0: + version "1.10.0" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.0.tgz#3b593add944876077d4d683fee01081bd9fff31e" + integrity sha512-yr2icI4glYaNG+KWONODapy2/jDdMSDnrONSjblABjD9B4Z5LgiircSt8m8sRZFNi08kG9Sm0uSHtEmP3zaEGg== + +yargonaut@^1.1.2: + version "1.1.4" + resolved "https://registry.yarnpkg.com/yargonaut/-/yargonaut-1.1.4.tgz#c64f56432c7465271221f53f5cc517890c3d6e0c" + integrity sha512-rHgFmbgXAAzl+1nngqOcwEljqHGG9uUZoPjsdZEs1w5JW9RXYzrSvH/u70C1JE5qFi0qjsdhnUX/dJRpWqitSA== + dependencies: + chalk "^1.1.1" + figlet "^1.1.1" + parent-require "^1.0.0" + +yargs-parser@20.x, yargs-parser@^20.2.2, yargs-parser@^20.2.3: + version "20.2.4" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.4.tgz#b42890f14566796f85ae8e3a25290d205f154a54" + integrity sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA== + +yargs-parser@^13.1.2: + version "13.1.2" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.2.tgz#130f09702ebaeef2650d54ce6e3e5706f7a4fb38" + integrity sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg== + dependencies: + camelcase "^5.0.0" + decamelize "^1.2.0" + +yargs-parser@^15.0.1: + version "15.0.1" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-15.0.1.tgz#54786af40b820dcb2fb8025b11b4d659d76323b3" + integrity sha512-0OAMV2mAZQrs3FkNpDQcBk1x5HXb8X4twADss4S0Iuk+2dGnLOE/fRHrsYm542GduMveyA77OF4wrNJuanRCWw== + dependencies: + camelcase "^5.0.0" + decamelize "^1.2.0" + +yargs-parser@^18.1.2: + version "18.1.3" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0" + integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ== + dependencies: + camelcase "^5.0.0" + decamelize "^1.2.0" + +yargs@^13.3.0: + version "13.3.2" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.3.2.tgz#ad7ffefec1aa59565ac915f82dccb38a9c31a2dd" + integrity sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw== + dependencies: + cliui "^5.0.0" + find-up "^3.0.0" + get-caller-file "^2.0.1" + require-directory "^2.1.1" + require-main-filename "^2.0.0" + set-blocking "^2.0.0" + string-width "^3.0.0" + which-module "^2.0.0" + y18n "^4.0.0" + yargs-parser "^13.1.2" + +yargs@^14.2.2: + version "14.2.3" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-14.2.3.tgz#1a1c3edced1afb2a2fea33604bc6d1d8d688a414" + integrity sha512-ZbotRWhF+lkjijC/VhmOT9wSgyBQ7+zr13+YLkhfsSiTriYsMzkTUFP18pFhWwBeMa5gUc1MzbhrO6/VB7c9Xg== + dependencies: + cliui "^5.0.0" + decamelize "^1.2.0" + find-up "^3.0.0" + get-caller-file "^2.0.1" + require-directory "^2.1.1" + require-main-filename "^2.0.0" + set-blocking "^2.0.0" + string-width "^3.0.0" + which-module "^2.0.0" + y18n "^4.0.0" + yargs-parser "^15.0.1" + +yargs@^15.4.1: + version "15.4.1" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8" + integrity sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A== + dependencies: + cliui "^6.0.0" + decamelize "^1.2.0" + find-up "^4.1.0" + get-caller-file "^2.0.1" + require-directory "^2.1.1" + require-main-filename "^2.0.0" + set-blocking "^2.0.0" + string-width "^4.2.0" + which-module "^2.0.0" + y18n "^4.0.0" + yargs-parser "^18.1.2" + +yargs@^16.0.0, yargs@^16.0.3, yargs@^16.2.0: + version "16.2.0" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" + integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== + dependencies: + cliui "^7.0.2" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.0" + y18n "^5.0.5" + yargs-parser "^20.2.2" + +yarn@^1.22.10: + version "1.22.10" + resolved "https://registry.yarnpkg.com/yarn/-/yarn-1.22.10.tgz#c99daa06257c80f8fa2c3f1490724e394c26b18c" + integrity sha512-IanQGI9RRPAN87VGTF7zs2uxkSyQSrSPsju0COgbsKQOOXr5LtcVPeyXWgwVa0ywG3d8dg6kSYKGBuYK021qeA== + +yeoman-environment@^2.0.5, yeoman-environment@^2.3.4, yeoman-environment@^2.9.5: + version "2.10.3" + resolved "https://registry.yarnpkg.com/yeoman-environment/-/yeoman-environment-2.10.3.tgz#9d8f42b77317414434cc0e51fb006a4bdd54688e" + integrity sha512-pLIhhU9z/G+kjOXmJ2bPFm3nejfbH+f1fjYRSOteEXDBrv1EoJE/e+kuHixSXfCYfTkxjYsvRaDX+1QykLCnpQ== + dependencies: + chalk "^2.4.1" + debug "^3.1.0" + diff "^3.5.0" + escape-string-regexp "^1.0.2" + execa "^4.0.0" + globby "^8.0.1" + grouped-queue "^1.1.0" + inquirer "^7.1.0" + is-scoped "^1.0.0" + lodash "^4.17.10" + log-symbols "^2.2.0" + mem-fs "^1.1.0" + mem-fs-editor "^6.0.0" + npm-api "^1.0.0" + semver "^7.1.3" + strip-ansi "^4.0.0" + text-table "^0.2.0" + untildify "^3.0.3" + yeoman-generator "^4.8.2" + +yeoman-generator@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/yeoman-generator/-/yeoman-generator-3.2.0.tgz#02077d2d7ff28fedc1ed7dad7f9967fd7c3604cc" + integrity sha512-iR/qb2je3GdXtSfxgvOXxUW0Cp8+C6LaZaNlK2BAICzFNzwHtM10t/QBwz5Ea9nk6xVDQNj4Q889TjCXGuIv8w== + dependencies: + async "^2.6.0" + chalk "^2.3.0" + cli-table "^0.3.1" + cross-spawn "^6.0.5" + dargs "^6.0.0" + dateformat "^3.0.3" + debug "^4.1.0" + detect-conflict "^1.0.0" + error "^7.0.2" + find-up "^3.0.0" + github-username "^4.0.0" + istextorbinary "^2.2.1" + lodash "^4.17.10" + make-dir "^1.1.0" + mem-fs-editor "^5.0.0" + minimist "^1.2.0" + pretty-bytes "^5.1.0" + read-chunk "^3.0.0" + read-pkg-up "^4.0.0" + rimraf "^2.6.2" + run-async "^2.0.0" + shelljs "^0.8.0" + text-table "^0.2.0" + through2 "^3.0.0" + yeoman-environment "^2.0.5" + +yeoman-generator@^4.8.2: + version "4.12.0" + resolved "https://registry.yarnpkg.com/yeoman-generator/-/yeoman-generator-4.12.0.tgz#512e783a38b004c49265e71826a09ff7f1939f4b" + integrity sha512-lozwklVQHwUXMM1o8BgxEB8F5BB7vkHW4pjAo1Zt5sJ7FOlWhd6DJ4ZxJ2OK0w+gNYkY/ocPMkUV7DTz/uqEEg== + dependencies: + async "^2.6.2" + chalk "^2.4.2" + cli-table "^0.3.1" + cross-spawn "^6.0.5" + dargs "^6.1.0" + dateformat "^3.0.3" + debug "^4.1.1" + diff "^4.0.1" + error "^7.0.2" + find-up "^3.0.0" + github-username "^3.0.0" + istextorbinary "^2.5.1" + lodash "^4.17.11" + make-dir "^3.0.0" + mem-fs-editor "^7.0.1" + minimist "^1.2.5" + pretty-bytes "^5.2.0" + read-chunk "^3.2.0" + read-pkg-up "^5.0.0" + rimraf "^2.6.3" + run-async "^2.0.0" + semver "^7.2.1" + shelljs "^0.8.3" + text-table "^0.2.0" + through2 "^3.0.1" + optionalDependencies: + grouped-queue "^1.1.0" + yeoman-environment "^2.9.5" + +yn@3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" + integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== + +yosay@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/yosay/-/yosay-2.0.2.tgz#a7017e764cd88d64a1ae64812201de5b157adf6d" + integrity sha512-avX6nz2esp7IMXGag4gu6OyQBsMh/SEn+ZybGu3yKPlOTE6z9qJrzG/0X5vCq/e0rPFy0CUYCze0G5hL310ibA== + dependencies: + ansi-regex "^2.0.0" + ansi-styles "^3.0.0" + chalk "^1.0.0" + cli-boxes "^1.0.0" + pad-component "0.0.1" + string-width "^2.0.0" + strip-ansi "^3.0.0" + taketalk "^1.0.0" + wrap-ansi "^2.0.0" + +yup@0.29.3: + version "0.29.3" + resolved "https://registry.yarnpkg.com/yup/-/yup-0.29.3.tgz#69a30fd3f1c19f5d9e31b1cf1c2b851ce8045fea" + integrity sha512-RNUGiZ/sQ37CkhzKFoedkeMfJM0vNQyaz+wRZJzxdKE7VfDeVKH8bb4rr7XhRLbHJz5hSjoDNwMEIaKhuMZ8gQ== + dependencies: + "@babel/runtime" "^7.10.5" + fn-name "~3.0.0" + lodash "^4.17.15" + lodash-es "^4.17.11" + property-expr "^2.0.2" + synchronous-promise "^2.0.13" + toposort "^2.0.2"